1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+sse2 | FileCheck %s --check-prefixes=SSE
3 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx | FileCheck %s --check-prefixes=AVX
4 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2 | FileCheck %s --check-prefixes=AVX2
5 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX2-FP
6 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX2-FCP
7 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl | FileCheck %s --check-prefixes=AVX512
8 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512-FCP
9 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq | FileCheck %s --check-prefixes=AVX512DQ
10 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512DQ-FCP
11 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw | FileCheck %s --check-prefixes=AVX512BW
12 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512BW-FCP
13 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw | FileCheck %s --check-prefixes=AVX512DQ-BW
14 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512DQ-BW-FCP
16 ; These patterns are produced by LoopVectorizer for interleaved loads.
18 define void @load_i64_stride6_vf2(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr %out.vec2, ptr %out.vec3, ptr %out.vec4, ptr %out.vec5) nounwind {
19 ; SSE-LABEL: load_i64_stride6_vf2:
21 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
22 ; SSE-NEXT: movaps 80(%rdi), %xmm0
23 ; SSE-NEXT: movaps 64(%rdi), %xmm1
24 ; SSE-NEXT: movaps (%rdi), %xmm2
25 ; SSE-NEXT: movaps 16(%rdi), %xmm3
26 ; SSE-NEXT: movaps 32(%rdi), %xmm4
27 ; SSE-NEXT: movaps 48(%rdi), %xmm5
28 ; SSE-NEXT: movaps %xmm2, %xmm6
29 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm5[0]
30 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm5[1]
31 ; SSE-NEXT: movaps %xmm3, %xmm5
32 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm1[0]
33 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm1[1]
34 ; SSE-NEXT: movaps %xmm4, %xmm1
35 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
36 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm0[1]
37 ; SSE-NEXT: movaps %xmm6, (%rsi)
38 ; SSE-NEXT: movaps %xmm2, (%rdx)
39 ; SSE-NEXT: movaps %xmm5, (%rcx)
40 ; SSE-NEXT: movaps %xmm3, (%r8)
41 ; SSE-NEXT: movaps %xmm1, (%r9)
42 ; SSE-NEXT: movaps %xmm4, (%rax)
45 ; AVX-LABEL: load_i64_stride6_vf2:
47 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
48 ; AVX-NEXT: vmovaps (%rdi), %xmm0
49 ; AVX-NEXT: vmovaps 16(%rdi), %xmm1
50 ; AVX-NEXT: vmovaps 32(%rdi), %xmm2
51 ; AVX-NEXT: vmovaps 48(%rdi), %xmm3
52 ; AVX-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm3[0]
53 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
54 ; AVX-NEXT: vmovaps 64(%rdi), %xmm3
55 ; AVX-NEXT: vmovlhps {{.*#+}} xmm5 = xmm1[0],xmm3[0]
56 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
57 ; AVX-NEXT: vmovaps 80(%rdi), %xmm3
58 ; AVX-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm3[0]
59 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
60 ; AVX-NEXT: vmovaps %xmm4, (%rsi)
61 ; AVX-NEXT: vmovaps %xmm0, (%rdx)
62 ; AVX-NEXT: vmovaps %xmm5, (%rcx)
63 ; AVX-NEXT: vmovaps %xmm1, (%r8)
64 ; AVX-NEXT: vmovaps %xmm6, (%r9)
65 ; AVX-NEXT: vmovaps %xmm2, (%rax)
68 ; AVX2-LABEL: load_i64_stride6_vf2:
70 ; AVX2-NEXT: movq {{[0-9]+}}(%rsp), %rax
71 ; AVX2-NEXT: vmovaps (%rdi), %xmm0
72 ; AVX2-NEXT: vmovaps 16(%rdi), %xmm1
73 ; AVX2-NEXT: vmovaps 32(%rdi), %xmm2
74 ; AVX2-NEXT: vmovaps 48(%rdi), %xmm3
75 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm3[0]
76 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
77 ; AVX2-NEXT: vmovaps 64(%rdi), %xmm3
78 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm5 = xmm1[0],xmm3[0]
79 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
80 ; AVX2-NEXT: vmovaps 80(%rdi), %xmm3
81 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm3[0]
82 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
83 ; AVX2-NEXT: vmovaps %xmm4, (%rsi)
84 ; AVX2-NEXT: vmovaps %xmm0, (%rdx)
85 ; AVX2-NEXT: vmovaps %xmm5, (%rcx)
86 ; AVX2-NEXT: vmovaps %xmm1, (%r8)
87 ; AVX2-NEXT: vmovaps %xmm6, (%r9)
88 ; AVX2-NEXT: vmovaps %xmm2, (%rax)
91 ; AVX2-FP-LABEL: load_i64_stride6_vf2:
93 ; AVX2-FP-NEXT: movq {{[0-9]+}}(%rsp), %rax
94 ; AVX2-FP-NEXT: vmovaps (%rdi), %xmm0
95 ; AVX2-FP-NEXT: vmovaps 16(%rdi), %xmm1
96 ; AVX2-FP-NEXT: vmovaps 32(%rdi), %xmm2
97 ; AVX2-FP-NEXT: vmovaps 48(%rdi), %xmm3
98 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm3[0]
99 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
100 ; AVX2-FP-NEXT: vmovaps 64(%rdi), %xmm3
101 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm5 = xmm1[0],xmm3[0]
102 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
103 ; AVX2-FP-NEXT: vmovaps 80(%rdi), %xmm3
104 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm3[0]
105 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
106 ; AVX2-FP-NEXT: vmovaps %xmm4, (%rsi)
107 ; AVX2-FP-NEXT: vmovaps %xmm0, (%rdx)
108 ; AVX2-FP-NEXT: vmovaps %xmm5, (%rcx)
109 ; AVX2-FP-NEXT: vmovaps %xmm1, (%r8)
110 ; AVX2-FP-NEXT: vmovaps %xmm6, (%r9)
111 ; AVX2-FP-NEXT: vmovaps %xmm2, (%rax)
114 ; AVX2-FCP-LABEL: load_i64_stride6_vf2:
116 ; AVX2-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
117 ; AVX2-FCP-NEXT: vmovaps (%rdi), %xmm0
118 ; AVX2-FCP-NEXT: vmovaps 16(%rdi), %xmm1
119 ; AVX2-FCP-NEXT: vmovaps 32(%rdi), %xmm2
120 ; AVX2-FCP-NEXT: vmovaps 48(%rdi), %xmm3
121 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm3[0]
122 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
123 ; AVX2-FCP-NEXT: vmovaps 64(%rdi), %xmm3
124 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm5 = xmm1[0],xmm3[0]
125 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
126 ; AVX2-FCP-NEXT: vmovaps 80(%rdi), %xmm3
127 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm3[0]
128 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
129 ; AVX2-FCP-NEXT: vmovaps %xmm4, (%rsi)
130 ; AVX2-FCP-NEXT: vmovaps %xmm0, (%rdx)
131 ; AVX2-FCP-NEXT: vmovaps %xmm5, (%rcx)
132 ; AVX2-FCP-NEXT: vmovaps %xmm1, (%r8)
133 ; AVX2-FCP-NEXT: vmovaps %xmm6, (%r9)
134 ; AVX2-FCP-NEXT: vmovaps %xmm2, (%rax)
135 ; AVX2-FCP-NEXT: retq
137 ; AVX512-LABEL: load_i64_stride6_vf2:
139 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
140 ; AVX512-NEXT: vmovaps (%rdi), %xmm0
141 ; AVX512-NEXT: vmovaps 16(%rdi), %xmm1
142 ; AVX512-NEXT: vmovaps 32(%rdi), %xmm2
143 ; AVX512-NEXT: vmovaps 48(%rdi), %xmm3
144 ; AVX512-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm3[0]
145 ; AVX512-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
146 ; AVX512-NEXT: vmovaps 64(%rdi), %xmm3
147 ; AVX512-NEXT: vmovlhps {{.*#+}} xmm5 = xmm1[0],xmm3[0]
148 ; AVX512-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
149 ; AVX512-NEXT: vmovaps 80(%rdi), %xmm3
150 ; AVX512-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm3[0]
151 ; AVX512-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
152 ; AVX512-NEXT: vmovaps %xmm4, (%rsi)
153 ; AVX512-NEXT: vmovaps %xmm0, (%rdx)
154 ; AVX512-NEXT: vmovaps %xmm5, (%rcx)
155 ; AVX512-NEXT: vmovaps %xmm1, (%r8)
156 ; AVX512-NEXT: vmovaps %xmm6, (%r9)
157 ; AVX512-NEXT: vmovaps %xmm2, (%rax)
160 ; AVX512-FCP-LABEL: load_i64_stride6_vf2:
161 ; AVX512-FCP: # %bb.0:
162 ; AVX512-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
163 ; AVX512-FCP-NEXT: vmovaps (%rdi), %xmm0
164 ; AVX512-FCP-NEXT: vmovaps 16(%rdi), %xmm1
165 ; AVX512-FCP-NEXT: vmovaps 32(%rdi), %xmm2
166 ; AVX512-FCP-NEXT: vmovaps 48(%rdi), %xmm3
167 ; AVX512-FCP-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm3[0]
168 ; AVX512-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
169 ; AVX512-FCP-NEXT: vmovaps 64(%rdi), %xmm3
170 ; AVX512-FCP-NEXT: vmovlhps {{.*#+}} xmm5 = xmm1[0],xmm3[0]
171 ; AVX512-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
172 ; AVX512-FCP-NEXT: vmovaps 80(%rdi), %xmm3
173 ; AVX512-FCP-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm3[0]
174 ; AVX512-FCP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
175 ; AVX512-FCP-NEXT: vmovaps %xmm4, (%rsi)
176 ; AVX512-FCP-NEXT: vmovaps %xmm0, (%rdx)
177 ; AVX512-FCP-NEXT: vmovaps %xmm5, (%rcx)
178 ; AVX512-FCP-NEXT: vmovaps %xmm1, (%r8)
179 ; AVX512-FCP-NEXT: vmovaps %xmm6, (%r9)
180 ; AVX512-FCP-NEXT: vmovaps %xmm2, (%rax)
181 ; AVX512-FCP-NEXT: retq
183 ; AVX512DQ-LABEL: load_i64_stride6_vf2:
185 ; AVX512DQ-NEXT: movq {{[0-9]+}}(%rsp), %rax
186 ; AVX512DQ-NEXT: vmovaps (%rdi), %xmm0
187 ; AVX512DQ-NEXT: vmovaps 16(%rdi), %xmm1
188 ; AVX512DQ-NEXT: vmovaps 32(%rdi), %xmm2
189 ; AVX512DQ-NEXT: vmovaps 48(%rdi), %xmm3
190 ; AVX512DQ-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm3[0]
191 ; AVX512DQ-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
192 ; AVX512DQ-NEXT: vmovaps 64(%rdi), %xmm3
193 ; AVX512DQ-NEXT: vmovlhps {{.*#+}} xmm5 = xmm1[0],xmm3[0]
194 ; AVX512DQ-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
195 ; AVX512DQ-NEXT: vmovaps 80(%rdi), %xmm3
196 ; AVX512DQ-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm3[0]
197 ; AVX512DQ-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
198 ; AVX512DQ-NEXT: vmovaps %xmm4, (%rsi)
199 ; AVX512DQ-NEXT: vmovaps %xmm0, (%rdx)
200 ; AVX512DQ-NEXT: vmovaps %xmm5, (%rcx)
201 ; AVX512DQ-NEXT: vmovaps %xmm1, (%r8)
202 ; AVX512DQ-NEXT: vmovaps %xmm6, (%r9)
203 ; AVX512DQ-NEXT: vmovaps %xmm2, (%rax)
204 ; AVX512DQ-NEXT: retq
206 ; AVX512DQ-FCP-LABEL: load_i64_stride6_vf2:
207 ; AVX512DQ-FCP: # %bb.0:
208 ; AVX512DQ-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
209 ; AVX512DQ-FCP-NEXT: vmovaps (%rdi), %xmm0
210 ; AVX512DQ-FCP-NEXT: vmovaps 16(%rdi), %xmm1
211 ; AVX512DQ-FCP-NEXT: vmovaps 32(%rdi), %xmm2
212 ; AVX512DQ-FCP-NEXT: vmovaps 48(%rdi), %xmm3
213 ; AVX512DQ-FCP-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm3[0]
214 ; AVX512DQ-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
215 ; AVX512DQ-FCP-NEXT: vmovaps 64(%rdi), %xmm3
216 ; AVX512DQ-FCP-NEXT: vmovlhps {{.*#+}} xmm5 = xmm1[0],xmm3[0]
217 ; AVX512DQ-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
218 ; AVX512DQ-FCP-NEXT: vmovaps 80(%rdi), %xmm3
219 ; AVX512DQ-FCP-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm3[0]
220 ; AVX512DQ-FCP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
221 ; AVX512DQ-FCP-NEXT: vmovaps %xmm4, (%rsi)
222 ; AVX512DQ-FCP-NEXT: vmovaps %xmm0, (%rdx)
223 ; AVX512DQ-FCP-NEXT: vmovaps %xmm5, (%rcx)
224 ; AVX512DQ-FCP-NEXT: vmovaps %xmm1, (%r8)
225 ; AVX512DQ-FCP-NEXT: vmovaps %xmm6, (%r9)
226 ; AVX512DQ-FCP-NEXT: vmovaps %xmm2, (%rax)
227 ; AVX512DQ-FCP-NEXT: retq
229 ; AVX512BW-LABEL: load_i64_stride6_vf2:
231 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
232 ; AVX512BW-NEXT: vmovaps (%rdi), %xmm0
233 ; AVX512BW-NEXT: vmovaps 16(%rdi), %xmm1
234 ; AVX512BW-NEXT: vmovaps 32(%rdi), %xmm2
235 ; AVX512BW-NEXT: vmovaps 48(%rdi), %xmm3
236 ; AVX512BW-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm3[0]
237 ; AVX512BW-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
238 ; AVX512BW-NEXT: vmovaps 64(%rdi), %xmm3
239 ; AVX512BW-NEXT: vmovlhps {{.*#+}} xmm5 = xmm1[0],xmm3[0]
240 ; AVX512BW-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
241 ; AVX512BW-NEXT: vmovaps 80(%rdi), %xmm3
242 ; AVX512BW-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm3[0]
243 ; AVX512BW-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
244 ; AVX512BW-NEXT: vmovaps %xmm4, (%rsi)
245 ; AVX512BW-NEXT: vmovaps %xmm0, (%rdx)
246 ; AVX512BW-NEXT: vmovaps %xmm5, (%rcx)
247 ; AVX512BW-NEXT: vmovaps %xmm1, (%r8)
248 ; AVX512BW-NEXT: vmovaps %xmm6, (%r9)
249 ; AVX512BW-NEXT: vmovaps %xmm2, (%rax)
250 ; AVX512BW-NEXT: retq
252 ; AVX512BW-FCP-LABEL: load_i64_stride6_vf2:
253 ; AVX512BW-FCP: # %bb.0:
254 ; AVX512BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
255 ; AVX512BW-FCP-NEXT: vmovaps (%rdi), %xmm0
256 ; AVX512BW-FCP-NEXT: vmovaps 16(%rdi), %xmm1
257 ; AVX512BW-FCP-NEXT: vmovaps 32(%rdi), %xmm2
258 ; AVX512BW-FCP-NEXT: vmovaps 48(%rdi), %xmm3
259 ; AVX512BW-FCP-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm3[0]
260 ; AVX512BW-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
261 ; AVX512BW-FCP-NEXT: vmovaps 64(%rdi), %xmm3
262 ; AVX512BW-FCP-NEXT: vmovlhps {{.*#+}} xmm5 = xmm1[0],xmm3[0]
263 ; AVX512BW-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
264 ; AVX512BW-FCP-NEXT: vmovaps 80(%rdi), %xmm3
265 ; AVX512BW-FCP-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm3[0]
266 ; AVX512BW-FCP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
267 ; AVX512BW-FCP-NEXT: vmovaps %xmm4, (%rsi)
268 ; AVX512BW-FCP-NEXT: vmovaps %xmm0, (%rdx)
269 ; AVX512BW-FCP-NEXT: vmovaps %xmm5, (%rcx)
270 ; AVX512BW-FCP-NEXT: vmovaps %xmm1, (%r8)
271 ; AVX512BW-FCP-NEXT: vmovaps %xmm6, (%r9)
272 ; AVX512BW-FCP-NEXT: vmovaps %xmm2, (%rax)
273 ; AVX512BW-FCP-NEXT: retq
275 ; AVX512DQ-BW-LABEL: load_i64_stride6_vf2:
276 ; AVX512DQ-BW: # %bb.0:
277 ; AVX512DQ-BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
278 ; AVX512DQ-BW-NEXT: vmovaps (%rdi), %xmm0
279 ; AVX512DQ-BW-NEXT: vmovaps 16(%rdi), %xmm1
280 ; AVX512DQ-BW-NEXT: vmovaps 32(%rdi), %xmm2
281 ; AVX512DQ-BW-NEXT: vmovaps 48(%rdi), %xmm3
282 ; AVX512DQ-BW-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm3[0]
283 ; AVX512DQ-BW-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
284 ; AVX512DQ-BW-NEXT: vmovaps 64(%rdi), %xmm3
285 ; AVX512DQ-BW-NEXT: vmovlhps {{.*#+}} xmm5 = xmm1[0],xmm3[0]
286 ; AVX512DQ-BW-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
287 ; AVX512DQ-BW-NEXT: vmovaps 80(%rdi), %xmm3
288 ; AVX512DQ-BW-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm3[0]
289 ; AVX512DQ-BW-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
290 ; AVX512DQ-BW-NEXT: vmovaps %xmm4, (%rsi)
291 ; AVX512DQ-BW-NEXT: vmovaps %xmm0, (%rdx)
292 ; AVX512DQ-BW-NEXT: vmovaps %xmm5, (%rcx)
293 ; AVX512DQ-BW-NEXT: vmovaps %xmm1, (%r8)
294 ; AVX512DQ-BW-NEXT: vmovaps %xmm6, (%r9)
295 ; AVX512DQ-BW-NEXT: vmovaps %xmm2, (%rax)
296 ; AVX512DQ-BW-NEXT: retq
298 ; AVX512DQ-BW-FCP-LABEL: load_i64_stride6_vf2:
299 ; AVX512DQ-BW-FCP: # %bb.0:
300 ; AVX512DQ-BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
301 ; AVX512DQ-BW-FCP-NEXT: vmovaps (%rdi), %xmm0
302 ; AVX512DQ-BW-FCP-NEXT: vmovaps 16(%rdi), %xmm1
303 ; AVX512DQ-BW-FCP-NEXT: vmovaps 32(%rdi), %xmm2
304 ; AVX512DQ-BW-FCP-NEXT: vmovaps 48(%rdi), %xmm3
305 ; AVX512DQ-BW-FCP-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm3[0]
306 ; AVX512DQ-BW-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
307 ; AVX512DQ-BW-FCP-NEXT: vmovaps 64(%rdi), %xmm3
308 ; AVX512DQ-BW-FCP-NEXT: vmovlhps {{.*#+}} xmm5 = xmm1[0],xmm3[0]
309 ; AVX512DQ-BW-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
310 ; AVX512DQ-BW-FCP-NEXT: vmovaps 80(%rdi), %xmm3
311 ; AVX512DQ-BW-FCP-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm3[0]
312 ; AVX512DQ-BW-FCP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
313 ; AVX512DQ-BW-FCP-NEXT: vmovaps %xmm4, (%rsi)
314 ; AVX512DQ-BW-FCP-NEXT: vmovaps %xmm0, (%rdx)
315 ; AVX512DQ-BW-FCP-NEXT: vmovaps %xmm5, (%rcx)
316 ; AVX512DQ-BW-FCP-NEXT: vmovaps %xmm1, (%r8)
317 ; AVX512DQ-BW-FCP-NEXT: vmovaps %xmm6, (%r9)
318 ; AVX512DQ-BW-FCP-NEXT: vmovaps %xmm2, (%rax)
319 ; AVX512DQ-BW-FCP-NEXT: retq
320 %wide.vec = load <12 x i64>, ptr %in.vec, align 64
321 %strided.vec0 = shufflevector <12 x i64> %wide.vec, <12 x i64> poison, <2 x i32> <i32 0, i32 6>
322 %strided.vec1 = shufflevector <12 x i64> %wide.vec, <12 x i64> poison, <2 x i32> <i32 1, i32 7>
323 %strided.vec2 = shufflevector <12 x i64> %wide.vec, <12 x i64> poison, <2 x i32> <i32 2, i32 8>
324 %strided.vec3 = shufflevector <12 x i64> %wide.vec, <12 x i64> poison, <2 x i32> <i32 3, i32 9>
325 %strided.vec4 = shufflevector <12 x i64> %wide.vec, <12 x i64> poison, <2 x i32> <i32 4, i32 10>
326 %strided.vec5 = shufflevector <12 x i64> %wide.vec, <12 x i64> poison, <2 x i32> <i32 5, i32 11>
327 store <2 x i64> %strided.vec0, ptr %out.vec0, align 64
328 store <2 x i64> %strided.vec1, ptr %out.vec1, align 64
329 store <2 x i64> %strided.vec2, ptr %out.vec2, align 64
330 store <2 x i64> %strided.vec3, ptr %out.vec3, align 64
331 store <2 x i64> %strided.vec4, ptr %out.vec4, align 64
332 store <2 x i64> %strided.vec5, ptr %out.vec5, align 64
336 define void @load_i64_stride6_vf4(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr %out.vec2, ptr %out.vec3, ptr %out.vec4, ptr %out.vec5) nounwind {
337 ; SSE-LABEL: load_i64_stride6_vf4:
339 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
340 ; SSE-NEXT: movaps 80(%rdi), %xmm3
341 ; SSE-NEXT: movaps 176(%rdi), %xmm5
342 ; SSE-NEXT: movaps 128(%rdi), %xmm0
343 ; SSE-NEXT: movaps 64(%rdi), %xmm6
344 ; SSE-NEXT: movaps 160(%rdi), %xmm7
345 ; SSE-NEXT: movaps 112(%rdi), %xmm1
346 ; SSE-NEXT: movaps (%rdi), %xmm8
347 ; SSE-NEXT: movaps 16(%rdi), %xmm4
348 ; SSE-NEXT: movaps 32(%rdi), %xmm2
349 ; SSE-NEXT: movaps 48(%rdi), %xmm9
350 ; SSE-NEXT: movaps 144(%rdi), %xmm10
351 ; SSE-NEXT: movaps 96(%rdi), %xmm11
352 ; SSE-NEXT: movaps %xmm11, %xmm12
353 ; SSE-NEXT: movlhps {{.*#+}} xmm12 = xmm12[0],xmm10[0]
354 ; SSE-NEXT: movaps %xmm8, %xmm13
355 ; SSE-NEXT: movlhps {{.*#+}} xmm13 = xmm13[0],xmm9[0]
356 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm10[1]
357 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm9[1]
358 ; SSE-NEXT: movaps %xmm1, %xmm9
359 ; SSE-NEXT: movlhps {{.*#+}} xmm9 = xmm9[0],xmm7[0]
360 ; SSE-NEXT: movaps %xmm4, %xmm10
361 ; SSE-NEXT: movlhps {{.*#+}} xmm10 = xmm10[0],xmm6[0]
362 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm7[1]
363 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm6[1]
364 ; SSE-NEXT: movaps %xmm0, %xmm6
365 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm5[0]
366 ; SSE-NEXT: movaps %xmm2, %xmm7
367 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm3[0]
368 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm5[1]
369 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
370 ; SSE-NEXT: movaps %xmm13, (%rsi)
371 ; SSE-NEXT: movaps %xmm12, 16(%rsi)
372 ; SSE-NEXT: movaps %xmm8, (%rdx)
373 ; SSE-NEXT: movaps %xmm11, 16(%rdx)
374 ; SSE-NEXT: movaps %xmm10, (%rcx)
375 ; SSE-NEXT: movaps %xmm9, 16(%rcx)
376 ; SSE-NEXT: movaps %xmm4, (%r8)
377 ; SSE-NEXT: movaps %xmm1, 16(%r8)
378 ; SSE-NEXT: movaps %xmm7, (%r9)
379 ; SSE-NEXT: movaps %xmm6, 16(%r9)
380 ; SSE-NEXT: movaps %xmm2, (%rax)
381 ; SSE-NEXT: movaps %xmm0, 16(%rax)
384 ; AVX-LABEL: load_i64_stride6_vf4:
386 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
387 ; AVX-NEXT: vmovaps 160(%rdi), %ymm0
388 ; AVX-NEXT: vmovaps 96(%rdi), %ymm1
389 ; AVX-NEXT: vmovaps 128(%rdi), %ymm2
390 ; AVX-NEXT: vinsertf128 $1, 96(%rdi), %ymm0, %ymm3
391 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm3[0],ymm2[0],ymm3[2],ymm2[2]
392 ; AVX-NEXT: vmovaps (%rdi), %xmm5
393 ; AVX-NEXT: vmovaps 16(%rdi), %xmm6
394 ; AVX-NEXT: vmovaps 32(%rdi), %xmm7
395 ; AVX-NEXT: vmovaps 48(%rdi), %xmm8
396 ; AVX-NEXT: vmovlhps {{.*#+}} xmm9 = xmm5[0],xmm8[0]
397 ; AVX-NEXT: vblendps {{.*#+}} ymm4 = ymm9[0,1,2,3],ymm4[4,5,6,7]
398 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm3[1],ymm2[1],ymm3[3],ymm2[3]
399 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm5[1],xmm8[1]
400 ; AVX-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1,2,3],ymm2[4,5,6,7]
401 ; AVX-NEXT: vinsertf128 $1, 160(%rdi), %ymm0, %ymm3
402 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm5 = ymm1[0],ymm3[0],ymm1[2],ymm3[2]
403 ; AVX-NEXT: vmovaps 64(%rdi), %xmm8
404 ; AVX-NEXT: vmovlhps {{.*#+}} xmm9 = xmm6[0],xmm8[0]
405 ; AVX-NEXT: vblendps {{.*#+}} ymm5 = ymm9[0,1,2,3],ymm5[4,5,6,7]
406 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm3[1],ymm1[3],ymm3[3]
407 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm6[1],xmm8[1]
408 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1,2,3],ymm1[4,5,6,7]
409 ; AVX-NEXT: vinsertf128 $1, 128(%rdi), %ymm0, %ymm3
410 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm6 = ymm3[0],ymm0[0],ymm3[2],ymm0[2]
411 ; AVX-NEXT: vmovaps 80(%rdi), %xmm8
412 ; AVX-NEXT: vmovlhps {{.*#+}} xmm9 = xmm7[0],xmm8[0]
413 ; AVX-NEXT: vblendps {{.*#+}} ymm6 = ymm9[0,1,2,3],ymm6[4,5,6,7]
414 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm3[1],ymm0[1],ymm3[3],ymm0[3]
415 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm7[1],xmm8[1]
416 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm3[0,1,2,3],ymm0[4,5,6,7]
417 ; AVX-NEXT: vmovaps %ymm4, (%rsi)
418 ; AVX-NEXT: vmovaps %ymm2, (%rdx)
419 ; AVX-NEXT: vmovaps %ymm5, (%rcx)
420 ; AVX-NEXT: vmovaps %ymm1, (%r8)
421 ; AVX-NEXT: vmovaps %ymm6, (%r9)
422 ; AVX-NEXT: vmovaps %ymm0, (%rax)
423 ; AVX-NEXT: vzeroupper
426 ; AVX2-LABEL: load_i64_stride6_vf4:
428 ; AVX2-NEXT: movq {{[0-9]+}}(%rsp), %rax
429 ; AVX2-NEXT: vmovaps 160(%rdi), %ymm0
430 ; AVX2-NEXT: vmovaps 128(%rdi), %ymm1
431 ; AVX2-NEXT: vmovaps 96(%rdi), %ymm2
432 ; AVX2-NEXT: vmovaps (%rdi), %xmm3
433 ; AVX2-NEXT: vmovaps 16(%rdi), %xmm4
434 ; AVX2-NEXT: vmovaps 32(%rdi), %xmm5
435 ; AVX2-NEXT: vmovaps 48(%rdi), %xmm6
436 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm7 = xmm3[0],xmm6[0]
437 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
438 ; AVX2-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,1,0,3]
439 ; AVX2-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm8[4,5,6,7]
440 ; AVX2-NEXT: vbroadcastsd 104(%rdi), %ymm8
441 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm8 = ymm8[1],ymm1[1],ymm8[3],ymm1[3]
442 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm6[1]
443 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm8[4,5,6,7]
444 ; AVX2-NEXT: vbroadcastsd 160(%rdi), %ymm6
445 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm6 = ymm2[0],ymm6[0],ymm2[2],ymm6[2]
446 ; AVX2-NEXT: vmovaps 64(%rdi), %xmm8
447 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm9 = xmm4[0],xmm8[0]
448 ; AVX2-NEXT: vblendps {{.*#+}} ymm6 = ymm9[0,1,2,3],ymm6[4,5,6,7]
449 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm4[1],xmm8[1]
450 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm0[1],ymm2[3],ymm0[3]
451 ; AVX2-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,2,1]
452 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm4[0,1,2,3],ymm2[4,5,6,7]
453 ; AVX2-NEXT: vmovaps 80(%rdi), %xmm4
454 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm8 = xmm5[0],xmm4[0]
455 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
456 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
457 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm8[0,1,2,3],ymm1[4,5,6,7]
458 ; AVX2-NEXT: vbroadcastsd 136(%rdi), %ymm8
459 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm8[1],ymm0[1],ymm8[3],ymm0[3]
460 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm5[1],xmm4[1]
461 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
462 ; AVX2-NEXT: vmovaps %ymm7, (%rsi)
463 ; AVX2-NEXT: vmovaps %ymm3, (%rdx)
464 ; AVX2-NEXT: vmovaps %ymm6, (%rcx)
465 ; AVX2-NEXT: vmovaps %ymm2, (%r8)
466 ; AVX2-NEXT: vmovaps %ymm1, (%r9)
467 ; AVX2-NEXT: vmovaps %ymm0, (%rax)
468 ; AVX2-NEXT: vzeroupper
471 ; AVX2-FP-LABEL: load_i64_stride6_vf4:
473 ; AVX2-FP-NEXT: movq {{[0-9]+}}(%rsp), %rax
474 ; AVX2-FP-NEXT: vmovaps 160(%rdi), %ymm0
475 ; AVX2-FP-NEXT: vmovaps 128(%rdi), %ymm1
476 ; AVX2-FP-NEXT: vmovaps 96(%rdi), %ymm2
477 ; AVX2-FP-NEXT: vmovaps (%rdi), %xmm3
478 ; AVX2-FP-NEXT: vmovaps 16(%rdi), %xmm4
479 ; AVX2-FP-NEXT: vmovaps 32(%rdi), %xmm5
480 ; AVX2-FP-NEXT: vmovaps 48(%rdi), %xmm6
481 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm7 = xmm3[0],xmm6[0]
482 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
483 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,1,0,3]
484 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm8[4,5,6,7]
485 ; AVX2-FP-NEXT: vbroadcastsd 104(%rdi), %ymm8
486 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm8 = ymm8[1],ymm1[1],ymm8[3],ymm1[3]
487 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm6[1]
488 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm8[4,5,6,7]
489 ; AVX2-FP-NEXT: vbroadcastsd 160(%rdi), %ymm6
490 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm6 = ymm2[0],ymm6[0],ymm2[2],ymm6[2]
491 ; AVX2-FP-NEXT: vmovaps 64(%rdi), %xmm8
492 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm9 = xmm4[0],xmm8[0]
493 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm6 = ymm9[0,1,2,3],ymm6[4,5,6,7]
494 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm4[1],xmm8[1]
495 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm0[1],ymm2[3],ymm0[3]
496 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,2,1]
497 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm4[0,1,2,3],ymm2[4,5,6,7]
498 ; AVX2-FP-NEXT: vmovaps 80(%rdi), %xmm4
499 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm8 = xmm5[0],xmm4[0]
500 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
501 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
502 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm8[0,1,2,3],ymm1[4,5,6,7]
503 ; AVX2-FP-NEXT: vbroadcastsd 136(%rdi), %ymm8
504 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm8[1],ymm0[1],ymm8[3],ymm0[3]
505 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm5[1],xmm4[1]
506 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
507 ; AVX2-FP-NEXT: vmovaps %ymm7, (%rsi)
508 ; AVX2-FP-NEXT: vmovaps %ymm3, (%rdx)
509 ; AVX2-FP-NEXT: vmovaps %ymm6, (%rcx)
510 ; AVX2-FP-NEXT: vmovaps %ymm2, (%r8)
511 ; AVX2-FP-NEXT: vmovaps %ymm1, (%r9)
512 ; AVX2-FP-NEXT: vmovaps %ymm0, (%rax)
513 ; AVX2-FP-NEXT: vzeroupper
516 ; AVX2-FCP-LABEL: load_i64_stride6_vf4:
518 ; AVX2-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
519 ; AVX2-FCP-NEXT: vmovaps 160(%rdi), %ymm0
520 ; AVX2-FCP-NEXT: vmovaps 128(%rdi), %ymm1
521 ; AVX2-FCP-NEXT: vmovaps 96(%rdi), %ymm2
522 ; AVX2-FCP-NEXT: vmovaps (%rdi), %xmm3
523 ; AVX2-FCP-NEXT: vmovaps 16(%rdi), %xmm4
524 ; AVX2-FCP-NEXT: vmovaps 32(%rdi), %xmm5
525 ; AVX2-FCP-NEXT: vmovaps 48(%rdi), %xmm6
526 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm7 = xmm3[0],xmm6[0]
527 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
528 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,1,0,3]
529 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm8[4,5,6,7]
530 ; AVX2-FCP-NEXT: vbroadcastsd 104(%rdi), %ymm8
531 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm8 = ymm8[1],ymm1[1],ymm8[3],ymm1[3]
532 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm6[1]
533 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm8[4,5,6,7]
534 ; AVX2-FCP-NEXT: vbroadcastsd 160(%rdi), %ymm6
535 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm6 = ymm2[0],ymm6[0],ymm2[2],ymm6[2]
536 ; AVX2-FCP-NEXT: vmovaps 64(%rdi), %xmm8
537 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm9 = xmm4[0],xmm8[0]
538 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm6 = ymm9[0,1,2,3],ymm6[4,5,6,7]
539 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm4[1],xmm8[1]
540 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm0[1],ymm2[3],ymm0[3]
541 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,2,1]
542 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm4[0,1,2,3],ymm2[4,5,6,7]
543 ; AVX2-FCP-NEXT: vmovaps 80(%rdi), %xmm4
544 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm8 = xmm5[0],xmm4[0]
545 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
546 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
547 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm8[0,1,2,3],ymm1[4,5,6,7]
548 ; AVX2-FCP-NEXT: vbroadcastsd 136(%rdi), %ymm8
549 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm8[1],ymm0[1],ymm8[3],ymm0[3]
550 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm5[1],xmm4[1]
551 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
552 ; AVX2-FCP-NEXT: vmovaps %ymm7, (%rsi)
553 ; AVX2-FCP-NEXT: vmovaps %ymm3, (%rdx)
554 ; AVX2-FCP-NEXT: vmovaps %ymm6, (%rcx)
555 ; AVX2-FCP-NEXT: vmovaps %ymm2, (%r8)
556 ; AVX2-FCP-NEXT: vmovaps %ymm1, (%r9)
557 ; AVX2-FCP-NEXT: vmovaps %ymm0, (%rax)
558 ; AVX2-FCP-NEXT: vzeroupper
559 ; AVX2-FCP-NEXT: retq
561 ; AVX512-LABEL: load_i64_stride6_vf4:
563 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
564 ; AVX512-NEXT: vmovdqa64 (%rdi), %zmm2
565 ; AVX512-NEXT: vmovdqa64 64(%rdi), %zmm3
566 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm0 = [0,6,12,0]
567 ; AVX512-NEXT: vpermi2q %zmm3, %zmm2, %zmm0
568 ; AVX512-NEXT: vpbroadcastq 144(%rdi), %ymm1
569 ; AVX512-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
570 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm1 = [1,7,13,0]
571 ; AVX512-NEXT: vpermi2q %zmm3, %zmm2, %zmm1
572 ; AVX512-NEXT: vmovdqa 128(%rdi), %ymm4
573 ; AVX512-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm4[6,7]
574 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm5 = [10,0,6,0]
575 ; AVX512-NEXT: vpermi2q %zmm2, %zmm3, %zmm5
576 ; AVX512-NEXT: vmovdqa 160(%rdi), %xmm6
577 ; AVX512-NEXT: vpbroadcastq %xmm6, %ymm7
578 ; AVX512-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],ymm7[6,7]
579 ; AVX512-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
580 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm7 = [11,1,7,0]
581 ; AVX512-NEXT: vpermi2q %zmm2, %zmm3, %zmm7
582 ; AVX512-NEXT: vpblendd {{.*#+}} ymm6 = ymm7[0,1,2,3,4,5],ymm6[6,7]
583 ; AVX512-NEXT: vmovdqa 160(%rdi), %ymm7
584 ; AVX512-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm4[0],ymm7[0],ymm4[2],ymm7[2]
585 ; AVX512-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,0,3]
586 ; AVX512-NEXT: vpmovsxbq {{.*#+}} xmm8 = [4,10]
587 ; AVX512-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
588 ; AVX512-NEXT: vpblendd {{.*#+}} ymm4 = ymm8[0,1,2,3],ymm4[4,5,6,7]
589 ; AVX512-NEXT: vpbroadcastq 136(%rdi), %ymm8
590 ; AVX512-NEXT: vpunpckhqdq {{.*#+}} ymm7 = ymm8[1],ymm7[1],ymm8[3],ymm7[3]
591 ; AVX512-NEXT: vpmovsxbq {{.*#+}} xmm8 = [5,11]
592 ; AVX512-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
593 ; AVX512-NEXT: vpblendd {{.*#+}} ymm2 = ymm8[0,1,2,3],ymm7[4,5,6,7]
594 ; AVX512-NEXT: vmovdqa %ymm0, (%rsi)
595 ; AVX512-NEXT: vmovdqa %ymm1, (%rdx)
596 ; AVX512-NEXT: vmovdqa %ymm5, (%rcx)
597 ; AVX512-NEXT: vmovdqa %ymm6, (%r8)
598 ; AVX512-NEXT: vmovdqa %ymm4, (%r9)
599 ; AVX512-NEXT: vmovdqa %ymm2, (%rax)
600 ; AVX512-NEXT: vzeroupper
603 ; AVX512-FCP-LABEL: load_i64_stride6_vf4:
604 ; AVX512-FCP: # %bb.0:
605 ; AVX512-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
606 ; AVX512-FCP-NEXT: vmovdqa64 (%rdi), %zmm2
607 ; AVX512-FCP-NEXT: vmovdqa64 64(%rdi), %zmm3
608 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm0 = [0,6,12,0]
609 ; AVX512-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm0
610 ; AVX512-FCP-NEXT: vpbroadcastq 144(%rdi), %ymm1
611 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
612 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm1 = [1,7,13,0]
613 ; AVX512-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm1
614 ; AVX512-FCP-NEXT: vmovdqa 128(%rdi), %ymm4
615 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm4[6,7]
616 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm5 = [10,0,6,0]
617 ; AVX512-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm5
618 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm6 = [0,1,2,4]
619 ; AVX512-FCP-NEXT: vmovdqa 160(%rdi), %ymm7
620 ; AVX512-FCP-NEXT: vpermi2q %ymm7, %ymm5, %ymm6
621 ; AVX512-FCP-NEXT: vinserti128 $1, 160(%rdi), %ymm0, %ymm5
622 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [11,1,7,0]
623 ; AVX512-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm8
624 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm5 = ymm8[0,1,2,3,4,5],ymm5[6,7]
625 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [0,0,0,6]
626 ; AVX512-FCP-NEXT: vpermi2q %ymm7, %ymm4, %ymm8
627 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} xmm4 = [4,10]
628 ; AVX512-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm4
629 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
630 ; AVX512-FCP-NEXT: vpbroadcastq 136(%rdi), %ymm8
631 ; AVX512-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm7 = ymm8[1],ymm7[1],ymm8[3],ymm7[3]
632 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} xmm8 = [5,11]
633 ; AVX512-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
634 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm8[0,1,2,3],ymm7[4,5,6,7]
635 ; AVX512-FCP-NEXT: vmovdqa %ymm0, (%rsi)
636 ; AVX512-FCP-NEXT: vmovdqa %ymm1, (%rdx)
637 ; AVX512-FCP-NEXT: vmovdqa %ymm6, (%rcx)
638 ; AVX512-FCP-NEXT: vmovdqa %ymm5, (%r8)
639 ; AVX512-FCP-NEXT: vmovdqa %ymm4, (%r9)
640 ; AVX512-FCP-NEXT: vmovdqa %ymm2, (%rax)
641 ; AVX512-FCP-NEXT: vzeroupper
642 ; AVX512-FCP-NEXT: retq
644 ; AVX512DQ-LABEL: load_i64_stride6_vf4:
646 ; AVX512DQ-NEXT: movq {{[0-9]+}}(%rsp), %rax
647 ; AVX512DQ-NEXT: vmovdqa64 (%rdi), %zmm2
648 ; AVX512DQ-NEXT: vmovdqa64 64(%rdi), %zmm3
649 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm0 = [0,6,12,0]
650 ; AVX512DQ-NEXT: vpermi2q %zmm3, %zmm2, %zmm0
651 ; AVX512DQ-NEXT: vpbroadcastq 144(%rdi), %ymm1
652 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
653 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm1 = [1,7,13,0]
654 ; AVX512DQ-NEXT: vpermi2q %zmm3, %zmm2, %zmm1
655 ; AVX512DQ-NEXT: vmovdqa 128(%rdi), %ymm4
656 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm4[6,7]
657 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm5 = [10,0,6,0]
658 ; AVX512DQ-NEXT: vpermi2q %zmm2, %zmm3, %zmm5
659 ; AVX512DQ-NEXT: vmovdqa 160(%rdi), %xmm6
660 ; AVX512DQ-NEXT: vpbroadcastq %xmm6, %ymm7
661 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],ymm7[6,7]
662 ; AVX512DQ-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
663 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm7 = [11,1,7,0]
664 ; AVX512DQ-NEXT: vpermi2q %zmm2, %zmm3, %zmm7
665 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm6 = ymm7[0,1,2,3,4,5],ymm6[6,7]
666 ; AVX512DQ-NEXT: vmovdqa 160(%rdi), %ymm7
667 ; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm4[0],ymm7[0],ymm4[2],ymm7[2]
668 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,0,3]
669 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} xmm8 = [4,10]
670 ; AVX512DQ-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
671 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm4 = ymm8[0,1,2,3],ymm4[4,5,6,7]
672 ; AVX512DQ-NEXT: vpbroadcastq 136(%rdi), %ymm8
673 ; AVX512DQ-NEXT: vpunpckhqdq {{.*#+}} ymm7 = ymm8[1],ymm7[1],ymm8[3],ymm7[3]
674 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} xmm8 = [5,11]
675 ; AVX512DQ-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
676 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm2 = ymm8[0,1,2,3],ymm7[4,5,6,7]
677 ; AVX512DQ-NEXT: vmovdqa %ymm0, (%rsi)
678 ; AVX512DQ-NEXT: vmovdqa %ymm1, (%rdx)
679 ; AVX512DQ-NEXT: vmovdqa %ymm5, (%rcx)
680 ; AVX512DQ-NEXT: vmovdqa %ymm6, (%r8)
681 ; AVX512DQ-NEXT: vmovdqa %ymm4, (%r9)
682 ; AVX512DQ-NEXT: vmovdqa %ymm2, (%rax)
683 ; AVX512DQ-NEXT: vzeroupper
684 ; AVX512DQ-NEXT: retq
686 ; AVX512DQ-FCP-LABEL: load_i64_stride6_vf4:
687 ; AVX512DQ-FCP: # %bb.0:
688 ; AVX512DQ-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
689 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rdi), %zmm2
690 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rdi), %zmm3
691 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm0 = [0,6,12,0]
692 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm0
693 ; AVX512DQ-FCP-NEXT: vpbroadcastq 144(%rdi), %ymm1
694 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
695 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm1 = [1,7,13,0]
696 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm1
697 ; AVX512DQ-FCP-NEXT: vmovdqa 128(%rdi), %ymm4
698 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm4[6,7]
699 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm5 = [10,0,6,0]
700 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm5
701 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm6 = [0,1,2,4]
702 ; AVX512DQ-FCP-NEXT: vmovdqa 160(%rdi), %ymm7
703 ; AVX512DQ-FCP-NEXT: vpermi2q %ymm7, %ymm5, %ymm6
704 ; AVX512DQ-FCP-NEXT: vinserti128 $1, 160(%rdi), %ymm0, %ymm5
705 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [11,1,7,0]
706 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm8
707 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm5 = ymm8[0,1,2,3,4,5],ymm5[6,7]
708 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [0,0,0,6]
709 ; AVX512DQ-FCP-NEXT: vpermi2q %ymm7, %ymm4, %ymm8
710 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} xmm4 = [4,10]
711 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm4
712 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
713 ; AVX512DQ-FCP-NEXT: vpbroadcastq 136(%rdi), %ymm8
714 ; AVX512DQ-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm7 = ymm8[1],ymm7[1],ymm8[3],ymm7[3]
715 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} xmm8 = [5,11]
716 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
717 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm8[0,1,2,3],ymm7[4,5,6,7]
718 ; AVX512DQ-FCP-NEXT: vmovdqa %ymm0, (%rsi)
719 ; AVX512DQ-FCP-NEXT: vmovdqa %ymm1, (%rdx)
720 ; AVX512DQ-FCP-NEXT: vmovdqa %ymm6, (%rcx)
721 ; AVX512DQ-FCP-NEXT: vmovdqa %ymm5, (%r8)
722 ; AVX512DQ-FCP-NEXT: vmovdqa %ymm4, (%r9)
723 ; AVX512DQ-FCP-NEXT: vmovdqa %ymm2, (%rax)
724 ; AVX512DQ-FCP-NEXT: vzeroupper
725 ; AVX512DQ-FCP-NEXT: retq
727 ; AVX512BW-LABEL: load_i64_stride6_vf4:
729 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
730 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm2
731 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %zmm3
732 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm0 = [0,6,12,0]
733 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm0
734 ; AVX512BW-NEXT: vpbroadcastq 144(%rdi), %ymm1
735 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
736 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm1 = [1,7,13,0]
737 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm1
738 ; AVX512BW-NEXT: vmovdqa 128(%rdi), %ymm4
739 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm4[6,7]
740 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm5 = [10,0,6,0]
741 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm3, %zmm5
742 ; AVX512BW-NEXT: vmovdqa 160(%rdi), %xmm6
743 ; AVX512BW-NEXT: vpbroadcastq %xmm6, %ymm7
744 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],ymm7[6,7]
745 ; AVX512BW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
746 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm7 = [11,1,7,0]
747 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm3, %zmm7
748 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm6 = ymm7[0,1,2,3,4,5],ymm6[6,7]
749 ; AVX512BW-NEXT: vmovdqa 160(%rdi), %ymm7
750 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm4[0],ymm7[0],ymm4[2],ymm7[2]
751 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,0,3]
752 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} xmm8 = [4,10]
753 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
754 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm4 = ymm8[0,1,2,3],ymm4[4,5,6,7]
755 ; AVX512BW-NEXT: vpbroadcastq 136(%rdi), %ymm8
756 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm7 = ymm8[1],ymm7[1],ymm8[3],ymm7[3]
757 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} xmm8 = [5,11]
758 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
759 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm2 = ymm8[0,1,2,3],ymm7[4,5,6,7]
760 ; AVX512BW-NEXT: vmovdqa %ymm0, (%rsi)
761 ; AVX512BW-NEXT: vmovdqa %ymm1, (%rdx)
762 ; AVX512BW-NEXT: vmovdqa %ymm5, (%rcx)
763 ; AVX512BW-NEXT: vmovdqa %ymm6, (%r8)
764 ; AVX512BW-NEXT: vmovdqa %ymm4, (%r9)
765 ; AVX512BW-NEXT: vmovdqa %ymm2, (%rax)
766 ; AVX512BW-NEXT: vzeroupper
767 ; AVX512BW-NEXT: retq
769 ; AVX512BW-FCP-LABEL: load_i64_stride6_vf4:
770 ; AVX512BW-FCP: # %bb.0:
771 ; AVX512BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
772 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm2
773 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm3
774 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm0 = [0,6,12,0]
775 ; AVX512BW-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm0
776 ; AVX512BW-FCP-NEXT: vpbroadcastq 144(%rdi), %ymm1
777 ; AVX512BW-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
778 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm1 = [1,7,13,0]
779 ; AVX512BW-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm1
780 ; AVX512BW-FCP-NEXT: vmovdqa 128(%rdi), %ymm4
781 ; AVX512BW-FCP-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm4[6,7]
782 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm5 = [10,0,6,0]
783 ; AVX512BW-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm5
784 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm6 = [0,1,2,4]
785 ; AVX512BW-FCP-NEXT: vmovdqa 160(%rdi), %ymm7
786 ; AVX512BW-FCP-NEXT: vpermi2q %ymm7, %ymm5, %ymm6
787 ; AVX512BW-FCP-NEXT: vinserti128 $1, 160(%rdi), %ymm0, %ymm5
788 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [11,1,7,0]
789 ; AVX512BW-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm8
790 ; AVX512BW-FCP-NEXT: vpblendd {{.*#+}} ymm5 = ymm8[0,1,2,3,4,5],ymm5[6,7]
791 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [0,0,0,6]
792 ; AVX512BW-FCP-NEXT: vpermi2q %ymm7, %ymm4, %ymm8
793 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} xmm4 = [4,10]
794 ; AVX512BW-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm4
795 ; AVX512BW-FCP-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
796 ; AVX512BW-FCP-NEXT: vpbroadcastq 136(%rdi), %ymm8
797 ; AVX512BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm7 = ymm8[1],ymm7[1],ymm8[3],ymm7[3]
798 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} xmm8 = [5,11]
799 ; AVX512BW-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
800 ; AVX512BW-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm8[0,1,2,3],ymm7[4,5,6,7]
801 ; AVX512BW-FCP-NEXT: vmovdqa %ymm0, (%rsi)
802 ; AVX512BW-FCP-NEXT: vmovdqa %ymm1, (%rdx)
803 ; AVX512BW-FCP-NEXT: vmovdqa %ymm6, (%rcx)
804 ; AVX512BW-FCP-NEXT: vmovdqa %ymm5, (%r8)
805 ; AVX512BW-FCP-NEXT: vmovdqa %ymm4, (%r9)
806 ; AVX512BW-FCP-NEXT: vmovdqa %ymm2, (%rax)
807 ; AVX512BW-FCP-NEXT: vzeroupper
808 ; AVX512BW-FCP-NEXT: retq
810 ; AVX512DQ-BW-LABEL: load_i64_stride6_vf4:
811 ; AVX512DQ-BW: # %bb.0:
812 ; AVX512DQ-BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
813 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdi), %zmm2
814 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rdi), %zmm3
815 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm0 = [0,6,12,0]
816 ; AVX512DQ-BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm0
817 ; AVX512DQ-BW-NEXT: vpbroadcastq 144(%rdi), %ymm1
818 ; AVX512DQ-BW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
819 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm1 = [1,7,13,0]
820 ; AVX512DQ-BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm1
821 ; AVX512DQ-BW-NEXT: vmovdqa 128(%rdi), %ymm4
822 ; AVX512DQ-BW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm4[6,7]
823 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm5 = [10,0,6,0]
824 ; AVX512DQ-BW-NEXT: vpermi2q %zmm2, %zmm3, %zmm5
825 ; AVX512DQ-BW-NEXT: vmovdqa 160(%rdi), %xmm6
826 ; AVX512DQ-BW-NEXT: vpbroadcastq %xmm6, %ymm7
827 ; AVX512DQ-BW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],ymm7[6,7]
828 ; AVX512DQ-BW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
829 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm7 = [11,1,7,0]
830 ; AVX512DQ-BW-NEXT: vpermi2q %zmm2, %zmm3, %zmm7
831 ; AVX512DQ-BW-NEXT: vpblendd {{.*#+}} ymm6 = ymm7[0,1,2,3,4,5],ymm6[6,7]
832 ; AVX512DQ-BW-NEXT: vmovdqa 160(%rdi), %ymm7
833 ; AVX512DQ-BW-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm4[0],ymm7[0],ymm4[2],ymm7[2]
834 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,0,3]
835 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} xmm8 = [4,10]
836 ; AVX512DQ-BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
837 ; AVX512DQ-BW-NEXT: vpblendd {{.*#+}} ymm4 = ymm8[0,1,2,3],ymm4[4,5,6,7]
838 ; AVX512DQ-BW-NEXT: vpbroadcastq 136(%rdi), %ymm8
839 ; AVX512DQ-BW-NEXT: vpunpckhqdq {{.*#+}} ymm7 = ymm8[1],ymm7[1],ymm8[3],ymm7[3]
840 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} xmm8 = [5,11]
841 ; AVX512DQ-BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
842 ; AVX512DQ-BW-NEXT: vpblendd {{.*#+}} ymm2 = ymm8[0,1,2,3],ymm7[4,5,6,7]
843 ; AVX512DQ-BW-NEXT: vmovdqa %ymm0, (%rsi)
844 ; AVX512DQ-BW-NEXT: vmovdqa %ymm1, (%rdx)
845 ; AVX512DQ-BW-NEXT: vmovdqa %ymm5, (%rcx)
846 ; AVX512DQ-BW-NEXT: vmovdqa %ymm6, (%r8)
847 ; AVX512DQ-BW-NEXT: vmovdqa %ymm4, (%r9)
848 ; AVX512DQ-BW-NEXT: vmovdqa %ymm2, (%rax)
849 ; AVX512DQ-BW-NEXT: vzeroupper
850 ; AVX512DQ-BW-NEXT: retq
852 ; AVX512DQ-BW-FCP-LABEL: load_i64_stride6_vf4:
853 ; AVX512DQ-BW-FCP: # %bb.0:
854 ; AVX512DQ-BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
855 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm2
856 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm3
857 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm0 = [0,6,12,0]
858 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm0
859 ; AVX512DQ-BW-FCP-NEXT: vpbroadcastq 144(%rdi), %ymm1
860 ; AVX512DQ-BW-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
861 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm1 = [1,7,13,0]
862 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm1
863 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 128(%rdi), %ymm4
864 ; AVX512DQ-BW-FCP-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm4[6,7]
865 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm5 = [10,0,6,0]
866 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm5
867 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm6 = [0,1,2,4]
868 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 160(%rdi), %ymm7
869 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %ymm7, %ymm5, %ymm6
870 ; AVX512DQ-BW-FCP-NEXT: vinserti128 $1, 160(%rdi), %ymm0, %ymm5
871 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [11,1,7,0]
872 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm8
873 ; AVX512DQ-BW-FCP-NEXT: vpblendd {{.*#+}} ymm5 = ymm8[0,1,2,3,4,5],ymm5[6,7]
874 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [0,0,0,6]
875 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %ymm7, %ymm4, %ymm8
876 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} xmm4 = [4,10]
877 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm4
878 ; AVX512DQ-BW-FCP-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
879 ; AVX512DQ-BW-FCP-NEXT: vpbroadcastq 136(%rdi), %ymm8
880 ; AVX512DQ-BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm7 = ymm8[1],ymm7[1],ymm8[3],ymm7[3]
881 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} xmm8 = [5,11]
882 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
883 ; AVX512DQ-BW-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm8[0,1,2,3],ymm7[4,5,6,7]
884 ; AVX512DQ-BW-FCP-NEXT: vmovdqa %ymm0, (%rsi)
885 ; AVX512DQ-BW-FCP-NEXT: vmovdqa %ymm1, (%rdx)
886 ; AVX512DQ-BW-FCP-NEXT: vmovdqa %ymm6, (%rcx)
887 ; AVX512DQ-BW-FCP-NEXT: vmovdqa %ymm5, (%r8)
888 ; AVX512DQ-BW-FCP-NEXT: vmovdqa %ymm4, (%r9)
889 ; AVX512DQ-BW-FCP-NEXT: vmovdqa %ymm2, (%rax)
890 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
891 ; AVX512DQ-BW-FCP-NEXT: retq
892 %wide.vec = load <24 x i64>, ptr %in.vec, align 64
893 %strided.vec0 = shufflevector <24 x i64> %wide.vec, <24 x i64> poison, <4 x i32> <i32 0, i32 6, i32 12, i32 18>
894 %strided.vec1 = shufflevector <24 x i64> %wide.vec, <24 x i64> poison, <4 x i32> <i32 1, i32 7, i32 13, i32 19>
895 %strided.vec2 = shufflevector <24 x i64> %wide.vec, <24 x i64> poison, <4 x i32> <i32 2, i32 8, i32 14, i32 20>
896 %strided.vec3 = shufflevector <24 x i64> %wide.vec, <24 x i64> poison, <4 x i32> <i32 3, i32 9, i32 15, i32 21>
897 %strided.vec4 = shufflevector <24 x i64> %wide.vec, <24 x i64> poison, <4 x i32> <i32 4, i32 10, i32 16, i32 22>
898 %strided.vec5 = shufflevector <24 x i64> %wide.vec, <24 x i64> poison, <4 x i32> <i32 5, i32 11, i32 17, i32 23>
899 store <4 x i64> %strided.vec0, ptr %out.vec0, align 64
900 store <4 x i64> %strided.vec1, ptr %out.vec1, align 64
901 store <4 x i64> %strided.vec2, ptr %out.vec2, align 64
902 store <4 x i64> %strided.vec3, ptr %out.vec3, align 64
903 store <4 x i64> %strided.vec4, ptr %out.vec4, align 64
904 store <4 x i64> %strided.vec5, ptr %out.vec5, align 64
908 define void @load_i64_stride6_vf8(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr %out.vec2, ptr %out.vec3, ptr %out.vec4, ptr %out.vec5) nounwind {
909 ; SSE-LABEL: load_i64_stride6_vf8:
911 ; SSE-NEXT: subq $24, %rsp
912 ; SSE-NEXT: movaps 160(%rdi), %xmm10
913 ; SSE-NEXT: movaps 256(%rdi), %xmm13
914 ; SSE-NEXT: movaps 208(%rdi), %xmm5
915 ; SSE-NEXT: movaps 352(%rdi), %xmm15
916 ; SSE-NEXT: movaps 304(%rdi), %xmm6
917 ; SSE-NEXT: movaps 64(%rdi), %xmm0
918 ; SSE-NEXT: movaps (%rdi), %xmm8
919 ; SSE-NEXT: movaps 16(%rdi), %xmm7
920 ; SSE-NEXT: movaps 48(%rdi), %xmm1
921 ; SSE-NEXT: movaps 144(%rdi), %xmm2
922 ; SSE-NEXT: movaps 96(%rdi), %xmm11
923 ; SSE-NEXT: movaps 240(%rdi), %xmm3
924 ; SSE-NEXT: movaps 192(%rdi), %xmm12
925 ; SSE-NEXT: movaps 336(%rdi), %xmm4
926 ; SSE-NEXT: movaps 288(%rdi), %xmm9
927 ; SSE-NEXT: movaps %xmm9, %xmm14
928 ; SSE-NEXT: movlhps {{.*#+}} xmm14 = xmm14[0],xmm4[0]
929 ; SSE-NEXT: movaps %xmm14, (%rsp) # 16-byte Spill
930 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm4[1]
931 ; SSE-NEXT: movaps %xmm12, %xmm4
932 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm3[0]
933 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
934 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm3[1]
935 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
936 ; SSE-NEXT: movaps %xmm11, %xmm3
937 ; SSE-NEXT: movlhps {{.*#+}} xmm11 = xmm11[0],xmm2[0]
938 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm2[1]
939 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
940 ; SSE-NEXT: movaps %xmm8, %xmm12
941 ; SSE-NEXT: movlhps {{.*#+}} xmm12 = xmm12[0],xmm1[0]
942 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm1[1]
943 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
944 ; SSE-NEXT: movaps %xmm7, %xmm2
945 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
946 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
947 ; SSE-NEXT: unpckhpd {{.*#+}} xmm7 = xmm7[1],xmm0[1]
948 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
949 ; SSE-NEXT: movaps %xmm6, %xmm14
950 ; SSE-NEXT: movlhps {{.*#+}} xmm14 = xmm14[0],xmm15[0]
951 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm15[1]
952 ; SSE-NEXT: movaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
953 ; SSE-NEXT: movaps %xmm5, %xmm15
954 ; SSE-NEXT: movlhps {{.*#+}} xmm15 = xmm15[0],xmm13[0]
955 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm13[1]
956 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
957 ; SSE-NEXT: movaps 112(%rdi), %xmm13
958 ; SSE-NEXT: movaps %xmm13, %xmm7
959 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm10[0]
960 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm10[1]
961 ; SSE-NEXT: movaps 80(%rdi), %xmm0
962 ; SSE-NEXT: movaps 32(%rdi), %xmm10
963 ; SSE-NEXT: movaps %xmm10, %xmm8
964 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm0[0]
965 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm0[1]
966 ; SSE-NEXT: movaps 368(%rdi), %xmm0
967 ; SSE-NEXT: movaps 320(%rdi), %xmm5
968 ; SSE-NEXT: movaps %xmm5, %xmm6
969 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm0[0]
970 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm0[1]
971 ; SSE-NEXT: movaps 272(%rdi), %xmm0
972 ; SSE-NEXT: movaps 224(%rdi), %xmm3
973 ; SSE-NEXT: movaps %xmm3, %xmm4
974 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm0[0]
975 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm0[1]
976 ; SSE-NEXT: movaps 176(%rdi), %xmm0
977 ; SSE-NEXT: movaps 128(%rdi), %xmm1
978 ; SSE-NEXT: movaps %xmm1, %xmm2
979 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
980 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
981 ; SSE-NEXT: movaps %xmm11, 16(%rsi)
982 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
983 ; SSE-NEXT: movaps %xmm0, 32(%rsi)
984 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
985 ; SSE-NEXT: movaps %xmm0, 48(%rsi)
986 ; SSE-NEXT: movaps %xmm12, (%rsi)
987 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
988 ; SSE-NEXT: movaps %xmm0, 16(%rdx)
989 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
990 ; SSE-NEXT: movaps %xmm0, 32(%rdx)
991 ; SSE-NEXT: movaps %xmm9, 48(%rdx)
992 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
993 ; SSE-NEXT: movaps %xmm0, (%rdx)
994 ; SSE-NEXT: movaps %xmm7, 16(%rcx)
995 ; SSE-NEXT: movaps %xmm15, 32(%rcx)
996 ; SSE-NEXT: movaps %xmm14, 48(%rcx)
997 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
998 ; SSE-NEXT: movaps %xmm0, (%rcx)
999 ; SSE-NEXT: movaps %xmm13, 16(%r8)
1000 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1001 ; SSE-NEXT: movaps %xmm0, 32(%r8)
1002 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1003 ; SSE-NEXT: movaps %xmm0, 48(%r8)
1004 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1005 ; SSE-NEXT: movaps %xmm0, (%r8)
1006 ; SSE-NEXT: movaps %xmm2, 16(%r9)
1007 ; SSE-NEXT: movaps %xmm4, 32(%r9)
1008 ; SSE-NEXT: movaps %xmm6, 48(%r9)
1009 ; SSE-NEXT: movaps %xmm8, (%r9)
1010 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
1011 ; SSE-NEXT: movaps %xmm1, 16(%rax)
1012 ; SSE-NEXT: movaps %xmm3, 32(%rax)
1013 ; SSE-NEXT: movaps %xmm5, 48(%rax)
1014 ; SSE-NEXT: movaps %xmm10, (%rax)
1015 ; SSE-NEXT: addq $24, %rsp
1018 ; AVX-LABEL: load_i64_stride6_vf8:
1020 ; AVX-NEXT: vmovaps 352(%rdi), %ymm0
1021 ; AVX-NEXT: vmovaps 288(%rdi), %ymm4
1022 ; AVX-NEXT: vmovaps 96(%rdi), %ymm7
1023 ; AVX-NEXT: vmovaps 320(%rdi), %ymm5
1024 ; AVX-NEXT: vmovaps 128(%rdi), %ymm3
1025 ; AVX-NEXT: vinsertf128 $1, 96(%rdi), %ymm0, %ymm6
1026 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm6[0],ymm3[0],ymm6[2],ymm3[2]
1027 ; AVX-NEXT: vmovaps (%rdi), %xmm9
1028 ; AVX-NEXT: vmovaps 16(%rdi), %xmm8
1029 ; AVX-NEXT: vmovaps 48(%rdi), %xmm10
1030 ; AVX-NEXT: vmovlhps {{.*#+}} xmm2 = xmm9[0],xmm10[0]
1031 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
1032 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1033 ; AVX-NEXT: vinsertf128 $1, 288(%rdi), %ymm0, %ymm11
1034 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm11[0],ymm5[0],ymm11[2],ymm5[2]
1035 ; AVX-NEXT: vmovaps 240(%rdi), %xmm12
1036 ; AVX-NEXT: vmovaps 192(%rdi), %xmm13
1037 ; AVX-NEXT: vmovlhps {{.*#+}} xmm14 = xmm13[0],xmm12[0]
1038 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm14[0,1,2,3],ymm2[4,5,6,7]
1039 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1040 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm6[1],ymm3[1],ymm6[3],ymm3[3]
1041 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm6 = xmm9[1],xmm10[1]
1042 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm6[0,1,2,3],ymm3[4,5,6,7]
1043 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1044 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm11[1],ymm5[1],ymm11[3],ymm5[3]
1045 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm6 = xmm13[1],xmm12[1]
1046 ; AVX-NEXT: vblendps {{.*#+}} ymm5 = ymm6[0,1,2,3],ymm5[4,5,6,7]
1047 ; AVX-NEXT: vinsertf128 $1, 160(%rdi), %ymm0, %ymm10
1048 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm6 = ymm7[0],ymm10[0],ymm7[2],ymm10[2]
1049 ; AVX-NEXT: vmovaps 64(%rdi), %xmm11
1050 ; AVX-NEXT: vmovlhps {{.*#+}} xmm9 = xmm8[0],xmm11[0]
1051 ; AVX-NEXT: vblendps {{.*#+}} ymm6 = ymm9[0,1,2,3],ymm6[4,5,6,7]
1052 ; AVX-NEXT: vinsertf128 $1, 352(%rdi), %ymm0, %ymm12
1053 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm9 = ymm4[0],ymm12[0],ymm4[2],ymm12[2]
1054 ; AVX-NEXT: vmovaps 256(%rdi), %xmm13
1055 ; AVX-NEXT: vmovaps 208(%rdi), %xmm14
1056 ; AVX-NEXT: vmovlhps {{.*#+}} xmm15 = xmm14[0],xmm13[0]
1057 ; AVX-NEXT: vblendps {{.*#+}} ymm9 = ymm15[0,1,2,3],ymm9[4,5,6,7]
1058 ; AVX-NEXT: vmovaps 160(%rdi), %ymm15
1059 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm7 = ymm7[1],ymm10[1],ymm7[3],ymm10[3]
1060 ; AVX-NEXT: vmovaps 32(%rdi), %xmm10
1061 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm8 = xmm8[1],xmm11[1]
1062 ; AVX-NEXT: vblendps {{.*#+}} ymm7 = ymm8[0,1,2,3],ymm7[4,5,6,7]
1063 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm4 = ymm4[1],ymm12[1],ymm4[3],ymm12[3]
1064 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm8 = xmm14[1],xmm13[1]
1065 ; AVX-NEXT: vblendps {{.*#+}} ymm4 = ymm8[0,1,2,3],ymm4[4,5,6,7]
1066 ; AVX-NEXT: vinsertf128 $1, 128(%rdi), %ymm0, %ymm8
1067 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm11 = ymm8[0],ymm15[0],ymm8[2],ymm15[2]
1068 ; AVX-NEXT: vmovaps 80(%rdi), %xmm12
1069 ; AVX-NEXT: vmovlhps {{.*#+}} xmm13 = xmm10[0],xmm12[0]
1070 ; AVX-NEXT: vblendps {{.*#+}} ymm11 = ymm13[0,1,2,3],ymm11[4,5,6,7]
1071 ; AVX-NEXT: vinsertf128 $1, 320(%rdi), %ymm0, %ymm13
1072 ; AVX-NEXT: vmovaps %ymm0, %ymm3
1073 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm14 = ymm13[0],ymm0[0],ymm13[2],ymm0[2]
1074 ; AVX-NEXT: vmovaps 272(%rdi), %xmm1
1075 ; AVX-NEXT: vmovaps 224(%rdi), %xmm0
1076 ; AVX-NEXT: vmovlhps {{.*#+}} xmm2 = xmm0[0],xmm1[0]
1077 ; AVX-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm14[4,5,6,7]
1078 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm8 = ymm8[1],ymm15[1],ymm8[3],ymm15[3]
1079 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm10 = xmm10[1],xmm12[1]
1080 ; AVX-NEXT: vblendps {{.*#+}} ymm8 = ymm10[0,1,2,3],ymm8[4,5,6,7]
1081 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm10 = ymm13[1],ymm3[1],ymm13[3],ymm3[3]
1082 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
1083 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm10[4,5,6,7]
1084 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
1085 ; AVX-NEXT: vmovaps %ymm1, 32(%rsi)
1086 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
1087 ; AVX-NEXT: vmovaps %ymm1, (%rsi)
1088 ; AVX-NEXT: vmovaps %ymm5, 32(%rdx)
1089 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
1090 ; AVX-NEXT: vmovaps %ymm1, (%rdx)
1091 ; AVX-NEXT: vmovaps %ymm9, 32(%rcx)
1092 ; AVX-NEXT: vmovaps %ymm6, (%rcx)
1093 ; AVX-NEXT: vmovaps %ymm4, 32(%r8)
1094 ; AVX-NEXT: vmovaps %ymm7, (%r8)
1095 ; AVX-NEXT: vmovaps %ymm2, 32(%r9)
1096 ; AVX-NEXT: vmovaps %ymm11, (%r9)
1097 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
1098 ; AVX-NEXT: vmovaps %ymm0, 32(%rax)
1099 ; AVX-NEXT: vmovaps %ymm8, (%rax)
1100 ; AVX-NEXT: vzeroupper
1103 ; AVX2-LABEL: load_i64_stride6_vf8:
1105 ; AVX2-NEXT: vmovaps 352(%rdi), %ymm0
1106 ; AVX2-NEXT: vmovaps 320(%rdi), %ymm2
1107 ; AVX2-NEXT: vmovaps 288(%rdi), %ymm7
1108 ; AVX2-NEXT: vmovaps 128(%rdi), %ymm5
1109 ; AVX2-NEXT: vmovaps 96(%rdi), %ymm9
1110 ; AVX2-NEXT: vmovaps (%rdi), %xmm4
1111 ; AVX2-NEXT: vmovaps 16(%rdi), %xmm11
1112 ; AVX2-NEXT: vmovaps 48(%rdi), %xmm6
1113 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm4[0],xmm6[0]
1114 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm9[0],ymm5[0],ymm9[2],ymm5[2]
1115 ; AVX2-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,0,3]
1116 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
1117 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1118 ; AVX2-NEXT: vmovaps 240(%rdi), %xmm8
1119 ; AVX2-NEXT: vmovaps 192(%rdi), %xmm10
1120 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm3 = xmm10[0],xmm8[0]
1121 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm12 = ymm7[0],ymm2[0],ymm7[2],ymm2[2]
1122 ; AVX2-NEXT: vpermpd {{.*#+}} ymm12 = ymm12[0,1,0,3]
1123 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm12[4,5,6,7]
1124 ; AVX2-NEXT: vbroadcastsd 104(%rdi), %ymm12
1125 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm12 = ymm12[1],ymm5[1],ymm12[3],ymm5[3]
1126 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm4[1],xmm6[1]
1127 ; AVX2-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm12[4,5,6,7]
1128 ; AVX2-NEXT: vbroadcastsd 296(%rdi), %ymm6
1129 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm6 = ymm6[1],ymm2[1],ymm6[3],ymm2[3]
1130 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm8 = xmm10[1],xmm8[1]
1131 ; AVX2-NEXT: vblendps {{.*#+}} ymm6 = ymm8[0,1,2,3],ymm6[4,5,6,7]
1132 ; AVX2-NEXT: vbroadcastsd 160(%rdi), %ymm8
1133 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm9[0],ymm8[0],ymm9[2],ymm8[2]
1134 ; AVX2-NEXT: vmovaps 64(%rdi), %xmm12
1135 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm10 = xmm11[0],xmm12[0]
1136 ; AVX2-NEXT: vblendps {{.*#+}} ymm8 = ymm10[0,1,2,3],ymm8[4,5,6,7]
1137 ; AVX2-NEXT: vbroadcastsd 352(%rdi), %ymm10
1138 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm10 = ymm7[0],ymm10[0],ymm7[2],ymm10[2]
1139 ; AVX2-NEXT: vmovaps 256(%rdi), %xmm13
1140 ; AVX2-NEXT: vmovaps 208(%rdi), %xmm14
1141 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm15 = xmm14[0],xmm13[0]
1142 ; AVX2-NEXT: vblendps {{.*#+}} ymm10 = ymm15[0,1,2,3],ymm10[4,5,6,7]
1143 ; AVX2-NEXT: vmovaps 160(%rdi), %ymm15
1144 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm11[1],xmm12[1]
1145 ; AVX2-NEXT: vmovaps 32(%rdi), %xmm12
1146 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm9 = ymm9[1],ymm15[1],ymm9[3],ymm15[3]
1147 ; AVX2-NEXT: vpermpd {{.*#+}} ymm9 = ymm9[0,1,2,1]
1148 ; AVX2-NEXT: vblendps {{.*#+}} ymm9 = ymm11[0,1,2,3],ymm9[4,5,6,7]
1149 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm14[1],xmm13[1]
1150 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm7 = ymm7[1],ymm0[1],ymm7[3],ymm0[3]
1151 ; AVX2-NEXT: vpermpd {{.*#+}} ymm7 = ymm7[0,1,2,1]
1152 ; AVX2-NEXT: vblendps {{.*#+}} ymm7 = ymm11[0,1,2,3],ymm7[4,5,6,7]
1153 ; AVX2-NEXT: vmovaps 80(%rdi), %xmm11
1154 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm13 = xmm12[0],xmm11[0]
1155 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm5 = ymm5[0],ymm15[0],ymm5[2],ymm15[2]
1156 ; AVX2-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,0,3]
1157 ; AVX2-NEXT: vblendps {{.*#+}} ymm5 = ymm13[0,1,2,3],ymm5[4,5,6,7]
1158 ; AVX2-NEXT: vmovaps 272(%rdi), %xmm13
1159 ; AVX2-NEXT: vmovaps 224(%rdi), %xmm14
1160 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm2[0],ymm0[0],ymm2[2],ymm0[2]
1161 ; AVX2-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,0,3]
1162 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm14[0],xmm13[0]
1163 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
1164 ; AVX2-NEXT: vbroadcastsd 136(%rdi), %ymm2
1165 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm15[1],ymm2[3],ymm15[3]
1166 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm12[1],xmm11[1]
1167 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm11[0,1,2,3],ymm2[4,5,6,7]
1168 ; AVX2-NEXT: vbroadcastsd 328(%rdi), %ymm11
1169 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm11[1],ymm0[1],ymm11[3],ymm0[3]
1170 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm14[1],xmm13[1]
1171 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm11[0,1,2,3],ymm0[4,5,6,7]
1172 ; AVX2-NEXT: vmovaps %ymm3, 32(%rsi)
1173 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
1174 ; AVX2-NEXT: vmovaps %ymm3, (%rsi)
1175 ; AVX2-NEXT: vmovaps %ymm6, 32(%rdx)
1176 ; AVX2-NEXT: vmovaps %ymm4, (%rdx)
1177 ; AVX2-NEXT: vmovaps %ymm10, 32(%rcx)
1178 ; AVX2-NEXT: vmovaps %ymm8, (%rcx)
1179 ; AVX2-NEXT: vmovaps %ymm7, 32(%r8)
1180 ; AVX2-NEXT: vmovaps %ymm9, (%r8)
1181 ; AVX2-NEXT: vmovaps %ymm1, 32(%r9)
1182 ; AVX2-NEXT: vmovaps %ymm5, (%r9)
1183 ; AVX2-NEXT: movq {{[0-9]+}}(%rsp), %rax
1184 ; AVX2-NEXT: vmovaps %ymm0, 32(%rax)
1185 ; AVX2-NEXT: vmovaps %ymm2, (%rax)
1186 ; AVX2-NEXT: vzeroupper
1189 ; AVX2-FP-LABEL: load_i64_stride6_vf8:
1191 ; AVX2-FP-NEXT: vmovaps 352(%rdi), %ymm0
1192 ; AVX2-FP-NEXT: vmovaps 320(%rdi), %ymm2
1193 ; AVX2-FP-NEXT: vmovaps 288(%rdi), %ymm7
1194 ; AVX2-FP-NEXT: vmovaps 128(%rdi), %ymm5
1195 ; AVX2-FP-NEXT: vmovaps 96(%rdi), %ymm9
1196 ; AVX2-FP-NEXT: vmovaps (%rdi), %xmm4
1197 ; AVX2-FP-NEXT: vmovaps 16(%rdi), %xmm11
1198 ; AVX2-FP-NEXT: vmovaps 48(%rdi), %xmm6
1199 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm4[0],xmm6[0]
1200 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm9[0],ymm5[0],ymm9[2],ymm5[2]
1201 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,0,3]
1202 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
1203 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1204 ; AVX2-FP-NEXT: vmovaps 240(%rdi), %xmm8
1205 ; AVX2-FP-NEXT: vmovaps 192(%rdi), %xmm10
1206 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm3 = xmm10[0],xmm8[0]
1207 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm12 = ymm7[0],ymm2[0],ymm7[2],ymm2[2]
1208 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm12 = ymm12[0,1,0,3]
1209 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm12[4,5,6,7]
1210 ; AVX2-FP-NEXT: vbroadcastsd 104(%rdi), %ymm12
1211 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm12 = ymm12[1],ymm5[1],ymm12[3],ymm5[3]
1212 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm4[1],xmm6[1]
1213 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm12[4,5,6,7]
1214 ; AVX2-FP-NEXT: vbroadcastsd 296(%rdi), %ymm6
1215 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm6 = ymm6[1],ymm2[1],ymm6[3],ymm2[3]
1216 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm8 = xmm10[1],xmm8[1]
1217 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm6 = ymm8[0,1,2,3],ymm6[4,5,6,7]
1218 ; AVX2-FP-NEXT: vbroadcastsd 160(%rdi), %ymm8
1219 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm9[0],ymm8[0],ymm9[2],ymm8[2]
1220 ; AVX2-FP-NEXT: vmovaps 64(%rdi), %xmm12
1221 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm10 = xmm11[0],xmm12[0]
1222 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm8 = ymm10[0,1,2,3],ymm8[4,5,6,7]
1223 ; AVX2-FP-NEXT: vbroadcastsd 352(%rdi), %ymm10
1224 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm10 = ymm7[0],ymm10[0],ymm7[2],ymm10[2]
1225 ; AVX2-FP-NEXT: vmovaps 256(%rdi), %xmm13
1226 ; AVX2-FP-NEXT: vmovaps 208(%rdi), %xmm14
1227 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm15 = xmm14[0],xmm13[0]
1228 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm10 = ymm15[0,1,2,3],ymm10[4,5,6,7]
1229 ; AVX2-FP-NEXT: vmovaps 160(%rdi), %ymm15
1230 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm11[1],xmm12[1]
1231 ; AVX2-FP-NEXT: vmovaps 32(%rdi), %xmm12
1232 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm9 = ymm9[1],ymm15[1],ymm9[3],ymm15[3]
1233 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm9 = ymm9[0,1,2,1]
1234 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm9 = ymm11[0,1,2,3],ymm9[4,5,6,7]
1235 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm14[1],xmm13[1]
1236 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm7 = ymm7[1],ymm0[1],ymm7[3],ymm0[3]
1237 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm7 = ymm7[0,1,2,1]
1238 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm7 = ymm11[0,1,2,3],ymm7[4,5,6,7]
1239 ; AVX2-FP-NEXT: vmovaps 80(%rdi), %xmm11
1240 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm13 = xmm12[0],xmm11[0]
1241 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm5 = ymm5[0],ymm15[0],ymm5[2],ymm15[2]
1242 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,0,3]
1243 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm5 = ymm13[0,1,2,3],ymm5[4,5,6,7]
1244 ; AVX2-FP-NEXT: vmovaps 272(%rdi), %xmm13
1245 ; AVX2-FP-NEXT: vmovaps 224(%rdi), %xmm14
1246 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm2[0],ymm0[0],ymm2[2],ymm0[2]
1247 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,0,3]
1248 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm14[0],xmm13[0]
1249 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
1250 ; AVX2-FP-NEXT: vbroadcastsd 136(%rdi), %ymm2
1251 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm15[1],ymm2[3],ymm15[3]
1252 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm12[1],xmm11[1]
1253 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm11[0,1,2,3],ymm2[4,5,6,7]
1254 ; AVX2-FP-NEXT: vbroadcastsd 328(%rdi), %ymm11
1255 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm11[1],ymm0[1],ymm11[3],ymm0[3]
1256 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm14[1],xmm13[1]
1257 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm11[0,1,2,3],ymm0[4,5,6,7]
1258 ; AVX2-FP-NEXT: vmovaps %ymm3, 32(%rsi)
1259 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
1260 ; AVX2-FP-NEXT: vmovaps %ymm3, (%rsi)
1261 ; AVX2-FP-NEXT: vmovaps %ymm6, 32(%rdx)
1262 ; AVX2-FP-NEXT: vmovaps %ymm4, (%rdx)
1263 ; AVX2-FP-NEXT: vmovaps %ymm10, 32(%rcx)
1264 ; AVX2-FP-NEXT: vmovaps %ymm8, (%rcx)
1265 ; AVX2-FP-NEXT: vmovaps %ymm7, 32(%r8)
1266 ; AVX2-FP-NEXT: vmovaps %ymm9, (%r8)
1267 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%r9)
1268 ; AVX2-FP-NEXT: vmovaps %ymm5, (%r9)
1269 ; AVX2-FP-NEXT: movq {{[0-9]+}}(%rsp), %rax
1270 ; AVX2-FP-NEXT: vmovaps %ymm0, 32(%rax)
1271 ; AVX2-FP-NEXT: vmovaps %ymm2, (%rax)
1272 ; AVX2-FP-NEXT: vzeroupper
1273 ; AVX2-FP-NEXT: retq
1275 ; AVX2-FCP-LABEL: load_i64_stride6_vf8:
1276 ; AVX2-FCP: # %bb.0:
1277 ; AVX2-FCP-NEXT: vmovaps 352(%rdi), %ymm0
1278 ; AVX2-FCP-NEXT: vmovaps 320(%rdi), %ymm2
1279 ; AVX2-FCP-NEXT: vmovaps 288(%rdi), %ymm7
1280 ; AVX2-FCP-NEXT: vmovaps 128(%rdi), %ymm5
1281 ; AVX2-FCP-NEXT: vmovaps 96(%rdi), %ymm9
1282 ; AVX2-FCP-NEXT: vmovaps (%rdi), %xmm4
1283 ; AVX2-FCP-NEXT: vmovaps 16(%rdi), %xmm11
1284 ; AVX2-FCP-NEXT: vmovaps 48(%rdi), %xmm6
1285 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm4[0],xmm6[0]
1286 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm9[0],ymm5[0],ymm9[2],ymm5[2]
1287 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,0,3]
1288 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
1289 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1290 ; AVX2-FCP-NEXT: vmovaps 240(%rdi), %xmm8
1291 ; AVX2-FCP-NEXT: vmovaps 192(%rdi), %xmm10
1292 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm3 = xmm10[0],xmm8[0]
1293 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm12 = ymm7[0],ymm2[0],ymm7[2],ymm2[2]
1294 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm12 = ymm12[0,1,0,3]
1295 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm12[4,5,6,7]
1296 ; AVX2-FCP-NEXT: vbroadcastsd 104(%rdi), %ymm12
1297 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm12 = ymm12[1],ymm5[1],ymm12[3],ymm5[3]
1298 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm4[1],xmm6[1]
1299 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm12[4,5,6,7]
1300 ; AVX2-FCP-NEXT: vbroadcastsd 296(%rdi), %ymm6
1301 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm6 = ymm6[1],ymm2[1],ymm6[3],ymm2[3]
1302 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm8 = xmm10[1],xmm8[1]
1303 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm6 = ymm8[0,1,2,3],ymm6[4,5,6,7]
1304 ; AVX2-FCP-NEXT: vbroadcastsd 160(%rdi), %ymm8
1305 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm9[0],ymm8[0],ymm9[2],ymm8[2]
1306 ; AVX2-FCP-NEXT: vmovaps 64(%rdi), %xmm12
1307 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm10 = xmm11[0],xmm12[0]
1308 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm8 = ymm10[0,1,2,3],ymm8[4,5,6,7]
1309 ; AVX2-FCP-NEXT: vbroadcastsd 352(%rdi), %ymm10
1310 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm10 = ymm7[0],ymm10[0],ymm7[2],ymm10[2]
1311 ; AVX2-FCP-NEXT: vmovaps 256(%rdi), %xmm13
1312 ; AVX2-FCP-NEXT: vmovaps 208(%rdi), %xmm14
1313 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm15 = xmm14[0],xmm13[0]
1314 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm10 = ymm15[0,1,2,3],ymm10[4,5,6,7]
1315 ; AVX2-FCP-NEXT: vmovaps 160(%rdi), %ymm15
1316 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm11[1],xmm12[1]
1317 ; AVX2-FCP-NEXT: vmovaps 32(%rdi), %xmm12
1318 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm9 = ymm9[1],ymm15[1],ymm9[3],ymm15[3]
1319 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm9 = ymm9[0,1,2,1]
1320 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm9 = ymm11[0,1,2,3],ymm9[4,5,6,7]
1321 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm14[1],xmm13[1]
1322 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm7 = ymm7[1],ymm0[1],ymm7[3],ymm0[3]
1323 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm7 = ymm7[0,1,2,1]
1324 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm7 = ymm11[0,1,2,3],ymm7[4,5,6,7]
1325 ; AVX2-FCP-NEXT: vmovaps 80(%rdi), %xmm11
1326 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm13 = xmm12[0],xmm11[0]
1327 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm5 = ymm5[0],ymm15[0],ymm5[2],ymm15[2]
1328 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,0,3]
1329 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm5 = ymm13[0,1,2,3],ymm5[4,5,6,7]
1330 ; AVX2-FCP-NEXT: vmovaps 272(%rdi), %xmm13
1331 ; AVX2-FCP-NEXT: vmovaps 224(%rdi), %xmm14
1332 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm2[0],ymm0[0],ymm2[2],ymm0[2]
1333 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,0,3]
1334 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm14[0],xmm13[0]
1335 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
1336 ; AVX2-FCP-NEXT: vbroadcastsd 136(%rdi), %ymm2
1337 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm15[1],ymm2[3],ymm15[3]
1338 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm12[1],xmm11[1]
1339 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm11[0,1,2,3],ymm2[4,5,6,7]
1340 ; AVX2-FCP-NEXT: vbroadcastsd 328(%rdi), %ymm11
1341 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm11[1],ymm0[1],ymm11[3],ymm0[3]
1342 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm14[1],xmm13[1]
1343 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm11[0,1,2,3],ymm0[4,5,6,7]
1344 ; AVX2-FCP-NEXT: vmovaps %ymm3, 32(%rsi)
1345 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
1346 ; AVX2-FCP-NEXT: vmovaps %ymm3, (%rsi)
1347 ; AVX2-FCP-NEXT: vmovaps %ymm6, 32(%rdx)
1348 ; AVX2-FCP-NEXT: vmovaps %ymm4, (%rdx)
1349 ; AVX2-FCP-NEXT: vmovaps %ymm10, 32(%rcx)
1350 ; AVX2-FCP-NEXT: vmovaps %ymm8, (%rcx)
1351 ; AVX2-FCP-NEXT: vmovaps %ymm7, 32(%r8)
1352 ; AVX2-FCP-NEXT: vmovaps %ymm9, (%r8)
1353 ; AVX2-FCP-NEXT: vmovaps %ymm1, 32(%r9)
1354 ; AVX2-FCP-NEXT: vmovaps %ymm5, (%r9)
1355 ; AVX2-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
1356 ; AVX2-FCP-NEXT: vmovaps %ymm0, 32(%rax)
1357 ; AVX2-FCP-NEXT: vmovaps %ymm2, (%rax)
1358 ; AVX2-FCP-NEXT: vzeroupper
1359 ; AVX2-FCP-NEXT: retq
1361 ; AVX512-LABEL: load_i64_stride6_vf8:
1363 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
1364 ; AVX512-NEXT: vmovdqa64 320(%rdi), %zmm6
1365 ; AVX512-NEXT: vmovdqa64 256(%rdi), %zmm7
1366 ; AVX512-NEXT: vmovdqa64 (%rdi), %zmm0
1367 ; AVX512-NEXT: vmovdqa64 64(%rdi), %zmm1
1368 ; AVX512-NEXT: vmovdqa64 128(%rdi), %zmm3
1369 ; AVX512-NEXT: vmovdqa64 192(%rdi), %zmm4
1370 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [0,6,0,10,0,6,0,10]
1371 ; AVX512-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
1372 ; AVX512-NEXT: vpermi2q %zmm3, %zmm4, %zmm5
1373 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm2 = [0,6,12,0]
1374 ; AVX512-NEXT: vpermi2q %zmm1, %zmm0, %zmm2
1375 ; AVX512-NEXT: movb $56, %dil
1376 ; AVX512-NEXT: kmovw %edi, %k1
1377 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm2 {%k1}
1378 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
1379 ; AVX512-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1380 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm8
1381 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [0,0,6,12,0,0,6,12]
1382 ; AVX512-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1383 ; AVX512-NEXT: vpermi2q %zmm6, %zmm7, %zmm9
1384 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [0,1,7,13,0,1,7,13]
1385 ; AVX512-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
1386 ; AVX512-NEXT: vpermi2q %zmm6, %zmm7, %zmm10
1387 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [0,10,0,6,0,10,0,6]
1388 ; AVX512-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
1389 ; AVX512-NEXT: vpermi2q %zmm7, %zmm6, %zmm11
1390 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [0,11,1,7,0,11,1,7]
1391 ; AVX512-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
1392 ; AVX512-NEXT: vpermi2q %zmm7, %zmm6, %zmm12
1393 ; AVX512-NEXT: vpermt2q %zmm6, %zmm5, %zmm7
1394 ; AVX512-NEXT: movb $-64, %dil
1395 ; AVX512-NEXT: kmovw %edi, %k2
1396 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm2 {%k2}
1397 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [1,7,0,11,1,7,0,11]
1398 ; AVX512-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
1399 ; AVX512-NEXT: vpermi2q %zmm3, %zmm4, %zmm7
1400 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm13 = [1,7,13,0]
1401 ; AVX512-NEXT: vpermi2q %zmm1, %zmm0, %zmm13
1402 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm13 {%k1}
1403 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [5,11,5,11,5,11,5,11]
1404 ; AVX512-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1405 ; AVX512-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
1406 ; AVX512-NEXT: vmovdqa64 %zmm8, %zmm13 {%k2}
1407 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [10,4,10,4,10,4,10,4]
1408 ; AVX512-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1409 ; AVX512-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1410 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm8 = [10,0,6,0]
1411 ; AVX512-NEXT: vpermi2q %zmm0, %zmm1, %zmm8
1412 ; AVX512-NEXT: movb $24, %dil
1413 ; AVX512-NEXT: kmovw %edi, %k2
1414 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm8 {%k2}
1415 ; AVX512-NEXT: movb $-32, %dil
1416 ; AVX512-NEXT: kmovw %edi, %k1
1417 ; AVX512-NEXT: vmovdqa64 %zmm9, %zmm8 {%k1}
1418 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [11,5,11,5,11,5,11,5]
1419 ; AVX512-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1420 ; AVX512-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1421 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm9 = [11,1,7,0]
1422 ; AVX512-NEXT: vpermi2q %zmm0, %zmm1, %zmm9
1423 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm9 {%k2}
1424 ; AVX512-NEXT: vmovdqa64 %zmm10, %zmm9 {%k1}
1425 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [12,0,0,6,12,0,0,6]
1426 ; AVX512-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1427 ; AVX512-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1428 ; AVX512-NEXT: vpermi2q %zmm1, %zmm0, %zmm5
1429 ; AVX512-NEXT: vinserti32x4 $0, %xmm5, %zmm6, %zmm5
1430 ; AVX512-NEXT: vmovdqa64 %zmm11, %zmm5 {%k1}
1431 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [13,0,1,7,13,0,1,7]
1432 ; AVX512-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1433 ; AVX512-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1434 ; AVX512-NEXT: vpermt2q %zmm1, %zmm7, %zmm0
1435 ; AVX512-NEXT: vinserti32x4 $0, %xmm0, %zmm6, %zmm0
1436 ; AVX512-NEXT: vmovdqa64 %zmm12, %zmm0 {%k1}
1437 ; AVX512-NEXT: vmovdqa64 %zmm2, (%rsi)
1438 ; AVX512-NEXT: vmovdqa64 %zmm13, (%rdx)
1439 ; AVX512-NEXT: vmovdqa64 %zmm8, (%rcx)
1440 ; AVX512-NEXT: vmovdqa64 %zmm9, (%r8)
1441 ; AVX512-NEXT: vmovdqa64 %zmm5, (%r9)
1442 ; AVX512-NEXT: vmovdqa64 %zmm0, (%rax)
1443 ; AVX512-NEXT: vzeroupper
1446 ; AVX512-FCP-LABEL: load_i64_stride6_vf8:
1447 ; AVX512-FCP: # %bb.0:
1448 ; AVX512-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
1449 ; AVX512-FCP-NEXT: vmovdqa64 320(%rdi), %zmm6
1450 ; AVX512-FCP-NEXT: vmovdqa64 256(%rdi), %zmm7
1451 ; AVX512-FCP-NEXT: vmovdqa64 (%rdi), %zmm0
1452 ; AVX512-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
1453 ; AVX512-FCP-NEXT: vmovdqa64 128(%rdi), %zmm3
1454 ; AVX512-FCP-NEXT: vmovdqa64 192(%rdi), %zmm4
1455 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [0,6,0,10,0,6,0,10]
1456 ; AVX512-FCP-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
1457 ; AVX512-FCP-NEXT: vpermi2q %zmm3, %zmm4, %zmm5
1458 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm2 = [0,6,12,0]
1459 ; AVX512-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm2
1460 ; AVX512-FCP-NEXT: movb $56, %dil
1461 ; AVX512-FCP-NEXT: kmovw %edi, %k1
1462 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm2 {%k1}
1463 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
1464 ; AVX512-FCP-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1465 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm8
1466 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [0,0,6,12,0,0,6,12]
1467 ; AVX512-FCP-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1468 ; AVX512-FCP-NEXT: vpermi2q %zmm6, %zmm7, %zmm9
1469 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [0,1,7,13,0,1,7,13]
1470 ; AVX512-FCP-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
1471 ; AVX512-FCP-NEXT: vpermi2q %zmm6, %zmm7, %zmm10
1472 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [0,10,0,6,0,10,0,6]
1473 ; AVX512-FCP-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
1474 ; AVX512-FCP-NEXT: vpermi2q %zmm7, %zmm6, %zmm11
1475 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [0,11,1,7,0,11,1,7]
1476 ; AVX512-FCP-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
1477 ; AVX512-FCP-NEXT: vpermi2q %zmm7, %zmm6, %zmm12
1478 ; AVX512-FCP-NEXT: vpermt2q %zmm6, %zmm5, %zmm7
1479 ; AVX512-FCP-NEXT: movb $-64, %dil
1480 ; AVX512-FCP-NEXT: kmovw %edi, %k2
1481 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm2 {%k2}
1482 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [1,7,0,11,1,7,0,11]
1483 ; AVX512-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
1484 ; AVX512-FCP-NEXT: vpermi2q %zmm3, %zmm4, %zmm7
1485 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm13 = [1,7,13,0]
1486 ; AVX512-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm13
1487 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm13 {%k1}
1488 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [5,11,5,11,5,11,5,11]
1489 ; AVX512-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1490 ; AVX512-FCP-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
1491 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, %zmm13 {%k2}
1492 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [10,4,10,4,10,4,10,4]
1493 ; AVX512-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1494 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1495 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [10,0,6,0]
1496 ; AVX512-FCP-NEXT: vpermi2q %zmm0, %zmm1, %zmm8
1497 ; AVX512-FCP-NEXT: movb $24, %dil
1498 ; AVX512-FCP-NEXT: kmovw %edi, %k2
1499 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm8 {%k2}
1500 ; AVX512-FCP-NEXT: movb $-32, %dil
1501 ; AVX512-FCP-NEXT: kmovw %edi, %k1
1502 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, %zmm8 {%k1}
1503 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [11,5,11,5,11,5,11,5]
1504 ; AVX512-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1505 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1506 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm9 = [11,1,7,0]
1507 ; AVX512-FCP-NEXT: vpermi2q %zmm0, %zmm1, %zmm9
1508 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm9 {%k2}
1509 ; AVX512-FCP-NEXT: vmovdqa64 %zmm10, %zmm9 {%k1}
1510 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [12,0,0,6,12,0,0,6]
1511 ; AVX512-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1512 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1513 ; AVX512-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm5
1514 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm5, %zmm6, %zmm5
1515 ; AVX512-FCP-NEXT: vmovdqa64 %zmm11, %zmm5 {%k1}
1516 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [13,0,1,7,13,0,1,7]
1517 ; AVX512-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1518 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1519 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm7, %zmm0
1520 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm0, %zmm6, %zmm0
1521 ; AVX512-FCP-NEXT: vmovdqa64 %zmm12, %zmm0 {%k1}
1522 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, (%rsi)
1523 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, (%rdx)
1524 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, (%rcx)
1525 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, (%r8)
1526 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, (%r9)
1527 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, (%rax)
1528 ; AVX512-FCP-NEXT: vzeroupper
1529 ; AVX512-FCP-NEXT: retq
1531 ; AVX512DQ-LABEL: load_i64_stride6_vf8:
1532 ; AVX512DQ: # %bb.0:
1533 ; AVX512DQ-NEXT: movq {{[0-9]+}}(%rsp), %rax
1534 ; AVX512DQ-NEXT: vmovdqa64 320(%rdi), %zmm6
1535 ; AVX512DQ-NEXT: vmovdqa64 256(%rdi), %zmm7
1536 ; AVX512DQ-NEXT: vmovdqa64 (%rdi), %zmm0
1537 ; AVX512DQ-NEXT: vmovdqa64 64(%rdi), %zmm1
1538 ; AVX512DQ-NEXT: vmovdqa64 128(%rdi), %zmm3
1539 ; AVX512DQ-NEXT: vmovdqa64 192(%rdi), %zmm4
1540 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [0,6,0,10,0,6,0,10]
1541 ; AVX512DQ-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
1542 ; AVX512DQ-NEXT: vpermi2q %zmm3, %zmm4, %zmm5
1543 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm2 = [0,6,12,0]
1544 ; AVX512DQ-NEXT: vpermi2q %zmm1, %zmm0, %zmm2
1545 ; AVX512DQ-NEXT: movb $56, %dil
1546 ; AVX512DQ-NEXT: kmovw %edi, %k1
1547 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm2 {%k1}
1548 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
1549 ; AVX512DQ-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1550 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm8
1551 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [0,0,6,12,0,0,6,12]
1552 ; AVX512DQ-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1553 ; AVX512DQ-NEXT: vpermi2q %zmm6, %zmm7, %zmm9
1554 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [0,1,7,13,0,1,7,13]
1555 ; AVX512DQ-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
1556 ; AVX512DQ-NEXT: vpermi2q %zmm6, %zmm7, %zmm10
1557 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [0,10,0,6,0,10,0,6]
1558 ; AVX512DQ-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
1559 ; AVX512DQ-NEXT: vpermi2q %zmm7, %zmm6, %zmm11
1560 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [0,11,1,7,0,11,1,7]
1561 ; AVX512DQ-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
1562 ; AVX512DQ-NEXT: vpermi2q %zmm7, %zmm6, %zmm12
1563 ; AVX512DQ-NEXT: vpermt2q %zmm6, %zmm5, %zmm7
1564 ; AVX512DQ-NEXT: movb $-64, %dil
1565 ; AVX512DQ-NEXT: kmovw %edi, %k2
1566 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm2 {%k2}
1567 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [1,7,0,11,1,7,0,11]
1568 ; AVX512DQ-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
1569 ; AVX512DQ-NEXT: vpermi2q %zmm3, %zmm4, %zmm7
1570 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm13 = [1,7,13,0]
1571 ; AVX512DQ-NEXT: vpermi2q %zmm1, %zmm0, %zmm13
1572 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm13 {%k1}
1573 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [5,11,5,11,5,11,5,11]
1574 ; AVX512DQ-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1575 ; AVX512DQ-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
1576 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, %zmm13 {%k2}
1577 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [10,4,10,4,10,4,10,4]
1578 ; AVX512DQ-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1579 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1580 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm8 = [10,0,6,0]
1581 ; AVX512DQ-NEXT: vpermi2q %zmm0, %zmm1, %zmm8
1582 ; AVX512DQ-NEXT: movb $24, %dil
1583 ; AVX512DQ-NEXT: kmovw %edi, %k2
1584 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm8 {%k2}
1585 ; AVX512DQ-NEXT: movb $-32, %dil
1586 ; AVX512DQ-NEXT: kmovw %edi, %k1
1587 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, %zmm8 {%k1}
1588 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [11,5,11,5,11,5,11,5]
1589 ; AVX512DQ-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1590 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1591 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm9 = [11,1,7,0]
1592 ; AVX512DQ-NEXT: vpermi2q %zmm0, %zmm1, %zmm9
1593 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm9 {%k2}
1594 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, %zmm9 {%k1}
1595 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [12,0,0,6,12,0,0,6]
1596 ; AVX512DQ-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1597 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1598 ; AVX512DQ-NEXT: vpermi2q %zmm1, %zmm0, %zmm5
1599 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm5, %zmm6, %zmm5
1600 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, %zmm5 {%k1}
1601 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [13,0,1,7,13,0,1,7]
1602 ; AVX512DQ-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1603 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1604 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm7, %zmm0
1605 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm0, %zmm6, %zmm0
1606 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, %zmm0 {%k1}
1607 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, (%rsi)
1608 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, (%rdx)
1609 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, (%rcx)
1610 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, (%r8)
1611 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, (%r9)
1612 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, (%rax)
1613 ; AVX512DQ-NEXT: vzeroupper
1614 ; AVX512DQ-NEXT: retq
1616 ; AVX512DQ-FCP-LABEL: load_i64_stride6_vf8:
1617 ; AVX512DQ-FCP: # %bb.0:
1618 ; AVX512DQ-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
1619 ; AVX512DQ-FCP-NEXT: vmovdqa64 320(%rdi), %zmm6
1620 ; AVX512DQ-FCP-NEXT: vmovdqa64 256(%rdi), %zmm7
1621 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rdi), %zmm0
1622 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
1623 ; AVX512DQ-FCP-NEXT: vmovdqa64 128(%rdi), %zmm3
1624 ; AVX512DQ-FCP-NEXT: vmovdqa64 192(%rdi), %zmm4
1625 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [0,6,0,10,0,6,0,10]
1626 ; AVX512DQ-FCP-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
1627 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm3, %zmm4, %zmm5
1628 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm2 = [0,6,12,0]
1629 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm2
1630 ; AVX512DQ-FCP-NEXT: movb $56, %dil
1631 ; AVX512DQ-FCP-NEXT: kmovw %edi, %k1
1632 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm2 {%k1}
1633 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
1634 ; AVX512DQ-FCP-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1635 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm8
1636 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [0,0,6,12,0,0,6,12]
1637 ; AVX512DQ-FCP-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1638 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm6, %zmm7, %zmm9
1639 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [0,1,7,13,0,1,7,13]
1640 ; AVX512DQ-FCP-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
1641 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm6, %zmm7, %zmm10
1642 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [0,10,0,6,0,10,0,6]
1643 ; AVX512DQ-FCP-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
1644 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm7, %zmm6, %zmm11
1645 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [0,11,1,7,0,11,1,7]
1646 ; AVX512DQ-FCP-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
1647 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm7, %zmm6, %zmm12
1648 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm6, %zmm5, %zmm7
1649 ; AVX512DQ-FCP-NEXT: movb $-64, %dil
1650 ; AVX512DQ-FCP-NEXT: kmovw %edi, %k2
1651 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm2 {%k2}
1652 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [1,7,0,11,1,7,0,11]
1653 ; AVX512DQ-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
1654 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm3, %zmm4, %zmm7
1655 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm13 = [1,7,13,0]
1656 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm13
1657 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm13 {%k1}
1658 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [5,11,5,11,5,11,5,11]
1659 ; AVX512DQ-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1660 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
1661 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, %zmm13 {%k2}
1662 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [10,4,10,4,10,4,10,4]
1663 ; AVX512DQ-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1664 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1665 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [10,0,6,0]
1666 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm0, %zmm1, %zmm8
1667 ; AVX512DQ-FCP-NEXT: movb $24, %dil
1668 ; AVX512DQ-FCP-NEXT: kmovw %edi, %k2
1669 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm8 {%k2}
1670 ; AVX512DQ-FCP-NEXT: movb $-32, %dil
1671 ; AVX512DQ-FCP-NEXT: kmovw %edi, %k1
1672 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, %zmm8 {%k1}
1673 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [11,5,11,5,11,5,11,5]
1674 ; AVX512DQ-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1675 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1676 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm9 = [11,1,7,0]
1677 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm0, %zmm1, %zmm9
1678 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm9 {%k2}
1679 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm10, %zmm9 {%k1}
1680 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [12,0,0,6,12,0,0,6]
1681 ; AVX512DQ-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1682 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1683 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm5
1684 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm5, %zmm6, %zmm5
1685 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm11, %zmm5 {%k1}
1686 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [13,0,1,7,13,0,1,7]
1687 ; AVX512DQ-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1688 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1689 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm7, %zmm0
1690 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm0, %zmm6, %zmm0
1691 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm12, %zmm0 {%k1}
1692 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, (%rsi)
1693 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, (%rdx)
1694 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, (%rcx)
1695 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, (%r8)
1696 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, (%r9)
1697 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, (%rax)
1698 ; AVX512DQ-FCP-NEXT: vzeroupper
1699 ; AVX512DQ-FCP-NEXT: retq
1701 ; AVX512BW-LABEL: load_i64_stride6_vf8:
1702 ; AVX512BW: # %bb.0:
1703 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1704 ; AVX512BW-NEXT: vmovdqa64 320(%rdi), %zmm6
1705 ; AVX512BW-NEXT: vmovdqa64 256(%rdi), %zmm7
1706 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm0
1707 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %zmm1
1708 ; AVX512BW-NEXT: vmovdqa64 128(%rdi), %zmm3
1709 ; AVX512BW-NEXT: vmovdqa64 192(%rdi), %zmm4
1710 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [0,6,0,10,0,6,0,10]
1711 ; AVX512BW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
1712 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm4, %zmm5
1713 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm2 = [0,6,12,0]
1714 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm0, %zmm2
1715 ; AVX512BW-NEXT: movb $56, %dil
1716 ; AVX512BW-NEXT: kmovd %edi, %k1
1717 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm2 {%k1}
1718 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
1719 ; AVX512BW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1720 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm8
1721 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [0,0,6,12,0,0,6,12]
1722 ; AVX512BW-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1723 ; AVX512BW-NEXT: vpermi2q %zmm6, %zmm7, %zmm9
1724 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [0,1,7,13,0,1,7,13]
1725 ; AVX512BW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
1726 ; AVX512BW-NEXT: vpermi2q %zmm6, %zmm7, %zmm10
1727 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [0,10,0,6,0,10,0,6]
1728 ; AVX512BW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
1729 ; AVX512BW-NEXT: vpermi2q %zmm7, %zmm6, %zmm11
1730 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [0,11,1,7,0,11,1,7]
1731 ; AVX512BW-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
1732 ; AVX512BW-NEXT: vpermi2q %zmm7, %zmm6, %zmm12
1733 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm5, %zmm7
1734 ; AVX512BW-NEXT: movb $-64, %dil
1735 ; AVX512BW-NEXT: kmovd %edi, %k2
1736 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm2 {%k2}
1737 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [1,7,0,11,1,7,0,11]
1738 ; AVX512BW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
1739 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm4, %zmm7
1740 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm13 = [1,7,13,0]
1741 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm0, %zmm13
1742 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm13 {%k1}
1743 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [5,11,5,11,5,11,5,11]
1744 ; AVX512BW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1745 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
1746 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm13 {%k2}
1747 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [10,4,10,4,10,4,10,4]
1748 ; AVX512BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1749 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1750 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm8 = [10,0,6,0]
1751 ; AVX512BW-NEXT: vpermi2q %zmm0, %zmm1, %zmm8
1752 ; AVX512BW-NEXT: movb $24, %dil
1753 ; AVX512BW-NEXT: kmovd %edi, %k2
1754 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm8 {%k2}
1755 ; AVX512BW-NEXT: movb $-32, %dil
1756 ; AVX512BW-NEXT: kmovd %edi, %k1
1757 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm8 {%k1}
1758 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [11,5,11,5,11,5,11,5]
1759 ; AVX512BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1760 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1761 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm9 = [11,1,7,0]
1762 ; AVX512BW-NEXT: vpermi2q %zmm0, %zmm1, %zmm9
1763 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm9 {%k2}
1764 ; AVX512BW-NEXT: vmovdqa64 %zmm10, %zmm9 {%k1}
1765 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [12,0,0,6,12,0,0,6]
1766 ; AVX512BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1767 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1768 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm0, %zmm5
1769 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm5, %zmm6, %zmm5
1770 ; AVX512BW-NEXT: vmovdqa64 %zmm11, %zmm5 {%k1}
1771 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [13,0,1,7,13,0,1,7]
1772 ; AVX512BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1773 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1774 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm7, %zmm0
1775 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm0, %zmm6, %zmm0
1776 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm0 {%k1}
1777 ; AVX512BW-NEXT: vmovdqa64 %zmm2, (%rsi)
1778 ; AVX512BW-NEXT: vmovdqa64 %zmm13, (%rdx)
1779 ; AVX512BW-NEXT: vmovdqa64 %zmm8, (%rcx)
1780 ; AVX512BW-NEXT: vmovdqa64 %zmm9, (%r8)
1781 ; AVX512BW-NEXT: vmovdqa64 %zmm5, (%r9)
1782 ; AVX512BW-NEXT: vmovdqa64 %zmm0, (%rax)
1783 ; AVX512BW-NEXT: vzeroupper
1784 ; AVX512BW-NEXT: retq
1786 ; AVX512BW-FCP-LABEL: load_i64_stride6_vf8:
1787 ; AVX512BW-FCP: # %bb.0:
1788 ; AVX512BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
1789 ; AVX512BW-FCP-NEXT: vmovdqa64 320(%rdi), %zmm6
1790 ; AVX512BW-FCP-NEXT: vmovdqa64 256(%rdi), %zmm7
1791 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm0
1792 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
1793 ; AVX512BW-FCP-NEXT: vmovdqa64 128(%rdi), %zmm3
1794 ; AVX512BW-FCP-NEXT: vmovdqa64 192(%rdi), %zmm4
1795 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [0,6,0,10,0,6,0,10]
1796 ; AVX512BW-FCP-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
1797 ; AVX512BW-FCP-NEXT: vpermi2q %zmm3, %zmm4, %zmm5
1798 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm2 = [0,6,12,0]
1799 ; AVX512BW-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm2
1800 ; AVX512BW-FCP-NEXT: movb $56, %dil
1801 ; AVX512BW-FCP-NEXT: kmovd %edi, %k1
1802 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm2 {%k1}
1803 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
1804 ; AVX512BW-FCP-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1805 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm8
1806 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [0,0,6,12,0,0,6,12]
1807 ; AVX512BW-FCP-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1808 ; AVX512BW-FCP-NEXT: vpermi2q %zmm6, %zmm7, %zmm9
1809 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [0,1,7,13,0,1,7,13]
1810 ; AVX512BW-FCP-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
1811 ; AVX512BW-FCP-NEXT: vpermi2q %zmm6, %zmm7, %zmm10
1812 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [0,10,0,6,0,10,0,6]
1813 ; AVX512BW-FCP-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
1814 ; AVX512BW-FCP-NEXT: vpermi2q %zmm7, %zmm6, %zmm11
1815 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [0,11,1,7,0,11,1,7]
1816 ; AVX512BW-FCP-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
1817 ; AVX512BW-FCP-NEXT: vpermi2q %zmm7, %zmm6, %zmm12
1818 ; AVX512BW-FCP-NEXT: vpermt2q %zmm6, %zmm5, %zmm7
1819 ; AVX512BW-FCP-NEXT: movb $-64, %dil
1820 ; AVX512BW-FCP-NEXT: kmovd %edi, %k2
1821 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm2 {%k2}
1822 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [1,7,0,11,1,7,0,11]
1823 ; AVX512BW-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
1824 ; AVX512BW-FCP-NEXT: vpermi2q %zmm3, %zmm4, %zmm7
1825 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm13 = [1,7,13,0]
1826 ; AVX512BW-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm13
1827 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm13 {%k1}
1828 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [5,11,5,11,5,11,5,11]
1829 ; AVX512BW-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1830 ; AVX512BW-FCP-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
1831 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm13 {%k2}
1832 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [10,4,10,4,10,4,10,4]
1833 ; AVX512BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1834 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1835 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [10,0,6,0]
1836 ; AVX512BW-FCP-NEXT: vpermi2q %zmm0, %zmm1, %zmm8
1837 ; AVX512BW-FCP-NEXT: movb $24, %dil
1838 ; AVX512BW-FCP-NEXT: kmovd %edi, %k2
1839 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm8 {%k2}
1840 ; AVX512BW-FCP-NEXT: movb $-32, %dil
1841 ; AVX512BW-FCP-NEXT: kmovd %edi, %k1
1842 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm8 {%k1}
1843 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [11,5,11,5,11,5,11,5]
1844 ; AVX512BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1845 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1846 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm9 = [11,1,7,0]
1847 ; AVX512BW-FCP-NEXT: vpermi2q %zmm0, %zmm1, %zmm9
1848 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm9 {%k2}
1849 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm9 {%k1}
1850 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [12,0,0,6,12,0,0,6]
1851 ; AVX512BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1852 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1853 ; AVX512BW-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm5
1854 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm5, %zmm6, %zmm5
1855 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm5 {%k1}
1856 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [13,0,1,7,13,0,1,7]
1857 ; AVX512BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1858 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1859 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm7, %zmm0
1860 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm0, %zmm6, %zmm0
1861 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm0 {%k1}
1862 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, (%rsi)
1863 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, (%rdx)
1864 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, (%rcx)
1865 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, (%r8)
1866 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, (%r9)
1867 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, (%rax)
1868 ; AVX512BW-FCP-NEXT: vzeroupper
1869 ; AVX512BW-FCP-NEXT: retq
1871 ; AVX512DQ-BW-LABEL: load_i64_stride6_vf8:
1872 ; AVX512DQ-BW: # %bb.0:
1873 ; AVX512DQ-BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1874 ; AVX512DQ-BW-NEXT: vmovdqa64 320(%rdi), %zmm6
1875 ; AVX512DQ-BW-NEXT: vmovdqa64 256(%rdi), %zmm7
1876 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdi), %zmm0
1877 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rdi), %zmm1
1878 ; AVX512DQ-BW-NEXT: vmovdqa64 128(%rdi), %zmm3
1879 ; AVX512DQ-BW-NEXT: vmovdqa64 192(%rdi), %zmm4
1880 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [0,6,0,10,0,6,0,10]
1881 ; AVX512DQ-BW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
1882 ; AVX512DQ-BW-NEXT: vpermi2q %zmm3, %zmm4, %zmm5
1883 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm2 = [0,6,12,0]
1884 ; AVX512DQ-BW-NEXT: vpermi2q %zmm1, %zmm0, %zmm2
1885 ; AVX512DQ-BW-NEXT: movb $56, %dil
1886 ; AVX512DQ-BW-NEXT: kmovd %edi, %k1
1887 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm2 {%k1}
1888 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
1889 ; AVX512DQ-BW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1890 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm8
1891 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [0,0,6,12,0,0,6,12]
1892 ; AVX512DQ-BW-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1893 ; AVX512DQ-BW-NEXT: vpermi2q %zmm6, %zmm7, %zmm9
1894 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [0,1,7,13,0,1,7,13]
1895 ; AVX512DQ-BW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
1896 ; AVX512DQ-BW-NEXT: vpermi2q %zmm6, %zmm7, %zmm10
1897 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [0,10,0,6,0,10,0,6]
1898 ; AVX512DQ-BW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
1899 ; AVX512DQ-BW-NEXT: vpermi2q %zmm7, %zmm6, %zmm11
1900 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [0,11,1,7,0,11,1,7]
1901 ; AVX512DQ-BW-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
1902 ; AVX512DQ-BW-NEXT: vpermi2q %zmm7, %zmm6, %zmm12
1903 ; AVX512DQ-BW-NEXT: vpermt2q %zmm6, %zmm5, %zmm7
1904 ; AVX512DQ-BW-NEXT: movb $-64, %dil
1905 ; AVX512DQ-BW-NEXT: kmovd %edi, %k2
1906 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm2 {%k2}
1907 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [1,7,0,11,1,7,0,11]
1908 ; AVX512DQ-BW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
1909 ; AVX512DQ-BW-NEXT: vpermi2q %zmm3, %zmm4, %zmm7
1910 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm13 = [1,7,13,0]
1911 ; AVX512DQ-BW-NEXT: vpermi2q %zmm1, %zmm0, %zmm13
1912 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm13 {%k1}
1913 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [5,11,5,11,5,11,5,11]
1914 ; AVX512DQ-BW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1915 ; AVX512DQ-BW-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
1916 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, %zmm13 {%k2}
1917 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [10,4,10,4,10,4,10,4]
1918 ; AVX512DQ-BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1919 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1920 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm8 = [10,0,6,0]
1921 ; AVX512DQ-BW-NEXT: vpermi2q %zmm0, %zmm1, %zmm8
1922 ; AVX512DQ-BW-NEXT: movb $24, %dil
1923 ; AVX512DQ-BW-NEXT: kmovd %edi, %k2
1924 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm8 {%k2}
1925 ; AVX512DQ-BW-NEXT: movb $-32, %dil
1926 ; AVX512DQ-BW-NEXT: kmovd %edi, %k1
1927 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, %zmm8 {%k1}
1928 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [11,5,11,5,11,5,11,5]
1929 ; AVX512DQ-BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1930 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1931 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm9 = [11,1,7,0]
1932 ; AVX512DQ-BW-NEXT: vpermi2q %zmm0, %zmm1, %zmm9
1933 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm9 {%k2}
1934 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, %zmm9 {%k1}
1935 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [12,0,0,6,12,0,0,6]
1936 ; AVX512DQ-BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1937 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1938 ; AVX512DQ-BW-NEXT: vpermi2q %zmm1, %zmm0, %zmm5
1939 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm5, %zmm6, %zmm5
1940 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm11, %zmm5 {%k1}
1941 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [13,0,1,7,13,0,1,7]
1942 ; AVX512DQ-BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1943 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1944 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm7, %zmm0
1945 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm0, %zmm6, %zmm0
1946 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm12, %zmm0 {%k1}
1947 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, (%rsi)
1948 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, (%rdx)
1949 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, (%rcx)
1950 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, (%r8)
1951 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, (%r9)
1952 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, (%rax)
1953 ; AVX512DQ-BW-NEXT: vzeroupper
1954 ; AVX512DQ-BW-NEXT: retq
1956 ; AVX512DQ-BW-FCP-LABEL: load_i64_stride6_vf8:
1957 ; AVX512DQ-BW-FCP: # %bb.0:
1958 ; AVX512DQ-BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
1959 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 320(%rdi), %zmm6
1960 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 256(%rdi), %zmm7
1961 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm0
1962 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
1963 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 128(%rdi), %zmm3
1964 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 192(%rdi), %zmm4
1965 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [0,6,0,10,0,6,0,10]
1966 ; AVX512DQ-BW-FCP-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
1967 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm3, %zmm4, %zmm5
1968 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm2 = [0,6,12,0]
1969 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm2
1970 ; AVX512DQ-BW-FCP-NEXT: movb $56, %dil
1971 ; AVX512DQ-BW-FCP-NEXT: kmovd %edi, %k1
1972 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm2 {%k1}
1973 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
1974 ; AVX512DQ-BW-FCP-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1975 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm8
1976 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [0,0,6,12,0,0,6,12]
1977 ; AVX512DQ-BW-FCP-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1978 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm6, %zmm7, %zmm9
1979 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [0,1,7,13,0,1,7,13]
1980 ; AVX512DQ-BW-FCP-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
1981 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm6, %zmm7, %zmm10
1982 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [0,10,0,6,0,10,0,6]
1983 ; AVX512DQ-BW-FCP-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
1984 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm7, %zmm6, %zmm11
1985 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [0,11,1,7,0,11,1,7]
1986 ; AVX512DQ-BW-FCP-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
1987 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm7, %zmm6, %zmm12
1988 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm6, %zmm5, %zmm7
1989 ; AVX512DQ-BW-FCP-NEXT: movb $-64, %dil
1990 ; AVX512DQ-BW-FCP-NEXT: kmovd %edi, %k2
1991 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm2 {%k2}
1992 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [1,7,0,11,1,7,0,11]
1993 ; AVX512DQ-BW-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
1994 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm3, %zmm4, %zmm7
1995 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm13 = [1,7,13,0]
1996 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm13
1997 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm13 {%k1}
1998 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [5,11,5,11,5,11,5,11]
1999 ; AVX512DQ-BW-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2000 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
2001 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm13 {%k2}
2002 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [10,4,10,4,10,4,10,4]
2003 ; AVX512DQ-BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2004 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
2005 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [10,0,6,0]
2006 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm0, %zmm1, %zmm8
2007 ; AVX512DQ-BW-FCP-NEXT: movb $24, %dil
2008 ; AVX512DQ-BW-FCP-NEXT: kmovd %edi, %k2
2009 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm8 {%k2}
2010 ; AVX512DQ-BW-FCP-NEXT: movb $-32, %dil
2011 ; AVX512DQ-BW-FCP-NEXT: kmovd %edi, %k1
2012 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm8 {%k1}
2013 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [11,5,11,5,11,5,11,5]
2014 ; AVX512DQ-BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2015 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
2016 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm9 = [11,1,7,0]
2017 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm0, %zmm1, %zmm9
2018 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm9 {%k2}
2019 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm9 {%k1}
2020 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [12,0,0,6,12,0,0,6]
2021 ; AVX512DQ-BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
2022 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
2023 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm5
2024 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm5, %zmm6, %zmm5
2025 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm5 {%k1}
2026 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [13,0,1,7,13,0,1,7]
2027 ; AVX512DQ-BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
2028 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
2029 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm7, %zmm0
2030 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm0, %zmm6, %zmm0
2031 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm0 {%k1}
2032 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, (%rsi)
2033 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, (%rdx)
2034 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, (%rcx)
2035 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, (%r8)
2036 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, (%r9)
2037 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, (%rax)
2038 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
2039 ; AVX512DQ-BW-FCP-NEXT: retq
2040 %wide.vec = load <48 x i64>, ptr %in.vec, align 64
2041 %strided.vec0 = shufflevector <48 x i64> %wide.vec, <48 x i64> poison, <8 x i32> <i32 0, i32 6, i32 12, i32 18, i32 24, i32 30, i32 36, i32 42>
2042 %strided.vec1 = shufflevector <48 x i64> %wide.vec, <48 x i64> poison, <8 x i32> <i32 1, i32 7, i32 13, i32 19, i32 25, i32 31, i32 37, i32 43>
2043 %strided.vec2 = shufflevector <48 x i64> %wide.vec, <48 x i64> poison, <8 x i32> <i32 2, i32 8, i32 14, i32 20, i32 26, i32 32, i32 38, i32 44>
2044 %strided.vec3 = shufflevector <48 x i64> %wide.vec, <48 x i64> poison, <8 x i32> <i32 3, i32 9, i32 15, i32 21, i32 27, i32 33, i32 39, i32 45>
2045 %strided.vec4 = shufflevector <48 x i64> %wide.vec, <48 x i64> poison, <8 x i32> <i32 4, i32 10, i32 16, i32 22, i32 28, i32 34, i32 40, i32 46>
2046 %strided.vec5 = shufflevector <48 x i64> %wide.vec, <48 x i64> poison, <8 x i32> <i32 5, i32 11, i32 17, i32 23, i32 29, i32 35, i32 41, i32 47>
2047 store <8 x i64> %strided.vec0, ptr %out.vec0, align 64
2048 store <8 x i64> %strided.vec1, ptr %out.vec1, align 64
2049 store <8 x i64> %strided.vec2, ptr %out.vec2, align 64
2050 store <8 x i64> %strided.vec3, ptr %out.vec3, align 64
2051 store <8 x i64> %strided.vec4, ptr %out.vec4, align 64
2052 store <8 x i64> %strided.vec5, ptr %out.vec5, align 64
2056 define void @load_i64_stride6_vf16(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr %out.vec2, ptr %out.vec3, ptr %out.vec4, ptr %out.vec5) nounwind {
2057 ; SSE-LABEL: load_i64_stride6_vf16:
2059 ; SSE-NEXT: subq $408, %rsp # imm = 0x198
2060 ; SSE-NEXT: movaps (%rdi), %xmm7
2061 ; SSE-NEXT: movaps 624(%rdi), %xmm0
2062 ; SSE-NEXT: movaps 576(%rdi), %xmm8
2063 ; SSE-NEXT: movaps 240(%rdi), %xmm1
2064 ; SSE-NEXT: movaps 192(%rdi), %xmm9
2065 ; SSE-NEXT: movaps 720(%rdi), %xmm2
2066 ; SSE-NEXT: movaps 672(%rdi), %xmm10
2067 ; SSE-NEXT: movaps 336(%rdi), %xmm3
2068 ; SSE-NEXT: movaps 288(%rdi), %xmm11
2069 ; SSE-NEXT: movaps 432(%rdi), %xmm4
2070 ; SSE-NEXT: movaps 384(%rdi), %xmm13
2071 ; SSE-NEXT: movaps 528(%rdi), %xmm5
2072 ; SSE-NEXT: movaps 480(%rdi), %xmm12
2073 ; SSE-NEXT: movaps 144(%rdi), %xmm6
2074 ; SSE-NEXT: movaps 96(%rdi), %xmm14
2075 ; SSE-NEXT: movaps %xmm14, %xmm15
2076 ; SSE-NEXT: movlhps {{.*#+}} xmm15 = xmm15[0],xmm6[0]
2077 ; SSE-NEXT: movaps %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2078 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm6[1]
2079 ; SSE-NEXT: movaps %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2080 ; SSE-NEXT: movaps %xmm12, %xmm6
2081 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm5[0]
2082 ; SSE-NEXT: movaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2083 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm5[1]
2084 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2085 ; SSE-NEXT: movaps %xmm13, %xmm5
2086 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm4[0]
2087 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2088 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm4[1]
2089 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2090 ; SSE-NEXT: movaps %xmm11, %xmm4
2091 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm3[0]
2092 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2093 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm3[1]
2094 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2095 ; SSE-NEXT: movaps %xmm10, %xmm3
2096 ; SSE-NEXT: movlhps {{.*#+}} xmm3 = xmm3[0],xmm2[0]
2097 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2098 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm2[1]
2099 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2100 ; SSE-NEXT: movaps %xmm9, %xmm2
2101 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm1[0]
2102 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2103 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm1[1]
2104 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2105 ; SSE-NEXT: movaps %xmm8, %xmm1
2106 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2107 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2108 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm0[1]
2109 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2110 ; SSE-NEXT: movaps 48(%rdi), %xmm0
2111 ; SSE-NEXT: movaps %xmm7, %xmm1
2112 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2113 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2114 ; SSE-NEXT: unpckhpd {{.*#+}} xmm7 = xmm7[1],xmm0[1]
2115 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2116 ; SSE-NEXT: movaps 64(%rdi), %xmm0
2117 ; SSE-NEXT: movaps 16(%rdi), %xmm1
2118 ; SSE-NEXT: movaps %xmm1, %xmm2
2119 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
2120 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2121 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
2122 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2123 ; SSE-NEXT: movaps 160(%rdi), %xmm0
2124 ; SSE-NEXT: movaps 112(%rdi), %xmm1
2125 ; SSE-NEXT: movaps %xmm1, %xmm2
2126 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
2127 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2128 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
2129 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2130 ; SSE-NEXT: movaps 256(%rdi), %xmm0
2131 ; SSE-NEXT: movaps 208(%rdi), %xmm1
2132 ; SSE-NEXT: movaps %xmm1, %xmm2
2133 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
2134 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2135 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
2136 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2137 ; SSE-NEXT: movaps 352(%rdi), %xmm0
2138 ; SSE-NEXT: movaps 304(%rdi), %xmm1
2139 ; SSE-NEXT: movaps %xmm1, %xmm2
2140 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
2141 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2142 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
2143 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2144 ; SSE-NEXT: movaps 448(%rdi), %xmm0
2145 ; SSE-NEXT: movaps 400(%rdi), %xmm1
2146 ; SSE-NEXT: movaps %xmm1, %xmm2
2147 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
2148 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2149 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
2150 ; SSE-NEXT: movaps %xmm1, (%rsp) # 16-byte Spill
2151 ; SSE-NEXT: movaps 544(%rdi), %xmm0
2152 ; SSE-NEXT: movaps 496(%rdi), %xmm1
2153 ; SSE-NEXT: movaps %xmm1, %xmm2
2154 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
2155 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2156 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
2157 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2158 ; SSE-NEXT: movaps 640(%rdi), %xmm0
2159 ; SSE-NEXT: movaps 592(%rdi), %xmm15
2160 ; SSE-NEXT: movaps %xmm15, %xmm1
2161 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2162 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2163 ; SSE-NEXT: unpckhpd {{.*#+}} xmm15 = xmm15[1],xmm0[1]
2164 ; SSE-NEXT: movaps 736(%rdi), %xmm0
2165 ; SSE-NEXT: movaps 688(%rdi), %xmm11
2166 ; SSE-NEXT: movaps %xmm11, %xmm1
2167 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2168 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2169 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm0[1]
2170 ; SSE-NEXT: movaps 80(%rdi), %xmm0
2171 ; SSE-NEXT: movaps 32(%rdi), %xmm1
2172 ; SSE-NEXT: movaps %xmm1, %xmm2
2173 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
2174 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2175 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
2176 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2177 ; SSE-NEXT: movaps 176(%rdi), %xmm0
2178 ; SSE-NEXT: movaps 128(%rdi), %xmm13
2179 ; SSE-NEXT: movaps %xmm13, %xmm1
2180 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2181 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2182 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm0[1]
2183 ; SSE-NEXT: movaps 272(%rdi), %xmm0
2184 ; SSE-NEXT: movaps 224(%rdi), %xmm12
2185 ; SSE-NEXT: movaps %xmm12, %xmm14
2186 ; SSE-NEXT: movlhps {{.*#+}} xmm14 = xmm14[0],xmm0[0]
2187 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm0[1]
2188 ; SSE-NEXT: movaps 368(%rdi), %xmm0
2189 ; SSE-NEXT: movaps 320(%rdi), %xmm9
2190 ; SSE-NEXT: movaps %xmm9, %xmm10
2191 ; SSE-NEXT: movlhps {{.*#+}} xmm10 = xmm10[0],xmm0[0]
2192 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm0[1]
2193 ; SSE-NEXT: movaps 464(%rdi), %xmm0
2194 ; SSE-NEXT: movaps 416(%rdi), %xmm5
2195 ; SSE-NEXT: movaps %xmm5, %xmm8
2196 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm0[0]
2197 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm0[1]
2198 ; SSE-NEXT: movaps 560(%rdi), %xmm0
2199 ; SSE-NEXT: movaps 512(%rdi), %xmm6
2200 ; SSE-NEXT: movaps %xmm6, %xmm7
2201 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm0[0]
2202 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm0[1]
2203 ; SSE-NEXT: movaps 656(%rdi), %xmm0
2204 ; SSE-NEXT: movaps 608(%rdi), %xmm3
2205 ; SSE-NEXT: movaps %xmm3, %xmm4
2206 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm0[0]
2207 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm0[1]
2208 ; SSE-NEXT: movaps 752(%rdi), %xmm0
2209 ; SSE-NEXT: movaps 704(%rdi), %xmm1
2210 ; SSE-NEXT: movaps %xmm1, %xmm2
2211 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
2212 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
2213 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2214 ; SSE-NEXT: movaps %xmm0, 96(%rsi)
2215 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2216 ; SSE-NEXT: movaps %xmm0, 32(%rsi)
2217 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2218 ; SSE-NEXT: movaps %xmm0, 112(%rsi)
2219 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2220 ; SSE-NEXT: movaps %xmm0, 48(%rsi)
2221 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2222 ; SSE-NEXT: movaps %xmm0, 64(%rsi)
2223 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2224 ; SSE-NEXT: movaps %xmm0, (%rsi)
2225 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2226 ; SSE-NEXT: movaps %xmm0, 80(%rsi)
2227 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2228 ; SSE-NEXT: movaps %xmm0, 16(%rsi)
2229 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2230 ; SSE-NEXT: movaps %xmm0, 96(%rdx)
2231 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2232 ; SSE-NEXT: movaps %xmm0, 32(%rdx)
2233 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2234 ; SSE-NEXT: movaps %xmm0, 112(%rdx)
2235 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2236 ; SSE-NEXT: movaps %xmm0, 48(%rdx)
2237 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2238 ; SSE-NEXT: movaps %xmm0, 64(%rdx)
2239 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2240 ; SSE-NEXT: movaps %xmm0, (%rdx)
2241 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2242 ; SSE-NEXT: movaps %xmm0, 80(%rdx)
2243 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2244 ; SSE-NEXT: movaps %xmm0, 16(%rdx)
2245 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2246 ; SSE-NEXT: movaps %xmm0, 96(%rcx)
2247 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2248 ; SSE-NEXT: movaps %xmm0, 112(%rcx)
2249 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2250 ; SSE-NEXT: movaps %xmm0, 64(%rcx)
2251 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2252 ; SSE-NEXT: movaps %xmm0, 80(%rcx)
2253 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2254 ; SSE-NEXT: movaps %xmm0, 32(%rcx)
2255 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2256 ; SSE-NEXT: movaps %xmm0, 48(%rcx)
2257 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2258 ; SSE-NEXT: movaps %xmm0, (%rcx)
2259 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2260 ; SSE-NEXT: movaps %xmm0, 16(%rcx)
2261 ; SSE-NEXT: movaps %xmm11, 112(%r8)
2262 ; SSE-NEXT: movaps %xmm15, 96(%r8)
2263 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2264 ; SSE-NEXT: movaps %xmm0, 80(%r8)
2265 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
2266 ; SSE-NEXT: movaps %xmm0, 64(%r8)
2267 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2268 ; SSE-NEXT: movaps %xmm0, 48(%r8)
2269 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2270 ; SSE-NEXT: movaps %xmm0, 32(%r8)
2271 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2272 ; SSE-NEXT: movaps %xmm0, 16(%r8)
2273 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2274 ; SSE-NEXT: movaps %xmm0, (%r8)
2275 ; SSE-NEXT: movaps %xmm2, 112(%r9)
2276 ; SSE-NEXT: movaps %xmm4, 96(%r9)
2277 ; SSE-NEXT: movaps %xmm7, 80(%r9)
2278 ; SSE-NEXT: movaps %xmm8, 64(%r9)
2279 ; SSE-NEXT: movaps %xmm10, 48(%r9)
2280 ; SSE-NEXT: movaps %xmm14, 32(%r9)
2281 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2282 ; SSE-NEXT: movaps %xmm0, 16(%r9)
2283 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2284 ; SSE-NEXT: movaps %xmm0, (%r9)
2285 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
2286 ; SSE-NEXT: movaps %xmm1, 112(%rax)
2287 ; SSE-NEXT: movaps %xmm3, 96(%rax)
2288 ; SSE-NEXT: movaps %xmm6, 80(%rax)
2289 ; SSE-NEXT: movaps %xmm5, 64(%rax)
2290 ; SSE-NEXT: movaps %xmm9, 48(%rax)
2291 ; SSE-NEXT: movaps %xmm12, 32(%rax)
2292 ; SSE-NEXT: movaps %xmm13, 16(%rax)
2293 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2294 ; SSE-NEXT: movaps %xmm0, (%rax)
2295 ; SSE-NEXT: addq $408, %rsp # imm = 0x198
2298 ; AVX-LABEL: load_i64_stride6_vf16:
2300 ; AVX-NEXT: subq $552, %rsp # imm = 0x228
2301 ; AVX-NEXT: vmovaps 320(%rdi), %ymm1
2302 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2303 ; AVX-NEXT: vmovaps 512(%rdi), %ymm8
2304 ; AVX-NEXT: vmovups %ymm8, (%rsp) # 32-byte Spill
2305 ; AVX-NEXT: vmovaps 128(%rdi), %ymm2
2306 ; AVX-NEXT: vinsertf128 $1, 96(%rdi), %ymm0, %ymm3
2307 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm3[0],ymm2[0],ymm3[2],ymm2[2]
2308 ; AVX-NEXT: vmovaps (%rdi), %xmm5
2309 ; AVX-NEXT: vmovaps 48(%rdi), %xmm6
2310 ; AVX-NEXT: vmovlhps {{.*#+}} xmm7 = xmm5[0],xmm6[0]
2311 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm7[0,1,2,3],ymm4[4,5,6,7]
2312 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2313 ; AVX-NEXT: vinsertf128 $1, 480(%rdi), %ymm0, %ymm4
2314 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm7 = ymm4[0],ymm8[0],ymm4[2],ymm8[2]
2315 ; AVX-NEXT: vmovaps 432(%rdi), %xmm8
2316 ; AVX-NEXT: vmovaps 384(%rdi), %xmm9
2317 ; AVX-NEXT: vmovlhps {{.*#+}} xmm10 = xmm9[0],xmm8[0]
2318 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm10[0,1,2,3],ymm7[4,5,6,7]
2319 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2320 ; AVX-NEXT: vinsertf128 $1, 288(%rdi), %ymm0, %ymm7
2321 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm10 = ymm7[0],ymm1[0],ymm7[2],ymm1[2]
2322 ; AVX-NEXT: vmovaps 240(%rdi), %xmm11
2323 ; AVX-NEXT: vmovaps 192(%rdi), %xmm12
2324 ; AVX-NEXT: vmovlhps {{.*#+}} xmm13 = xmm12[0],xmm11[0]
2325 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm13[0,1,2,3],ymm10[4,5,6,7]
2326 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2327 ; AVX-NEXT: vmovaps 704(%rdi), %ymm10
2328 ; AVX-NEXT: vinsertf128 $1, 672(%rdi), %ymm0, %ymm13
2329 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm14 = ymm13[0],ymm10[0],ymm13[2],ymm10[2]
2330 ; AVX-NEXT: vmovaps 624(%rdi), %xmm15
2331 ; AVX-NEXT: vmovaps 576(%rdi), %xmm0
2332 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm0[0],xmm15[0]
2333 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm14[4,5,6,7]
2334 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2335 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm3[1],ymm2[1],ymm3[3],ymm2[3]
2336 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm5[1],xmm6[1]
2337 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
2338 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2339 ; AVX-NEXT: vunpckhpd (%rsp), %ymm4, %ymm1 # 32-byte Folded Reload
2340 ; AVX-NEXT: # ymm1 = ymm4[1],mem[1],ymm4[3],mem[3]
2341 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm9[1],xmm8[1]
2342 ; AVX-NEXT: vmovaps 480(%rdi), %ymm3
2343 ; AVX-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2344 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
2345 ; AVX-NEXT: vmovups %ymm1, (%rsp) # 32-byte Spill
2346 ; AVX-NEXT: vmovaps 96(%rdi), %ymm2
2347 ; AVX-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2348 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm7, %ymm4 # 32-byte Folded Reload
2349 ; AVX-NEXT: # ymm4 = ymm7[1],mem[1],ymm7[3],mem[3]
2350 ; AVX-NEXT: vmovaps 16(%rdi), %xmm6
2351 ; AVX-NEXT: vmovaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2352 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm5 = xmm12[1],xmm11[1]
2353 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm5[0,1,2,3],ymm4[4,5,6,7]
2354 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2355 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm4 = ymm13[1],ymm10[1],ymm13[3],ymm10[3]
2356 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm15[1]
2357 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm4[4,5,6,7]
2358 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2359 ; AVX-NEXT: vinsertf128 $1, 160(%rdi), %ymm0, %ymm14
2360 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm14[0],ymm2[2],ymm14[2]
2361 ; AVX-NEXT: vmovaps 64(%rdi), %xmm13
2362 ; AVX-NEXT: vmovlhps {{.*#+}} xmm6 = xmm6[0],xmm13[0]
2363 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1,2,3],ymm4[4,5,6,7]
2364 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2365 ; AVX-NEXT: vinsertf128 $1, 544(%rdi), %ymm0, %ymm11
2366 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm6 = ymm3[0],ymm11[0],ymm3[2],ymm11[2]
2367 ; AVX-NEXT: vmovaps 448(%rdi), %xmm7
2368 ; AVX-NEXT: vmovaps 400(%rdi), %xmm8
2369 ; AVX-NEXT: vmovlhps {{.*#+}} xmm9 = xmm8[0],xmm7[0]
2370 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm9[0,1,2,3],ymm6[4,5,6,7]
2371 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2372 ; AVX-NEXT: vmovaps 288(%rdi), %ymm6
2373 ; AVX-NEXT: vinsertf128 $1, 352(%rdi), %ymm0, %ymm15
2374 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm10 = ymm6[0],ymm15[0],ymm6[2],ymm15[2]
2375 ; AVX-NEXT: vmovaps 256(%rdi), %xmm4
2376 ; AVX-NEXT: vmovaps 208(%rdi), %xmm3
2377 ; AVX-NEXT: vmovlhps {{.*#+}} xmm12 = xmm3[0],xmm4[0]
2378 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm12[0,1,2,3],ymm10[4,5,6,7]
2379 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2380 ; AVX-NEXT: vmovaps 672(%rdi), %ymm5
2381 ; AVX-NEXT: vinsertf128 $1, 736(%rdi), %ymm0, %ymm2
2382 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm12 = ymm5[0],ymm2[0],ymm5[2],ymm2[2]
2383 ; AVX-NEXT: vmovaps 640(%rdi), %xmm1
2384 ; AVX-NEXT: vmovaps 592(%rdi), %xmm0
2385 ; AVX-NEXT: vmovlhps {{.*#+}} xmm9 = xmm0[0],xmm1[0]
2386 ; AVX-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm12[4,5,6,7]
2387 ; AVX-NEXT: vmovups %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2388 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
2389 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm9 = ymm9[1],ymm14[1],ymm9[3],ymm14[3]
2390 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
2391 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm13 = xmm10[1],xmm13[1]
2392 ; AVX-NEXT: vblendps {{.*#+}} ymm9 = ymm13[0,1,2,3],ymm9[4,5,6,7]
2393 ; AVX-NEXT: vmovups %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2394 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
2395 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm11 = ymm9[1],ymm11[1],ymm9[3],ymm11[3]
2396 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm7 = xmm8[1],xmm7[1]
2397 ; AVX-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm11[4,5,6,7]
2398 ; AVX-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2399 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm5[1],ymm2[1],ymm5[3],ymm2[3]
2400 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
2401 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
2402 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2403 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm6[1],ymm15[1],ymm6[3],ymm15[3]
2404 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm3[1],xmm4[1]
2405 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
2406 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2407 ; AVX-NEXT: vmovaps 544(%rdi), %ymm0
2408 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2409 ; AVX-NEXT: vinsertf128 $1, 512(%rdi), %ymm0, %ymm1
2410 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2411 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
2412 ; AVX-NEXT: vmovaps 464(%rdi), %xmm14
2413 ; AVX-NEXT: vmovaps 416(%rdi), %xmm13
2414 ; AVX-NEXT: vmovlhps {{.*#+}} xmm5 = xmm13[0],xmm14[0]
2415 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm5[0,1,2,3],ymm4[4,5,6,7]
2416 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2417 ; AVX-NEXT: vmovaps 160(%rdi), %ymm11
2418 ; AVX-NEXT: vmovaps 32(%rdi), %xmm12
2419 ; AVX-NEXT: vinsertf128 $1, 128(%rdi), %ymm0, %ymm10
2420 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm10[0],ymm11[0],ymm10[2],ymm11[2]
2421 ; AVX-NEXT: vmovaps 80(%rdi), %xmm9
2422 ; AVX-NEXT: vmovlhps {{.*#+}} xmm5 = xmm12[0],xmm9[0]
2423 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm5[0,1,2,3],ymm4[4,5,6,7]
2424 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2425 ; AVX-NEXT: vmovaps 352(%rdi), %ymm8
2426 ; AVX-NEXT: vinsertf128 $1, 320(%rdi), %ymm0, %ymm7
2427 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm7[0],ymm8[0],ymm7[2],ymm8[2]
2428 ; AVX-NEXT: vmovaps 272(%rdi), %xmm6
2429 ; AVX-NEXT: vmovaps 224(%rdi), %xmm3
2430 ; AVX-NEXT: vmovlhps {{.*#+}} xmm5 = xmm3[0],xmm6[0]
2431 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm5[0,1,2,3],ymm4[4,5,6,7]
2432 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2433 ; AVX-NEXT: vmovaps 736(%rdi), %ymm5
2434 ; AVX-NEXT: vinsertf128 $1, 704(%rdi), %ymm0, %ymm2
2435 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm5[0],ymm2[2],ymm5[2]
2436 ; AVX-NEXT: vmovaps 656(%rdi), %xmm1
2437 ; AVX-NEXT: vmovaps 608(%rdi), %xmm0
2438 ; AVX-NEXT: vmovlhps {{.*#+}} xmm15 = xmm0[0],xmm1[0]
2439 ; AVX-NEXT: vblendps {{.*#+}} ymm4 = ymm15[0,1,2,3],ymm4[4,5,6,7]
2440 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm5[1],ymm2[3],ymm5[3]
2441 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
2442 ; AVX-NEXT: vblendps {{.*#+}} ymm5 = ymm0[0,1,2,3],ymm2[4,5,6,7]
2443 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2444 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
2445 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
2446 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm13[1],xmm14[1]
2447 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
2448 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm10[1],ymm11[1],ymm10[3],ymm11[3]
2449 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm12[1],xmm9[1]
2450 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
2451 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm7[1],ymm8[1],ymm7[3],ymm8[3]
2452 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm6[1]
2453 ; AVX-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1,2,3],ymm2[4,5,6,7]
2454 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2455 ; AVX-NEXT: vmovaps %ymm3, 96(%rsi)
2456 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2457 ; AVX-NEXT: vmovaps %ymm3, 32(%rsi)
2458 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2459 ; AVX-NEXT: vmovaps %ymm3, 64(%rsi)
2460 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2461 ; AVX-NEXT: vmovaps %ymm3, (%rsi)
2462 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2463 ; AVX-NEXT: vmovaps %ymm3, 96(%rdx)
2464 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2465 ; AVX-NEXT: vmovaps %ymm3, 32(%rdx)
2466 ; AVX-NEXT: vmovups (%rsp), %ymm3 # 32-byte Reload
2467 ; AVX-NEXT: vmovaps %ymm3, 64(%rdx)
2468 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2469 ; AVX-NEXT: vmovaps %ymm3, (%rdx)
2470 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2471 ; AVX-NEXT: vmovaps %ymm3, 96(%rcx)
2472 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2473 ; AVX-NEXT: vmovaps %ymm3, 32(%rcx)
2474 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2475 ; AVX-NEXT: vmovaps %ymm3, 64(%rcx)
2476 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2477 ; AVX-NEXT: vmovaps %ymm3, (%rcx)
2478 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2479 ; AVX-NEXT: vmovaps %ymm3, 32(%r8)
2480 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2481 ; AVX-NEXT: vmovaps %ymm3, 96(%r8)
2482 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2483 ; AVX-NEXT: vmovaps %ymm3, 64(%r8)
2484 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2485 ; AVX-NEXT: vmovaps %ymm3, (%r8)
2486 ; AVX-NEXT: vmovaps %ymm4, 96(%r9)
2487 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2488 ; AVX-NEXT: vmovaps %ymm3, 32(%r9)
2489 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2490 ; AVX-NEXT: vmovaps %ymm3, (%r9)
2491 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2492 ; AVX-NEXT: vmovaps %ymm3, 64(%r9)
2493 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
2494 ; AVX-NEXT: vmovaps %ymm2, 32(%rax)
2495 ; AVX-NEXT: vmovaps %ymm1, (%rax)
2496 ; AVX-NEXT: vmovaps %ymm0, 64(%rax)
2497 ; AVX-NEXT: vmovaps %ymm5, 96(%rax)
2498 ; AVX-NEXT: addq $552, %rsp # imm = 0x228
2499 ; AVX-NEXT: vzeroupper
2502 ; AVX2-LABEL: load_i64_stride6_vf16:
2504 ; AVX2-NEXT: subq $488, %rsp # imm = 0x1E8
2505 ; AVX2-NEXT: vmovaps 320(%rdi), %ymm10
2506 ; AVX2-NEXT: vmovaps 288(%rdi), %ymm12
2507 ; AVX2-NEXT: vmovups %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2508 ; AVX2-NEXT: vmovaps 512(%rdi), %ymm7
2509 ; AVX2-NEXT: vmovaps 480(%rdi), %ymm4
2510 ; AVX2-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2511 ; AVX2-NEXT: vmovaps 128(%rdi), %ymm15
2512 ; AVX2-NEXT: vmovaps 96(%rdi), %ymm3
2513 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2514 ; AVX2-NEXT: vmovaps (%rdi), %xmm1
2515 ; AVX2-NEXT: vmovaps 48(%rdi), %xmm5
2516 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm5[0]
2517 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm3[0],ymm15[0],ymm3[2],ymm15[2]
2518 ; AVX2-NEXT: vmovups %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2519 ; AVX2-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,0,3]
2520 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm3[4,5,6,7]
2521 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2522 ; AVX2-NEXT: vmovaps 432(%rdi), %xmm3
2523 ; AVX2-NEXT: vmovaps 384(%rdi), %xmm6
2524 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm2 = xmm6[0],xmm3[0]
2525 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm4[0],ymm7[0],ymm4[2],ymm7[2]
2526 ; AVX2-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2527 ; AVX2-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,1,0,3]
2528 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm4[4,5,6,7]
2529 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2530 ; AVX2-NEXT: vmovaps 240(%rdi), %xmm11
2531 ; AVX2-NEXT: vmovaps 192(%rdi), %xmm9
2532 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm2 = xmm9[0],xmm11[0]
2533 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm12[0],ymm10[0],ymm12[2],ymm10[2]
2534 ; AVX2-NEXT: vmovaps %ymm10, %ymm4
2535 ; AVX2-NEXT: vmovups %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2536 ; AVX2-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,1,0,3]
2537 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm8[4,5,6,7]
2538 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2539 ; AVX2-NEXT: vmovaps 704(%rdi), %ymm10
2540 ; AVX2-NEXT: vmovaps 672(%rdi), %ymm8
2541 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm13 = ymm8[0],ymm10[0],ymm8[2],ymm10[2]
2542 ; AVX2-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[0,1,0,3]
2543 ; AVX2-NEXT: vmovaps 624(%rdi), %xmm14
2544 ; AVX2-NEXT: vmovaps 576(%rdi), %xmm0
2545 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm12 = xmm0[0],xmm14[0]
2546 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm12[0,1,2,3],ymm13[4,5,6,7]
2547 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2548 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm5[1]
2549 ; AVX2-NEXT: vbroadcastsd 104(%rdi), %ymm5
2550 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm5[1],ymm15[1],ymm5[3],ymm15[3]
2551 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm5[4,5,6,7]
2552 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2553 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm6[1],xmm3[1]
2554 ; AVX2-NEXT: vbroadcastsd 488(%rdi), %ymm3
2555 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm7[1],ymm3[3],ymm7[3]
2556 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
2557 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2558 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm9[1],xmm11[1]
2559 ; AVX2-NEXT: vbroadcastsd 296(%rdi), %ymm3
2560 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm4[1],ymm3[3],ymm4[3]
2561 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
2562 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2563 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm14[1]
2564 ; AVX2-NEXT: vbroadcastsd 680(%rdi), %ymm1
2565 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm10[1],ymm1[3],ymm10[3]
2566 ; AVX2-NEXT: vmovaps %ymm10, %ymm14
2567 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
2568 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2569 ; AVX2-NEXT: vbroadcastsd 160(%rdi), %ymm0
2570 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
2571 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm10[0],ymm0[0],ymm10[2],ymm0[2]
2572 ; AVX2-NEXT: vmovaps 16(%rdi), %xmm7
2573 ; AVX2-NEXT: vmovaps 64(%rdi), %xmm3
2574 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm4 = xmm7[0],xmm3[0]
2575 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
2576 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2577 ; AVX2-NEXT: vbroadcastsd 544(%rdi), %ymm0
2578 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
2579 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm11[0],ymm0[0],ymm11[2],ymm0[2]
2580 ; AVX2-NEXT: vmovaps 448(%rdi), %xmm4
2581 ; AVX2-NEXT: vmovaps 400(%rdi), %xmm5
2582 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm6 = xmm5[0],xmm4[0]
2583 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1,2,3],ymm0[4,5,6,7]
2584 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2585 ; AVX2-NEXT: vbroadcastsd 352(%rdi), %ymm0
2586 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
2587 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm9[0],ymm0[0],ymm9[2],ymm0[2]
2588 ; AVX2-NEXT: vmovaps 256(%rdi), %xmm12
2589 ; AVX2-NEXT: vmovaps 208(%rdi), %xmm2
2590 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm12[0]
2591 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1,2,3],ymm0[4,5,6,7]
2592 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2593 ; AVX2-NEXT: vbroadcastsd 736(%rdi), %ymm0
2594 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm8[0],ymm0[0],ymm8[2],ymm0[2]
2595 ; AVX2-NEXT: vmovaps 640(%rdi), %xmm13
2596 ; AVX2-NEXT: vmovaps 592(%rdi), %xmm1
2597 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm6 = xmm1[0],xmm13[0]
2598 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1,2,3],ymm0[4,5,6,7]
2599 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2600 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm3[1]
2601 ; AVX2-NEXT: vmovaps 160(%rdi), %ymm15
2602 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm10[1],ymm15[1],ymm10[3],ymm15[3]
2603 ; AVX2-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
2604 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
2605 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2606 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm4[1]
2607 ; AVX2-NEXT: vmovaps 544(%rdi), %ymm10
2608 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm11[1],ymm10[1],ymm11[3],ymm10[3]
2609 ; AVX2-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
2610 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
2611 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2612 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm13[1]
2613 ; AVX2-NEXT: vmovaps 736(%rdi), %ymm4
2614 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm8[1],ymm4[1],ymm8[3],ymm4[3]
2615 ; AVX2-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
2616 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm3[4,5,6,7]
2617 ; AVX2-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
2618 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm2[1],xmm12[1]
2619 ; AVX2-NEXT: vmovaps 352(%rdi), %ymm5
2620 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm9[1],ymm5[1],ymm9[3],ymm5[3]
2621 ; AVX2-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,2,1]
2622 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm2[4,5,6,7]
2623 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2624 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2625 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm0[0],ymm10[0],ymm0[2],ymm10[2]
2626 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
2627 ; AVX2-NEXT: vmovaps 464(%rdi), %xmm7
2628 ; AVX2-NEXT: vmovaps 416(%rdi), %xmm6
2629 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm2 = xmm6[0],xmm7[0]
2630 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm1[4,5,6,7]
2631 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2632 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2633 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm0[0],ymm15[0],ymm0[2],ymm15[2]
2634 ; AVX2-NEXT: vpermpd {{.*#+}} ymm8 = ymm1[0,1,0,3]
2635 ; AVX2-NEXT: vmovaps 32(%rdi), %xmm3
2636 ; AVX2-NEXT: vmovaps 80(%rdi), %xmm2
2637 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm9 = xmm3[0],xmm2[0]
2638 ; AVX2-NEXT: vblendps {{.*#+}} ymm13 = ymm9[0,1,2,3],ymm8[4,5,6,7]
2639 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2640 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm0[0],ymm5[0],ymm0[2],ymm5[2]
2641 ; AVX2-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,1,0,3]
2642 ; AVX2-NEXT: vmovaps 272(%rdi), %xmm12
2643 ; AVX2-NEXT: vmovaps 224(%rdi), %xmm1
2644 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm9 = xmm1[0],xmm12[0]
2645 ; AVX2-NEXT: vblendps {{.*#+}} ymm11 = ymm9[0,1,2,3],ymm8[4,5,6,7]
2646 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm14[0],ymm4[0],ymm14[2],ymm4[2]
2647 ; AVX2-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,1,0,3]
2648 ; AVX2-NEXT: vmovaps 656(%rdi), %xmm9
2649 ; AVX2-NEXT: vmovaps 608(%rdi), %xmm0
2650 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm14 = xmm0[0],xmm9[0]
2651 ; AVX2-NEXT: vblendps {{.*#+}} ymm8 = ymm14[0,1,2,3],ymm8[4,5,6,7]
2652 ; AVX2-NEXT: vbroadcastsd 712(%rdi), %ymm14
2653 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm4 = ymm14[1],ymm4[1],ymm14[3],ymm4[3]
2654 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm9[1]
2655 ; AVX2-NEXT: vblendps {{.*#+}} ymm9 = ymm0[0,1,2,3],ymm4[4,5,6,7]
2656 ; AVX2-NEXT: vbroadcastsd 520(%rdi), %ymm0
2657 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm10[1],ymm0[3],ymm10[3]
2658 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm6[1],xmm7[1]
2659 ; AVX2-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm0[4,5,6,7]
2660 ; AVX2-NEXT: vbroadcastsd 136(%rdi), %ymm0
2661 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm15[1],ymm0[3],ymm15[3]
2662 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm3[1],xmm2[1]
2663 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
2664 ; AVX2-NEXT: vbroadcastsd 328(%rdi), %ymm2
2665 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm5[1],ymm2[3],ymm5[3]
2666 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm12[1]
2667 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
2668 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2669 ; AVX2-NEXT: vmovaps %ymm2, 96(%rsi)
2670 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2671 ; AVX2-NEXT: vmovaps %ymm2, 32(%rsi)
2672 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2673 ; AVX2-NEXT: vmovaps %ymm2, 64(%rsi)
2674 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2675 ; AVX2-NEXT: vmovaps %ymm2, (%rsi)
2676 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2677 ; AVX2-NEXT: vmovaps %ymm2, 96(%rdx)
2678 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2679 ; AVX2-NEXT: vmovaps %ymm2, 32(%rdx)
2680 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2681 ; AVX2-NEXT: vmovaps %ymm2, 64(%rdx)
2682 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2683 ; AVX2-NEXT: vmovaps %ymm2, (%rdx)
2684 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2685 ; AVX2-NEXT: vmovaps %ymm2, 96(%rcx)
2686 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2687 ; AVX2-NEXT: vmovaps %ymm2, 32(%rcx)
2688 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2689 ; AVX2-NEXT: vmovaps %ymm2, 64(%rcx)
2690 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2691 ; AVX2-NEXT: vmovaps %ymm2, (%rcx)
2692 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2693 ; AVX2-NEXT: vmovaps %ymm2, 32(%r8)
2694 ; AVX2-NEXT: vmovups (%rsp), %ymm2 # 32-byte Reload
2695 ; AVX2-NEXT: vmovaps %ymm2, 96(%r8)
2696 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2697 ; AVX2-NEXT: vmovaps %ymm2, 64(%r8)
2698 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2699 ; AVX2-NEXT: vmovaps %ymm2, (%r8)
2700 ; AVX2-NEXT: vmovaps %ymm8, 96(%r9)
2701 ; AVX2-NEXT: vmovaps %ymm11, 32(%r9)
2702 ; AVX2-NEXT: vmovaps %ymm13, (%r9)
2703 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2704 ; AVX2-NEXT: vmovaps %ymm2, 64(%r9)
2705 ; AVX2-NEXT: movq {{[0-9]+}}(%rsp), %rax
2706 ; AVX2-NEXT: vmovaps %ymm1, 32(%rax)
2707 ; AVX2-NEXT: vmovaps %ymm0, (%rax)
2708 ; AVX2-NEXT: vmovaps %ymm4, 64(%rax)
2709 ; AVX2-NEXT: vmovaps %ymm9, 96(%rax)
2710 ; AVX2-NEXT: addq $488, %rsp # imm = 0x1E8
2711 ; AVX2-NEXT: vzeroupper
2714 ; AVX2-FP-LABEL: load_i64_stride6_vf16:
2716 ; AVX2-FP-NEXT: subq $488, %rsp # imm = 0x1E8
2717 ; AVX2-FP-NEXT: vmovaps 320(%rdi), %ymm10
2718 ; AVX2-FP-NEXT: vmovaps 288(%rdi), %ymm12
2719 ; AVX2-FP-NEXT: vmovups %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2720 ; AVX2-FP-NEXT: vmovaps 512(%rdi), %ymm7
2721 ; AVX2-FP-NEXT: vmovaps 480(%rdi), %ymm4
2722 ; AVX2-FP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2723 ; AVX2-FP-NEXT: vmovaps 128(%rdi), %ymm15
2724 ; AVX2-FP-NEXT: vmovaps 96(%rdi), %ymm3
2725 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2726 ; AVX2-FP-NEXT: vmovaps (%rdi), %xmm1
2727 ; AVX2-FP-NEXT: vmovaps 48(%rdi), %xmm5
2728 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm5[0]
2729 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm3[0],ymm15[0],ymm3[2],ymm15[2]
2730 ; AVX2-FP-NEXT: vmovups %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2731 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,0,3]
2732 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm3[4,5,6,7]
2733 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2734 ; AVX2-FP-NEXT: vmovaps 432(%rdi), %xmm3
2735 ; AVX2-FP-NEXT: vmovaps 384(%rdi), %xmm6
2736 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm2 = xmm6[0],xmm3[0]
2737 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm4[0],ymm7[0],ymm4[2],ymm7[2]
2738 ; AVX2-FP-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2739 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,1,0,3]
2740 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm4[4,5,6,7]
2741 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2742 ; AVX2-FP-NEXT: vmovaps 240(%rdi), %xmm11
2743 ; AVX2-FP-NEXT: vmovaps 192(%rdi), %xmm9
2744 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm2 = xmm9[0],xmm11[0]
2745 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm12[0],ymm10[0],ymm12[2],ymm10[2]
2746 ; AVX2-FP-NEXT: vmovaps %ymm10, %ymm4
2747 ; AVX2-FP-NEXT: vmovups %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2748 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,1,0,3]
2749 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm8[4,5,6,7]
2750 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2751 ; AVX2-FP-NEXT: vmovaps 704(%rdi), %ymm10
2752 ; AVX2-FP-NEXT: vmovaps 672(%rdi), %ymm8
2753 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm13 = ymm8[0],ymm10[0],ymm8[2],ymm10[2]
2754 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[0,1,0,3]
2755 ; AVX2-FP-NEXT: vmovaps 624(%rdi), %xmm14
2756 ; AVX2-FP-NEXT: vmovaps 576(%rdi), %xmm0
2757 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm12 = xmm0[0],xmm14[0]
2758 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm12[0,1,2,3],ymm13[4,5,6,7]
2759 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2760 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm5[1]
2761 ; AVX2-FP-NEXT: vbroadcastsd 104(%rdi), %ymm5
2762 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm5[1],ymm15[1],ymm5[3],ymm15[3]
2763 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm5[4,5,6,7]
2764 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2765 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm6[1],xmm3[1]
2766 ; AVX2-FP-NEXT: vbroadcastsd 488(%rdi), %ymm3
2767 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm7[1],ymm3[3],ymm7[3]
2768 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
2769 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2770 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm9[1],xmm11[1]
2771 ; AVX2-FP-NEXT: vbroadcastsd 296(%rdi), %ymm3
2772 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm4[1],ymm3[3],ymm4[3]
2773 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
2774 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2775 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm14[1]
2776 ; AVX2-FP-NEXT: vbroadcastsd 680(%rdi), %ymm1
2777 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm10[1],ymm1[3],ymm10[3]
2778 ; AVX2-FP-NEXT: vmovaps %ymm10, %ymm14
2779 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
2780 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2781 ; AVX2-FP-NEXT: vbroadcastsd 160(%rdi), %ymm0
2782 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
2783 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm10[0],ymm0[0],ymm10[2],ymm0[2]
2784 ; AVX2-FP-NEXT: vmovaps 16(%rdi), %xmm7
2785 ; AVX2-FP-NEXT: vmovaps 64(%rdi), %xmm3
2786 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm4 = xmm7[0],xmm3[0]
2787 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
2788 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2789 ; AVX2-FP-NEXT: vbroadcastsd 544(%rdi), %ymm0
2790 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
2791 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm11[0],ymm0[0],ymm11[2],ymm0[2]
2792 ; AVX2-FP-NEXT: vmovaps 448(%rdi), %xmm4
2793 ; AVX2-FP-NEXT: vmovaps 400(%rdi), %xmm5
2794 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm6 = xmm5[0],xmm4[0]
2795 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1,2,3],ymm0[4,5,6,7]
2796 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2797 ; AVX2-FP-NEXT: vbroadcastsd 352(%rdi), %ymm0
2798 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
2799 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm9[0],ymm0[0],ymm9[2],ymm0[2]
2800 ; AVX2-FP-NEXT: vmovaps 256(%rdi), %xmm12
2801 ; AVX2-FP-NEXT: vmovaps 208(%rdi), %xmm2
2802 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm12[0]
2803 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1,2,3],ymm0[4,5,6,7]
2804 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2805 ; AVX2-FP-NEXT: vbroadcastsd 736(%rdi), %ymm0
2806 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm8[0],ymm0[0],ymm8[2],ymm0[2]
2807 ; AVX2-FP-NEXT: vmovaps 640(%rdi), %xmm13
2808 ; AVX2-FP-NEXT: vmovaps 592(%rdi), %xmm1
2809 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm6 = xmm1[0],xmm13[0]
2810 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1,2,3],ymm0[4,5,6,7]
2811 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2812 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm3[1]
2813 ; AVX2-FP-NEXT: vmovaps 160(%rdi), %ymm15
2814 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm10[1],ymm15[1],ymm10[3],ymm15[3]
2815 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
2816 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
2817 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2818 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm4[1]
2819 ; AVX2-FP-NEXT: vmovaps 544(%rdi), %ymm10
2820 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm11[1],ymm10[1],ymm11[3],ymm10[3]
2821 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
2822 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
2823 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2824 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm13[1]
2825 ; AVX2-FP-NEXT: vmovaps 736(%rdi), %ymm4
2826 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm8[1],ymm4[1],ymm8[3],ymm4[3]
2827 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
2828 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm3[4,5,6,7]
2829 ; AVX2-FP-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
2830 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm2[1],xmm12[1]
2831 ; AVX2-FP-NEXT: vmovaps 352(%rdi), %ymm5
2832 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm9[1],ymm5[1],ymm9[3],ymm5[3]
2833 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,2,1]
2834 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm2[4,5,6,7]
2835 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2836 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2837 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm0[0],ymm10[0],ymm0[2],ymm10[2]
2838 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
2839 ; AVX2-FP-NEXT: vmovaps 464(%rdi), %xmm7
2840 ; AVX2-FP-NEXT: vmovaps 416(%rdi), %xmm6
2841 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm2 = xmm6[0],xmm7[0]
2842 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm1[4,5,6,7]
2843 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2844 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2845 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm0[0],ymm15[0],ymm0[2],ymm15[2]
2846 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm8 = ymm1[0,1,0,3]
2847 ; AVX2-FP-NEXT: vmovaps 32(%rdi), %xmm3
2848 ; AVX2-FP-NEXT: vmovaps 80(%rdi), %xmm2
2849 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm9 = xmm3[0],xmm2[0]
2850 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm13 = ymm9[0,1,2,3],ymm8[4,5,6,7]
2851 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2852 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm0[0],ymm5[0],ymm0[2],ymm5[2]
2853 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,1,0,3]
2854 ; AVX2-FP-NEXT: vmovaps 272(%rdi), %xmm12
2855 ; AVX2-FP-NEXT: vmovaps 224(%rdi), %xmm1
2856 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm9 = xmm1[0],xmm12[0]
2857 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm11 = ymm9[0,1,2,3],ymm8[4,5,6,7]
2858 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm14[0],ymm4[0],ymm14[2],ymm4[2]
2859 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,1,0,3]
2860 ; AVX2-FP-NEXT: vmovaps 656(%rdi), %xmm9
2861 ; AVX2-FP-NEXT: vmovaps 608(%rdi), %xmm0
2862 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm14 = xmm0[0],xmm9[0]
2863 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm8 = ymm14[0,1,2,3],ymm8[4,5,6,7]
2864 ; AVX2-FP-NEXT: vbroadcastsd 712(%rdi), %ymm14
2865 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm4 = ymm14[1],ymm4[1],ymm14[3],ymm4[3]
2866 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm9[1]
2867 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm9 = ymm0[0,1,2,3],ymm4[4,5,6,7]
2868 ; AVX2-FP-NEXT: vbroadcastsd 520(%rdi), %ymm0
2869 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm10[1],ymm0[3],ymm10[3]
2870 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm6[1],xmm7[1]
2871 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm0[4,5,6,7]
2872 ; AVX2-FP-NEXT: vbroadcastsd 136(%rdi), %ymm0
2873 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm15[1],ymm0[3],ymm15[3]
2874 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm3[1],xmm2[1]
2875 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
2876 ; AVX2-FP-NEXT: vbroadcastsd 328(%rdi), %ymm2
2877 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm5[1],ymm2[3],ymm5[3]
2878 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm12[1]
2879 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
2880 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2881 ; AVX2-FP-NEXT: vmovaps %ymm2, 96(%rsi)
2882 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2883 ; AVX2-FP-NEXT: vmovaps %ymm2, 32(%rsi)
2884 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2885 ; AVX2-FP-NEXT: vmovaps %ymm2, 64(%rsi)
2886 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2887 ; AVX2-FP-NEXT: vmovaps %ymm2, (%rsi)
2888 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2889 ; AVX2-FP-NEXT: vmovaps %ymm2, 96(%rdx)
2890 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2891 ; AVX2-FP-NEXT: vmovaps %ymm2, 32(%rdx)
2892 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2893 ; AVX2-FP-NEXT: vmovaps %ymm2, 64(%rdx)
2894 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2895 ; AVX2-FP-NEXT: vmovaps %ymm2, (%rdx)
2896 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2897 ; AVX2-FP-NEXT: vmovaps %ymm2, 96(%rcx)
2898 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2899 ; AVX2-FP-NEXT: vmovaps %ymm2, 32(%rcx)
2900 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2901 ; AVX2-FP-NEXT: vmovaps %ymm2, 64(%rcx)
2902 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2903 ; AVX2-FP-NEXT: vmovaps %ymm2, (%rcx)
2904 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2905 ; AVX2-FP-NEXT: vmovaps %ymm2, 32(%r8)
2906 ; AVX2-FP-NEXT: vmovups (%rsp), %ymm2 # 32-byte Reload
2907 ; AVX2-FP-NEXT: vmovaps %ymm2, 96(%r8)
2908 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2909 ; AVX2-FP-NEXT: vmovaps %ymm2, 64(%r8)
2910 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2911 ; AVX2-FP-NEXT: vmovaps %ymm2, (%r8)
2912 ; AVX2-FP-NEXT: vmovaps %ymm8, 96(%r9)
2913 ; AVX2-FP-NEXT: vmovaps %ymm11, 32(%r9)
2914 ; AVX2-FP-NEXT: vmovaps %ymm13, (%r9)
2915 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2916 ; AVX2-FP-NEXT: vmovaps %ymm2, 64(%r9)
2917 ; AVX2-FP-NEXT: movq {{[0-9]+}}(%rsp), %rax
2918 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%rax)
2919 ; AVX2-FP-NEXT: vmovaps %ymm0, (%rax)
2920 ; AVX2-FP-NEXT: vmovaps %ymm4, 64(%rax)
2921 ; AVX2-FP-NEXT: vmovaps %ymm9, 96(%rax)
2922 ; AVX2-FP-NEXT: addq $488, %rsp # imm = 0x1E8
2923 ; AVX2-FP-NEXT: vzeroupper
2924 ; AVX2-FP-NEXT: retq
2926 ; AVX2-FCP-LABEL: load_i64_stride6_vf16:
2927 ; AVX2-FCP: # %bb.0:
2928 ; AVX2-FCP-NEXT: subq $488, %rsp # imm = 0x1E8
2929 ; AVX2-FCP-NEXT: vmovaps 320(%rdi), %ymm10
2930 ; AVX2-FCP-NEXT: vmovaps 288(%rdi), %ymm12
2931 ; AVX2-FCP-NEXT: vmovups %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2932 ; AVX2-FCP-NEXT: vmovaps 512(%rdi), %ymm7
2933 ; AVX2-FCP-NEXT: vmovaps 480(%rdi), %ymm4
2934 ; AVX2-FCP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2935 ; AVX2-FCP-NEXT: vmovaps 128(%rdi), %ymm15
2936 ; AVX2-FCP-NEXT: vmovaps 96(%rdi), %ymm3
2937 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2938 ; AVX2-FCP-NEXT: vmovaps (%rdi), %xmm1
2939 ; AVX2-FCP-NEXT: vmovaps 48(%rdi), %xmm5
2940 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm5[0]
2941 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm3[0],ymm15[0],ymm3[2],ymm15[2]
2942 ; AVX2-FCP-NEXT: vmovups %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2943 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,0,3]
2944 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm3[4,5,6,7]
2945 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2946 ; AVX2-FCP-NEXT: vmovaps 432(%rdi), %xmm3
2947 ; AVX2-FCP-NEXT: vmovaps 384(%rdi), %xmm6
2948 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm2 = xmm6[0],xmm3[0]
2949 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm4[0],ymm7[0],ymm4[2],ymm7[2]
2950 ; AVX2-FCP-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2951 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,1,0,3]
2952 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm4[4,5,6,7]
2953 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2954 ; AVX2-FCP-NEXT: vmovaps 240(%rdi), %xmm11
2955 ; AVX2-FCP-NEXT: vmovaps 192(%rdi), %xmm9
2956 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm2 = xmm9[0],xmm11[0]
2957 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm12[0],ymm10[0],ymm12[2],ymm10[2]
2958 ; AVX2-FCP-NEXT: vmovaps %ymm10, %ymm4
2959 ; AVX2-FCP-NEXT: vmovups %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2960 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,1,0,3]
2961 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm8[4,5,6,7]
2962 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2963 ; AVX2-FCP-NEXT: vmovaps 704(%rdi), %ymm10
2964 ; AVX2-FCP-NEXT: vmovaps 672(%rdi), %ymm8
2965 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm13 = ymm8[0],ymm10[0],ymm8[2],ymm10[2]
2966 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[0,1,0,3]
2967 ; AVX2-FCP-NEXT: vmovaps 624(%rdi), %xmm14
2968 ; AVX2-FCP-NEXT: vmovaps 576(%rdi), %xmm0
2969 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm12 = xmm0[0],xmm14[0]
2970 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm12[0,1,2,3],ymm13[4,5,6,7]
2971 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2972 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm5[1]
2973 ; AVX2-FCP-NEXT: vbroadcastsd 104(%rdi), %ymm5
2974 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm5[1],ymm15[1],ymm5[3],ymm15[3]
2975 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm5[4,5,6,7]
2976 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2977 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm6[1],xmm3[1]
2978 ; AVX2-FCP-NEXT: vbroadcastsd 488(%rdi), %ymm3
2979 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm7[1],ymm3[3],ymm7[3]
2980 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
2981 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2982 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm9[1],xmm11[1]
2983 ; AVX2-FCP-NEXT: vbroadcastsd 296(%rdi), %ymm3
2984 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm4[1],ymm3[3],ymm4[3]
2985 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
2986 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2987 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm14[1]
2988 ; AVX2-FCP-NEXT: vbroadcastsd 680(%rdi), %ymm1
2989 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm10[1],ymm1[3],ymm10[3]
2990 ; AVX2-FCP-NEXT: vmovaps %ymm10, %ymm14
2991 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
2992 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2993 ; AVX2-FCP-NEXT: vbroadcastsd 160(%rdi), %ymm0
2994 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
2995 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm10[0],ymm0[0],ymm10[2],ymm0[2]
2996 ; AVX2-FCP-NEXT: vmovaps 16(%rdi), %xmm7
2997 ; AVX2-FCP-NEXT: vmovaps 64(%rdi), %xmm3
2998 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm4 = xmm7[0],xmm3[0]
2999 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
3000 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3001 ; AVX2-FCP-NEXT: vbroadcastsd 544(%rdi), %ymm0
3002 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
3003 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm11[0],ymm0[0],ymm11[2],ymm0[2]
3004 ; AVX2-FCP-NEXT: vmovaps 448(%rdi), %xmm4
3005 ; AVX2-FCP-NEXT: vmovaps 400(%rdi), %xmm5
3006 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm6 = xmm5[0],xmm4[0]
3007 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1,2,3],ymm0[4,5,6,7]
3008 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3009 ; AVX2-FCP-NEXT: vbroadcastsd 352(%rdi), %ymm0
3010 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
3011 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm9[0],ymm0[0],ymm9[2],ymm0[2]
3012 ; AVX2-FCP-NEXT: vmovaps 256(%rdi), %xmm12
3013 ; AVX2-FCP-NEXT: vmovaps 208(%rdi), %xmm2
3014 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm12[0]
3015 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1,2,3],ymm0[4,5,6,7]
3016 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3017 ; AVX2-FCP-NEXT: vbroadcastsd 736(%rdi), %ymm0
3018 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm8[0],ymm0[0],ymm8[2],ymm0[2]
3019 ; AVX2-FCP-NEXT: vmovaps 640(%rdi), %xmm13
3020 ; AVX2-FCP-NEXT: vmovaps 592(%rdi), %xmm1
3021 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm6 = xmm1[0],xmm13[0]
3022 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1,2,3],ymm0[4,5,6,7]
3023 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3024 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm3[1]
3025 ; AVX2-FCP-NEXT: vmovaps 160(%rdi), %ymm15
3026 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm10[1],ymm15[1],ymm10[3],ymm15[3]
3027 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
3028 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
3029 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3030 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm4[1]
3031 ; AVX2-FCP-NEXT: vmovaps 544(%rdi), %ymm10
3032 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm11[1],ymm10[1],ymm11[3],ymm10[3]
3033 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
3034 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
3035 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3036 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm13[1]
3037 ; AVX2-FCP-NEXT: vmovaps 736(%rdi), %ymm4
3038 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm8[1],ymm4[1],ymm8[3],ymm4[3]
3039 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
3040 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm3[4,5,6,7]
3041 ; AVX2-FCP-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
3042 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm2[1],xmm12[1]
3043 ; AVX2-FCP-NEXT: vmovaps 352(%rdi), %ymm5
3044 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm9[1],ymm5[1],ymm9[3],ymm5[3]
3045 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,2,1]
3046 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm2[4,5,6,7]
3047 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3048 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3049 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm0[0],ymm10[0],ymm0[2],ymm10[2]
3050 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
3051 ; AVX2-FCP-NEXT: vmovaps 464(%rdi), %xmm7
3052 ; AVX2-FCP-NEXT: vmovaps 416(%rdi), %xmm6
3053 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm2 = xmm6[0],xmm7[0]
3054 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm1[4,5,6,7]
3055 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3056 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3057 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm0[0],ymm15[0],ymm0[2],ymm15[2]
3058 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm8 = ymm1[0,1,0,3]
3059 ; AVX2-FCP-NEXT: vmovaps 32(%rdi), %xmm3
3060 ; AVX2-FCP-NEXT: vmovaps 80(%rdi), %xmm2
3061 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm9 = xmm3[0],xmm2[0]
3062 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm13 = ymm9[0,1,2,3],ymm8[4,5,6,7]
3063 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3064 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm0[0],ymm5[0],ymm0[2],ymm5[2]
3065 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,1,0,3]
3066 ; AVX2-FCP-NEXT: vmovaps 272(%rdi), %xmm12
3067 ; AVX2-FCP-NEXT: vmovaps 224(%rdi), %xmm1
3068 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm9 = xmm1[0],xmm12[0]
3069 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm11 = ymm9[0,1,2,3],ymm8[4,5,6,7]
3070 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm14[0],ymm4[0],ymm14[2],ymm4[2]
3071 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,1,0,3]
3072 ; AVX2-FCP-NEXT: vmovaps 656(%rdi), %xmm9
3073 ; AVX2-FCP-NEXT: vmovaps 608(%rdi), %xmm0
3074 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm14 = xmm0[0],xmm9[0]
3075 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm8 = ymm14[0,1,2,3],ymm8[4,5,6,7]
3076 ; AVX2-FCP-NEXT: vbroadcastsd 712(%rdi), %ymm14
3077 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm4 = ymm14[1],ymm4[1],ymm14[3],ymm4[3]
3078 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm9[1]
3079 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm9 = ymm0[0,1,2,3],ymm4[4,5,6,7]
3080 ; AVX2-FCP-NEXT: vbroadcastsd 520(%rdi), %ymm0
3081 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm10[1],ymm0[3],ymm10[3]
3082 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm6[1],xmm7[1]
3083 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm0[4,5,6,7]
3084 ; AVX2-FCP-NEXT: vbroadcastsd 136(%rdi), %ymm0
3085 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm15[1],ymm0[3],ymm15[3]
3086 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm3[1],xmm2[1]
3087 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
3088 ; AVX2-FCP-NEXT: vbroadcastsd 328(%rdi), %ymm2
3089 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm5[1],ymm2[3],ymm5[3]
3090 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm12[1]
3091 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
3092 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3093 ; AVX2-FCP-NEXT: vmovaps %ymm2, 96(%rsi)
3094 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3095 ; AVX2-FCP-NEXT: vmovaps %ymm2, 32(%rsi)
3096 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3097 ; AVX2-FCP-NEXT: vmovaps %ymm2, 64(%rsi)
3098 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3099 ; AVX2-FCP-NEXT: vmovaps %ymm2, (%rsi)
3100 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3101 ; AVX2-FCP-NEXT: vmovaps %ymm2, 96(%rdx)
3102 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3103 ; AVX2-FCP-NEXT: vmovaps %ymm2, 32(%rdx)
3104 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3105 ; AVX2-FCP-NEXT: vmovaps %ymm2, 64(%rdx)
3106 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3107 ; AVX2-FCP-NEXT: vmovaps %ymm2, (%rdx)
3108 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3109 ; AVX2-FCP-NEXT: vmovaps %ymm2, 96(%rcx)
3110 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3111 ; AVX2-FCP-NEXT: vmovaps %ymm2, 32(%rcx)
3112 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3113 ; AVX2-FCP-NEXT: vmovaps %ymm2, 64(%rcx)
3114 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3115 ; AVX2-FCP-NEXT: vmovaps %ymm2, (%rcx)
3116 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3117 ; AVX2-FCP-NEXT: vmovaps %ymm2, 32(%r8)
3118 ; AVX2-FCP-NEXT: vmovups (%rsp), %ymm2 # 32-byte Reload
3119 ; AVX2-FCP-NEXT: vmovaps %ymm2, 96(%r8)
3120 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3121 ; AVX2-FCP-NEXT: vmovaps %ymm2, 64(%r8)
3122 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3123 ; AVX2-FCP-NEXT: vmovaps %ymm2, (%r8)
3124 ; AVX2-FCP-NEXT: vmovaps %ymm8, 96(%r9)
3125 ; AVX2-FCP-NEXT: vmovaps %ymm11, 32(%r9)
3126 ; AVX2-FCP-NEXT: vmovaps %ymm13, (%r9)
3127 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3128 ; AVX2-FCP-NEXT: vmovaps %ymm2, 64(%r9)
3129 ; AVX2-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
3130 ; AVX2-FCP-NEXT: vmovaps %ymm1, 32(%rax)
3131 ; AVX2-FCP-NEXT: vmovaps %ymm0, (%rax)
3132 ; AVX2-FCP-NEXT: vmovaps %ymm4, 64(%rax)
3133 ; AVX2-FCP-NEXT: vmovaps %ymm9, 96(%rax)
3134 ; AVX2-FCP-NEXT: addq $488, %rsp # imm = 0x1E8
3135 ; AVX2-FCP-NEXT: vzeroupper
3136 ; AVX2-FCP-NEXT: retq
3138 ; AVX512-LABEL: load_i64_stride6_vf16:
3140 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
3141 ; AVX512-NEXT: vmovdqa64 448(%rdi), %zmm2
3142 ; AVX512-NEXT: vmovdqa64 384(%rdi), %zmm1
3143 ; AVX512-NEXT: vmovdqa64 512(%rdi), %zmm3
3144 ; AVX512-NEXT: vmovdqa64 576(%rdi), %zmm5
3145 ; AVX512-NEXT: vmovdqa64 704(%rdi), %zmm0
3146 ; AVX512-NEXT: vmovdqa64 640(%rdi), %zmm4
3147 ; AVX512-NEXT: vmovdqa64 320(%rdi), %zmm8
3148 ; AVX512-NEXT: vmovdqa64 256(%rdi), %zmm12
3149 ; AVX512-NEXT: vmovdqa64 (%rdi), %zmm9
3150 ; AVX512-NEXT: vmovdqa64 64(%rdi), %zmm11
3151 ; AVX512-NEXT: vmovdqa64 128(%rdi), %zmm10
3152 ; AVX512-NEXT: vmovdqa64 192(%rdi), %zmm13
3153 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [0,6,0,10,0,6,0,10]
3154 ; AVX512-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
3155 ; AVX512-NEXT: vmovdqa64 %zmm13, %zmm15
3156 ; AVX512-NEXT: vpermt2q %zmm10, %zmm14, %zmm15
3157 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm6 = [0,6,12,0]
3158 ; AVX512-NEXT: vmovdqa64 %zmm9, %zmm7
3159 ; AVX512-NEXT: vpermt2q %zmm11, %zmm6, %zmm7
3160 ; AVX512-NEXT: movb $56, %dil
3161 ; AVX512-NEXT: kmovw %edi, %k1
3162 ; AVX512-NEXT: vmovdqa64 %zmm15, %zmm7 {%k1}
3163 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm16 = [4,10,4,10,4,10,4,10]
3164 ; AVX512-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3165 ; AVX512-NEXT: vmovdqa64 %zmm12, %zmm15
3166 ; AVX512-NEXT: vpermt2q %zmm8, %zmm16, %zmm15
3167 ; AVX512-NEXT: movb $-64, %dil
3168 ; AVX512-NEXT: kmovw %edi, %k2
3169 ; AVX512-NEXT: vmovdqa64 %zmm15, %zmm7 {%k2}
3170 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm15
3171 ; AVX512-NEXT: vpermt2q %zmm0, %zmm16, %zmm15
3172 ; AVX512-NEXT: vpermi2q %zmm3, %zmm5, %zmm14
3173 ; AVX512-NEXT: vpermi2q %zmm2, %zmm1, %zmm6
3174 ; AVX512-NEXT: vmovdqa64 %zmm14, %zmm6 {%k1}
3175 ; AVX512-NEXT: vmovdqa64 %zmm15, %zmm6 {%k2}
3176 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [1,7,0,11,1,7,0,11]
3177 ; AVX512-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
3178 ; AVX512-NEXT: vmovdqa64 %zmm13, %zmm17
3179 ; AVX512-NEXT: vpermt2q %zmm10, %zmm18, %zmm17
3180 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm14 = [1,7,13,0]
3181 ; AVX512-NEXT: vmovdqa64 %zmm9, %zmm15
3182 ; AVX512-NEXT: vpermt2q %zmm11, %zmm14, %zmm15
3183 ; AVX512-NEXT: vmovdqa64 %zmm17, %zmm15 {%k1}
3184 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm17 = [5,11,5,11,5,11,5,11]
3185 ; AVX512-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3186 ; AVX512-NEXT: vmovdqa64 %zmm12, %zmm19
3187 ; AVX512-NEXT: vpermt2q %zmm8, %zmm17, %zmm19
3188 ; AVX512-NEXT: vmovdqa64 %zmm19, %zmm15 {%k2}
3189 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm19
3190 ; AVX512-NEXT: vpermt2q %zmm0, %zmm17, %zmm19
3191 ; AVX512-NEXT: vpermi2q %zmm3, %zmm5, %zmm18
3192 ; AVX512-NEXT: vpermi2q %zmm2, %zmm1, %zmm14
3193 ; AVX512-NEXT: vmovdqa64 %zmm18, %zmm14 {%k1}
3194 ; AVX512-NEXT: vmovdqa64 %zmm19, %zmm14 {%k2}
3195 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [10,4,10,4,10,4,10,4]
3196 ; AVX512-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3197 ; AVX512-NEXT: vmovdqa64 %zmm10, %zmm21
3198 ; AVX512-NEXT: vpermt2q %zmm13, %zmm20, %zmm21
3199 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm18 = [10,0,6,0]
3200 ; AVX512-NEXT: vmovdqa64 %zmm11, %zmm19
3201 ; AVX512-NEXT: vpermt2q %zmm9, %zmm18, %zmm19
3202 ; AVX512-NEXT: movb $24, %dil
3203 ; AVX512-NEXT: kmovw %edi, %k2
3204 ; AVX512-NEXT: vmovdqa64 %zmm21, %zmm19 {%k2}
3205 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm21 = [0,0,6,12,0,0,6,12]
3206 ; AVX512-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3]
3207 ; AVX512-NEXT: vmovdqa64 %zmm12, %zmm22
3208 ; AVX512-NEXT: vpermt2q %zmm8, %zmm21, %zmm22
3209 ; AVX512-NEXT: movb $-32, %dil
3210 ; AVX512-NEXT: kmovw %edi, %k1
3211 ; AVX512-NEXT: vmovdqa64 %zmm22, %zmm19 {%k1}
3212 ; AVX512-NEXT: vpermi2q %zmm0, %zmm4, %zmm21
3213 ; AVX512-NEXT: vpermi2q %zmm5, %zmm3, %zmm20
3214 ; AVX512-NEXT: vpermi2q %zmm1, %zmm2, %zmm18
3215 ; AVX512-NEXT: vmovdqa64 %zmm20, %zmm18 {%k2}
3216 ; AVX512-NEXT: vmovdqa64 %zmm21, %zmm18 {%k1}
3217 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm22 = [11,5,11,5,11,5,11,5]
3218 ; AVX512-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3219 ; AVX512-NEXT: vmovdqa64 %zmm10, %zmm23
3220 ; AVX512-NEXT: vpermt2q %zmm13, %zmm22, %zmm23
3221 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm20 = [11,1,7,0]
3222 ; AVX512-NEXT: vmovdqa64 %zmm11, %zmm21
3223 ; AVX512-NEXT: vpermt2q %zmm9, %zmm20, %zmm21
3224 ; AVX512-NEXT: vmovdqa64 %zmm23, %zmm21 {%k2}
3225 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm23 = [0,1,7,13,0,1,7,13]
3226 ; AVX512-NEXT: # zmm23 = mem[0,1,2,3,0,1,2,3]
3227 ; AVX512-NEXT: vmovdqa64 %zmm12, %zmm24
3228 ; AVX512-NEXT: vpermt2q %zmm8, %zmm23, %zmm24
3229 ; AVX512-NEXT: vmovdqa64 %zmm24, %zmm21 {%k1}
3230 ; AVX512-NEXT: vpermi2q %zmm0, %zmm4, %zmm23
3231 ; AVX512-NEXT: vpermi2q %zmm5, %zmm3, %zmm22
3232 ; AVX512-NEXT: vpermi2q %zmm1, %zmm2, %zmm20
3233 ; AVX512-NEXT: vmovdqa64 %zmm22, %zmm20 {%k2}
3234 ; AVX512-NEXT: vmovdqa64 %zmm23, %zmm20 {%k1}
3235 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,10,0,6,0,10,0,6]
3236 ; AVX512-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
3237 ; AVX512-NEXT: vmovdqa64 %zmm8, %zmm23
3238 ; AVX512-NEXT: vpermt2q %zmm12, %zmm22, %zmm23
3239 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm24 = [12,0,0,6,12,0,0,6]
3240 ; AVX512-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3]
3241 ; AVX512-NEXT: vmovdqa64 %zmm10, %zmm25
3242 ; AVX512-NEXT: vpermt2q %zmm13, %zmm24, %zmm25
3243 ; AVX512-NEXT: vmovdqa64 %zmm9, %zmm26
3244 ; AVX512-NEXT: vpermt2q %zmm11, %zmm16, %zmm26
3245 ; AVX512-NEXT: vinserti32x4 $0, %xmm26, %zmm25, %zmm25
3246 ; AVX512-NEXT: vmovdqa64 %zmm23, %zmm25 {%k1}
3247 ; AVX512-NEXT: vpermi2q %zmm5, %zmm3, %zmm24
3248 ; AVX512-NEXT: vpermi2q %zmm2, %zmm1, %zmm16
3249 ; AVX512-NEXT: vinserti32x4 $0, %xmm16, %zmm24, %zmm16
3250 ; AVX512-NEXT: vpermi2q %zmm4, %zmm0, %zmm22
3251 ; AVX512-NEXT: vmovdqa64 %zmm22, %zmm16 {%k1}
3252 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,11,1,7,0,11,1,7]
3253 ; AVX512-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
3254 ; AVX512-NEXT: vpermt2q %zmm12, %zmm22, %zmm8
3255 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [13,0,1,7,13,0,1,7]
3256 ; AVX512-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
3257 ; AVX512-NEXT: vpermt2q %zmm13, %zmm12, %zmm10
3258 ; AVX512-NEXT: vpermt2q %zmm11, %zmm17, %zmm9
3259 ; AVX512-NEXT: vinserti32x4 $0, %xmm9, %zmm10, %zmm9
3260 ; AVX512-NEXT: vmovdqa64 %zmm8, %zmm9 {%k1}
3261 ; AVX512-NEXT: vpermt2q %zmm5, %zmm12, %zmm3
3262 ; AVX512-NEXT: vpermt2q %zmm2, %zmm17, %zmm1
3263 ; AVX512-NEXT: vinserti32x4 $0, %xmm1, %zmm3, %zmm1
3264 ; AVX512-NEXT: vpermt2q %zmm4, %zmm22, %zmm0
3265 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
3266 ; AVX512-NEXT: vmovdqa64 %zmm6, 64(%rsi)
3267 ; AVX512-NEXT: vmovdqa64 %zmm7, (%rsi)
3268 ; AVX512-NEXT: vmovdqa64 %zmm14, 64(%rdx)
3269 ; AVX512-NEXT: vmovdqa64 %zmm15, (%rdx)
3270 ; AVX512-NEXT: vmovdqa64 %zmm18, 64(%rcx)
3271 ; AVX512-NEXT: vmovdqa64 %zmm19, (%rcx)
3272 ; AVX512-NEXT: vmovdqa64 %zmm20, 64(%r8)
3273 ; AVX512-NEXT: vmovdqa64 %zmm21, (%r8)
3274 ; AVX512-NEXT: vmovdqa64 %zmm16, 64(%r9)
3275 ; AVX512-NEXT: vmovdqa64 %zmm25, (%r9)
3276 ; AVX512-NEXT: vmovdqa64 %zmm1, 64(%rax)
3277 ; AVX512-NEXT: vmovdqa64 %zmm9, (%rax)
3278 ; AVX512-NEXT: vzeroupper
3281 ; AVX512-FCP-LABEL: load_i64_stride6_vf16:
3282 ; AVX512-FCP: # %bb.0:
3283 ; AVX512-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
3284 ; AVX512-FCP-NEXT: vmovdqa64 448(%rdi), %zmm2
3285 ; AVX512-FCP-NEXT: vmovdqa64 384(%rdi), %zmm1
3286 ; AVX512-FCP-NEXT: vmovdqa64 512(%rdi), %zmm3
3287 ; AVX512-FCP-NEXT: vmovdqa64 576(%rdi), %zmm5
3288 ; AVX512-FCP-NEXT: vmovdqa64 704(%rdi), %zmm0
3289 ; AVX512-FCP-NEXT: vmovdqa64 640(%rdi), %zmm4
3290 ; AVX512-FCP-NEXT: vmovdqa64 320(%rdi), %zmm8
3291 ; AVX512-FCP-NEXT: vmovdqa64 256(%rdi), %zmm12
3292 ; AVX512-FCP-NEXT: vmovdqa64 (%rdi), %zmm9
3293 ; AVX512-FCP-NEXT: vmovdqa64 64(%rdi), %zmm11
3294 ; AVX512-FCP-NEXT: vmovdqa64 128(%rdi), %zmm10
3295 ; AVX512-FCP-NEXT: vmovdqa64 192(%rdi), %zmm13
3296 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [0,6,0,10,0,6,0,10]
3297 ; AVX512-FCP-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
3298 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, %zmm15
3299 ; AVX512-FCP-NEXT: vpermt2q %zmm10, %zmm14, %zmm15
3300 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm6 = [0,6,12,0]
3301 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, %zmm7
3302 ; AVX512-FCP-NEXT: vpermt2q %zmm11, %zmm6, %zmm7
3303 ; AVX512-FCP-NEXT: movb $56, %dil
3304 ; AVX512-FCP-NEXT: kmovw %edi, %k1
3305 ; AVX512-FCP-NEXT: vmovdqa64 %zmm15, %zmm7 {%k1}
3306 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm16 = [4,10,4,10,4,10,4,10]
3307 ; AVX512-FCP-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3308 ; AVX512-FCP-NEXT: vmovdqa64 %zmm12, %zmm15
3309 ; AVX512-FCP-NEXT: vpermt2q %zmm8, %zmm16, %zmm15
3310 ; AVX512-FCP-NEXT: movb $-64, %dil
3311 ; AVX512-FCP-NEXT: kmovw %edi, %k2
3312 ; AVX512-FCP-NEXT: vmovdqa64 %zmm15, %zmm7 {%k2}
3313 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm15
3314 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm16, %zmm15
3315 ; AVX512-FCP-NEXT: vpermi2q %zmm3, %zmm5, %zmm14
3316 ; AVX512-FCP-NEXT: vpermi2q %zmm2, %zmm1, %zmm6
3317 ; AVX512-FCP-NEXT: vmovdqa64 %zmm14, %zmm6 {%k1}
3318 ; AVX512-FCP-NEXT: vmovdqa64 %zmm15, %zmm6 {%k2}
3319 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [1,7,0,11,1,7,0,11]
3320 ; AVX512-FCP-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
3321 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, %zmm17
3322 ; AVX512-FCP-NEXT: vpermt2q %zmm10, %zmm18, %zmm17
3323 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm14 = [1,7,13,0]
3324 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, %zmm15
3325 ; AVX512-FCP-NEXT: vpermt2q %zmm11, %zmm14, %zmm15
3326 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, %zmm15 {%k1}
3327 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm17 = [5,11,5,11,5,11,5,11]
3328 ; AVX512-FCP-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3329 ; AVX512-FCP-NEXT: vmovdqa64 %zmm12, %zmm19
3330 ; AVX512-FCP-NEXT: vpermt2q %zmm8, %zmm17, %zmm19
3331 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, %zmm15 {%k2}
3332 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm19
3333 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm17, %zmm19
3334 ; AVX512-FCP-NEXT: vpermi2q %zmm3, %zmm5, %zmm18
3335 ; AVX512-FCP-NEXT: vpermi2q %zmm2, %zmm1, %zmm14
3336 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, %zmm14 {%k1}
3337 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, %zmm14 {%k2}
3338 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [10,4,10,4,10,4,10,4]
3339 ; AVX512-FCP-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3340 ; AVX512-FCP-NEXT: vmovdqa64 %zmm10, %zmm21
3341 ; AVX512-FCP-NEXT: vpermt2q %zmm13, %zmm20, %zmm21
3342 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm18 = [10,0,6,0]
3343 ; AVX512-FCP-NEXT: vmovdqa64 %zmm11, %zmm19
3344 ; AVX512-FCP-NEXT: vpermt2q %zmm9, %zmm18, %zmm19
3345 ; AVX512-FCP-NEXT: movb $24, %dil
3346 ; AVX512-FCP-NEXT: kmovw %edi, %k2
3347 ; AVX512-FCP-NEXT: vmovdqa64 %zmm21, %zmm19 {%k2}
3348 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm21 = [0,0,6,12,0,0,6,12]
3349 ; AVX512-FCP-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3]
3350 ; AVX512-FCP-NEXT: vmovdqa64 %zmm12, %zmm22
3351 ; AVX512-FCP-NEXT: vpermt2q %zmm8, %zmm21, %zmm22
3352 ; AVX512-FCP-NEXT: movb $-32, %dil
3353 ; AVX512-FCP-NEXT: kmovw %edi, %k1
3354 ; AVX512-FCP-NEXT: vmovdqa64 %zmm22, %zmm19 {%k1}
3355 ; AVX512-FCP-NEXT: vpermi2q %zmm0, %zmm4, %zmm21
3356 ; AVX512-FCP-NEXT: vpermi2q %zmm5, %zmm3, %zmm20
3357 ; AVX512-FCP-NEXT: vpermi2q %zmm1, %zmm2, %zmm18
3358 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, %zmm18 {%k2}
3359 ; AVX512-FCP-NEXT: vmovdqa64 %zmm21, %zmm18 {%k1}
3360 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm22 = [11,5,11,5,11,5,11,5]
3361 ; AVX512-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3362 ; AVX512-FCP-NEXT: vmovdqa64 %zmm10, %zmm23
3363 ; AVX512-FCP-NEXT: vpermt2q %zmm13, %zmm22, %zmm23
3364 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm20 = [11,1,7,0]
3365 ; AVX512-FCP-NEXT: vmovdqa64 %zmm11, %zmm21
3366 ; AVX512-FCP-NEXT: vpermt2q %zmm9, %zmm20, %zmm21
3367 ; AVX512-FCP-NEXT: vmovdqa64 %zmm23, %zmm21 {%k2}
3368 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm23 = [0,1,7,13,0,1,7,13]
3369 ; AVX512-FCP-NEXT: # zmm23 = mem[0,1,2,3,0,1,2,3]
3370 ; AVX512-FCP-NEXT: vmovdqa64 %zmm12, %zmm24
3371 ; AVX512-FCP-NEXT: vpermt2q %zmm8, %zmm23, %zmm24
3372 ; AVX512-FCP-NEXT: vmovdqa64 %zmm24, %zmm21 {%k1}
3373 ; AVX512-FCP-NEXT: vpermi2q %zmm0, %zmm4, %zmm23
3374 ; AVX512-FCP-NEXT: vpermi2q %zmm5, %zmm3, %zmm22
3375 ; AVX512-FCP-NEXT: vpermi2q %zmm1, %zmm2, %zmm20
3376 ; AVX512-FCP-NEXT: vmovdqa64 %zmm22, %zmm20 {%k2}
3377 ; AVX512-FCP-NEXT: vmovdqa64 %zmm23, %zmm20 {%k1}
3378 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,10,0,6,0,10,0,6]
3379 ; AVX512-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
3380 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, %zmm23
3381 ; AVX512-FCP-NEXT: vpermt2q %zmm12, %zmm22, %zmm23
3382 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm24 = [12,0,0,6,12,0,0,6]
3383 ; AVX512-FCP-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3]
3384 ; AVX512-FCP-NEXT: vmovdqa64 %zmm10, %zmm25
3385 ; AVX512-FCP-NEXT: vpermt2q %zmm13, %zmm24, %zmm25
3386 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, %zmm26
3387 ; AVX512-FCP-NEXT: vpermt2q %zmm11, %zmm16, %zmm26
3388 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm26, %zmm25, %zmm25
3389 ; AVX512-FCP-NEXT: vmovdqa64 %zmm23, %zmm25 {%k1}
3390 ; AVX512-FCP-NEXT: vpermi2q %zmm5, %zmm3, %zmm24
3391 ; AVX512-FCP-NEXT: vpermi2q %zmm2, %zmm1, %zmm16
3392 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm16, %zmm24, %zmm16
3393 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm0, %zmm22
3394 ; AVX512-FCP-NEXT: vmovdqa64 %zmm22, %zmm16 {%k1}
3395 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,11,1,7,0,11,1,7]
3396 ; AVX512-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
3397 ; AVX512-FCP-NEXT: vpermt2q %zmm12, %zmm22, %zmm8
3398 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [13,0,1,7,13,0,1,7]
3399 ; AVX512-FCP-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
3400 ; AVX512-FCP-NEXT: vpermt2q %zmm13, %zmm12, %zmm10
3401 ; AVX512-FCP-NEXT: vpermt2q %zmm11, %zmm17, %zmm9
3402 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm9, %zmm10, %zmm9
3403 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, %zmm9 {%k1}
3404 ; AVX512-FCP-NEXT: vpermt2q %zmm5, %zmm12, %zmm3
3405 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm17, %zmm1
3406 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm1, %zmm3, %zmm1
3407 ; AVX512-FCP-NEXT: vpermt2q %zmm4, %zmm22, %zmm0
3408 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
3409 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, 64(%rsi)
3410 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, (%rsi)
3411 ; AVX512-FCP-NEXT: vmovdqa64 %zmm14, 64(%rdx)
3412 ; AVX512-FCP-NEXT: vmovdqa64 %zmm15, (%rdx)
3413 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, 64(%rcx)
3414 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, (%rcx)
3415 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, 64(%r8)
3416 ; AVX512-FCP-NEXT: vmovdqa64 %zmm21, (%r8)
3417 ; AVX512-FCP-NEXT: vmovdqa64 %zmm16, 64(%r9)
3418 ; AVX512-FCP-NEXT: vmovdqa64 %zmm25, (%r9)
3419 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, 64(%rax)
3420 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, (%rax)
3421 ; AVX512-FCP-NEXT: vzeroupper
3422 ; AVX512-FCP-NEXT: retq
3424 ; AVX512DQ-LABEL: load_i64_stride6_vf16:
3425 ; AVX512DQ: # %bb.0:
3426 ; AVX512DQ-NEXT: movq {{[0-9]+}}(%rsp), %rax
3427 ; AVX512DQ-NEXT: vmovdqa64 448(%rdi), %zmm2
3428 ; AVX512DQ-NEXT: vmovdqa64 384(%rdi), %zmm1
3429 ; AVX512DQ-NEXT: vmovdqa64 512(%rdi), %zmm3
3430 ; AVX512DQ-NEXT: vmovdqa64 576(%rdi), %zmm5
3431 ; AVX512DQ-NEXT: vmovdqa64 704(%rdi), %zmm0
3432 ; AVX512DQ-NEXT: vmovdqa64 640(%rdi), %zmm4
3433 ; AVX512DQ-NEXT: vmovdqa64 320(%rdi), %zmm8
3434 ; AVX512DQ-NEXT: vmovdqa64 256(%rdi), %zmm12
3435 ; AVX512DQ-NEXT: vmovdqa64 (%rdi), %zmm9
3436 ; AVX512DQ-NEXT: vmovdqa64 64(%rdi), %zmm11
3437 ; AVX512DQ-NEXT: vmovdqa64 128(%rdi), %zmm10
3438 ; AVX512DQ-NEXT: vmovdqa64 192(%rdi), %zmm13
3439 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [0,6,0,10,0,6,0,10]
3440 ; AVX512DQ-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
3441 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, %zmm15
3442 ; AVX512DQ-NEXT: vpermt2q %zmm10, %zmm14, %zmm15
3443 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm6 = [0,6,12,0]
3444 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, %zmm7
3445 ; AVX512DQ-NEXT: vpermt2q %zmm11, %zmm6, %zmm7
3446 ; AVX512DQ-NEXT: movb $56, %dil
3447 ; AVX512DQ-NEXT: kmovw %edi, %k1
3448 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, %zmm7 {%k1}
3449 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm16 = [4,10,4,10,4,10,4,10]
3450 ; AVX512DQ-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3451 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, %zmm15
3452 ; AVX512DQ-NEXT: vpermt2q %zmm8, %zmm16, %zmm15
3453 ; AVX512DQ-NEXT: movb $-64, %dil
3454 ; AVX512DQ-NEXT: kmovw %edi, %k2
3455 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, %zmm7 {%k2}
3456 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm15
3457 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm16, %zmm15
3458 ; AVX512DQ-NEXT: vpermi2q %zmm3, %zmm5, %zmm14
3459 ; AVX512DQ-NEXT: vpermi2q %zmm2, %zmm1, %zmm6
3460 ; AVX512DQ-NEXT: vmovdqa64 %zmm14, %zmm6 {%k1}
3461 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, %zmm6 {%k2}
3462 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [1,7,0,11,1,7,0,11]
3463 ; AVX512DQ-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
3464 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, %zmm17
3465 ; AVX512DQ-NEXT: vpermt2q %zmm10, %zmm18, %zmm17
3466 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm14 = [1,7,13,0]
3467 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, %zmm15
3468 ; AVX512DQ-NEXT: vpermt2q %zmm11, %zmm14, %zmm15
3469 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, %zmm15 {%k1}
3470 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm17 = [5,11,5,11,5,11,5,11]
3471 ; AVX512DQ-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3472 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, %zmm19
3473 ; AVX512DQ-NEXT: vpermt2q %zmm8, %zmm17, %zmm19
3474 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, %zmm15 {%k2}
3475 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm19
3476 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm17, %zmm19
3477 ; AVX512DQ-NEXT: vpermi2q %zmm3, %zmm5, %zmm18
3478 ; AVX512DQ-NEXT: vpermi2q %zmm2, %zmm1, %zmm14
3479 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, %zmm14 {%k1}
3480 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, %zmm14 {%k2}
3481 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [10,4,10,4,10,4,10,4]
3482 ; AVX512DQ-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3483 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, %zmm21
3484 ; AVX512DQ-NEXT: vpermt2q %zmm13, %zmm20, %zmm21
3485 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm18 = [10,0,6,0]
3486 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, %zmm19
3487 ; AVX512DQ-NEXT: vpermt2q %zmm9, %zmm18, %zmm19
3488 ; AVX512DQ-NEXT: movb $24, %dil
3489 ; AVX512DQ-NEXT: kmovw %edi, %k2
3490 ; AVX512DQ-NEXT: vmovdqa64 %zmm21, %zmm19 {%k2}
3491 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm21 = [0,0,6,12,0,0,6,12]
3492 ; AVX512DQ-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3]
3493 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, %zmm22
3494 ; AVX512DQ-NEXT: vpermt2q %zmm8, %zmm21, %zmm22
3495 ; AVX512DQ-NEXT: movb $-32, %dil
3496 ; AVX512DQ-NEXT: kmovw %edi, %k1
3497 ; AVX512DQ-NEXT: vmovdqa64 %zmm22, %zmm19 {%k1}
3498 ; AVX512DQ-NEXT: vpermi2q %zmm0, %zmm4, %zmm21
3499 ; AVX512DQ-NEXT: vpermi2q %zmm5, %zmm3, %zmm20
3500 ; AVX512DQ-NEXT: vpermi2q %zmm1, %zmm2, %zmm18
3501 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, %zmm18 {%k2}
3502 ; AVX512DQ-NEXT: vmovdqa64 %zmm21, %zmm18 {%k1}
3503 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm22 = [11,5,11,5,11,5,11,5]
3504 ; AVX512DQ-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3505 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, %zmm23
3506 ; AVX512DQ-NEXT: vpermt2q %zmm13, %zmm22, %zmm23
3507 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm20 = [11,1,7,0]
3508 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, %zmm21
3509 ; AVX512DQ-NEXT: vpermt2q %zmm9, %zmm20, %zmm21
3510 ; AVX512DQ-NEXT: vmovdqa64 %zmm23, %zmm21 {%k2}
3511 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm23 = [0,1,7,13,0,1,7,13]
3512 ; AVX512DQ-NEXT: # zmm23 = mem[0,1,2,3,0,1,2,3]
3513 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, %zmm24
3514 ; AVX512DQ-NEXT: vpermt2q %zmm8, %zmm23, %zmm24
3515 ; AVX512DQ-NEXT: vmovdqa64 %zmm24, %zmm21 {%k1}
3516 ; AVX512DQ-NEXT: vpermi2q %zmm0, %zmm4, %zmm23
3517 ; AVX512DQ-NEXT: vpermi2q %zmm5, %zmm3, %zmm22
3518 ; AVX512DQ-NEXT: vpermi2q %zmm1, %zmm2, %zmm20
3519 ; AVX512DQ-NEXT: vmovdqa64 %zmm22, %zmm20 {%k2}
3520 ; AVX512DQ-NEXT: vmovdqa64 %zmm23, %zmm20 {%k1}
3521 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,10,0,6,0,10,0,6]
3522 ; AVX512DQ-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
3523 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, %zmm23
3524 ; AVX512DQ-NEXT: vpermt2q %zmm12, %zmm22, %zmm23
3525 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm24 = [12,0,0,6,12,0,0,6]
3526 ; AVX512DQ-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3]
3527 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, %zmm25
3528 ; AVX512DQ-NEXT: vpermt2q %zmm13, %zmm24, %zmm25
3529 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, %zmm26
3530 ; AVX512DQ-NEXT: vpermt2q %zmm11, %zmm16, %zmm26
3531 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm26, %zmm25, %zmm25
3532 ; AVX512DQ-NEXT: vmovdqa64 %zmm23, %zmm25 {%k1}
3533 ; AVX512DQ-NEXT: vpermi2q %zmm5, %zmm3, %zmm24
3534 ; AVX512DQ-NEXT: vpermi2q %zmm2, %zmm1, %zmm16
3535 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm16, %zmm24, %zmm16
3536 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm0, %zmm22
3537 ; AVX512DQ-NEXT: vmovdqa64 %zmm22, %zmm16 {%k1}
3538 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,11,1,7,0,11,1,7]
3539 ; AVX512DQ-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
3540 ; AVX512DQ-NEXT: vpermt2q %zmm12, %zmm22, %zmm8
3541 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [13,0,1,7,13,0,1,7]
3542 ; AVX512DQ-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
3543 ; AVX512DQ-NEXT: vpermt2q %zmm13, %zmm12, %zmm10
3544 ; AVX512DQ-NEXT: vpermt2q %zmm11, %zmm17, %zmm9
3545 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm9, %zmm10, %zmm9
3546 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, %zmm9 {%k1}
3547 ; AVX512DQ-NEXT: vpermt2q %zmm5, %zmm12, %zmm3
3548 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm17, %zmm1
3549 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm1, %zmm3, %zmm1
3550 ; AVX512DQ-NEXT: vpermt2q %zmm4, %zmm22, %zmm0
3551 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
3552 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, 64(%rsi)
3553 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, (%rsi)
3554 ; AVX512DQ-NEXT: vmovdqa64 %zmm14, 64(%rdx)
3555 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, (%rdx)
3556 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, 64(%rcx)
3557 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, (%rcx)
3558 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, 64(%r8)
3559 ; AVX512DQ-NEXT: vmovdqa64 %zmm21, (%r8)
3560 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, 64(%r9)
3561 ; AVX512DQ-NEXT: vmovdqa64 %zmm25, (%r9)
3562 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 64(%rax)
3563 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, (%rax)
3564 ; AVX512DQ-NEXT: vzeroupper
3565 ; AVX512DQ-NEXT: retq
3567 ; AVX512DQ-FCP-LABEL: load_i64_stride6_vf16:
3568 ; AVX512DQ-FCP: # %bb.0:
3569 ; AVX512DQ-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
3570 ; AVX512DQ-FCP-NEXT: vmovdqa64 448(%rdi), %zmm2
3571 ; AVX512DQ-FCP-NEXT: vmovdqa64 384(%rdi), %zmm1
3572 ; AVX512DQ-FCP-NEXT: vmovdqa64 512(%rdi), %zmm3
3573 ; AVX512DQ-FCP-NEXT: vmovdqa64 576(%rdi), %zmm5
3574 ; AVX512DQ-FCP-NEXT: vmovdqa64 704(%rdi), %zmm0
3575 ; AVX512DQ-FCP-NEXT: vmovdqa64 640(%rdi), %zmm4
3576 ; AVX512DQ-FCP-NEXT: vmovdqa64 320(%rdi), %zmm8
3577 ; AVX512DQ-FCP-NEXT: vmovdqa64 256(%rdi), %zmm12
3578 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rdi), %zmm9
3579 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rdi), %zmm11
3580 ; AVX512DQ-FCP-NEXT: vmovdqa64 128(%rdi), %zmm10
3581 ; AVX512DQ-FCP-NEXT: vmovdqa64 192(%rdi), %zmm13
3582 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [0,6,0,10,0,6,0,10]
3583 ; AVX512DQ-FCP-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
3584 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, %zmm15
3585 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm10, %zmm14, %zmm15
3586 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm6 = [0,6,12,0]
3587 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, %zmm7
3588 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm11, %zmm6, %zmm7
3589 ; AVX512DQ-FCP-NEXT: movb $56, %dil
3590 ; AVX512DQ-FCP-NEXT: kmovw %edi, %k1
3591 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm15, %zmm7 {%k1}
3592 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm16 = [4,10,4,10,4,10,4,10]
3593 ; AVX512DQ-FCP-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3594 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm12, %zmm15
3595 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm8, %zmm16, %zmm15
3596 ; AVX512DQ-FCP-NEXT: movb $-64, %dil
3597 ; AVX512DQ-FCP-NEXT: kmovw %edi, %k2
3598 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm15, %zmm7 {%k2}
3599 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm15
3600 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm16, %zmm15
3601 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm3, %zmm5, %zmm14
3602 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm2, %zmm1, %zmm6
3603 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm14, %zmm6 {%k1}
3604 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm15, %zmm6 {%k2}
3605 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [1,7,0,11,1,7,0,11]
3606 ; AVX512DQ-FCP-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
3607 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, %zmm17
3608 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm10, %zmm18, %zmm17
3609 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm14 = [1,7,13,0]
3610 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, %zmm15
3611 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm11, %zmm14, %zmm15
3612 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, %zmm15 {%k1}
3613 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm17 = [5,11,5,11,5,11,5,11]
3614 ; AVX512DQ-FCP-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3615 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm12, %zmm19
3616 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm8, %zmm17, %zmm19
3617 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, %zmm15 {%k2}
3618 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm19
3619 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm17, %zmm19
3620 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm3, %zmm5, %zmm18
3621 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm2, %zmm1, %zmm14
3622 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, %zmm14 {%k1}
3623 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, %zmm14 {%k2}
3624 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [10,4,10,4,10,4,10,4]
3625 ; AVX512DQ-FCP-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3626 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm10, %zmm21
3627 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm13, %zmm20, %zmm21
3628 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm18 = [10,0,6,0]
3629 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm11, %zmm19
3630 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm9, %zmm18, %zmm19
3631 ; AVX512DQ-FCP-NEXT: movb $24, %dil
3632 ; AVX512DQ-FCP-NEXT: kmovw %edi, %k2
3633 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm21, %zmm19 {%k2}
3634 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm21 = [0,0,6,12,0,0,6,12]
3635 ; AVX512DQ-FCP-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3]
3636 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm12, %zmm22
3637 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm8, %zmm21, %zmm22
3638 ; AVX512DQ-FCP-NEXT: movb $-32, %dil
3639 ; AVX512DQ-FCP-NEXT: kmovw %edi, %k1
3640 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm22, %zmm19 {%k1}
3641 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm0, %zmm4, %zmm21
3642 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm5, %zmm3, %zmm20
3643 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm1, %zmm2, %zmm18
3644 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, %zmm18 {%k2}
3645 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm21, %zmm18 {%k1}
3646 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm22 = [11,5,11,5,11,5,11,5]
3647 ; AVX512DQ-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3648 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm10, %zmm23
3649 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm13, %zmm22, %zmm23
3650 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm20 = [11,1,7,0]
3651 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm11, %zmm21
3652 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm9, %zmm20, %zmm21
3653 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm23, %zmm21 {%k2}
3654 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm23 = [0,1,7,13,0,1,7,13]
3655 ; AVX512DQ-FCP-NEXT: # zmm23 = mem[0,1,2,3,0,1,2,3]
3656 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm12, %zmm24
3657 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm8, %zmm23, %zmm24
3658 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm24, %zmm21 {%k1}
3659 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm0, %zmm4, %zmm23
3660 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm5, %zmm3, %zmm22
3661 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm1, %zmm2, %zmm20
3662 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm22, %zmm20 {%k2}
3663 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm23, %zmm20 {%k1}
3664 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,10,0,6,0,10,0,6]
3665 ; AVX512DQ-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
3666 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, %zmm23
3667 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm12, %zmm22, %zmm23
3668 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm24 = [12,0,0,6,12,0,0,6]
3669 ; AVX512DQ-FCP-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3]
3670 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm10, %zmm25
3671 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm13, %zmm24, %zmm25
3672 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, %zmm26
3673 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm11, %zmm16, %zmm26
3674 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm26, %zmm25, %zmm25
3675 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm23, %zmm25 {%k1}
3676 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm5, %zmm3, %zmm24
3677 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm2, %zmm1, %zmm16
3678 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm16, %zmm24, %zmm16
3679 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm0, %zmm22
3680 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm22, %zmm16 {%k1}
3681 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,11,1,7,0,11,1,7]
3682 ; AVX512DQ-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
3683 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm12, %zmm22, %zmm8
3684 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [13,0,1,7,13,0,1,7]
3685 ; AVX512DQ-FCP-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
3686 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm13, %zmm12, %zmm10
3687 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm11, %zmm17, %zmm9
3688 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm9, %zmm10, %zmm9
3689 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, %zmm9 {%k1}
3690 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm5, %zmm12, %zmm3
3691 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm17, %zmm1
3692 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm1, %zmm3, %zmm1
3693 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm4, %zmm22, %zmm0
3694 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
3695 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, 64(%rsi)
3696 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, (%rsi)
3697 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm14, 64(%rdx)
3698 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm15, (%rdx)
3699 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, 64(%rcx)
3700 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, (%rcx)
3701 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, 64(%r8)
3702 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm21, (%r8)
3703 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm16, 64(%r9)
3704 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm25, (%r9)
3705 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, 64(%rax)
3706 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, (%rax)
3707 ; AVX512DQ-FCP-NEXT: vzeroupper
3708 ; AVX512DQ-FCP-NEXT: retq
3710 ; AVX512BW-LABEL: load_i64_stride6_vf16:
3711 ; AVX512BW: # %bb.0:
3712 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
3713 ; AVX512BW-NEXT: vmovdqa64 448(%rdi), %zmm2
3714 ; AVX512BW-NEXT: vmovdqa64 384(%rdi), %zmm1
3715 ; AVX512BW-NEXT: vmovdqa64 512(%rdi), %zmm3
3716 ; AVX512BW-NEXT: vmovdqa64 576(%rdi), %zmm5
3717 ; AVX512BW-NEXT: vmovdqa64 704(%rdi), %zmm0
3718 ; AVX512BW-NEXT: vmovdqa64 640(%rdi), %zmm4
3719 ; AVX512BW-NEXT: vmovdqa64 320(%rdi), %zmm8
3720 ; AVX512BW-NEXT: vmovdqa64 256(%rdi), %zmm12
3721 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm9
3722 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %zmm11
3723 ; AVX512BW-NEXT: vmovdqa64 128(%rdi), %zmm10
3724 ; AVX512BW-NEXT: vmovdqa64 192(%rdi), %zmm13
3725 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [0,6,0,10,0,6,0,10]
3726 ; AVX512BW-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
3727 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm15
3728 ; AVX512BW-NEXT: vpermt2q %zmm10, %zmm14, %zmm15
3729 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm6 = [0,6,12,0]
3730 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm7
3731 ; AVX512BW-NEXT: vpermt2q %zmm11, %zmm6, %zmm7
3732 ; AVX512BW-NEXT: movb $56, %dil
3733 ; AVX512BW-NEXT: kmovd %edi, %k1
3734 ; AVX512BW-NEXT: vmovdqa64 %zmm15, %zmm7 {%k1}
3735 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm16 = [4,10,4,10,4,10,4,10]
3736 ; AVX512BW-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3737 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm15
3738 ; AVX512BW-NEXT: vpermt2q %zmm8, %zmm16, %zmm15
3739 ; AVX512BW-NEXT: movb $-64, %dil
3740 ; AVX512BW-NEXT: kmovd %edi, %k2
3741 ; AVX512BW-NEXT: vmovdqa64 %zmm15, %zmm7 {%k2}
3742 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm15
3743 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm16, %zmm15
3744 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm5, %zmm14
3745 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm1, %zmm6
3746 ; AVX512BW-NEXT: vmovdqa64 %zmm14, %zmm6 {%k1}
3747 ; AVX512BW-NEXT: vmovdqa64 %zmm15, %zmm6 {%k2}
3748 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [1,7,0,11,1,7,0,11]
3749 ; AVX512BW-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
3750 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm17
3751 ; AVX512BW-NEXT: vpermt2q %zmm10, %zmm18, %zmm17
3752 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm14 = [1,7,13,0]
3753 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm15
3754 ; AVX512BW-NEXT: vpermt2q %zmm11, %zmm14, %zmm15
3755 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm15 {%k1}
3756 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm17 = [5,11,5,11,5,11,5,11]
3757 ; AVX512BW-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3758 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm19
3759 ; AVX512BW-NEXT: vpermt2q %zmm8, %zmm17, %zmm19
3760 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm15 {%k2}
3761 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm19
3762 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm17, %zmm19
3763 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm5, %zmm18
3764 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm1, %zmm14
3765 ; AVX512BW-NEXT: vmovdqa64 %zmm18, %zmm14 {%k1}
3766 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm14 {%k2}
3767 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [10,4,10,4,10,4,10,4]
3768 ; AVX512BW-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3769 ; AVX512BW-NEXT: vmovdqa64 %zmm10, %zmm21
3770 ; AVX512BW-NEXT: vpermt2q %zmm13, %zmm20, %zmm21
3771 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm18 = [10,0,6,0]
3772 ; AVX512BW-NEXT: vmovdqa64 %zmm11, %zmm19
3773 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm18, %zmm19
3774 ; AVX512BW-NEXT: movb $24, %dil
3775 ; AVX512BW-NEXT: kmovd %edi, %k2
3776 ; AVX512BW-NEXT: vmovdqa64 %zmm21, %zmm19 {%k2}
3777 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm21 = [0,0,6,12,0,0,6,12]
3778 ; AVX512BW-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3]
3779 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm22
3780 ; AVX512BW-NEXT: vpermt2q %zmm8, %zmm21, %zmm22
3781 ; AVX512BW-NEXT: movb $-32, %dil
3782 ; AVX512BW-NEXT: kmovd %edi, %k1
3783 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm19 {%k1}
3784 ; AVX512BW-NEXT: vpermi2q %zmm0, %zmm4, %zmm21
3785 ; AVX512BW-NEXT: vpermi2q %zmm5, %zmm3, %zmm20
3786 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm2, %zmm18
3787 ; AVX512BW-NEXT: vmovdqa64 %zmm20, %zmm18 {%k2}
3788 ; AVX512BW-NEXT: vmovdqa64 %zmm21, %zmm18 {%k1}
3789 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm22 = [11,5,11,5,11,5,11,5]
3790 ; AVX512BW-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3791 ; AVX512BW-NEXT: vmovdqa64 %zmm10, %zmm23
3792 ; AVX512BW-NEXT: vpermt2q %zmm13, %zmm22, %zmm23
3793 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm20 = [11,1,7,0]
3794 ; AVX512BW-NEXT: vmovdqa64 %zmm11, %zmm21
3795 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm20, %zmm21
3796 ; AVX512BW-NEXT: vmovdqa64 %zmm23, %zmm21 {%k2}
3797 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm23 = [0,1,7,13,0,1,7,13]
3798 ; AVX512BW-NEXT: # zmm23 = mem[0,1,2,3,0,1,2,3]
3799 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm24
3800 ; AVX512BW-NEXT: vpermt2q %zmm8, %zmm23, %zmm24
3801 ; AVX512BW-NEXT: vmovdqa64 %zmm24, %zmm21 {%k1}
3802 ; AVX512BW-NEXT: vpermi2q %zmm0, %zmm4, %zmm23
3803 ; AVX512BW-NEXT: vpermi2q %zmm5, %zmm3, %zmm22
3804 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm2, %zmm20
3805 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm20 {%k2}
3806 ; AVX512BW-NEXT: vmovdqa64 %zmm23, %zmm20 {%k1}
3807 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,10,0,6,0,10,0,6]
3808 ; AVX512BW-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
3809 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm23
3810 ; AVX512BW-NEXT: vpermt2q %zmm12, %zmm22, %zmm23
3811 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm24 = [12,0,0,6,12,0,0,6]
3812 ; AVX512BW-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3]
3813 ; AVX512BW-NEXT: vmovdqa64 %zmm10, %zmm25
3814 ; AVX512BW-NEXT: vpermt2q %zmm13, %zmm24, %zmm25
3815 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm26
3816 ; AVX512BW-NEXT: vpermt2q %zmm11, %zmm16, %zmm26
3817 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm26, %zmm25, %zmm25
3818 ; AVX512BW-NEXT: vmovdqa64 %zmm23, %zmm25 {%k1}
3819 ; AVX512BW-NEXT: vpermi2q %zmm5, %zmm3, %zmm24
3820 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm1, %zmm16
3821 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm16, %zmm24, %zmm16
3822 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm0, %zmm22
3823 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm16 {%k1}
3824 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,11,1,7,0,11,1,7]
3825 ; AVX512BW-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
3826 ; AVX512BW-NEXT: vpermt2q %zmm12, %zmm22, %zmm8
3827 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [13,0,1,7,13,0,1,7]
3828 ; AVX512BW-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
3829 ; AVX512BW-NEXT: vpermt2q %zmm13, %zmm12, %zmm10
3830 ; AVX512BW-NEXT: vpermt2q %zmm11, %zmm17, %zmm9
3831 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm9, %zmm10, %zmm9
3832 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm9 {%k1}
3833 ; AVX512BW-NEXT: vpermt2q %zmm5, %zmm12, %zmm3
3834 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm17, %zmm1
3835 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm1, %zmm3, %zmm1
3836 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm22, %zmm0
3837 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
3838 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 64(%rsi)
3839 ; AVX512BW-NEXT: vmovdqa64 %zmm7, (%rsi)
3840 ; AVX512BW-NEXT: vmovdqa64 %zmm14, 64(%rdx)
3841 ; AVX512BW-NEXT: vmovdqa64 %zmm15, (%rdx)
3842 ; AVX512BW-NEXT: vmovdqa64 %zmm18, 64(%rcx)
3843 ; AVX512BW-NEXT: vmovdqa64 %zmm19, (%rcx)
3844 ; AVX512BW-NEXT: vmovdqa64 %zmm20, 64(%r8)
3845 ; AVX512BW-NEXT: vmovdqa64 %zmm21, (%r8)
3846 ; AVX512BW-NEXT: vmovdqa64 %zmm16, 64(%r9)
3847 ; AVX512BW-NEXT: vmovdqa64 %zmm25, (%r9)
3848 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 64(%rax)
3849 ; AVX512BW-NEXT: vmovdqa64 %zmm9, (%rax)
3850 ; AVX512BW-NEXT: vzeroupper
3851 ; AVX512BW-NEXT: retq
3853 ; AVX512BW-FCP-LABEL: load_i64_stride6_vf16:
3854 ; AVX512BW-FCP: # %bb.0:
3855 ; AVX512BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
3856 ; AVX512BW-FCP-NEXT: vmovdqa64 448(%rdi), %zmm2
3857 ; AVX512BW-FCP-NEXT: vmovdqa64 384(%rdi), %zmm1
3858 ; AVX512BW-FCP-NEXT: vmovdqa64 512(%rdi), %zmm3
3859 ; AVX512BW-FCP-NEXT: vmovdqa64 576(%rdi), %zmm5
3860 ; AVX512BW-FCP-NEXT: vmovdqa64 704(%rdi), %zmm0
3861 ; AVX512BW-FCP-NEXT: vmovdqa64 640(%rdi), %zmm4
3862 ; AVX512BW-FCP-NEXT: vmovdqa64 320(%rdi), %zmm8
3863 ; AVX512BW-FCP-NEXT: vmovdqa64 256(%rdi), %zmm12
3864 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm9
3865 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm11
3866 ; AVX512BW-FCP-NEXT: vmovdqa64 128(%rdi), %zmm10
3867 ; AVX512BW-FCP-NEXT: vmovdqa64 192(%rdi), %zmm13
3868 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [0,6,0,10,0,6,0,10]
3869 ; AVX512BW-FCP-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
3870 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm15
3871 ; AVX512BW-FCP-NEXT: vpermt2q %zmm10, %zmm14, %zmm15
3872 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm6 = [0,6,12,0]
3873 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm7
3874 ; AVX512BW-FCP-NEXT: vpermt2q %zmm11, %zmm6, %zmm7
3875 ; AVX512BW-FCP-NEXT: movb $56, %dil
3876 ; AVX512BW-FCP-NEXT: kmovd %edi, %k1
3877 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm7 {%k1}
3878 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm16 = [4,10,4,10,4,10,4,10]
3879 ; AVX512BW-FCP-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3880 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm15
3881 ; AVX512BW-FCP-NEXT: vpermt2q %zmm8, %zmm16, %zmm15
3882 ; AVX512BW-FCP-NEXT: movb $-64, %dil
3883 ; AVX512BW-FCP-NEXT: kmovd %edi, %k2
3884 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm7 {%k2}
3885 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm15
3886 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm16, %zmm15
3887 ; AVX512BW-FCP-NEXT: vpermi2q %zmm3, %zmm5, %zmm14
3888 ; AVX512BW-FCP-NEXT: vpermi2q %zmm2, %zmm1, %zmm6
3889 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm14, %zmm6 {%k1}
3890 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm6 {%k2}
3891 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [1,7,0,11,1,7,0,11]
3892 ; AVX512BW-FCP-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
3893 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm17
3894 ; AVX512BW-FCP-NEXT: vpermt2q %zmm10, %zmm18, %zmm17
3895 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm14 = [1,7,13,0]
3896 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm15
3897 ; AVX512BW-FCP-NEXT: vpermt2q %zmm11, %zmm14, %zmm15
3898 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm15 {%k1}
3899 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm17 = [5,11,5,11,5,11,5,11]
3900 ; AVX512BW-FCP-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3901 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm19
3902 ; AVX512BW-FCP-NEXT: vpermt2q %zmm8, %zmm17, %zmm19
3903 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm15 {%k2}
3904 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm19
3905 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm17, %zmm19
3906 ; AVX512BW-FCP-NEXT: vpermi2q %zmm3, %zmm5, %zmm18
3907 ; AVX512BW-FCP-NEXT: vpermi2q %zmm2, %zmm1, %zmm14
3908 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm14 {%k1}
3909 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm14 {%k2}
3910 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [10,4,10,4,10,4,10,4]
3911 ; AVX512BW-FCP-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3912 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm21
3913 ; AVX512BW-FCP-NEXT: vpermt2q %zmm13, %zmm20, %zmm21
3914 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm18 = [10,0,6,0]
3915 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm19
3916 ; AVX512BW-FCP-NEXT: vpermt2q %zmm9, %zmm18, %zmm19
3917 ; AVX512BW-FCP-NEXT: movb $24, %dil
3918 ; AVX512BW-FCP-NEXT: kmovd %edi, %k2
3919 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm19 {%k2}
3920 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm21 = [0,0,6,12,0,0,6,12]
3921 ; AVX512BW-FCP-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3]
3922 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm22
3923 ; AVX512BW-FCP-NEXT: vpermt2q %zmm8, %zmm21, %zmm22
3924 ; AVX512BW-FCP-NEXT: movb $-32, %dil
3925 ; AVX512BW-FCP-NEXT: kmovd %edi, %k1
3926 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm19 {%k1}
3927 ; AVX512BW-FCP-NEXT: vpermi2q %zmm0, %zmm4, %zmm21
3928 ; AVX512BW-FCP-NEXT: vpermi2q %zmm5, %zmm3, %zmm20
3929 ; AVX512BW-FCP-NEXT: vpermi2q %zmm1, %zmm2, %zmm18
3930 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm18 {%k2}
3931 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm18 {%k1}
3932 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm22 = [11,5,11,5,11,5,11,5]
3933 ; AVX512BW-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3934 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm23
3935 ; AVX512BW-FCP-NEXT: vpermt2q %zmm13, %zmm22, %zmm23
3936 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm20 = [11,1,7,0]
3937 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm21
3938 ; AVX512BW-FCP-NEXT: vpermt2q %zmm9, %zmm20, %zmm21
3939 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm21 {%k2}
3940 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm23 = [0,1,7,13,0,1,7,13]
3941 ; AVX512BW-FCP-NEXT: # zmm23 = mem[0,1,2,3,0,1,2,3]
3942 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm24
3943 ; AVX512BW-FCP-NEXT: vpermt2q %zmm8, %zmm23, %zmm24
3944 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm21 {%k1}
3945 ; AVX512BW-FCP-NEXT: vpermi2q %zmm0, %zmm4, %zmm23
3946 ; AVX512BW-FCP-NEXT: vpermi2q %zmm5, %zmm3, %zmm22
3947 ; AVX512BW-FCP-NEXT: vpermi2q %zmm1, %zmm2, %zmm20
3948 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm20 {%k2}
3949 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm20 {%k1}
3950 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,10,0,6,0,10,0,6]
3951 ; AVX512BW-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
3952 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm23
3953 ; AVX512BW-FCP-NEXT: vpermt2q %zmm12, %zmm22, %zmm23
3954 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm24 = [12,0,0,6,12,0,0,6]
3955 ; AVX512BW-FCP-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3]
3956 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm25
3957 ; AVX512BW-FCP-NEXT: vpermt2q %zmm13, %zmm24, %zmm25
3958 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm26
3959 ; AVX512BW-FCP-NEXT: vpermt2q %zmm11, %zmm16, %zmm26
3960 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm26, %zmm25, %zmm25
3961 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm25 {%k1}
3962 ; AVX512BW-FCP-NEXT: vpermi2q %zmm5, %zmm3, %zmm24
3963 ; AVX512BW-FCP-NEXT: vpermi2q %zmm2, %zmm1, %zmm16
3964 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm16, %zmm24, %zmm16
3965 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm0, %zmm22
3966 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm16 {%k1}
3967 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,11,1,7,0,11,1,7]
3968 ; AVX512BW-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
3969 ; AVX512BW-FCP-NEXT: vpermt2q %zmm12, %zmm22, %zmm8
3970 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [13,0,1,7,13,0,1,7]
3971 ; AVX512BW-FCP-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
3972 ; AVX512BW-FCP-NEXT: vpermt2q %zmm13, %zmm12, %zmm10
3973 ; AVX512BW-FCP-NEXT: vpermt2q %zmm11, %zmm17, %zmm9
3974 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm9, %zmm10, %zmm9
3975 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm9 {%k1}
3976 ; AVX512BW-FCP-NEXT: vpermt2q %zmm5, %zmm12, %zmm3
3977 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm17, %zmm1
3978 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm1, %zmm3, %zmm1
3979 ; AVX512BW-FCP-NEXT: vpermt2q %zmm4, %zmm22, %zmm0
3980 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
3981 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, 64(%rsi)
3982 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, (%rsi)
3983 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm14, 64(%rdx)
3984 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm15, (%rdx)
3985 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, 64(%rcx)
3986 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, (%rcx)
3987 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, 64(%r8)
3988 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm21, (%r8)
3989 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm16, 64(%r9)
3990 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm25, (%r9)
3991 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, 64(%rax)
3992 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, (%rax)
3993 ; AVX512BW-FCP-NEXT: vzeroupper
3994 ; AVX512BW-FCP-NEXT: retq
3996 ; AVX512DQ-BW-LABEL: load_i64_stride6_vf16:
3997 ; AVX512DQ-BW: # %bb.0:
3998 ; AVX512DQ-BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
3999 ; AVX512DQ-BW-NEXT: vmovdqa64 448(%rdi), %zmm2
4000 ; AVX512DQ-BW-NEXT: vmovdqa64 384(%rdi), %zmm1
4001 ; AVX512DQ-BW-NEXT: vmovdqa64 512(%rdi), %zmm3
4002 ; AVX512DQ-BW-NEXT: vmovdqa64 576(%rdi), %zmm5
4003 ; AVX512DQ-BW-NEXT: vmovdqa64 704(%rdi), %zmm0
4004 ; AVX512DQ-BW-NEXT: vmovdqa64 640(%rdi), %zmm4
4005 ; AVX512DQ-BW-NEXT: vmovdqa64 320(%rdi), %zmm8
4006 ; AVX512DQ-BW-NEXT: vmovdqa64 256(%rdi), %zmm12
4007 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdi), %zmm9
4008 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rdi), %zmm11
4009 ; AVX512DQ-BW-NEXT: vmovdqa64 128(%rdi), %zmm10
4010 ; AVX512DQ-BW-NEXT: vmovdqa64 192(%rdi), %zmm13
4011 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [0,6,0,10,0,6,0,10]
4012 ; AVX512DQ-BW-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
4013 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, %zmm15
4014 ; AVX512DQ-BW-NEXT: vpermt2q %zmm10, %zmm14, %zmm15
4015 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm6 = [0,6,12,0]
4016 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, %zmm7
4017 ; AVX512DQ-BW-NEXT: vpermt2q %zmm11, %zmm6, %zmm7
4018 ; AVX512DQ-BW-NEXT: movb $56, %dil
4019 ; AVX512DQ-BW-NEXT: kmovd %edi, %k1
4020 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm15, %zmm7 {%k1}
4021 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm16 = [4,10,4,10,4,10,4,10]
4022 ; AVX512DQ-BW-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4023 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm12, %zmm15
4024 ; AVX512DQ-BW-NEXT: vpermt2q %zmm8, %zmm16, %zmm15
4025 ; AVX512DQ-BW-NEXT: movb $-64, %dil
4026 ; AVX512DQ-BW-NEXT: kmovd %edi, %k2
4027 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm15, %zmm7 {%k2}
4028 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm15
4029 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm16, %zmm15
4030 ; AVX512DQ-BW-NEXT: vpermi2q %zmm3, %zmm5, %zmm14
4031 ; AVX512DQ-BW-NEXT: vpermi2q %zmm2, %zmm1, %zmm6
4032 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm14, %zmm6 {%k1}
4033 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm15, %zmm6 {%k2}
4034 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [1,7,0,11,1,7,0,11]
4035 ; AVX512DQ-BW-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
4036 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, %zmm17
4037 ; AVX512DQ-BW-NEXT: vpermt2q %zmm10, %zmm18, %zmm17
4038 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm14 = [1,7,13,0]
4039 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, %zmm15
4040 ; AVX512DQ-BW-NEXT: vpermt2q %zmm11, %zmm14, %zmm15
4041 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, %zmm15 {%k1}
4042 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm17 = [5,11,5,11,5,11,5,11]
4043 ; AVX512DQ-BW-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4044 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm12, %zmm19
4045 ; AVX512DQ-BW-NEXT: vpermt2q %zmm8, %zmm17, %zmm19
4046 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, %zmm15 {%k2}
4047 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm19
4048 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm17, %zmm19
4049 ; AVX512DQ-BW-NEXT: vpermi2q %zmm3, %zmm5, %zmm18
4050 ; AVX512DQ-BW-NEXT: vpermi2q %zmm2, %zmm1, %zmm14
4051 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, %zmm14 {%k1}
4052 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, %zmm14 {%k2}
4053 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [10,4,10,4,10,4,10,4]
4054 ; AVX512DQ-BW-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4055 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, %zmm21
4056 ; AVX512DQ-BW-NEXT: vpermt2q %zmm13, %zmm20, %zmm21
4057 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm18 = [10,0,6,0]
4058 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm11, %zmm19
4059 ; AVX512DQ-BW-NEXT: vpermt2q %zmm9, %zmm18, %zmm19
4060 ; AVX512DQ-BW-NEXT: movb $24, %dil
4061 ; AVX512DQ-BW-NEXT: kmovd %edi, %k2
4062 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm21, %zmm19 {%k2}
4063 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm21 = [0,0,6,12,0,0,6,12]
4064 ; AVX512DQ-BW-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3]
4065 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm12, %zmm22
4066 ; AVX512DQ-BW-NEXT: vpermt2q %zmm8, %zmm21, %zmm22
4067 ; AVX512DQ-BW-NEXT: movb $-32, %dil
4068 ; AVX512DQ-BW-NEXT: kmovd %edi, %k1
4069 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm22, %zmm19 {%k1}
4070 ; AVX512DQ-BW-NEXT: vpermi2q %zmm0, %zmm4, %zmm21
4071 ; AVX512DQ-BW-NEXT: vpermi2q %zmm5, %zmm3, %zmm20
4072 ; AVX512DQ-BW-NEXT: vpermi2q %zmm1, %zmm2, %zmm18
4073 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, %zmm18 {%k2}
4074 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm21, %zmm18 {%k1}
4075 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm22 = [11,5,11,5,11,5,11,5]
4076 ; AVX512DQ-BW-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4077 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, %zmm23
4078 ; AVX512DQ-BW-NEXT: vpermt2q %zmm13, %zmm22, %zmm23
4079 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm20 = [11,1,7,0]
4080 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm11, %zmm21
4081 ; AVX512DQ-BW-NEXT: vpermt2q %zmm9, %zmm20, %zmm21
4082 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm23, %zmm21 {%k2}
4083 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm23 = [0,1,7,13,0,1,7,13]
4084 ; AVX512DQ-BW-NEXT: # zmm23 = mem[0,1,2,3,0,1,2,3]
4085 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm12, %zmm24
4086 ; AVX512DQ-BW-NEXT: vpermt2q %zmm8, %zmm23, %zmm24
4087 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm24, %zmm21 {%k1}
4088 ; AVX512DQ-BW-NEXT: vpermi2q %zmm0, %zmm4, %zmm23
4089 ; AVX512DQ-BW-NEXT: vpermi2q %zmm5, %zmm3, %zmm22
4090 ; AVX512DQ-BW-NEXT: vpermi2q %zmm1, %zmm2, %zmm20
4091 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm22, %zmm20 {%k2}
4092 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm23, %zmm20 {%k1}
4093 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,10,0,6,0,10,0,6]
4094 ; AVX512DQ-BW-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
4095 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, %zmm23
4096 ; AVX512DQ-BW-NEXT: vpermt2q %zmm12, %zmm22, %zmm23
4097 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm24 = [12,0,0,6,12,0,0,6]
4098 ; AVX512DQ-BW-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3]
4099 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, %zmm25
4100 ; AVX512DQ-BW-NEXT: vpermt2q %zmm13, %zmm24, %zmm25
4101 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, %zmm26
4102 ; AVX512DQ-BW-NEXT: vpermt2q %zmm11, %zmm16, %zmm26
4103 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm26, %zmm25, %zmm25
4104 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm23, %zmm25 {%k1}
4105 ; AVX512DQ-BW-NEXT: vpermi2q %zmm5, %zmm3, %zmm24
4106 ; AVX512DQ-BW-NEXT: vpermi2q %zmm2, %zmm1, %zmm16
4107 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm16, %zmm24, %zmm16
4108 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm0, %zmm22
4109 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm22, %zmm16 {%k1}
4110 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,11,1,7,0,11,1,7]
4111 ; AVX512DQ-BW-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
4112 ; AVX512DQ-BW-NEXT: vpermt2q %zmm12, %zmm22, %zmm8
4113 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [13,0,1,7,13,0,1,7]
4114 ; AVX512DQ-BW-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
4115 ; AVX512DQ-BW-NEXT: vpermt2q %zmm13, %zmm12, %zmm10
4116 ; AVX512DQ-BW-NEXT: vpermt2q %zmm11, %zmm17, %zmm9
4117 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm9, %zmm10, %zmm9
4118 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, %zmm9 {%k1}
4119 ; AVX512DQ-BW-NEXT: vpermt2q %zmm5, %zmm12, %zmm3
4120 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm17, %zmm1
4121 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm1, %zmm3, %zmm1
4122 ; AVX512DQ-BW-NEXT: vpermt2q %zmm4, %zmm22, %zmm0
4123 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
4124 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, 64(%rsi)
4125 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, (%rsi)
4126 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm14, 64(%rdx)
4127 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm15, (%rdx)
4128 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, 64(%rcx)
4129 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, (%rcx)
4130 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, 64(%r8)
4131 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm21, (%r8)
4132 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm16, 64(%r9)
4133 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm25, (%r9)
4134 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, 64(%rax)
4135 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, (%rax)
4136 ; AVX512DQ-BW-NEXT: vzeroupper
4137 ; AVX512DQ-BW-NEXT: retq
4139 ; AVX512DQ-BW-FCP-LABEL: load_i64_stride6_vf16:
4140 ; AVX512DQ-BW-FCP: # %bb.0:
4141 ; AVX512DQ-BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
4142 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 448(%rdi), %zmm2
4143 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 384(%rdi), %zmm1
4144 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 512(%rdi), %zmm3
4145 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 576(%rdi), %zmm5
4146 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 704(%rdi), %zmm0
4147 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 640(%rdi), %zmm4
4148 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 320(%rdi), %zmm8
4149 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 256(%rdi), %zmm12
4150 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm9
4151 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm11
4152 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 128(%rdi), %zmm10
4153 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 192(%rdi), %zmm13
4154 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [0,6,0,10,0,6,0,10]
4155 ; AVX512DQ-BW-FCP-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
4156 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm15
4157 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm10, %zmm14, %zmm15
4158 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm6 = [0,6,12,0]
4159 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm7
4160 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm11, %zmm6, %zmm7
4161 ; AVX512DQ-BW-FCP-NEXT: movb $56, %dil
4162 ; AVX512DQ-BW-FCP-NEXT: kmovd %edi, %k1
4163 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm7 {%k1}
4164 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm16 = [4,10,4,10,4,10,4,10]
4165 ; AVX512DQ-BW-FCP-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4166 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm15
4167 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm8, %zmm16, %zmm15
4168 ; AVX512DQ-BW-FCP-NEXT: movb $-64, %dil
4169 ; AVX512DQ-BW-FCP-NEXT: kmovd %edi, %k2
4170 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm7 {%k2}
4171 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm15
4172 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm16, %zmm15
4173 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm3, %zmm5, %zmm14
4174 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm2, %zmm1, %zmm6
4175 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm14, %zmm6 {%k1}
4176 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm6 {%k2}
4177 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [1,7,0,11,1,7,0,11]
4178 ; AVX512DQ-BW-FCP-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
4179 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm17
4180 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm10, %zmm18, %zmm17
4181 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm14 = [1,7,13,0]
4182 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm15
4183 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm11, %zmm14, %zmm15
4184 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm15 {%k1}
4185 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm17 = [5,11,5,11,5,11,5,11]
4186 ; AVX512DQ-BW-FCP-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4187 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm19
4188 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm8, %zmm17, %zmm19
4189 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm15 {%k2}
4190 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm19
4191 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm17, %zmm19
4192 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm3, %zmm5, %zmm18
4193 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm2, %zmm1, %zmm14
4194 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm14 {%k1}
4195 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm14 {%k2}
4196 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [10,4,10,4,10,4,10,4]
4197 ; AVX512DQ-BW-FCP-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4198 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm21
4199 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm13, %zmm20, %zmm21
4200 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm18 = [10,0,6,0]
4201 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm19
4202 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm9, %zmm18, %zmm19
4203 ; AVX512DQ-BW-FCP-NEXT: movb $24, %dil
4204 ; AVX512DQ-BW-FCP-NEXT: kmovd %edi, %k2
4205 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm19 {%k2}
4206 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm21 = [0,0,6,12,0,0,6,12]
4207 ; AVX512DQ-BW-FCP-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3]
4208 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm22
4209 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm8, %zmm21, %zmm22
4210 ; AVX512DQ-BW-FCP-NEXT: movb $-32, %dil
4211 ; AVX512DQ-BW-FCP-NEXT: kmovd %edi, %k1
4212 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm19 {%k1}
4213 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm0, %zmm4, %zmm21
4214 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm5, %zmm3, %zmm20
4215 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm1, %zmm2, %zmm18
4216 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm18 {%k2}
4217 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm18 {%k1}
4218 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm22 = [11,5,11,5,11,5,11,5]
4219 ; AVX512DQ-BW-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4220 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm23
4221 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm13, %zmm22, %zmm23
4222 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm20 = [11,1,7,0]
4223 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm21
4224 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm9, %zmm20, %zmm21
4225 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm21 {%k2}
4226 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm23 = [0,1,7,13,0,1,7,13]
4227 ; AVX512DQ-BW-FCP-NEXT: # zmm23 = mem[0,1,2,3,0,1,2,3]
4228 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm24
4229 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm8, %zmm23, %zmm24
4230 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm21 {%k1}
4231 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm0, %zmm4, %zmm23
4232 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm5, %zmm3, %zmm22
4233 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm1, %zmm2, %zmm20
4234 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm20 {%k2}
4235 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm20 {%k1}
4236 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,10,0,6,0,10,0,6]
4237 ; AVX512DQ-BW-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
4238 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm23
4239 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm12, %zmm22, %zmm23
4240 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm24 = [12,0,0,6,12,0,0,6]
4241 ; AVX512DQ-BW-FCP-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3]
4242 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm25
4243 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm13, %zmm24, %zmm25
4244 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm26
4245 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm11, %zmm16, %zmm26
4246 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm26, %zmm25, %zmm25
4247 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm25 {%k1}
4248 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm5, %zmm3, %zmm24
4249 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm2, %zmm1, %zmm16
4250 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm16, %zmm24, %zmm16
4251 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm0, %zmm22
4252 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm16 {%k1}
4253 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,11,1,7,0,11,1,7]
4254 ; AVX512DQ-BW-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
4255 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm12, %zmm22, %zmm8
4256 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [13,0,1,7,13,0,1,7]
4257 ; AVX512DQ-BW-FCP-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
4258 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm13, %zmm12, %zmm10
4259 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm11, %zmm17, %zmm9
4260 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm9, %zmm10, %zmm9
4261 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm9 {%k1}
4262 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm5, %zmm12, %zmm3
4263 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm17, %zmm1
4264 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm1, %zmm3, %zmm1
4265 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm4, %zmm22, %zmm0
4266 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
4267 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, 64(%rsi)
4268 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, (%rsi)
4269 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm14, 64(%rdx)
4270 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm15, (%rdx)
4271 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, 64(%rcx)
4272 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, (%rcx)
4273 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, 64(%r8)
4274 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm21, (%r8)
4275 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm16, 64(%r9)
4276 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm25, (%r9)
4277 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, 64(%rax)
4278 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, (%rax)
4279 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
4280 ; AVX512DQ-BW-FCP-NEXT: retq
4281 %wide.vec = load <96 x i64>, ptr %in.vec, align 64
4282 %strided.vec0 = shufflevector <96 x i64> %wide.vec, <96 x i64> poison, <16 x i32> <i32 0, i32 6, i32 12, i32 18, i32 24, i32 30, i32 36, i32 42, i32 48, i32 54, i32 60, i32 66, i32 72, i32 78, i32 84, i32 90>
4283 %strided.vec1 = shufflevector <96 x i64> %wide.vec, <96 x i64> poison, <16 x i32> <i32 1, i32 7, i32 13, i32 19, i32 25, i32 31, i32 37, i32 43, i32 49, i32 55, i32 61, i32 67, i32 73, i32 79, i32 85, i32 91>
4284 %strided.vec2 = shufflevector <96 x i64> %wide.vec, <96 x i64> poison, <16 x i32> <i32 2, i32 8, i32 14, i32 20, i32 26, i32 32, i32 38, i32 44, i32 50, i32 56, i32 62, i32 68, i32 74, i32 80, i32 86, i32 92>
4285 %strided.vec3 = shufflevector <96 x i64> %wide.vec, <96 x i64> poison, <16 x i32> <i32 3, i32 9, i32 15, i32 21, i32 27, i32 33, i32 39, i32 45, i32 51, i32 57, i32 63, i32 69, i32 75, i32 81, i32 87, i32 93>
4286 %strided.vec4 = shufflevector <96 x i64> %wide.vec, <96 x i64> poison, <16 x i32> <i32 4, i32 10, i32 16, i32 22, i32 28, i32 34, i32 40, i32 46, i32 52, i32 58, i32 64, i32 70, i32 76, i32 82, i32 88, i32 94>
4287 %strided.vec5 = shufflevector <96 x i64> %wide.vec, <96 x i64> poison, <16 x i32> <i32 5, i32 11, i32 17, i32 23, i32 29, i32 35, i32 41, i32 47, i32 53, i32 59, i32 65, i32 71, i32 77, i32 83, i32 89, i32 95>
4288 store <16 x i64> %strided.vec0, ptr %out.vec0, align 64
4289 store <16 x i64> %strided.vec1, ptr %out.vec1, align 64
4290 store <16 x i64> %strided.vec2, ptr %out.vec2, align 64
4291 store <16 x i64> %strided.vec3, ptr %out.vec3, align 64
4292 store <16 x i64> %strided.vec4, ptr %out.vec4, align 64
4293 store <16 x i64> %strided.vec5, ptr %out.vec5, align 64
4297 define void @load_i64_stride6_vf32(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr %out.vec2, ptr %out.vec3, ptr %out.vec4, ptr %out.vec5) nounwind {
4298 ; SSE-LABEL: load_i64_stride6_vf32:
4300 ; SSE-NEXT: subq $1176, %rsp # imm = 0x498
4301 ; SSE-NEXT: movaps 624(%rdi), %xmm0
4302 ; SSE-NEXT: movaps 576(%rdi), %xmm9
4303 ; SSE-NEXT: movaps 240(%rdi), %xmm3
4304 ; SSE-NEXT: movaps 192(%rdi), %xmm8
4305 ; SSE-NEXT: movaps 720(%rdi), %xmm1
4306 ; SSE-NEXT: movaps 672(%rdi), %xmm11
4307 ; SSE-NEXT: movaps 336(%rdi), %xmm5
4308 ; SSE-NEXT: movaps 288(%rdi), %xmm10
4309 ; SSE-NEXT: movaps 432(%rdi), %xmm4
4310 ; SSE-NEXT: movaps 384(%rdi), %xmm12
4311 ; SSE-NEXT: movaps 912(%rdi), %xmm2
4312 ; SSE-NEXT: movaps 528(%rdi), %xmm6
4313 ; SSE-NEXT: movaps 480(%rdi), %xmm14
4314 ; SSE-NEXT: movaps 144(%rdi), %xmm7
4315 ; SSE-NEXT: movaps 96(%rdi), %xmm13
4316 ; SSE-NEXT: movaps %xmm13, %xmm15
4317 ; SSE-NEXT: movlhps {{.*#+}} xmm15 = xmm15[0],xmm7[0]
4318 ; SSE-NEXT: movaps %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4319 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm7[1]
4320 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4321 ; SSE-NEXT: movaps %xmm10, %xmm7
4322 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm5[0]
4323 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4324 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm5[1]
4325 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4326 ; SSE-NEXT: movaps %xmm8, %xmm5
4327 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm3[0]
4328 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4329 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm3[1]
4330 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4331 ; SSE-NEXT: movaps %xmm14, %xmm3
4332 ; SSE-NEXT: movlhps {{.*#+}} xmm3 = xmm3[0],xmm6[0]
4333 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4334 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm6[1]
4335 ; SSE-NEXT: movaps %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4336 ; SSE-NEXT: movaps %xmm12, %xmm3
4337 ; SSE-NEXT: movlhps {{.*#+}} xmm3 = xmm3[0],xmm4[0]
4338 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4339 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm4[1]
4340 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4341 ; SSE-NEXT: movaps %xmm11, %xmm3
4342 ; SSE-NEXT: movlhps {{.*#+}} xmm3 = xmm3[0],xmm1[0]
4343 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4344 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm1[1]
4345 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4346 ; SSE-NEXT: movaps %xmm9, %xmm1
4347 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4348 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4349 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm0[1]
4350 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4351 ; SSE-NEXT: movaps 864(%rdi), %xmm0
4352 ; SSE-NEXT: movaps %xmm0, %xmm1
4353 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4354 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4355 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
4356 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4357 ; SSE-NEXT: movaps 816(%rdi), %xmm0
4358 ; SSE-NEXT: movaps 768(%rdi), %xmm1
4359 ; SSE-NEXT: movaps %xmm1, %xmm2
4360 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4361 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4362 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4363 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4364 ; SSE-NEXT: movaps 1104(%rdi), %xmm0
4365 ; SSE-NEXT: movaps 1056(%rdi), %xmm1
4366 ; SSE-NEXT: movaps %xmm1, %xmm2
4367 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4368 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4369 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4370 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4371 ; SSE-NEXT: movaps 1008(%rdi), %xmm0
4372 ; SSE-NEXT: movaps 960(%rdi), %xmm1
4373 ; SSE-NEXT: movaps %xmm1, %xmm2
4374 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4375 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4376 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4377 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4378 ; SSE-NEXT: movaps 1296(%rdi), %xmm0
4379 ; SSE-NEXT: movaps 1248(%rdi), %xmm1
4380 ; SSE-NEXT: movaps %xmm1, %xmm2
4381 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4382 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4383 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4384 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4385 ; SSE-NEXT: movaps 1200(%rdi), %xmm0
4386 ; SSE-NEXT: movaps 1152(%rdi), %xmm1
4387 ; SSE-NEXT: movaps %xmm1, %xmm2
4388 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4389 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4390 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4391 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4392 ; SSE-NEXT: movaps 1488(%rdi), %xmm0
4393 ; SSE-NEXT: movaps 1440(%rdi), %xmm1
4394 ; SSE-NEXT: movaps %xmm1, %xmm2
4395 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4396 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4397 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4398 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4399 ; SSE-NEXT: movaps 1392(%rdi), %xmm0
4400 ; SSE-NEXT: movaps 1344(%rdi), %xmm1
4401 ; SSE-NEXT: movaps %xmm1, %xmm2
4402 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4403 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4404 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4405 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4406 ; SSE-NEXT: movaps (%rdi), %xmm1
4407 ; SSE-NEXT: movaps 48(%rdi), %xmm0
4408 ; SSE-NEXT: movaps %xmm1, %xmm2
4409 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4410 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4411 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4412 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4413 ; SSE-NEXT: movaps 64(%rdi), %xmm0
4414 ; SSE-NEXT: movaps 16(%rdi), %xmm1
4415 ; SSE-NEXT: movaps %xmm1, %xmm2
4416 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4417 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4418 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4419 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4420 ; SSE-NEXT: movaps 160(%rdi), %xmm0
4421 ; SSE-NEXT: movaps 112(%rdi), %xmm1
4422 ; SSE-NEXT: movaps %xmm1, %xmm2
4423 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4424 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4425 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4426 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4427 ; SSE-NEXT: movaps 256(%rdi), %xmm0
4428 ; SSE-NEXT: movaps 208(%rdi), %xmm1
4429 ; SSE-NEXT: movaps %xmm1, %xmm2
4430 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4431 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4432 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4433 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4434 ; SSE-NEXT: movaps 352(%rdi), %xmm0
4435 ; SSE-NEXT: movaps 304(%rdi), %xmm1
4436 ; SSE-NEXT: movaps %xmm1, %xmm2
4437 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4438 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4439 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4440 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4441 ; SSE-NEXT: movaps 448(%rdi), %xmm0
4442 ; SSE-NEXT: movaps 400(%rdi), %xmm1
4443 ; SSE-NEXT: movaps %xmm1, %xmm2
4444 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4445 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4446 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4447 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4448 ; SSE-NEXT: movaps 544(%rdi), %xmm0
4449 ; SSE-NEXT: movaps 496(%rdi), %xmm1
4450 ; SSE-NEXT: movaps %xmm1, %xmm2
4451 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4452 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4453 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4454 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4455 ; SSE-NEXT: movaps 640(%rdi), %xmm0
4456 ; SSE-NEXT: movaps 592(%rdi), %xmm1
4457 ; SSE-NEXT: movaps %xmm1, %xmm2
4458 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4459 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4460 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4461 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4462 ; SSE-NEXT: movaps 736(%rdi), %xmm0
4463 ; SSE-NEXT: movaps 688(%rdi), %xmm1
4464 ; SSE-NEXT: movaps %xmm1, %xmm2
4465 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4466 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4467 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4468 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4469 ; SSE-NEXT: movaps 832(%rdi), %xmm0
4470 ; SSE-NEXT: movaps 784(%rdi), %xmm1
4471 ; SSE-NEXT: movaps %xmm1, %xmm2
4472 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4473 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4474 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4475 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4476 ; SSE-NEXT: movaps 928(%rdi), %xmm0
4477 ; SSE-NEXT: movaps 880(%rdi), %xmm1
4478 ; SSE-NEXT: movaps %xmm1, %xmm2
4479 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4480 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4481 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4482 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4483 ; SSE-NEXT: movaps 1024(%rdi), %xmm0
4484 ; SSE-NEXT: movaps 976(%rdi), %xmm1
4485 ; SSE-NEXT: movaps %xmm1, %xmm2
4486 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4487 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4488 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4489 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4490 ; SSE-NEXT: movaps 1120(%rdi), %xmm0
4491 ; SSE-NEXT: movaps 1072(%rdi), %xmm1
4492 ; SSE-NEXT: movaps %xmm1, %xmm2
4493 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4494 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4495 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4496 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4497 ; SSE-NEXT: movaps 1216(%rdi), %xmm0
4498 ; SSE-NEXT: movaps 1168(%rdi), %xmm1
4499 ; SSE-NEXT: movaps %xmm1, %xmm2
4500 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4501 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4502 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4503 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4504 ; SSE-NEXT: movaps 1312(%rdi), %xmm0
4505 ; SSE-NEXT: movaps 1264(%rdi), %xmm1
4506 ; SSE-NEXT: movaps %xmm1, %xmm2
4507 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4508 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4509 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4510 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4511 ; SSE-NEXT: movaps 1408(%rdi), %xmm0
4512 ; SSE-NEXT: movaps 1360(%rdi), %xmm1
4513 ; SSE-NEXT: movaps %xmm1, %xmm2
4514 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4515 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4516 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4517 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4518 ; SSE-NEXT: movaps 1504(%rdi), %xmm0
4519 ; SSE-NEXT: movaps 1456(%rdi), %xmm1
4520 ; SSE-NEXT: movaps %xmm1, %xmm2
4521 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4522 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4523 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4524 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4525 ; SSE-NEXT: movaps 80(%rdi), %xmm0
4526 ; SSE-NEXT: movaps 32(%rdi), %xmm1
4527 ; SSE-NEXT: movaps %xmm1, %xmm2
4528 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4529 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4530 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4531 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4532 ; SSE-NEXT: movaps 176(%rdi), %xmm0
4533 ; SSE-NEXT: movaps 128(%rdi), %xmm1
4534 ; SSE-NEXT: movaps %xmm1, %xmm2
4535 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4536 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4537 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4538 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4539 ; SSE-NEXT: movaps 272(%rdi), %xmm0
4540 ; SSE-NEXT: movaps 224(%rdi), %xmm1
4541 ; SSE-NEXT: movaps %xmm1, %xmm2
4542 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4543 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4544 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4545 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4546 ; SSE-NEXT: movaps 368(%rdi), %xmm0
4547 ; SSE-NEXT: movaps 320(%rdi), %xmm1
4548 ; SSE-NEXT: movaps %xmm1, %xmm2
4549 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4550 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4551 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4552 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4553 ; SSE-NEXT: movaps 464(%rdi), %xmm0
4554 ; SSE-NEXT: movaps 416(%rdi), %xmm1
4555 ; SSE-NEXT: movaps %xmm1, %xmm2
4556 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4557 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4558 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4559 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4560 ; SSE-NEXT: movaps 560(%rdi), %xmm0
4561 ; SSE-NEXT: movaps 512(%rdi), %xmm1
4562 ; SSE-NEXT: movaps %xmm1, %xmm2
4563 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4564 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4565 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4566 ; SSE-NEXT: movaps %xmm1, (%rsp) # 16-byte Spill
4567 ; SSE-NEXT: movaps 656(%rdi), %xmm0
4568 ; SSE-NEXT: movaps 608(%rdi), %xmm1
4569 ; SSE-NEXT: movaps %xmm1, %xmm2
4570 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4571 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4572 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4573 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4574 ; SSE-NEXT: movaps 752(%rdi), %xmm0
4575 ; SSE-NEXT: movaps 704(%rdi), %xmm14
4576 ; SSE-NEXT: movaps %xmm14, %xmm1
4577 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4578 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4579 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm0[1]
4580 ; SSE-NEXT: movaps 848(%rdi), %xmm0
4581 ; SSE-NEXT: movaps 800(%rdi), %xmm15
4582 ; SSE-NEXT: movaps %xmm15, %xmm1
4583 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4584 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4585 ; SSE-NEXT: unpckhpd {{.*#+}} xmm15 = xmm15[1],xmm0[1]
4586 ; SSE-NEXT: movaps 944(%rdi), %xmm0
4587 ; SSE-NEXT: movaps 896(%rdi), %xmm13
4588 ; SSE-NEXT: movaps %xmm13, %xmm1
4589 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4590 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4591 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm0[1]
4592 ; SSE-NEXT: movaps 1040(%rdi), %xmm0
4593 ; SSE-NEXT: movaps 992(%rdi), %xmm10
4594 ; SSE-NEXT: movaps %xmm10, %xmm12
4595 ; SSE-NEXT: movlhps {{.*#+}} xmm12 = xmm12[0],xmm0[0]
4596 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm0[1]
4597 ; SSE-NEXT: movaps 1136(%rdi), %xmm0
4598 ; SSE-NEXT: movaps 1088(%rdi), %xmm9
4599 ; SSE-NEXT: movaps %xmm9, %xmm11
4600 ; SSE-NEXT: movlhps {{.*#+}} xmm11 = xmm11[0],xmm0[0]
4601 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm0[1]
4602 ; SSE-NEXT: movaps 1232(%rdi), %xmm0
4603 ; SSE-NEXT: movaps 1184(%rdi), %xmm7
4604 ; SSE-NEXT: movaps %xmm7, %xmm8
4605 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm0[0]
4606 ; SSE-NEXT: unpckhpd {{.*#+}} xmm7 = xmm7[1],xmm0[1]
4607 ; SSE-NEXT: movaps 1328(%rdi), %xmm0
4608 ; SSE-NEXT: movaps 1280(%rdi), %xmm5
4609 ; SSE-NEXT: movaps %xmm5, %xmm6
4610 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm0[0]
4611 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm0[1]
4612 ; SSE-NEXT: movaps 1424(%rdi), %xmm0
4613 ; SSE-NEXT: movaps 1376(%rdi), %xmm3
4614 ; SSE-NEXT: movaps %xmm3, %xmm4
4615 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm0[0]
4616 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm0[1]
4617 ; SSE-NEXT: movaps 1520(%rdi), %xmm0
4618 ; SSE-NEXT: movaps 1472(%rdi), %xmm1
4619 ; SSE-NEXT: movaps %xmm1, %xmm2
4620 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4621 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4622 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4623 ; SSE-NEXT: movaps %xmm0, 224(%rsi)
4624 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4625 ; SSE-NEXT: movaps %xmm0, 160(%rsi)
4626 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4627 ; SSE-NEXT: movaps %xmm0, 96(%rsi)
4628 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4629 ; SSE-NEXT: movaps %xmm0, 32(%rsi)
4630 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4631 ; SSE-NEXT: movaps %xmm0, 240(%rsi)
4632 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4633 ; SSE-NEXT: movaps %xmm0, 176(%rsi)
4634 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4635 ; SSE-NEXT: movaps %xmm0, 112(%rsi)
4636 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4637 ; SSE-NEXT: movaps %xmm0, 48(%rsi)
4638 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4639 ; SSE-NEXT: movaps %xmm0, 192(%rsi)
4640 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4641 ; SSE-NEXT: movaps %xmm0, 128(%rsi)
4642 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4643 ; SSE-NEXT: movaps %xmm0, 64(%rsi)
4644 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4645 ; SSE-NEXT: movaps %xmm0, (%rsi)
4646 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4647 ; SSE-NEXT: movaps %xmm0, 208(%rsi)
4648 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4649 ; SSE-NEXT: movaps %xmm0, 144(%rsi)
4650 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4651 ; SSE-NEXT: movaps %xmm0, 80(%rsi)
4652 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4653 ; SSE-NEXT: movaps %xmm0, 16(%rsi)
4654 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4655 ; SSE-NEXT: movaps %xmm0, 224(%rdx)
4656 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4657 ; SSE-NEXT: movaps %xmm0, 240(%rdx)
4658 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4659 ; SSE-NEXT: movaps %xmm0, 192(%rdx)
4660 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4661 ; SSE-NEXT: movaps %xmm0, 208(%rdx)
4662 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4663 ; SSE-NEXT: movaps %xmm0, 160(%rdx)
4664 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4665 ; SSE-NEXT: movaps %xmm0, 176(%rdx)
4666 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4667 ; SSE-NEXT: movaps %xmm0, 128(%rdx)
4668 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4669 ; SSE-NEXT: movaps %xmm0, 144(%rdx)
4670 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4671 ; SSE-NEXT: movaps %xmm0, 96(%rdx)
4672 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4673 ; SSE-NEXT: movaps %xmm0, 112(%rdx)
4674 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4675 ; SSE-NEXT: movaps %xmm0, 64(%rdx)
4676 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4677 ; SSE-NEXT: movaps %xmm0, 80(%rdx)
4678 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4679 ; SSE-NEXT: movaps %xmm0, 32(%rdx)
4680 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4681 ; SSE-NEXT: movaps %xmm0, 48(%rdx)
4682 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4683 ; SSE-NEXT: movaps %xmm0, (%rdx)
4684 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4685 ; SSE-NEXT: movaps %xmm0, 16(%rdx)
4686 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4687 ; SSE-NEXT: movaps %xmm0, 240(%rcx)
4688 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4689 ; SSE-NEXT: movaps %xmm0, 224(%rcx)
4690 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4691 ; SSE-NEXT: movaps %xmm0, 208(%rcx)
4692 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4693 ; SSE-NEXT: movaps %xmm0, 192(%rcx)
4694 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4695 ; SSE-NEXT: movaps %xmm0, 176(%rcx)
4696 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4697 ; SSE-NEXT: movaps %xmm0, 160(%rcx)
4698 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4699 ; SSE-NEXT: movaps %xmm0, 144(%rcx)
4700 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4701 ; SSE-NEXT: movaps %xmm0, 128(%rcx)
4702 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4703 ; SSE-NEXT: movaps %xmm0, 112(%rcx)
4704 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4705 ; SSE-NEXT: movaps %xmm0, 96(%rcx)
4706 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4707 ; SSE-NEXT: movaps %xmm0, 80(%rcx)
4708 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4709 ; SSE-NEXT: movaps %xmm0, 64(%rcx)
4710 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4711 ; SSE-NEXT: movaps %xmm0, 48(%rcx)
4712 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4713 ; SSE-NEXT: movaps %xmm0, 32(%rcx)
4714 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4715 ; SSE-NEXT: movaps %xmm0, 16(%rcx)
4716 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4717 ; SSE-NEXT: movaps %xmm0, (%rcx)
4718 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4719 ; SSE-NEXT: movaps %xmm0, 240(%r8)
4720 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4721 ; SSE-NEXT: movaps %xmm0, 224(%r8)
4722 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4723 ; SSE-NEXT: movaps %xmm0, 208(%r8)
4724 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4725 ; SSE-NEXT: movaps %xmm0, 192(%r8)
4726 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4727 ; SSE-NEXT: movaps %xmm0, 176(%r8)
4728 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4729 ; SSE-NEXT: movaps %xmm0, 160(%r8)
4730 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4731 ; SSE-NEXT: movaps %xmm0, 144(%r8)
4732 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4733 ; SSE-NEXT: movaps %xmm0, 128(%r8)
4734 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4735 ; SSE-NEXT: movaps %xmm0, 112(%r8)
4736 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4737 ; SSE-NEXT: movaps %xmm0, 96(%r8)
4738 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4739 ; SSE-NEXT: movaps %xmm0, 80(%r8)
4740 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4741 ; SSE-NEXT: movaps %xmm0, 64(%r8)
4742 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4743 ; SSE-NEXT: movaps %xmm0, 48(%r8)
4744 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4745 ; SSE-NEXT: movaps %xmm0, 32(%r8)
4746 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4747 ; SSE-NEXT: movaps %xmm0, 16(%r8)
4748 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4749 ; SSE-NEXT: movaps %xmm0, (%r8)
4750 ; SSE-NEXT: movaps %xmm2, 240(%r9)
4751 ; SSE-NEXT: movaps %xmm4, 224(%r9)
4752 ; SSE-NEXT: movaps %xmm6, 208(%r9)
4753 ; SSE-NEXT: movaps %xmm8, 192(%r9)
4754 ; SSE-NEXT: movaps %xmm11, 176(%r9)
4755 ; SSE-NEXT: movaps %xmm12, 160(%r9)
4756 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4757 ; SSE-NEXT: movaps %xmm0, 144(%r9)
4758 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4759 ; SSE-NEXT: movaps %xmm0, 128(%r9)
4760 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4761 ; SSE-NEXT: movaps %xmm0, 112(%r9)
4762 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4763 ; SSE-NEXT: movaps %xmm0, 96(%r9)
4764 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4765 ; SSE-NEXT: movaps %xmm0, 80(%r9)
4766 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4767 ; SSE-NEXT: movaps %xmm0, 64(%r9)
4768 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4769 ; SSE-NEXT: movaps %xmm0, 48(%r9)
4770 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4771 ; SSE-NEXT: movaps %xmm0, 32(%r9)
4772 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4773 ; SSE-NEXT: movaps %xmm0, 16(%r9)
4774 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4775 ; SSE-NEXT: movaps %xmm0, (%r9)
4776 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
4777 ; SSE-NEXT: movaps %xmm1, 240(%rax)
4778 ; SSE-NEXT: movaps %xmm3, 224(%rax)
4779 ; SSE-NEXT: movaps %xmm5, 208(%rax)
4780 ; SSE-NEXT: movaps %xmm7, 192(%rax)
4781 ; SSE-NEXT: movaps %xmm9, 176(%rax)
4782 ; SSE-NEXT: movaps %xmm10, 160(%rax)
4783 ; SSE-NEXT: movaps %xmm13, 144(%rax)
4784 ; SSE-NEXT: movaps %xmm15, 128(%rax)
4785 ; SSE-NEXT: movaps %xmm14, 112(%rax)
4786 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4787 ; SSE-NEXT: movaps %xmm0, 96(%rax)
4788 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
4789 ; SSE-NEXT: movaps %xmm0, 80(%rax)
4790 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4791 ; SSE-NEXT: movaps %xmm0, 64(%rax)
4792 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4793 ; SSE-NEXT: movaps %xmm0, 48(%rax)
4794 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4795 ; SSE-NEXT: movaps %xmm0, 32(%rax)
4796 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4797 ; SSE-NEXT: movaps %xmm0, 16(%rax)
4798 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4799 ; SSE-NEXT: movaps %xmm0, (%rax)
4800 ; SSE-NEXT: addq $1176, %rsp # imm = 0x498
4803 ; AVX-LABEL: load_i64_stride6_vf32:
4805 ; AVX-NEXT: subq $1624, %rsp # imm = 0x658
4806 ; AVX-NEXT: vmovaps 1088(%rdi), %ymm2
4807 ; AVX-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4808 ; AVX-NEXT: vmovaps 704(%rdi), %ymm3
4809 ; AVX-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4810 ; AVX-NEXT: vmovaps 320(%rdi), %ymm1
4811 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4812 ; AVX-NEXT: vinsertf128 $1, 288(%rdi), %ymm0, %ymm0
4813 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4814 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
4815 ; AVX-NEXT: vmovaps 240(%rdi), %xmm4
4816 ; AVX-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4817 ; AVX-NEXT: vmovaps 192(%rdi), %xmm1
4818 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4819 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm4[0]
4820 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4821 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4822 ; AVX-NEXT: vinsertf128 $1, 672(%rdi), %ymm0, %ymm0
4823 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4824 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
4825 ; AVX-NEXT: vmovaps 624(%rdi), %xmm3
4826 ; AVX-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4827 ; AVX-NEXT: vmovaps 576(%rdi), %xmm1
4828 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4829 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
4830 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4831 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4832 ; AVX-NEXT: vinsertf128 $1, 1056(%rdi), %ymm0, %ymm0
4833 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4834 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm2[0],ymm0[2],ymm2[2]
4835 ; AVX-NEXT: vmovaps 1008(%rdi), %xmm2
4836 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4837 ; AVX-NEXT: vmovaps 960(%rdi), %xmm1
4838 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4839 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4840 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4841 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4842 ; AVX-NEXT: vmovaps 1472(%rdi), %ymm1
4843 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4844 ; AVX-NEXT: vinsertf128 $1, 1440(%rdi), %ymm0, %ymm0
4845 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4846 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
4847 ; AVX-NEXT: vmovaps 1392(%rdi), %xmm2
4848 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4849 ; AVX-NEXT: vmovaps 1344(%rdi), %xmm1
4850 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4851 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4852 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4853 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4854 ; AVX-NEXT: vmovaps 128(%rdi), %ymm1
4855 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4856 ; AVX-NEXT: vinsertf128 $1, 96(%rdi), %ymm0, %ymm0
4857 ; AVX-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
4858 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
4859 ; AVX-NEXT: vmovaps (%rdi), %xmm14
4860 ; AVX-NEXT: vmovaps 48(%rdi), %xmm13
4861 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm14[0],xmm13[0]
4862 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4863 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4864 ; AVX-NEXT: vmovaps 512(%rdi), %ymm12
4865 ; AVX-NEXT: vinsertf128 $1, 480(%rdi), %ymm0, %ymm11
4866 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm11[0],ymm12[0],ymm11[2],ymm12[2]
4867 ; AVX-NEXT: vmovaps 432(%rdi), %xmm10
4868 ; AVX-NEXT: vmovaps 384(%rdi), %xmm9
4869 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm9[0],xmm10[0]
4870 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4871 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4872 ; AVX-NEXT: vmovaps 896(%rdi), %ymm8
4873 ; AVX-NEXT: vinsertf128 $1, 864(%rdi), %ymm0, %ymm7
4874 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm7[0],ymm8[0],ymm7[2],ymm8[2]
4875 ; AVX-NEXT: vmovaps 816(%rdi), %xmm6
4876 ; AVX-NEXT: vmovaps 768(%rdi), %xmm5
4877 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm6[0]
4878 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4879 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4880 ; AVX-NEXT: vmovaps 1280(%rdi), %ymm4
4881 ; AVX-NEXT: vinsertf128 $1, 1248(%rdi), %ymm0, %ymm3
4882 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm3[0],ymm4[0],ymm3[2],ymm4[2]
4883 ; AVX-NEXT: vmovaps 1200(%rdi), %xmm2
4884 ; AVX-NEXT: vmovaps 1152(%rdi), %xmm1
4885 ; AVX-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
4886 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
4887 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4888 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4889 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
4890 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
4891 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
4892 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
4893 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
4894 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
4895 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4896 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4897 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
4898 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
4899 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
4900 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
4901 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
4902 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
4903 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4904 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4905 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
4906 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
4907 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
4908 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
4909 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
4910 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
4911 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4912 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4913 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
4914 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
4915 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
4916 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
4917 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
4918 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
4919 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4920 ; AVX-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
4921 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
4922 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
4923 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm13 = xmm14[1],xmm13[1]
4924 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm13[0,1,2,3],ymm0[4,5,6,7]
4925 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4926 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm11[1],ymm12[1],ymm11[3],ymm12[3]
4927 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm9 = xmm9[1],xmm10[1]
4928 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm9[0,1,2,3],ymm0[4,5,6,7]
4929 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4930 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm7[1],ymm8[1],ymm7[3],ymm8[3]
4931 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm5 = xmm5[1],xmm6[1]
4932 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm5[0,1,2,3],ymm0[4,5,6,7]
4933 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4934 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm3[1],ymm4[1],ymm3[3],ymm4[3]
4935 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
4936 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4937 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4938 ; AVX-NEXT: vmovaps 288(%rdi), %ymm1
4939 ; AVX-NEXT: vmovups %ymm1, (%rsp) # 32-byte Spill
4940 ; AVX-NEXT: vinsertf128 $1, 352(%rdi), %ymm0, %ymm0
4941 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4942 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
4943 ; AVX-NEXT: vmovaps 256(%rdi), %xmm2
4944 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4945 ; AVX-NEXT: vmovaps 208(%rdi), %xmm1
4946 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4947 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4948 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4949 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4950 ; AVX-NEXT: vmovaps 672(%rdi), %ymm1
4951 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4952 ; AVX-NEXT: vinsertf128 $1, 736(%rdi), %ymm0, %ymm0
4953 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4954 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
4955 ; AVX-NEXT: vmovaps 640(%rdi), %xmm2
4956 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4957 ; AVX-NEXT: vmovaps 592(%rdi), %xmm1
4958 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4959 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4960 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4961 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4962 ; AVX-NEXT: vmovaps 1056(%rdi), %ymm1
4963 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4964 ; AVX-NEXT: vinsertf128 $1, 1120(%rdi), %ymm0, %ymm0
4965 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4966 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
4967 ; AVX-NEXT: vmovaps 1024(%rdi), %xmm2
4968 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4969 ; AVX-NEXT: vmovaps 976(%rdi), %xmm1
4970 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4971 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4972 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4973 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4974 ; AVX-NEXT: vmovaps 1440(%rdi), %ymm1
4975 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4976 ; AVX-NEXT: vinsertf128 $1, 1504(%rdi), %ymm0, %ymm0
4977 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4978 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
4979 ; AVX-NEXT: vmovaps 1408(%rdi), %xmm14
4980 ; AVX-NEXT: vmovaps 1360(%rdi), %xmm12
4981 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm12[0],xmm14[0]
4982 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4983 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4984 ; AVX-NEXT: vmovaps 96(%rdi), %ymm1
4985 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4986 ; AVX-NEXT: vmovaps 16(%rdi), %xmm2
4987 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4988 ; AVX-NEXT: vinsertf128 $1, 160(%rdi), %ymm0, %ymm0
4989 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4990 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
4991 ; AVX-NEXT: vmovaps 64(%rdi), %xmm1
4992 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4993 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
4994 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4995 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4996 ; AVX-NEXT: vmovaps 480(%rdi), %ymm11
4997 ; AVX-NEXT: vinsertf128 $1, 544(%rdi), %ymm0, %ymm10
4998 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm11[0],ymm10[0],ymm11[2],ymm10[2]
4999 ; AVX-NEXT: vmovaps 448(%rdi), %xmm9
5000 ; AVX-NEXT: vmovaps 400(%rdi), %xmm8
5001 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm8[0],xmm9[0]
5002 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5003 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5004 ; AVX-NEXT: vmovaps 864(%rdi), %ymm7
5005 ; AVX-NEXT: vinsertf128 $1, 928(%rdi), %ymm0, %ymm6
5006 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm7[0],ymm6[0],ymm7[2],ymm6[2]
5007 ; AVX-NEXT: vmovaps 832(%rdi), %xmm5
5008 ; AVX-NEXT: vmovaps 784(%rdi), %xmm4
5009 ; AVX-NEXT: vmovlhps {{.*#+}} xmm13 = xmm4[0],xmm5[0]
5010 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm13[0,1,2,3],ymm0[4,5,6,7]
5011 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5012 ; AVX-NEXT: vmovaps 1248(%rdi), %ymm3
5013 ; AVX-NEXT: vinsertf128 $1, 1312(%rdi), %ymm0, %ymm13
5014 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm3[0],ymm13[0],ymm3[2],ymm13[2]
5015 ; AVX-NEXT: vmovaps 1216(%rdi), %xmm2
5016 ; AVX-NEXT: vmovaps 1168(%rdi), %xmm1
5017 ; AVX-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
5018 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
5019 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5020 ; AVX-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
5021 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5022 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5023 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
5024 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
5025 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
5026 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
5027 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5028 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5029 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5030 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5031 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
5032 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
5033 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
5034 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
5035 ; AVX-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
5036 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5037 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5038 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5039 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
5040 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
5041 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
5042 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
5043 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5044 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5045 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5046 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5047 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm12 = xmm12[1],xmm14[1]
5048 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm12[0,1,2,3],ymm0[4,5,6,7]
5049 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5050 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm3[1],ymm13[1],ymm3[3],ymm13[3]
5051 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
5052 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5053 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5054 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm7[1],ymm6[1],ymm7[3],ymm6[3]
5055 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm4[1],xmm5[1]
5056 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5057 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5058 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm11[1],ymm10[1],ymm11[3],ymm10[3]
5059 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm8[1],xmm9[1]
5060 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5061 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5062 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5063 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5064 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5065 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5066 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
5067 ; AVX-NEXT: # xmm1 = xmm1[1],mem[1]
5068 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5069 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5070 ; AVX-NEXT: vmovaps 160(%rdi), %ymm1
5071 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5072 ; AVX-NEXT: vmovaps 32(%rdi), %xmm2
5073 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5074 ; AVX-NEXT: vinsertf128 $1, 128(%rdi), %ymm0, %ymm0
5075 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5076 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5077 ; AVX-NEXT: vmovaps 80(%rdi), %xmm1
5078 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5079 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
5080 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5081 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5082 ; AVX-NEXT: vmovaps 352(%rdi), %ymm1
5083 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5084 ; AVX-NEXT: vinsertf128 $1, 320(%rdi), %ymm0, %ymm0
5085 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5086 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5087 ; AVX-NEXT: vmovaps 272(%rdi), %xmm2
5088 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5089 ; AVX-NEXT: vmovaps 224(%rdi), %xmm1
5090 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5091 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
5092 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5093 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5094 ; AVX-NEXT: vmovaps 544(%rdi), %ymm1
5095 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5096 ; AVX-NEXT: vinsertf128 $1, 512(%rdi), %ymm0, %ymm0
5097 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5098 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5099 ; AVX-NEXT: vmovaps 464(%rdi), %xmm2
5100 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5101 ; AVX-NEXT: vmovaps 416(%rdi), %xmm1
5102 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5103 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
5104 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5105 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5106 ; AVX-NEXT: vmovaps 736(%rdi), %ymm1
5107 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5108 ; AVX-NEXT: vinsertf128 $1, 704(%rdi), %ymm0, %ymm0
5109 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5110 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5111 ; AVX-NEXT: vmovaps 656(%rdi), %xmm2
5112 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5113 ; AVX-NEXT: vmovaps 608(%rdi), %xmm1
5114 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5115 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
5116 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5117 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5118 ; AVX-NEXT: vmovaps 928(%rdi), %ymm1
5119 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5120 ; AVX-NEXT: vinsertf128 $1, 896(%rdi), %ymm0, %ymm0
5121 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5122 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5123 ; AVX-NEXT: vmovaps 848(%rdi), %xmm14
5124 ; AVX-NEXT: vmovaps 800(%rdi), %xmm13
5125 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm13[0],xmm14[0]
5126 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5127 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5128 ; AVX-NEXT: vmovaps 1120(%rdi), %ymm12
5129 ; AVX-NEXT: vinsertf128 $1, 1088(%rdi), %ymm0, %ymm11
5130 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm11[0],ymm12[0],ymm11[2],ymm12[2]
5131 ; AVX-NEXT: vmovaps 1040(%rdi), %xmm10
5132 ; AVX-NEXT: vmovaps 992(%rdi), %xmm9
5133 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm9[0],xmm10[0]
5134 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5135 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5136 ; AVX-NEXT: vmovaps 1312(%rdi), %ymm8
5137 ; AVX-NEXT: vinsertf128 $1, 1280(%rdi), %ymm0, %ymm7
5138 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm7[0],ymm8[0],ymm7[2],ymm8[2]
5139 ; AVX-NEXT: vmovaps 1232(%rdi), %xmm6
5140 ; AVX-NEXT: vmovaps 1184(%rdi), %xmm5
5141 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm6[0]
5142 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5143 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5144 ; AVX-NEXT: vmovaps 1504(%rdi), %ymm4
5145 ; AVX-NEXT: vinsertf128 $1, 1472(%rdi), %ymm0, %ymm3
5146 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm3[0],ymm4[0],ymm3[2],ymm4[2]
5147 ; AVX-NEXT: vmovaps 1424(%rdi), %xmm2
5148 ; AVX-NEXT: vmovaps 1376(%rdi), %xmm1
5149 ; AVX-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
5150 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
5151 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5152 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5153 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5154 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5155 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
5156 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
5157 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
5158 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
5159 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5160 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5161 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5162 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5163 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
5164 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
5165 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
5166 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
5167 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5168 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5169 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5170 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5171 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
5172 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
5173 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
5174 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
5175 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5176 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5177 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5178 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5179 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
5180 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
5181 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
5182 ; AVX-NEXT: vblendps {{.*#+}} ymm15 = ymm15[0,1,2,3],ymm0[4,5,6,7]
5183 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5184 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5185 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5186 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm13 = xmm13[1],xmm14[1]
5187 ; AVX-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3],ymm0[4,5,6,7]
5188 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm11[1],ymm12[1],ymm11[3],ymm12[3]
5189 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm9 = xmm9[1],xmm10[1]
5190 ; AVX-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm0[4,5,6,7]
5191 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm7[1],ymm8[1],ymm7[3],ymm8[3]
5192 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm5 = xmm5[1],xmm6[1]
5193 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm5[0,1,2,3],ymm0[4,5,6,7]
5194 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm4[1],ymm3[3],ymm4[3]
5195 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
5196 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
5197 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5198 ; AVX-NEXT: vmovaps %ymm2, 192(%rsi)
5199 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5200 ; AVX-NEXT: vmovaps %ymm2, 128(%rsi)
5201 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5202 ; AVX-NEXT: vmovaps %ymm2, 64(%rsi)
5203 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5204 ; AVX-NEXT: vmovaps %ymm2, (%rsi)
5205 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5206 ; AVX-NEXT: vmovaps %ymm2, 224(%rsi)
5207 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5208 ; AVX-NEXT: vmovaps %ymm2, 160(%rsi)
5209 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5210 ; AVX-NEXT: vmovaps %ymm2, 96(%rsi)
5211 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5212 ; AVX-NEXT: vmovaps %ymm2, 32(%rsi)
5213 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5214 ; AVX-NEXT: vmovaps %ymm2, 192(%rdx)
5215 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5216 ; AVX-NEXT: vmovaps %ymm2, 128(%rdx)
5217 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5218 ; AVX-NEXT: vmovaps %ymm2, 64(%rdx)
5219 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5220 ; AVX-NEXT: vmovaps %ymm2, (%rdx)
5221 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5222 ; AVX-NEXT: vmovaps %ymm2, 224(%rdx)
5223 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5224 ; AVX-NEXT: vmovaps %ymm2, 160(%rdx)
5225 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5226 ; AVX-NEXT: vmovaps %ymm2, 96(%rdx)
5227 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5228 ; AVX-NEXT: vmovaps %ymm2, 32(%rdx)
5229 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5230 ; AVX-NEXT: vmovaps %ymm2, 192(%rcx)
5231 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5232 ; AVX-NEXT: vmovaps %ymm2, 128(%rcx)
5233 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5234 ; AVX-NEXT: vmovaps %ymm2, 64(%rcx)
5235 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5236 ; AVX-NEXT: vmovaps %ymm2, (%rcx)
5237 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5238 ; AVX-NEXT: vmovaps %ymm2, 224(%rcx)
5239 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5240 ; AVX-NEXT: vmovaps %ymm2, 160(%rcx)
5241 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5242 ; AVX-NEXT: vmovaps %ymm2, 96(%rcx)
5243 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5244 ; AVX-NEXT: vmovaps %ymm2, 32(%rcx)
5245 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5246 ; AVX-NEXT: vmovaps %ymm2, (%r8)
5247 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5248 ; AVX-NEXT: vmovaps %ymm2, 64(%r8)
5249 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5250 ; AVX-NEXT: vmovaps %ymm2, 128(%r8)
5251 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5252 ; AVX-NEXT: vmovaps %ymm2, 192(%r8)
5253 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5254 ; AVX-NEXT: vmovaps %ymm2, 224(%r8)
5255 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5256 ; AVX-NEXT: vmovaps %ymm2, 160(%r8)
5257 ; AVX-NEXT: vmovups (%rsp), %ymm2 # 32-byte Reload
5258 ; AVX-NEXT: vmovaps %ymm2, 96(%r8)
5259 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5260 ; AVX-NEXT: vmovaps %ymm2, 32(%r8)
5261 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5262 ; AVX-NEXT: vmovaps %ymm2, 224(%r9)
5263 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5264 ; AVX-NEXT: vmovaps %ymm2, 192(%r9)
5265 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5266 ; AVX-NEXT: vmovaps %ymm2, 160(%r9)
5267 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5268 ; AVX-NEXT: vmovaps %ymm2, 128(%r9)
5269 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5270 ; AVX-NEXT: vmovaps %ymm2, 96(%r9)
5271 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5272 ; AVX-NEXT: vmovaps %ymm2, 64(%r9)
5273 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5274 ; AVX-NEXT: vmovaps %ymm2, 32(%r9)
5275 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5276 ; AVX-NEXT: vmovaps %ymm2, (%r9)
5277 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
5278 ; AVX-NEXT: vmovaps %ymm1, 224(%rax)
5279 ; AVX-NEXT: vmovaps %ymm0, 192(%rax)
5280 ; AVX-NEXT: vmovaps %ymm9, 160(%rax)
5281 ; AVX-NEXT: vmovaps %ymm13, 128(%rax)
5282 ; AVX-NEXT: vmovaps %ymm15, 96(%rax)
5283 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5284 ; AVX-NEXT: vmovaps %ymm0, 64(%rax)
5285 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5286 ; AVX-NEXT: vmovaps %ymm0, 32(%rax)
5287 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5288 ; AVX-NEXT: vmovaps %ymm0, (%rax)
5289 ; AVX-NEXT: addq $1624, %rsp # imm = 0x658
5290 ; AVX-NEXT: vzeroupper
5293 ; AVX2-LABEL: load_i64_stride6_vf32:
5295 ; AVX2-NEXT: subq $1496, %rsp # imm = 0x5D8
5296 ; AVX2-NEXT: vmovaps 1088(%rdi), %ymm2
5297 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5298 ; AVX2-NEXT: vmovaps 1056(%rdi), %ymm4
5299 ; AVX2-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5300 ; AVX2-NEXT: vmovaps 704(%rdi), %ymm3
5301 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5302 ; AVX2-NEXT: vmovaps 672(%rdi), %ymm5
5303 ; AVX2-NEXT: vmovups %ymm5, (%rsp) # 32-byte Spill
5304 ; AVX2-NEXT: vmovaps 320(%rdi), %ymm1
5305 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5306 ; AVX2-NEXT: vmovaps 288(%rdi), %ymm7
5307 ; AVX2-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5308 ; AVX2-NEXT: vmovaps 240(%rdi), %xmm0
5309 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5310 ; AVX2-NEXT: vmovaps 192(%rdi), %xmm6
5311 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm0 = xmm6[0],xmm0[0]
5312 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm7[0],ymm1[0],ymm7[2],ymm1[2]
5313 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
5314 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5315 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5316 ; AVX2-NEXT: vmovaps 624(%rdi), %xmm1
5317 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5318 ; AVX2-NEXT: vmovaps 576(%rdi), %xmm0
5319 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5320 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
5321 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm5[0],ymm3[0],ymm5[2],ymm3[2]
5322 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
5323 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5324 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5325 ; AVX2-NEXT: vmovaps 1008(%rdi), %xmm1
5326 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5327 ; AVX2-NEXT: vmovaps 960(%rdi), %xmm0
5328 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5329 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
5330 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm4[0],ymm2[0],ymm4[2],ymm2[2]
5331 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
5332 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5333 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5334 ; AVX2-NEXT: vmovaps 1472(%rdi), %ymm0
5335 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5336 ; AVX2-NEXT: vmovaps 1440(%rdi), %ymm1
5337 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5338 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5339 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5340 ; AVX2-NEXT: vmovaps 1392(%rdi), %xmm1
5341 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5342 ; AVX2-NEXT: vmovaps 1344(%rdi), %xmm4
5343 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm4[0],xmm1[0]
5344 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5345 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5346 ; AVX2-NEXT: vmovaps 128(%rdi), %ymm15
5347 ; AVX2-NEXT: vmovaps 96(%rdi), %ymm0
5348 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5349 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm15[0],ymm0[2],ymm15[2]
5350 ; AVX2-NEXT: vmovups %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5351 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5352 ; AVX2-NEXT: vmovaps (%rdi), %xmm8
5353 ; AVX2-NEXT: vmovaps 48(%rdi), %xmm2
5354 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm10 = xmm8[0],xmm2[0]
5355 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm10[0,1,2,3],ymm0[4,5,6,7]
5356 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5357 ; AVX2-NEXT: vmovaps 512(%rdi), %ymm7
5358 ; AVX2-NEXT: vmovaps 480(%rdi), %ymm0
5359 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5360 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm7[0],ymm0[2],ymm7[2]
5361 ; AVX2-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5362 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5363 ; AVX2-NEXT: vmovaps 432(%rdi), %xmm11
5364 ; AVX2-NEXT: vmovaps 384(%rdi), %xmm12
5365 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm10 = xmm12[0],xmm11[0]
5366 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm10[0,1,2,3],ymm0[4,5,6,7]
5367 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5368 ; AVX2-NEXT: vmovaps 896(%rdi), %ymm5
5369 ; AVX2-NEXT: vmovaps 864(%rdi), %ymm0
5370 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5371 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm5[0],ymm0[2],ymm5[2]
5372 ; AVX2-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5373 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5374 ; AVX2-NEXT: vmovaps 816(%rdi), %xmm13
5375 ; AVX2-NEXT: vmovaps 768(%rdi), %xmm10
5376 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm14 = xmm10[0],xmm13[0]
5377 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm14[0,1,2,3],ymm0[4,5,6,7]
5378 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5379 ; AVX2-NEXT: vmovaps 1280(%rdi), %ymm3
5380 ; AVX2-NEXT: vmovaps 1248(%rdi), %ymm0
5381 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5382 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
5383 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5384 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5385 ; AVX2-NEXT: vmovaps 1200(%rdi), %xmm14
5386 ; AVX2-NEXT: vmovaps 1152(%rdi), %xmm1
5387 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm9 = xmm1[0],xmm14[0]
5388 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm9[0,1,2,3],ymm0[4,5,6,7]
5389 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5390 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm6, %xmm0 # 16-byte Folded Reload
5391 ; AVX2-NEXT: # xmm0 = xmm6[1],mem[1]
5392 ; AVX2-NEXT: vbroadcastsd 296(%rdi), %ymm6
5393 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm6 # 32-byte Folded Reload
5394 ; AVX2-NEXT: # ymm6 = ymm6[1],mem[1],ymm6[3],mem[3]
5395 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm6[4,5,6,7]
5396 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5397 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5398 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
5399 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
5400 ; AVX2-NEXT: vbroadcastsd 680(%rdi), %ymm6
5401 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm6 # 32-byte Folded Reload
5402 ; AVX2-NEXT: # ymm6 = ymm6[1],mem[1],ymm6[3],mem[3]
5403 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm6[4,5,6,7]
5404 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5405 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5406 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
5407 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
5408 ; AVX2-NEXT: vbroadcastsd 1064(%rdi), %ymm6
5409 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm6 # 32-byte Folded Reload
5410 ; AVX2-NEXT: # ymm6 = ymm6[1],mem[1],ymm6[3],mem[3]
5411 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm6[4,5,6,7]
5412 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5413 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm4, %xmm0 # 16-byte Folded Reload
5414 ; AVX2-NEXT: # xmm0 = xmm4[1],mem[1]
5415 ; AVX2-NEXT: vbroadcastsd 1448(%rdi), %ymm4
5416 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm4, %ymm4 # 32-byte Folded Reload
5417 ; AVX2-NEXT: # ymm4 = ymm4[1],mem[1],ymm4[3],mem[3]
5418 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm4[4,5,6,7]
5419 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5420 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm8[1],xmm2[1]
5421 ; AVX2-NEXT: vbroadcastsd 104(%rdi), %ymm2
5422 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm15[1],ymm2[3],ymm15[3]
5423 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5424 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5425 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm12[1],xmm11[1]
5426 ; AVX2-NEXT: vbroadcastsd 488(%rdi), %ymm2
5427 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm7[1],ymm2[3],ymm7[3]
5428 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5429 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5430 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm10[1],xmm13[1]
5431 ; AVX2-NEXT: vbroadcastsd 872(%rdi), %ymm2
5432 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm5[1],ymm2[3],ymm5[3]
5433 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5434 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5435 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm14[1]
5436 ; AVX2-NEXT: vbroadcastsd 1256(%rdi), %ymm1
5437 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm3[1],ymm1[3],ymm3[3]
5438 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5439 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5440 ; AVX2-NEXT: vbroadcastsd 352(%rdi), %ymm0
5441 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5442 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5443 ; AVX2-NEXT: vmovaps 256(%rdi), %xmm3
5444 ; AVX2-NEXT: vmovaps 208(%rdi), %xmm5
5445 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm3[0]
5446 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5447 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5448 ; AVX2-NEXT: vbroadcastsd 736(%rdi), %ymm0
5449 ; AVX2-NEXT: vmovups (%rsp), %ymm1 # 32-byte Reload
5450 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5451 ; AVX2-NEXT: vmovaps 640(%rdi), %xmm6
5452 ; AVX2-NEXT: vmovaps 592(%rdi), %xmm7
5453 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm7[0],xmm6[0]
5454 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5455 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5456 ; AVX2-NEXT: vbroadcastsd 1120(%rdi), %ymm0
5457 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5458 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5459 ; AVX2-NEXT: vmovaps 1024(%rdi), %xmm8
5460 ; AVX2-NEXT: vmovaps 976(%rdi), %xmm9
5461 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm9[0],xmm8[0]
5462 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5463 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5464 ; AVX2-NEXT: vbroadcastsd 1504(%rdi), %ymm0
5465 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5466 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5467 ; AVX2-NEXT: vmovaps 1408(%rdi), %xmm10
5468 ; AVX2-NEXT: vmovaps 1360(%rdi), %xmm11
5469 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm10[0]
5470 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5471 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5472 ; AVX2-NEXT: vbroadcastsd 160(%rdi), %ymm0
5473 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5474 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5475 ; AVX2-NEXT: vmovaps 16(%rdi), %xmm0
5476 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5477 ; AVX2-NEXT: vmovaps 64(%rdi), %xmm2
5478 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5479 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm2[0]
5480 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm1[4,5,6,7]
5481 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5482 ; AVX2-NEXT: vbroadcastsd 544(%rdi), %ymm1
5483 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5484 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm12 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5485 ; AVX2-NEXT: vmovaps 448(%rdi), %xmm0
5486 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5487 ; AVX2-NEXT: vmovaps 400(%rdi), %xmm1
5488 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5489 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm13 = xmm1[0],xmm0[0]
5490 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm13[0,1,2,3],ymm12[4,5,6,7]
5491 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5492 ; AVX2-NEXT: vbroadcastsd 928(%rdi), %ymm12
5493 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
5494 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm14 = ymm4[0],ymm12[0],ymm4[2],ymm12[2]
5495 ; AVX2-NEXT: vmovaps 832(%rdi), %xmm12
5496 ; AVX2-NEXT: vmovaps 784(%rdi), %xmm13
5497 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm15 = xmm13[0],xmm12[0]
5498 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm14[4,5,6,7]
5499 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5500 ; AVX2-NEXT: vbroadcastsd 1312(%rdi), %ymm14
5501 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5502 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm14 = ymm1[0],ymm14[0],ymm1[2],ymm14[2]
5503 ; AVX2-NEXT: vmovaps 1216(%rdi), %xmm15
5504 ; AVX2-NEXT: vmovaps 1168(%rdi), %xmm0
5505 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm2 = xmm0[0],xmm15[0]
5506 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm14[4,5,6,7]
5507 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5508 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm5[1],xmm3[1]
5509 ; AVX2-NEXT: vmovaps 352(%rdi), %ymm14
5510 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
5511 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm14[1],ymm3[3],ymm14[3]
5512 ; AVX2-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5513 ; AVX2-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
5514 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
5515 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5516 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm7[1],xmm6[1]
5517 ; AVX2-NEXT: vmovaps 736(%rdi), %ymm7
5518 ; AVX2-NEXT: vmovups (%rsp), %ymm3 # 32-byte Reload
5519 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm7[1],ymm3[3],ymm7[3]
5520 ; AVX2-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5521 ; AVX2-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
5522 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
5523 ; AVX2-NEXT: vmovups %ymm2, (%rsp) # 32-byte Spill
5524 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm9[1],xmm8[1]
5525 ; AVX2-NEXT: vmovaps 1120(%rdi), %ymm6
5526 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
5527 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm6[1],ymm3[3],ymm6[3]
5528 ; AVX2-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5529 ; AVX2-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
5530 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
5531 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5532 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm11[1],xmm10[1]
5533 ; AVX2-NEXT: vmovaps 1504(%rdi), %ymm5
5534 ; AVX2-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5535 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
5536 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm5[1],ymm3[3],ymm5[3]
5537 ; AVX2-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
5538 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
5539 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5540 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm15[1]
5541 ; AVX2-NEXT: vmovaps 1312(%rdi), %ymm3
5542 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm1[1],ymm3[1],ymm1[3],ymm3[3]
5543 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5544 ; AVX2-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,2,1]
5545 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5546 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5547 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm13[1],xmm12[1]
5548 ; AVX2-NEXT: vmovaps 928(%rdi), %ymm1
5549 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm4[1],ymm1[1],ymm4[3],ymm1[3]
5550 ; AVX2-NEXT: vmovaps %ymm1, %ymm4
5551 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5552 ; AVX2-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,2,1]
5553 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5554 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5555 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5556 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
5557 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
5558 ; AVX2-NEXT: vmovaps 544(%rdi), %ymm2
5559 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5560 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
5561 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5562 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
5563 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5564 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5565 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5566 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
5567 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
5568 ; AVX2-NEXT: vmovaps 160(%rdi), %ymm9
5569 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5570 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm9[1],ymm1[3],ymm9[3]
5571 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
5572 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5573 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5574 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5575 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm9[0],ymm0[2],ymm9[2]
5576 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5577 ; AVX2-NEXT: vmovaps 32(%rdi), %xmm5
5578 ; AVX2-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5579 ; AVX2-NEXT: vmovaps 80(%rdi), %xmm1
5580 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5581 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm1[0]
5582 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5583 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5584 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5585 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm14[0],ymm0[2],ymm14[2]
5586 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5587 ; AVX2-NEXT: vmovaps 272(%rdi), %xmm5
5588 ; AVX2-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5589 ; AVX2-NEXT: vmovaps 224(%rdi), %xmm1
5590 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5591 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm5[0]
5592 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5593 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5594 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5595 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm2[0],ymm0[2],ymm2[2]
5596 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5597 ; AVX2-NEXT: vmovaps 464(%rdi), %xmm14
5598 ; AVX2-NEXT: vmovaps 416(%rdi), %xmm13
5599 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm13[0],xmm14[0]
5600 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5601 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5602 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5603 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm7[0],ymm0[2],ymm7[2]
5604 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5605 ; AVX2-NEXT: vmovaps 656(%rdi), %xmm12
5606 ; AVX2-NEXT: vmovaps 608(%rdi), %xmm11
5607 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm12[0]
5608 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5609 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5610 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5611 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
5612 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5613 ; AVX2-NEXT: vmovaps 848(%rdi), %xmm10
5614 ; AVX2-NEXT: vmovaps 800(%rdi), %xmm7
5615 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm7[0],xmm10[0]
5616 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5617 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5618 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5619 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm6[0],ymm0[2],ymm6[2]
5620 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5621 ; AVX2-NEXT: vmovaps 1040(%rdi), %xmm8
5622 ; AVX2-NEXT: vmovaps 992(%rdi), %xmm5
5623 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm8[0]
5624 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5625 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5626 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5627 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
5628 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5629 ; AVX2-NEXT: vmovaps 1232(%rdi), %xmm6
5630 ; AVX2-NEXT: vmovaps 1184(%rdi), %xmm3
5631 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm6[0]
5632 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5633 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5634 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5635 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
5636 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
5637 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5638 ; AVX2-NEXT: vmovaps 1424(%rdi), %xmm2
5639 ; AVX2-NEXT: vmovaps 1376(%rdi), %xmm1
5640 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
5641 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
5642 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5643 ; AVX2-NEXT: vbroadcastsd 136(%rdi), %ymm0
5644 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm9[1],ymm0[3],ymm9[3]
5645 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
5646 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9, %xmm9 # 16-byte Folded Reload
5647 ; AVX2-NEXT: # xmm9 = xmm9[1],mem[1]
5648 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm9[0,1,2,3],ymm0[4,5,6,7]
5649 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5650 ; AVX2-NEXT: vbroadcastsd 328(%rdi), %ymm0
5651 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5652 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5653 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
5654 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9, %xmm9 # 16-byte Folded Reload
5655 ; AVX2-NEXT: # xmm9 = xmm9[1],mem[1]
5656 ; AVX2-NEXT: vblendps {{.*#+}} ymm15 = ymm9[0,1,2,3],ymm0[4,5,6,7]
5657 ; AVX2-NEXT: vbroadcastsd 520(%rdi), %ymm0
5658 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5659 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5660 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm9 = xmm13[1],xmm14[1]
5661 ; AVX2-NEXT: vblendps {{.*#+}} ymm13 = ymm9[0,1,2,3],ymm0[4,5,6,7]
5662 ; AVX2-NEXT: vbroadcastsd 712(%rdi), %ymm0
5663 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5664 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5665 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm9 = xmm11[1],xmm12[1]
5666 ; AVX2-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm0[4,5,6,7]
5667 ; AVX2-NEXT: vbroadcastsd 904(%rdi), %ymm0
5668 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5669 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5670 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm7 = xmm7[1],xmm10[1]
5671 ; AVX2-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm0[4,5,6,7]
5672 ; AVX2-NEXT: vbroadcastsd 1096(%rdi), %ymm0
5673 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5674 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5675 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm5 = xmm5[1],xmm8[1]
5676 ; AVX2-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm0[4,5,6,7]
5677 ; AVX2-NEXT: vbroadcastsd 1288(%rdi), %ymm0
5678 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5679 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5680 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm6[1]
5681 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm0[4,5,6,7]
5682 ; AVX2-NEXT: vbroadcastsd 1480(%rdi), %ymm0
5683 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm4[1],ymm0[3],ymm4[3]
5684 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
5685 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5686 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5687 ; AVX2-NEXT: vmovaps %ymm1, 192(%rsi)
5688 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5689 ; AVX2-NEXT: vmovaps %ymm1, 128(%rsi)
5690 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5691 ; AVX2-NEXT: vmovaps %ymm1, 64(%rsi)
5692 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5693 ; AVX2-NEXT: vmovaps %ymm1, (%rsi)
5694 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5695 ; AVX2-NEXT: vmovaps %ymm1, 224(%rsi)
5696 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5697 ; AVX2-NEXT: vmovaps %ymm1, 160(%rsi)
5698 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5699 ; AVX2-NEXT: vmovaps %ymm1, 96(%rsi)
5700 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5701 ; AVX2-NEXT: vmovaps %ymm1, 32(%rsi)
5702 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5703 ; AVX2-NEXT: vmovaps %ymm1, 192(%rdx)
5704 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5705 ; AVX2-NEXT: vmovaps %ymm1, 128(%rdx)
5706 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5707 ; AVX2-NEXT: vmovaps %ymm1, 64(%rdx)
5708 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5709 ; AVX2-NEXT: vmovaps %ymm1, (%rdx)
5710 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5711 ; AVX2-NEXT: vmovaps %ymm1, 224(%rdx)
5712 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5713 ; AVX2-NEXT: vmovaps %ymm1, 160(%rdx)
5714 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5715 ; AVX2-NEXT: vmovaps %ymm1, 96(%rdx)
5716 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5717 ; AVX2-NEXT: vmovaps %ymm1, 32(%rdx)
5718 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5719 ; AVX2-NEXT: vmovaps %ymm1, 192(%rcx)
5720 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5721 ; AVX2-NEXT: vmovaps %ymm1, 128(%rcx)
5722 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5723 ; AVX2-NEXT: vmovaps %ymm1, 64(%rcx)
5724 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5725 ; AVX2-NEXT: vmovaps %ymm1, (%rcx)
5726 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5727 ; AVX2-NEXT: vmovaps %ymm1, 224(%rcx)
5728 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5729 ; AVX2-NEXT: vmovaps %ymm1, 160(%rcx)
5730 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5731 ; AVX2-NEXT: vmovaps %ymm1, 96(%rcx)
5732 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5733 ; AVX2-NEXT: vmovaps %ymm1, 32(%rcx)
5734 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5735 ; AVX2-NEXT: vmovaps %ymm1, (%r8)
5736 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5737 ; AVX2-NEXT: vmovaps %ymm1, 64(%r8)
5738 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5739 ; AVX2-NEXT: vmovaps %ymm1, 128(%r8)
5740 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5741 ; AVX2-NEXT: vmovaps %ymm1, 192(%r8)
5742 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5743 ; AVX2-NEXT: vmovaps %ymm1, 224(%r8)
5744 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5745 ; AVX2-NEXT: vmovaps %ymm1, 160(%r8)
5746 ; AVX2-NEXT: vmovups (%rsp), %ymm1 # 32-byte Reload
5747 ; AVX2-NEXT: vmovaps %ymm1, 96(%r8)
5748 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5749 ; AVX2-NEXT: vmovaps %ymm1, 32(%r8)
5750 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5751 ; AVX2-NEXT: vmovaps %ymm1, 224(%r9)
5752 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5753 ; AVX2-NEXT: vmovaps %ymm1, 192(%r9)
5754 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5755 ; AVX2-NEXT: vmovaps %ymm1, 160(%r9)
5756 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5757 ; AVX2-NEXT: vmovaps %ymm1, 128(%r9)
5758 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5759 ; AVX2-NEXT: vmovaps %ymm1, 96(%r9)
5760 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5761 ; AVX2-NEXT: vmovaps %ymm1, 64(%r9)
5762 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5763 ; AVX2-NEXT: vmovaps %ymm1, 32(%r9)
5764 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5765 ; AVX2-NEXT: vmovaps %ymm1, (%r9)
5766 ; AVX2-NEXT: movq {{[0-9]+}}(%rsp), %rax
5767 ; AVX2-NEXT: vmovaps %ymm0, 224(%rax)
5768 ; AVX2-NEXT: vmovaps %ymm3, 192(%rax)
5769 ; AVX2-NEXT: vmovaps %ymm5, 160(%rax)
5770 ; AVX2-NEXT: vmovaps %ymm7, 128(%rax)
5771 ; AVX2-NEXT: vmovaps %ymm9, 96(%rax)
5772 ; AVX2-NEXT: vmovaps %ymm13, 64(%rax)
5773 ; AVX2-NEXT: vmovaps %ymm15, 32(%rax)
5774 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5775 ; AVX2-NEXT: vmovaps %ymm0, (%rax)
5776 ; AVX2-NEXT: addq $1496, %rsp # imm = 0x5D8
5777 ; AVX2-NEXT: vzeroupper
5780 ; AVX2-FP-LABEL: load_i64_stride6_vf32:
5782 ; AVX2-FP-NEXT: subq $1496, %rsp # imm = 0x5D8
5783 ; AVX2-FP-NEXT: vmovaps 1088(%rdi), %ymm2
5784 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5785 ; AVX2-FP-NEXT: vmovaps 1056(%rdi), %ymm4
5786 ; AVX2-FP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5787 ; AVX2-FP-NEXT: vmovaps 704(%rdi), %ymm3
5788 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5789 ; AVX2-FP-NEXT: vmovaps 672(%rdi), %ymm5
5790 ; AVX2-FP-NEXT: vmovups %ymm5, (%rsp) # 32-byte Spill
5791 ; AVX2-FP-NEXT: vmovaps 320(%rdi), %ymm1
5792 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5793 ; AVX2-FP-NEXT: vmovaps 288(%rdi), %ymm7
5794 ; AVX2-FP-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5795 ; AVX2-FP-NEXT: vmovaps 240(%rdi), %xmm0
5796 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5797 ; AVX2-FP-NEXT: vmovaps 192(%rdi), %xmm6
5798 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm6[0],xmm0[0]
5799 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm7[0],ymm1[0],ymm7[2],ymm1[2]
5800 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
5801 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5802 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5803 ; AVX2-FP-NEXT: vmovaps 624(%rdi), %xmm1
5804 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5805 ; AVX2-FP-NEXT: vmovaps 576(%rdi), %xmm0
5806 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5807 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
5808 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm5[0],ymm3[0],ymm5[2],ymm3[2]
5809 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
5810 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5811 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5812 ; AVX2-FP-NEXT: vmovaps 1008(%rdi), %xmm1
5813 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5814 ; AVX2-FP-NEXT: vmovaps 960(%rdi), %xmm0
5815 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5816 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
5817 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm4[0],ymm2[0],ymm4[2],ymm2[2]
5818 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
5819 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5820 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5821 ; AVX2-FP-NEXT: vmovaps 1472(%rdi), %ymm0
5822 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5823 ; AVX2-FP-NEXT: vmovaps 1440(%rdi), %ymm1
5824 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5825 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5826 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5827 ; AVX2-FP-NEXT: vmovaps 1392(%rdi), %xmm1
5828 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5829 ; AVX2-FP-NEXT: vmovaps 1344(%rdi), %xmm4
5830 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm4[0],xmm1[0]
5831 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5832 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5833 ; AVX2-FP-NEXT: vmovaps 128(%rdi), %ymm15
5834 ; AVX2-FP-NEXT: vmovaps 96(%rdi), %ymm0
5835 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5836 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm15[0],ymm0[2],ymm15[2]
5837 ; AVX2-FP-NEXT: vmovups %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5838 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5839 ; AVX2-FP-NEXT: vmovaps (%rdi), %xmm8
5840 ; AVX2-FP-NEXT: vmovaps 48(%rdi), %xmm2
5841 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm10 = xmm8[0],xmm2[0]
5842 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm10[0,1,2,3],ymm0[4,5,6,7]
5843 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5844 ; AVX2-FP-NEXT: vmovaps 512(%rdi), %ymm7
5845 ; AVX2-FP-NEXT: vmovaps 480(%rdi), %ymm0
5846 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5847 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm7[0],ymm0[2],ymm7[2]
5848 ; AVX2-FP-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5849 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5850 ; AVX2-FP-NEXT: vmovaps 432(%rdi), %xmm11
5851 ; AVX2-FP-NEXT: vmovaps 384(%rdi), %xmm12
5852 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm10 = xmm12[0],xmm11[0]
5853 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm10[0,1,2,3],ymm0[4,5,6,7]
5854 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5855 ; AVX2-FP-NEXT: vmovaps 896(%rdi), %ymm5
5856 ; AVX2-FP-NEXT: vmovaps 864(%rdi), %ymm0
5857 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5858 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm5[0],ymm0[2],ymm5[2]
5859 ; AVX2-FP-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5860 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5861 ; AVX2-FP-NEXT: vmovaps 816(%rdi), %xmm13
5862 ; AVX2-FP-NEXT: vmovaps 768(%rdi), %xmm10
5863 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm14 = xmm10[0],xmm13[0]
5864 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm14[0,1,2,3],ymm0[4,5,6,7]
5865 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5866 ; AVX2-FP-NEXT: vmovaps 1280(%rdi), %ymm3
5867 ; AVX2-FP-NEXT: vmovaps 1248(%rdi), %ymm0
5868 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5869 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
5870 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5871 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5872 ; AVX2-FP-NEXT: vmovaps 1200(%rdi), %xmm14
5873 ; AVX2-FP-NEXT: vmovaps 1152(%rdi), %xmm1
5874 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm9 = xmm1[0],xmm14[0]
5875 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm9[0,1,2,3],ymm0[4,5,6,7]
5876 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5877 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm6, %xmm0 # 16-byte Folded Reload
5878 ; AVX2-FP-NEXT: # xmm0 = xmm6[1],mem[1]
5879 ; AVX2-FP-NEXT: vbroadcastsd 296(%rdi), %ymm6
5880 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm6 # 32-byte Folded Reload
5881 ; AVX2-FP-NEXT: # ymm6 = ymm6[1],mem[1],ymm6[3],mem[3]
5882 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm6[4,5,6,7]
5883 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5884 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5885 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
5886 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
5887 ; AVX2-FP-NEXT: vbroadcastsd 680(%rdi), %ymm6
5888 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm6 # 32-byte Folded Reload
5889 ; AVX2-FP-NEXT: # ymm6 = ymm6[1],mem[1],ymm6[3],mem[3]
5890 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm6[4,5,6,7]
5891 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5892 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5893 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
5894 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
5895 ; AVX2-FP-NEXT: vbroadcastsd 1064(%rdi), %ymm6
5896 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm6 # 32-byte Folded Reload
5897 ; AVX2-FP-NEXT: # ymm6 = ymm6[1],mem[1],ymm6[3],mem[3]
5898 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm6[4,5,6,7]
5899 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5900 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm4, %xmm0 # 16-byte Folded Reload
5901 ; AVX2-FP-NEXT: # xmm0 = xmm4[1],mem[1]
5902 ; AVX2-FP-NEXT: vbroadcastsd 1448(%rdi), %ymm4
5903 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm4, %ymm4 # 32-byte Folded Reload
5904 ; AVX2-FP-NEXT: # ymm4 = ymm4[1],mem[1],ymm4[3],mem[3]
5905 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm4[4,5,6,7]
5906 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5907 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm8[1],xmm2[1]
5908 ; AVX2-FP-NEXT: vbroadcastsd 104(%rdi), %ymm2
5909 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm15[1],ymm2[3],ymm15[3]
5910 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5911 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5912 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm12[1],xmm11[1]
5913 ; AVX2-FP-NEXT: vbroadcastsd 488(%rdi), %ymm2
5914 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm7[1],ymm2[3],ymm7[3]
5915 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5916 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5917 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm10[1],xmm13[1]
5918 ; AVX2-FP-NEXT: vbroadcastsd 872(%rdi), %ymm2
5919 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm5[1],ymm2[3],ymm5[3]
5920 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5921 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5922 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm14[1]
5923 ; AVX2-FP-NEXT: vbroadcastsd 1256(%rdi), %ymm1
5924 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm3[1],ymm1[3],ymm3[3]
5925 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5926 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5927 ; AVX2-FP-NEXT: vbroadcastsd 352(%rdi), %ymm0
5928 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5929 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5930 ; AVX2-FP-NEXT: vmovaps 256(%rdi), %xmm3
5931 ; AVX2-FP-NEXT: vmovaps 208(%rdi), %xmm5
5932 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm3[0]
5933 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5934 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5935 ; AVX2-FP-NEXT: vbroadcastsd 736(%rdi), %ymm0
5936 ; AVX2-FP-NEXT: vmovups (%rsp), %ymm1 # 32-byte Reload
5937 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5938 ; AVX2-FP-NEXT: vmovaps 640(%rdi), %xmm6
5939 ; AVX2-FP-NEXT: vmovaps 592(%rdi), %xmm7
5940 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm7[0],xmm6[0]
5941 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5942 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5943 ; AVX2-FP-NEXT: vbroadcastsd 1120(%rdi), %ymm0
5944 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5945 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5946 ; AVX2-FP-NEXT: vmovaps 1024(%rdi), %xmm8
5947 ; AVX2-FP-NEXT: vmovaps 976(%rdi), %xmm9
5948 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm9[0],xmm8[0]
5949 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5950 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5951 ; AVX2-FP-NEXT: vbroadcastsd 1504(%rdi), %ymm0
5952 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5953 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5954 ; AVX2-FP-NEXT: vmovaps 1408(%rdi), %xmm10
5955 ; AVX2-FP-NEXT: vmovaps 1360(%rdi), %xmm11
5956 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm10[0]
5957 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5958 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5959 ; AVX2-FP-NEXT: vbroadcastsd 160(%rdi), %ymm0
5960 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5961 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5962 ; AVX2-FP-NEXT: vmovaps 16(%rdi), %xmm0
5963 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5964 ; AVX2-FP-NEXT: vmovaps 64(%rdi), %xmm2
5965 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5966 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm2[0]
5967 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm1[4,5,6,7]
5968 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5969 ; AVX2-FP-NEXT: vbroadcastsd 544(%rdi), %ymm1
5970 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5971 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm12 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5972 ; AVX2-FP-NEXT: vmovaps 448(%rdi), %xmm0
5973 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5974 ; AVX2-FP-NEXT: vmovaps 400(%rdi), %xmm1
5975 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5976 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm13 = xmm1[0],xmm0[0]
5977 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm13[0,1,2,3],ymm12[4,5,6,7]
5978 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5979 ; AVX2-FP-NEXT: vbroadcastsd 928(%rdi), %ymm12
5980 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
5981 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm14 = ymm4[0],ymm12[0],ymm4[2],ymm12[2]
5982 ; AVX2-FP-NEXT: vmovaps 832(%rdi), %xmm12
5983 ; AVX2-FP-NEXT: vmovaps 784(%rdi), %xmm13
5984 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm15 = xmm13[0],xmm12[0]
5985 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm14[4,5,6,7]
5986 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5987 ; AVX2-FP-NEXT: vbroadcastsd 1312(%rdi), %ymm14
5988 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5989 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm14 = ymm1[0],ymm14[0],ymm1[2],ymm14[2]
5990 ; AVX2-FP-NEXT: vmovaps 1216(%rdi), %xmm15
5991 ; AVX2-FP-NEXT: vmovaps 1168(%rdi), %xmm0
5992 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm2 = xmm0[0],xmm15[0]
5993 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm14[4,5,6,7]
5994 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5995 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm5[1],xmm3[1]
5996 ; AVX2-FP-NEXT: vmovaps 352(%rdi), %ymm14
5997 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
5998 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm14[1],ymm3[3],ymm14[3]
5999 ; AVX2-FP-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6000 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
6001 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
6002 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6003 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm7[1],xmm6[1]
6004 ; AVX2-FP-NEXT: vmovaps 736(%rdi), %ymm7
6005 ; AVX2-FP-NEXT: vmovups (%rsp), %ymm3 # 32-byte Reload
6006 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm7[1],ymm3[3],ymm7[3]
6007 ; AVX2-FP-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6008 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
6009 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
6010 ; AVX2-FP-NEXT: vmovups %ymm2, (%rsp) # 32-byte Spill
6011 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm9[1],xmm8[1]
6012 ; AVX2-FP-NEXT: vmovaps 1120(%rdi), %ymm6
6013 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6014 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm6[1],ymm3[3],ymm6[3]
6015 ; AVX2-FP-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6016 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
6017 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
6018 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6019 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm11[1],xmm10[1]
6020 ; AVX2-FP-NEXT: vmovaps 1504(%rdi), %ymm5
6021 ; AVX2-FP-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6022 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6023 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm5[1],ymm3[3],ymm5[3]
6024 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
6025 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
6026 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6027 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm15[1]
6028 ; AVX2-FP-NEXT: vmovaps 1312(%rdi), %ymm3
6029 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm1[1],ymm3[1],ymm1[3],ymm3[3]
6030 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6031 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,2,1]
6032 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6033 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6034 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm13[1],xmm12[1]
6035 ; AVX2-FP-NEXT: vmovaps 928(%rdi), %ymm1
6036 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm4[1],ymm1[1],ymm4[3],ymm1[3]
6037 ; AVX2-FP-NEXT: vmovaps %ymm1, %ymm4
6038 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6039 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,2,1]
6040 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6041 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6042 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6043 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
6044 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
6045 ; AVX2-FP-NEXT: vmovaps 544(%rdi), %ymm2
6046 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6047 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
6048 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6049 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
6050 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
6051 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6052 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6053 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
6054 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
6055 ; AVX2-FP-NEXT: vmovaps 160(%rdi), %ymm9
6056 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6057 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm9[1],ymm1[3],ymm9[3]
6058 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
6059 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
6060 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6061 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6062 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm9[0],ymm0[2],ymm9[2]
6063 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6064 ; AVX2-FP-NEXT: vmovaps 32(%rdi), %xmm5
6065 ; AVX2-FP-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6066 ; AVX2-FP-NEXT: vmovaps 80(%rdi), %xmm1
6067 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6068 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm1[0]
6069 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6070 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6071 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6072 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm14[0],ymm0[2],ymm14[2]
6073 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6074 ; AVX2-FP-NEXT: vmovaps 272(%rdi), %xmm5
6075 ; AVX2-FP-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6076 ; AVX2-FP-NEXT: vmovaps 224(%rdi), %xmm1
6077 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6078 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm5[0]
6079 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6080 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6081 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6082 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm2[0],ymm0[2],ymm2[2]
6083 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6084 ; AVX2-FP-NEXT: vmovaps 464(%rdi), %xmm14
6085 ; AVX2-FP-NEXT: vmovaps 416(%rdi), %xmm13
6086 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm13[0],xmm14[0]
6087 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6088 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6089 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6090 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm7[0],ymm0[2],ymm7[2]
6091 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6092 ; AVX2-FP-NEXT: vmovaps 656(%rdi), %xmm12
6093 ; AVX2-FP-NEXT: vmovaps 608(%rdi), %xmm11
6094 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm12[0]
6095 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6096 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6097 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6098 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
6099 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6100 ; AVX2-FP-NEXT: vmovaps 848(%rdi), %xmm10
6101 ; AVX2-FP-NEXT: vmovaps 800(%rdi), %xmm7
6102 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm7[0],xmm10[0]
6103 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6104 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6105 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6106 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm6[0],ymm0[2],ymm6[2]
6107 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6108 ; AVX2-FP-NEXT: vmovaps 1040(%rdi), %xmm8
6109 ; AVX2-FP-NEXT: vmovaps 992(%rdi), %xmm5
6110 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm8[0]
6111 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6112 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6113 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6114 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
6115 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6116 ; AVX2-FP-NEXT: vmovaps 1232(%rdi), %xmm6
6117 ; AVX2-FP-NEXT: vmovaps 1184(%rdi), %xmm3
6118 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm6[0]
6119 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6120 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6121 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6122 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
6123 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
6124 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6125 ; AVX2-FP-NEXT: vmovaps 1424(%rdi), %xmm2
6126 ; AVX2-FP-NEXT: vmovaps 1376(%rdi), %xmm1
6127 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
6128 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
6129 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6130 ; AVX2-FP-NEXT: vbroadcastsd 136(%rdi), %ymm0
6131 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm9[1],ymm0[3],ymm9[3]
6132 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
6133 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9, %xmm9 # 16-byte Folded Reload
6134 ; AVX2-FP-NEXT: # xmm9 = xmm9[1],mem[1]
6135 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm9[0,1,2,3],ymm0[4,5,6,7]
6136 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6137 ; AVX2-FP-NEXT: vbroadcastsd 328(%rdi), %ymm0
6138 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6139 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6140 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
6141 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9, %xmm9 # 16-byte Folded Reload
6142 ; AVX2-FP-NEXT: # xmm9 = xmm9[1],mem[1]
6143 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm15 = ymm9[0,1,2,3],ymm0[4,5,6,7]
6144 ; AVX2-FP-NEXT: vbroadcastsd 520(%rdi), %ymm0
6145 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6146 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6147 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm9 = xmm13[1],xmm14[1]
6148 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm13 = ymm9[0,1,2,3],ymm0[4,5,6,7]
6149 ; AVX2-FP-NEXT: vbroadcastsd 712(%rdi), %ymm0
6150 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6151 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6152 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm9 = xmm11[1],xmm12[1]
6153 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm0[4,5,6,7]
6154 ; AVX2-FP-NEXT: vbroadcastsd 904(%rdi), %ymm0
6155 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6156 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6157 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm7 = xmm7[1],xmm10[1]
6158 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm0[4,5,6,7]
6159 ; AVX2-FP-NEXT: vbroadcastsd 1096(%rdi), %ymm0
6160 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6161 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6162 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm5 = xmm5[1],xmm8[1]
6163 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm0[4,5,6,7]
6164 ; AVX2-FP-NEXT: vbroadcastsd 1288(%rdi), %ymm0
6165 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6166 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6167 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm6[1]
6168 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm0[4,5,6,7]
6169 ; AVX2-FP-NEXT: vbroadcastsd 1480(%rdi), %ymm0
6170 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm4[1],ymm0[3],ymm4[3]
6171 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
6172 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6173 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6174 ; AVX2-FP-NEXT: vmovaps %ymm1, 192(%rsi)
6175 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6176 ; AVX2-FP-NEXT: vmovaps %ymm1, 128(%rsi)
6177 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6178 ; AVX2-FP-NEXT: vmovaps %ymm1, 64(%rsi)
6179 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6180 ; AVX2-FP-NEXT: vmovaps %ymm1, (%rsi)
6181 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6182 ; AVX2-FP-NEXT: vmovaps %ymm1, 224(%rsi)
6183 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6184 ; AVX2-FP-NEXT: vmovaps %ymm1, 160(%rsi)
6185 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6186 ; AVX2-FP-NEXT: vmovaps %ymm1, 96(%rsi)
6187 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6188 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%rsi)
6189 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6190 ; AVX2-FP-NEXT: vmovaps %ymm1, 192(%rdx)
6191 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6192 ; AVX2-FP-NEXT: vmovaps %ymm1, 128(%rdx)
6193 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6194 ; AVX2-FP-NEXT: vmovaps %ymm1, 64(%rdx)
6195 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6196 ; AVX2-FP-NEXT: vmovaps %ymm1, (%rdx)
6197 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6198 ; AVX2-FP-NEXT: vmovaps %ymm1, 224(%rdx)
6199 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6200 ; AVX2-FP-NEXT: vmovaps %ymm1, 160(%rdx)
6201 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6202 ; AVX2-FP-NEXT: vmovaps %ymm1, 96(%rdx)
6203 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6204 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%rdx)
6205 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6206 ; AVX2-FP-NEXT: vmovaps %ymm1, 192(%rcx)
6207 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6208 ; AVX2-FP-NEXT: vmovaps %ymm1, 128(%rcx)
6209 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6210 ; AVX2-FP-NEXT: vmovaps %ymm1, 64(%rcx)
6211 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6212 ; AVX2-FP-NEXT: vmovaps %ymm1, (%rcx)
6213 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6214 ; AVX2-FP-NEXT: vmovaps %ymm1, 224(%rcx)
6215 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6216 ; AVX2-FP-NEXT: vmovaps %ymm1, 160(%rcx)
6217 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6218 ; AVX2-FP-NEXT: vmovaps %ymm1, 96(%rcx)
6219 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6220 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%rcx)
6221 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6222 ; AVX2-FP-NEXT: vmovaps %ymm1, (%r8)
6223 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6224 ; AVX2-FP-NEXT: vmovaps %ymm1, 64(%r8)
6225 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6226 ; AVX2-FP-NEXT: vmovaps %ymm1, 128(%r8)
6227 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6228 ; AVX2-FP-NEXT: vmovaps %ymm1, 192(%r8)
6229 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6230 ; AVX2-FP-NEXT: vmovaps %ymm1, 224(%r8)
6231 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6232 ; AVX2-FP-NEXT: vmovaps %ymm1, 160(%r8)
6233 ; AVX2-FP-NEXT: vmovups (%rsp), %ymm1 # 32-byte Reload
6234 ; AVX2-FP-NEXT: vmovaps %ymm1, 96(%r8)
6235 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6236 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%r8)
6237 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6238 ; AVX2-FP-NEXT: vmovaps %ymm1, 224(%r9)
6239 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6240 ; AVX2-FP-NEXT: vmovaps %ymm1, 192(%r9)
6241 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6242 ; AVX2-FP-NEXT: vmovaps %ymm1, 160(%r9)
6243 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6244 ; AVX2-FP-NEXT: vmovaps %ymm1, 128(%r9)
6245 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6246 ; AVX2-FP-NEXT: vmovaps %ymm1, 96(%r9)
6247 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6248 ; AVX2-FP-NEXT: vmovaps %ymm1, 64(%r9)
6249 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6250 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%r9)
6251 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6252 ; AVX2-FP-NEXT: vmovaps %ymm1, (%r9)
6253 ; AVX2-FP-NEXT: movq {{[0-9]+}}(%rsp), %rax
6254 ; AVX2-FP-NEXT: vmovaps %ymm0, 224(%rax)
6255 ; AVX2-FP-NEXT: vmovaps %ymm3, 192(%rax)
6256 ; AVX2-FP-NEXT: vmovaps %ymm5, 160(%rax)
6257 ; AVX2-FP-NEXT: vmovaps %ymm7, 128(%rax)
6258 ; AVX2-FP-NEXT: vmovaps %ymm9, 96(%rax)
6259 ; AVX2-FP-NEXT: vmovaps %ymm13, 64(%rax)
6260 ; AVX2-FP-NEXT: vmovaps %ymm15, 32(%rax)
6261 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6262 ; AVX2-FP-NEXT: vmovaps %ymm0, (%rax)
6263 ; AVX2-FP-NEXT: addq $1496, %rsp # imm = 0x5D8
6264 ; AVX2-FP-NEXT: vzeroupper
6265 ; AVX2-FP-NEXT: retq
6267 ; AVX2-FCP-LABEL: load_i64_stride6_vf32:
6268 ; AVX2-FCP: # %bb.0:
6269 ; AVX2-FCP-NEXT: subq $1496, %rsp # imm = 0x5D8
6270 ; AVX2-FCP-NEXT: vmovaps 1088(%rdi), %ymm2
6271 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6272 ; AVX2-FCP-NEXT: vmovaps 1056(%rdi), %ymm4
6273 ; AVX2-FCP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6274 ; AVX2-FCP-NEXT: vmovaps 704(%rdi), %ymm3
6275 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6276 ; AVX2-FCP-NEXT: vmovaps 672(%rdi), %ymm5
6277 ; AVX2-FCP-NEXT: vmovups %ymm5, (%rsp) # 32-byte Spill
6278 ; AVX2-FCP-NEXT: vmovaps 320(%rdi), %ymm1
6279 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6280 ; AVX2-FCP-NEXT: vmovaps 288(%rdi), %ymm7
6281 ; AVX2-FCP-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6282 ; AVX2-FCP-NEXT: vmovaps 240(%rdi), %xmm0
6283 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6284 ; AVX2-FCP-NEXT: vmovaps 192(%rdi), %xmm6
6285 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm6[0],xmm0[0]
6286 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm7[0],ymm1[0],ymm7[2],ymm1[2]
6287 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
6288 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
6289 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6290 ; AVX2-FCP-NEXT: vmovaps 624(%rdi), %xmm1
6291 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6292 ; AVX2-FCP-NEXT: vmovaps 576(%rdi), %xmm0
6293 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6294 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
6295 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm5[0],ymm3[0],ymm5[2],ymm3[2]
6296 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
6297 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
6298 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6299 ; AVX2-FCP-NEXT: vmovaps 1008(%rdi), %xmm1
6300 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6301 ; AVX2-FCP-NEXT: vmovaps 960(%rdi), %xmm0
6302 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6303 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
6304 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm4[0],ymm2[0],ymm4[2],ymm2[2]
6305 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
6306 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
6307 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6308 ; AVX2-FCP-NEXT: vmovaps 1472(%rdi), %ymm0
6309 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6310 ; AVX2-FCP-NEXT: vmovaps 1440(%rdi), %ymm1
6311 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6312 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
6313 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6314 ; AVX2-FCP-NEXT: vmovaps 1392(%rdi), %xmm1
6315 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6316 ; AVX2-FCP-NEXT: vmovaps 1344(%rdi), %xmm4
6317 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm4[0],xmm1[0]
6318 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6319 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6320 ; AVX2-FCP-NEXT: vmovaps 128(%rdi), %ymm15
6321 ; AVX2-FCP-NEXT: vmovaps 96(%rdi), %ymm0
6322 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6323 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm15[0],ymm0[2],ymm15[2]
6324 ; AVX2-FCP-NEXT: vmovups %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6325 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6326 ; AVX2-FCP-NEXT: vmovaps (%rdi), %xmm8
6327 ; AVX2-FCP-NEXT: vmovaps 48(%rdi), %xmm2
6328 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm10 = xmm8[0],xmm2[0]
6329 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm10[0,1,2,3],ymm0[4,5,6,7]
6330 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6331 ; AVX2-FCP-NEXT: vmovaps 512(%rdi), %ymm7
6332 ; AVX2-FCP-NEXT: vmovaps 480(%rdi), %ymm0
6333 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6334 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm7[0],ymm0[2],ymm7[2]
6335 ; AVX2-FCP-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6336 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6337 ; AVX2-FCP-NEXT: vmovaps 432(%rdi), %xmm11
6338 ; AVX2-FCP-NEXT: vmovaps 384(%rdi), %xmm12
6339 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm10 = xmm12[0],xmm11[0]
6340 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm10[0,1,2,3],ymm0[4,5,6,7]
6341 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6342 ; AVX2-FCP-NEXT: vmovaps 896(%rdi), %ymm5
6343 ; AVX2-FCP-NEXT: vmovaps 864(%rdi), %ymm0
6344 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6345 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm5[0],ymm0[2],ymm5[2]
6346 ; AVX2-FCP-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6347 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6348 ; AVX2-FCP-NEXT: vmovaps 816(%rdi), %xmm13
6349 ; AVX2-FCP-NEXT: vmovaps 768(%rdi), %xmm10
6350 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm14 = xmm10[0],xmm13[0]
6351 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm14[0,1,2,3],ymm0[4,5,6,7]
6352 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6353 ; AVX2-FCP-NEXT: vmovaps 1280(%rdi), %ymm3
6354 ; AVX2-FCP-NEXT: vmovaps 1248(%rdi), %ymm0
6355 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6356 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
6357 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6358 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6359 ; AVX2-FCP-NEXT: vmovaps 1200(%rdi), %xmm14
6360 ; AVX2-FCP-NEXT: vmovaps 1152(%rdi), %xmm1
6361 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm9 = xmm1[0],xmm14[0]
6362 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm9[0,1,2,3],ymm0[4,5,6,7]
6363 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6364 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm6, %xmm0 # 16-byte Folded Reload
6365 ; AVX2-FCP-NEXT: # xmm0 = xmm6[1],mem[1]
6366 ; AVX2-FCP-NEXT: vbroadcastsd 296(%rdi), %ymm6
6367 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm6 # 32-byte Folded Reload
6368 ; AVX2-FCP-NEXT: # ymm6 = ymm6[1],mem[1],ymm6[3],mem[3]
6369 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm6[4,5,6,7]
6370 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6371 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6372 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
6373 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
6374 ; AVX2-FCP-NEXT: vbroadcastsd 680(%rdi), %ymm6
6375 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm6 # 32-byte Folded Reload
6376 ; AVX2-FCP-NEXT: # ymm6 = ymm6[1],mem[1],ymm6[3],mem[3]
6377 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm6[4,5,6,7]
6378 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6379 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6380 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
6381 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
6382 ; AVX2-FCP-NEXT: vbroadcastsd 1064(%rdi), %ymm6
6383 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm6 # 32-byte Folded Reload
6384 ; AVX2-FCP-NEXT: # ymm6 = ymm6[1],mem[1],ymm6[3],mem[3]
6385 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm6[4,5,6,7]
6386 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6387 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm4, %xmm0 # 16-byte Folded Reload
6388 ; AVX2-FCP-NEXT: # xmm0 = xmm4[1],mem[1]
6389 ; AVX2-FCP-NEXT: vbroadcastsd 1448(%rdi), %ymm4
6390 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm4, %ymm4 # 32-byte Folded Reload
6391 ; AVX2-FCP-NEXT: # ymm4 = ymm4[1],mem[1],ymm4[3],mem[3]
6392 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm4[4,5,6,7]
6393 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6394 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm8[1],xmm2[1]
6395 ; AVX2-FCP-NEXT: vbroadcastsd 104(%rdi), %ymm2
6396 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm15[1],ymm2[3],ymm15[3]
6397 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6398 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6399 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm12[1],xmm11[1]
6400 ; AVX2-FCP-NEXT: vbroadcastsd 488(%rdi), %ymm2
6401 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm7[1],ymm2[3],ymm7[3]
6402 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6403 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6404 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm10[1],xmm13[1]
6405 ; AVX2-FCP-NEXT: vbroadcastsd 872(%rdi), %ymm2
6406 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm5[1],ymm2[3],ymm5[3]
6407 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6408 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6409 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm14[1]
6410 ; AVX2-FCP-NEXT: vbroadcastsd 1256(%rdi), %ymm1
6411 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm3[1],ymm1[3],ymm3[3]
6412 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
6413 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6414 ; AVX2-FCP-NEXT: vbroadcastsd 352(%rdi), %ymm0
6415 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6416 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
6417 ; AVX2-FCP-NEXT: vmovaps 256(%rdi), %xmm3
6418 ; AVX2-FCP-NEXT: vmovaps 208(%rdi), %xmm5
6419 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm3[0]
6420 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6421 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6422 ; AVX2-FCP-NEXT: vbroadcastsd 736(%rdi), %ymm0
6423 ; AVX2-FCP-NEXT: vmovups (%rsp), %ymm1 # 32-byte Reload
6424 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
6425 ; AVX2-FCP-NEXT: vmovaps 640(%rdi), %xmm6
6426 ; AVX2-FCP-NEXT: vmovaps 592(%rdi), %xmm7
6427 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm7[0],xmm6[0]
6428 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6429 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6430 ; AVX2-FCP-NEXT: vbroadcastsd 1120(%rdi), %ymm0
6431 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6432 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
6433 ; AVX2-FCP-NEXT: vmovaps 1024(%rdi), %xmm8
6434 ; AVX2-FCP-NEXT: vmovaps 976(%rdi), %xmm9
6435 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm9[0],xmm8[0]
6436 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6437 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6438 ; AVX2-FCP-NEXT: vbroadcastsd 1504(%rdi), %ymm0
6439 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6440 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
6441 ; AVX2-FCP-NEXT: vmovaps 1408(%rdi), %xmm10
6442 ; AVX2-FCP-NEXT: vmovaps 1360(%rdi), %xmm11
6443 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm10[0]
6444 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6445 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6446 ; AVX2-FCP-NEXT: vbroadcastsd 160(%rdi), %ymm0
6447 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6448 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
6449 ; AVX2-FCP-NEXT: vmovaps 16(%rdi), %xmm0
6450 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6451 ; AVX2-FCP-NEXT: vmovaps 64(%rdi), %xmm2
6452 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6453 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm2[0]
6454 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm1[4,5,6,7]
6455 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6456 ; AVX2-FCP-NEXT: vbroadcastsd 544(%rdi), %ymm1
6457 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6458 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm12 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
6459 ; AVX2-FCP-NEXT: vmovaps 448(%rdi), %xmm0
6460 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6461 ; AVX2-FCP-NEXT: vmovaps 400(%rdi), %xmm1
6462 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6463 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm13 = xmm1[0],xmm0[0]
6464 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm13[0,1,2,3],ymm12[4,5,6,7]
6465 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6466 ; AVX2-FCP-NEXT: vbroadcastsd 928(%rdi), %ymm12
6467 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
6468 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm14 = ymm4[0],ymm12[0],ymm4[2],ymm12[2]
6469 ; AVX2-FCP-NEXT: vmovaps 832(%rdi), %xmm12
6470 ; AVX2-FCP-NEXT: vmovaps 784(%rdi), %xmm13
6471 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm15 = xmm13[0],xmm12[0]
6472 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm14[4,5,6,7]
6473 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6474 ; AVX2-FCP-NEXT: vbroadcastsd 1312(%rdi), %ymm14
6475 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6476 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm14 = ymm1[0],ymm14[0],ymm1[2],ymm14[2]
6477 ; AVX2-FCP-NEXT: vmovaps 1216(%rdi), %xmm15
6478 ; AVX2-FCP-NEXT: vmovaps 1168(%rdi), %xmm0
6479 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm2 = xmm0[0],xmm15[0]
6480 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm14[4,5,6,7]
6481 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6482 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm5[1],xmm3[1]
6483 ; AVX2-FCP-NEXT: vmovaps 352(%rdi), %ymm14
6484 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6485 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm14[1],ymm3[3],ymm14[3]
6486 ; AVX2-FCP-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6487 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
6488 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
6489 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6490 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm7[1],xmm6[1]
6491 ; AVX2-FCP-NEXT: vmovaps 736(%rdi), %ymm7
6492 ; AVX2-FCP-NEXT: vmovups (%rsp), %ymm3 # 32-byte Reload
6493 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm7[1],ymm3[3],ymm7[3]
6494 ; AVX2-FCP-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6495 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
6496 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
6497 ; AVX2-FCP-NEXT: vmovups %ymm2, (%rsp) # 32-byte Spill
6498 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm9[1],xmm8[1]
6499 ; AVX2-FCP-NEXT: vmovaps 1120(%rdi), %ymm6
6500 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6501 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm6[1],ymm3[3],ymm6[3]
6502 ; AVX2-FCP-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6503 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
6504 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
6505 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6506 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm11[1],xmm10[1]
6507 ; AVX2-FCP-NEXT: vmovaps 1504(%rdi), %ymm5
6508 ; AVX2-FCP-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6509 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6510 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm5[1],ymm3[3],ymm5[3]
6511 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
6512 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
6513 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6514 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm15[1]
6515 ; AVX2-FCP-NEXT: vmovaps 1312(%rdi), %ymm3
6516 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm1[1],ymm3[1],ymm1[3],ymm3[3]
6517 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6518 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,2,1]
6519 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6520 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6521 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm13[1],xmm12[1]
6522 ; AVX2-FCP-NEXT: vmovaps 928(%rdi), %ymm1
6523 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm4[1],ymm1[1],ymm4[3],ymm1[3]
6524 ; AVX2-FCP-NEXT: vmovaps %ymm1, %ymm4
6525 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6526 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,2,1]
6527 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6528 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6529 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6530 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
6531 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
6532 ; AVX2-FCP-NEXT: vmovaps 544(%rdi), %ymm2
6533 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6534 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
6535 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6536 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
6537 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
6538 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6539 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6540 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
6541 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
6542 ; AVX2-FCP-NEXT: vmovaps 160(%rdi), %ymm9
6543 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6544 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm9[1],ymm1[3],ymm9[3]
6545 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
6546 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
6547 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6548 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6549 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm9[0],ymm0[2],ymm9[2]
6550 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6551 ; AVX2-FCP-NEXT: vmovaps 32(%rdi), %xmm5
6552 ; AVX2-FCP-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6553 ; AVX2-FCP-NEXT: vmovaps 80(%rdi), %xmm1
6554 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6555 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm1[0]
6556 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6557 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6558 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6559 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm14[0],ymm0[2],ymm14[2]
6560 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6561 ; AVX2-FCP-NEXT: vmovaps 272(%rdi), %xmm5
6562 ; AVX2-FCP-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6563 ; AVX2-FCP-NEXT: vmovaps 224(%rdi), %xmm1
6564 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6565 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm5[0]
6566 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6567 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6568 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6569 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm2[0],ymm0[2],ymm2[2]
6570 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6571 ; AVX2-FCP-NEXT: vmovaps 464(%rdi), %xmm14
6572 ; AVX2-FCP-NEXT: vmovaps 416(%rdi), %xmm13
6573 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm13[0],xmm14[0]
6574 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6575 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6576 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6577 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm7[0],ymm0[2],ymm7[2]
6578 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6579 ; AVX2-FCP-NEXT: vmovaps 656(%rdi), %xmm12
6580 ; AVX2-FCP-NEXT: vmovaps 608(%rdi), %xmm11
6581 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm12[0]
6582 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6583 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6584 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6585 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
6586 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6587 ; AVX2-FCP-NEXT: vmovaps 848(%rdi), %xmm10
6588 ; AVX2-FCP-NEXT: vmovaps 800(%rdi), %xmm7
6589 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm7[0],xmm10[0]
6590 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6591 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6592 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6593 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm6[0],ymm0[2],ymm6[2]
6594 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6595 ; AVX2-FCP-NEXT: vmovaps 1040(%rdi), %xmm8
6596 ; AVX2-FCP-NEXT: vmovaps 992(%rdi), %xmm5
6597 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm8[0]
6598 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6599 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6600 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6601 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
6602 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6603 ; AVX2-FCP-NEXT: vmovaps 1232(%rdi), %xmm6
6604 ; AVX2-FCP-NEXT: vmovaps 1184(%rdi), %xmm3
6605 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm6[0]
6606 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6607 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6608 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6609 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
6610 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
6611 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6612 ; AVX2-FCP-NEXT: vmovaps 1424(%rdi), %xmm2
6613 ; AVX2-FCP-NEXT: vmovaps 1376(%rdi), %xmm1
6614 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
6615 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
6616 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6617 ; AVX2-FCP-NEXT: vbroadcastsd 136(%rdi), %ymm0
6618 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm9[1],ymm0[3],ymm9[3]
6619 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
6620 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9, %xmm9 # 16-byte Folded Reload
6621 ; AVX2-FCP-NEXT: # xmm9 = xmm9[1],mem[1]
6622 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm9[0,1,2,3],ymm0[4,5,6,7]
6623 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6624 ; AVX2-FCP-NEXT: vbroadcastsd 328(%rdi), %ymm0
6625 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6626 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6627 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
6628 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9, %xmm9 # 16-byte Folded Reload
6629 ; AVX2-FCP-NEXT: # xmm9 = xmm9[1],mem[1]
6630 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm15 = ymm9[0,1,2,3],ymm0[4,5,6,7]
6631 ; AVX2-FCP-NEXT: vbroadcastsd 520(%rdi), %ymm0
6632 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6633 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6634 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm9 = xmm13[1],xmm14[1]
6635 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm13 = ymm9[0,1,2,3],ymm0[4,5,6,7]
6636 ; AVX2-FCP-NEXT: vbroadcastsd 712(%rdi), %ymm0
6637 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6638 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6639 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm9 = xmm11[1],xmm12[1]
6640 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm0[4,5,6,7]
6641 ; AVX2-FCP-NEXT: vbroadcastsd 904(%rdi), %ymm0
6642 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6643 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6644 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm7 = xmm7[1],xmm10[1]
6645 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm0[4,5,6,7]
6646 ; AVX2-FCP-NEXT: vbroadcastsd 1096(%rdi), %ymm0
6647 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6648 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6649 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm5 = xmm5[1],xmm8[1]
6650 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm0[4,5,6,7]
6651 ; AVX2-FCP-NEXT: vbroadcastsd 1288(%rdi), %ymm0
6652 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6653 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6654 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm6[1]
6655 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm0[4,5,6,7]
6656 ; AVX2-FCP-NEXT: vbroadcastsd 1480(%rdi), %ymm0
6657 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm4[1],ymm0[3],ymm4[3]
6658 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
6659 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6660 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6661 ; AVX2-FCP-NEXT: vmovaps %ymm1, 192(%rsi)
6662 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6663 ; AVX2-FCP-NEXT: vmovaps %ymm1, 128(%rsi)
6664 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6665 ; AVX2-FCP-NEXT: vmovaps %ymm1, 64(%rsi)
6666 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6667 ; AVX2-FCP-NEXT: vmovaps %ymm1, (%rsi)
6668 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6669 ; AVX2-FCP-NEXT: vmovaps %ymm1, 224(%rsi)
6670 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6671 ; AVX2-FCP-NEXT: vmovaps %ymm1, 160(%rsi)
6672 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6673 ; AVX2-FCP-NEXT: vmovaps %ymm1, 96(%rsi)
6674 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6675 ; AVX2-FCP-NEXT: vmovaps %ymm1, 32(%rsi)
6676 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6677 ; AVX2-FCP-NEXT: vmovaps %ymm1, 192(%rdx)
6678 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6679 ; AVX2-FCP-NEXT: vmovaps %ymm1, 128(%rdx)
6680 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6681 ; AVX2-FCP-NEXT: vmovaps %ymm1, 64(%rdx)
6682 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6683 ; AVX2-FCP-NEXT: vmovaps %ymm1, (%rdx)
6684 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6685 ; AVX2-FCP-NEXT: vmovaps %ymm1, 224(%rdx)
6686 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6687 ; AVX2-FCP-NEXT: vmovaps %ymm1, 160(%rdx)
6688 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6689 ; AVX2-FCP-NEXT: vmovaps %ymm1, 96(%rdx)
6690 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6691 ; AVX2-FCP-NEXT: vmovaps %ymm1, 32(%rdx)
6692 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6693 ; AVX2-FCP-NEXT: vmovaps %ymm1, 192(%rcx)
6694 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6695 ; AVX2-FCP-NEXT: vmovaps %ymm1, 128(%rcx)
6696 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6697 ; AVX2-FCP-NEXT: vmovaps %ymm1, 64(%rcx)
6698 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6699 ; AVX2-FCP-NEXT: vmovaps %ymm1, (%rcx)
6700 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6701 ; AVX2-FCP-NEXT: vmovaps %ymm1, 224(%rcx)
6702 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6703 ; AVX2-FCP-NEXT: vmovaps %ymm1, 160(%rcx)
6704 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6705 ; AVX2-FCP-NEXT: vmovaps %ymm1, 96(%rcx)
6706 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6707 ; AVX2-FCP-NEXT: vmovaps %ymm1, 32(%rcx)
6708 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6709 ; AVX2-FCP-NEXT: vmovaps %ymm1, (%r8)
6710 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6711 ; AVX2-FCP-NEXT: vmovaps %ymm1, 64(%r8)
6712 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6713 ; AVX2-FCP-NEXT: vmovaps %ymm1, 128(%r8)
6714 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6715 ; AVX2-FCP-NEXT: vmovaps %ymm1, 192(%r8)
6716 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6717 ; AVX2-FCP-NEXT: vmovaps %ymm1, 224(%r8)
6718 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6719 ; AVX2-FCP-NEXT: vmovaps %ymm1, 160(%r8)
6720 ; AVX2-FCP-NEXT: vmovups (%rsp), %ymm1 # 32-byte Reload
6721 ; AVX2-FCP-NEXT: vmovaps %ymm1, 96(%r8)
6722 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6723 ; AVX2-FCP-NEXT: vmovaps %ymm1, 32(%r8)
6724 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6725 ; AVX2-FCP-NEXT: vmovaps %ymm1, 224(%r9)
6726 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6727 ; AVX2-FCP-NEXT: vmovaps %ymm1, 192(%r9)
6728 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6729 ; AVX2-FCP-NEXT: vmovaps %ymm1, 160(%r9)
6730 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6731 ; AVX2-FCP-NEXT: vmovaps %ymm1, 128(%r9)
6732 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6733 ; AVX2-FCP-NEXT: vmovaps %ymm1, 96(%r9)
6734 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6735 ; AVX2-FCP-NEXT: vmovaps %ymm1, 64(%r9)
6736 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6737 ; AVX2-FCP-NEXT: vmovaps %ymm1, 32(%r9)
6738 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6739 ; AVX2-FCP-NEXT: vmovaps %ymm1, (%r9)
6740 ; AVX2-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
6741 ; AVX2-FCP-NEXT: vmovaps %ymm0, 224(%rax)
6742 ; AVX2-FCP-NEXT: vmovaps %ymm3, 192(%rax)
6743 ; AVX2-FCP-NEXT: vmovaps %ymm5, 160(%rax)
6744 ; AVX2-FCP-NEXT: vmovaps %ymm7, 128(%rax)
6745 ; AVX2-FCP-NEXT: vmovaps %ymm9, 96(%rax)
6746 ; AVX2-FCP-NEXT: vmovaps %ymm13, 64(%rax)
6747 ; AVX2-FCP-NEXT: vmovaps %ymm15, 32(%rax)
6748 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6749 ; AVX2-FCP-NEXT: vmovaps %ymm0, (%rax)
6750 ; AVX2-FCP-NEXT: addq $1496, %rsp # imm = 0x5D8
6751 ; AVX2-FCP-NEXT: vzeroupper
6752 ; AVX2-FCP-NEXT: retq
6754 ; AVX512-LABEL: load_i64_stride6_vf32:
6756 ; AVX512-NEXT: subq $2632, %rsp # imm = 0xA48
6757 ; AVX512-NEXT: vmovdqa64 1280(%rdi), %zmm2
6758 ; AVX512-NEXT: vmovdqa64 1344(%rdi), %zmm21
6759 ; AVX512-NEXT: vmovdqa64 896(%rdi), %zmm1
6760 ; AVX512-NEXT: vmovdqa64 960(%rdi), %zmm19
6761 ; AVX512-NEXT: vmovdqa64 448(%rdi), %zmm18
6762 ; AVX512-NEXT: vmovdqa64 384(%rdi), %zmm3
6763 ; AVX512-NEXT: vmovdqa64 512(%rdi), %zmm4
6764 ; AVX512-NEXT: vmovdqa64 576(%rdi), %zmm0
6765 ; AVX512-NEXT: vmovdqa64 128(%rdi), %zmm5
6766 ; AVX512-NEXT: vmovdqa64 192(%rdi), %zmm25
6767 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,6,0,10,0,6,0,10]
6768 ; AVX512-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
6769 ; AVX512-NEXT: vmovdqa64 %zmm25, %zmm7
6770 ; AVX512-NEXT: vpermt2q %zmm5, %zmm6, %zmm7
6771 ; AVX512-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6772 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm7 = [0,6,12,0]
6773 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm8
6774 ; AVX512-NEXT: vpermt2q %zmm4, %zmm6, %zmm8
6775 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6776 ; AVX512-NEXT: vmovdqa64 %zmm3, %zmm8
6777 ; AVX512-NEXT: vpermt2q %zmm18, %zmm7, %zmm8
6778 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6779 ; AVX512-NEXT: vmovdqa64 %zmm19, %zmm8
6780 ; AVX512-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
6781 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6782 ; AVX512-NEXT: vpermi2q %zmm2, %zmm21, %zmm6
6783 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6784 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [1,7,0,11,1,7,0,11]
6785 ; AVX512-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
6786 ; AVX512-NEXT: vmovdqa64 %zmm19, %zmm8
6787 ; AVX512-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
6788 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6789 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm8
6790 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm6
6791 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm10 = [1,7,13,0]
6792 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1
6793 ; AVX512-NEXT: vpermt2q %zmm4, %zmm8, %zmm1
6794 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6795 ; AVX512-NEXT: vmovdqa64 %zmm25, %zmm1
6796 ; AVX512-NEXT: vpermt2q %zmm5, %zmm8, %zmm1
6797 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6798 ; AVX512-NEXT: vpermi2q %zmm2, %zmm21, %zmm8
6799 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6800 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [10,4,10,4,10,4,10,4]
6801 ; AVX512-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6802 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm8
6803 ; AVX512-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
6804 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6805 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm8
6806 ; AVX512-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
6807 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6808 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm8
6809 ; AVX512-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
6810 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6811 ; AVX512-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
6812 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6813 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [11,5,11,5,11,5,11,5]
6814 ; AVX512-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6815 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm8
6816 ; AVX512-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
6817 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6818 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm8
6819 ; AVX512-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
6820 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6821 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm8
6822 ; AVX512-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
6823 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6824 ; AVX512-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
6825 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6826 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [12,0,0,6,12,0,0,6]
6827 ; AVX512-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
6828 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm1
6829 ; AVX512-NEXT: vpermt2q %zmm0, %zmm8, %zmm1
6830 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6831 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [13,0,1,7,13,0,1,7]
6832 ; AVX512-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
6833 ; AVX512-NEXT: vpermt2q %zmm0, %zmm1, %zmm4
6834 ; AVX512-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6835 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm0
6836 ; AVX512-NEXT: vpermt2q %zmm25, %zmm8, %zmm0
6837 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6838 ; AVX512-NEXT: vpermt2q %zmm25, %zmm1, %zmm5
6839 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6840 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm0
6841 ; AVX512-NEXT: vpermt2q %zmm19, %zmm8, %zmm0
6842 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6843 ; AVX512-NEXT: vpermi2q %zmm21, %zmm2, %zmm8
6844 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6845 ; AVX512-NEXT: vpermt2q %zmm21, %zmm1, %zmm2
6846 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6847 ; AVX512-NEXT: vmovdqa64 %zmm3, %zmm0
6848 ; AVX512-NEXT: vpermt2q %zmm18, %zmm10, %zmm0
6849 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6850 ; AVX512-NEXT: vpermt2q %zmm19, %zmm1, %zmm6
6851 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6852 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm11 = [10,0,6,0]
6853 ; AVX512-NEXT: vmovdqa64 %zmm18, %zmm0
6854 ; AVX512-NEXT: vpermt2q %zmm3, %zmm11, %zmm0
6855 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6856 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm31 = [11,1,7,0]
6857 ; AVX512-NEXT: vmovdqa64 %zmm18, %zmm0
6858 ; AVX512-NEXT: vpermt2q %zmm3, %zmm31, %zmm0
6859 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6860 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
6861 ; AVX512-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6862 ; AVX512-NEXT: vmovdqa64 %zmm3, %zmm0
6863 ; AVX512-NEXT: vpermt2q %zmm18, %zmm5, %zmm0
6864 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6865 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
6866 ; AVX512-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6867 ; AVX512-NEXT: vpermt2q %zmm18, %zmm0, %zmm3
6868 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6869 ; AVX512-NEXT: vmovdqa64 (%rdi), %zmm25
6870 ; AVX512-NEXT: vmovdqa64 64(%rdi), %zmm1
6871 ; AVX512-NEXT: vmovdqa64 %zmm25, %zmm29
6872 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm2
6873 ; AVX512-NEXT: vpermt2q %zmm1, %zmm7, %zmm29
6874 ; AVX512-NEXT: vmovdqa64 %zmm25, %zmm3
6875 ; AVX512-NEXT: vpermt2q %zmm1, %zmm10, %zmm3
6876 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6877 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
6878 ; AVX512-NEXT: vpermt2q %zmm25, %zmm11, %zmm3
6879 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6880 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
6881 ; AVX512-NEXT: vpermt2q %zmm25, %zmm31, %zmm3
6882 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6883 ; AVX512-NEXT: vmovdqa64 %zmm25, %zmm3
6884 ; AVX512-NEXT: vpermt2q %zmm1, %zmm5, %zmm3
6885 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6886 ; AVX512-NEXT: vpermt2q %zmm1, %zmm0, %zmm25
6887 ; AVX512-NEXT: vmovdqa64 320(%rdi), %zmm9
6888 ; AVX512-NEXT: vmovdqa64 256(%rdi), %zmm27
6889 ; AVX512-NEXT: vmovdqa64 %zmm27, %zmm1
6890 ; AVX512-NEXT: vpermt2q %zmm9, %zmm5, %zmm1
6891 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6892 ; AVX512-NEXT: vmovdqa64 704(%rdi), %zmm30
6893 ; AVX512-NEXT: vmovdqa64 640(%rdi), %zmm26
6894 ; AVX512-NEXT: vmovdqa64 %zmm26, %zmm1
6895 ; AVX512-NEXT: vpermt2q %zmm30, %zmm5, %zmm1
6896 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6897 ; AVX512-NEXT: vmovdqa64 1088(%rdi), %zmm7
6898 ; AVX512-NEXT: vmovdqa64 1024(%rdi), %zmm20
6899 ; AVX512-NEXT: vmovdqa64 %zmm20, %zmm1
6900 ; AVX512-NEXT: vpermt2q %zmm7, %zmm5, %zmm1
6901 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6902 ; AVX512-NEXT: vmovdqa64 832(%rdi), %zmm1
6903 ; AVX512-NEXT: vmovdqa64 768(%rdi), %zmm8
6904 ; AVX512-NEXT: vmovdqa64 %zmm8, %zmm15
6905 ; AVX512-NEXT: vpermt2q %zmm1, %zmm2, %zmm15
6906 ; AVX512-NEXT: vmovdqa64 1472(%rdi), %zmm6
6907 ; AVX512-NEXT: vmovdqa64 1408(%rdi), %zmm21
6908 ; AVX512-NEXT: vmovdqa64 %zmm21, %zmm3
6909 ; AVX512-NEXT: vpermt2q %zmm6, %zmm5, %zmm3
6910 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6911 ; AVX512-NEXT: vmovdqa64 1216(%rdi), %zmm19
6912 ; AVX512-NEXT: vmovdqa64 1152(%rdi), %zmm4
6913 ; AVX512-NEXT: vpermi2q %zmm19, %zmm4, %zmm2
6914 ; AVX512-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
6915 ; AVX512-NEXT: vmovdqa64 %zmm8, %zmm12
6916 ; AVX512-NEXT: vpermt2q %zmm1, %zmm10, %zmm12
6917 ; AVX512-NEXT: vpermi2q %zmm19, %zmm4, %zmm10
6918 ; AVX512-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6919 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm13
6920 ; AVX512-NEXT: vpermt2q %zmm8, %zmm11, %zmm13
6921 ; AVX512-NEXT: vpermi2q %zmm4, %zmm19, %zmm11
6922 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm14
6923 ; AVX512-NEXT: vpermt2q %zmm8, %zmm31, %zmm14
6924 ; AVX512-NEXT: vpermi2q %zmm4, %zmm19, %zmm31
6925 ; AVX512-NEXT: vmovdqa64 %zmm8, %zmm2
6926 ; AVX512-NEXT: vpermt2q %zmm1, %zmm5, %zmm2
6927 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6928 ; AVX512-NEXT: vpermi2q %zmm19, %zmm4, %zmm5
6929 ; AVX512-NEXT: vpermt2q %zmm19, %zmm0, %zmm4
6930 ; AVX512-NEXT: vpermt2q %zmm1, %zmm0, %zmm8
6931 ; AVX512-NEXT: vmovdqa64 %zmm20, %zmm19
6932 ; AVX512-NEXT: vpermt2q %zmm7, %zmm0, %zmm19
6933 ; AVX512-NEXT: vmovdqa64 %zmm26, %zmm22
6934 ; AVX512-NEXT: vpermt2q %zmm30, %zmm0, %zmm22
6935 ; AVX512-NEXT: vmovdqa64 %zmm27, %zmm17
6936 ; AVX512-NEXT: vpermt2q %zmm9, %zmm0, %zmm17
6937 ; AVX512-NEXT: vmovdqa64 %zmm21, %zmm18
6938 ; AVX512-NEXT: vpermt2q %zmm6, %zmm0, %zmm18
6939 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
6940 ; AVX512-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
6941 ; AVX512-NEXT: vmovdqa64 %zmm26, %zmm24
6942 ; AVX512-NEXT: vpermt2q %zmm30, %zmm1, %zmm24
6943 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
6944 ; AVX512-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
6945 ; AVX512-NEXT: vmovdqa64 %zmm26, %zmm0
6946 ; AVX512-NEXT: vpermt2q %zmm30, %zmm2, %zmm0
6947 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6948 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
6949 ; AVX512-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
6950 ; AVX512-NEXT: vmovdqa64 %zmm30, %zmm0
6951 ; AVX512-NEXT: vpermt2q %zmm26, %zmm3, %zmm0
6952 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6953 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
6954 ; AVX512-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
6955 ; AVX512-NEXT: vpermt2q %zmm26, %zmm0, %zmm30
6956 ; AVX512-NEXT: vmovdqa64 %zmm27, %zmm26
6957 ; AVX512-NEXT: vpermt2q %zmm9, %zmm1, %zmm26
6958 ; AVX512-NEXT: vmovdqa64 %zmm27, %zmm23
6959 ; AVX512-NEXT: vpermt2q %zmm9, %zmm2, %zmm23
6960 ; AVX512-NEXT: vmovdqa64 %zmm9, %zmm28
6961 ; AVX512-NEXT: vpermt2q %zmm27, %zmm3, %zmm28
6962 ; AVX512-NEXT: vpermt2q %zmm27, %zmm0, %zmm9
6963 ; AVX512-NEXT: vmovdqa64 %zmm20, %zmm27
6964 ; AVX512-NEXT: vpermt2q %zmm7, %zmm1, %zmm27
6965 ; AVX512-NEXT: vpermi2q %zmm6, %zmm21, %zmm1
6966 ; AVX512-NEXT: vmovdqa64 %zmm20, %zmm10
6967 ; AVX512-NEXT: vpermt2q %zmm7, %zmm2, %zmm10
6968 ; AVX512-NEXT: vpermi2q %zmm6, %zmm21, %zmm2
6969 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm16
6970 ; AVX512-NEXT: vpermt2q %zmm20, %zmm3, %zmm16
6971 ; AVX512-NEXT: vpermi2q %zmm21, %zmm6, %zmm3
6972 ; AVX512-NEXT: vpermt2q %zmm21, %zmm0, %zmm6
6973 ; AVX512-NEXT: vpermt2q %zmm20, %zmm0, %zmm7
6974 ; AVX512-NEXT: movb $56, %al
6975 ; AVX512-NEXT: kmovw %eax, %k1
6976 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6977 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
6978 ; AVX512-NEXT: movb $-64, %al
6979 ; AVX512-NEXT: kmovw %eax, %k2
6980 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6981 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
6982 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6983 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
6984 ; AVX512-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
6985 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
6986 ; AVX512-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
6987 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6988 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6989 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm15 {%k1}
6990 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6991 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm15 {%k2}
6992 ; AVX512-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
6993 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
6994 ; AVX512-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
6995 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
6996 ; AVX512-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
6997 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm20
6998 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6999 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
7000 ; AVX512-NEXT: vmovdqa64 %zmm19, %zmm12 {%k2}
7001 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7002 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
7003 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm21 {%k1}
7004 ; AVX512-NEXT: vmovdqa64 %zmm22, %zmm21 {%k2}
7005 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7006 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
7007 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
7008 ; AVX512-NEXT: vmovdqa64 %zmm17, %zmm19 {%k2}
7009 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7010 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
7011 ; AVX512-NEXT: vmovdqa64 %zmm17, %zmm0 {%k1}
7012 ; AVX512-NEXT: vmovdqa64 %zmm18, %zmm0 {%k2}
7013 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm17
7014 ; AVX512-NEXT: movb $24, %al
7015 ; AVX512-NEXT: kmovw %eax, %k2
7016 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7017 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm13 {%k2}
7018 ; AVX512-NEXT: movb $-32, %al
7019 ; AVX512-NEXT: kmovw %eax, %k1
7020 ; AVX512-NEXT: vmovdqa64 %zmm27, %zmm13 {%k1}
7021 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7022 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
7023 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
7024 ; AVX512-NEXT: vmovdqa64 %zmm24, %zmm18 {%k1}
7025 ; AVX512-NEXT: vmovdqa64 %zmm18, %zmm22
7026 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7027 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
7028 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
7029 ; AVX512-NEXT: vmovdqa64 %zmm26, %zmm18 {%k1}
7030 ; AVX512-NEXT: vmovdqa64 %zmm18, %zmm24
7031 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7032 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
7033 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm11 {%k1}
7034 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7035 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm14 {%k2}
7036 ; AVX512-NEXT: vmovdqa64 %zmm10, %zmm14 {%k1}
7037 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7038 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7039 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
7040 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7041 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
7042 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm18
7043 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7044 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7045 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
7046 ; AVX512-NEXT: vmovdqa64 %zmm23, %zmm1 {%k1}
7047 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm23
7048 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7049 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
7050 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm31 {%k1}
7051 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7052 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
7053 ; AVX512-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
7054 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7055 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
7056 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
7057 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
7058 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
7059 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
7060 ; AVX512-NEXT: vmovdqa64 %zmm28, %zmm2 {%k1}
7061 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
7062 ; AVX512-NEXT: vinserti32x4 $0, %xmm5, %zmm10, %zmm5
7063 ; AVX512-NEXT: vmovdqa64 %zmm3, %zmm5 {%k1}
7064 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
7065 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
7066 ; AVX512-NEXT: vmovdqa64 %zmm30, %zmm3 {%k1}
7067 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
7068 ; AVX512-NEXT: vinserti32x4 $0, %xmm25, %zmm10, %zmm10
7069 ; AVX512-NEXT: vmovdqa64 %zmm9, %zmm10 {%k1}
7070 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
7071 ; AVX512-NEXT: vinserti32x4 $0, %xmm4, %zmm9, %zmm4
7072 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm4 {%k1}
7073 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7074 ; AVX512-NEXT: vinserti32x4 $0, %xmm8, %zmm6, %zmm6
7075 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm6 {%k1}
7076 ; AVX512-NEXT: vmovdqa64 %zmm20, 192(%rsi)
7077 ; AVX512-NEXT: vmovdqa64 %zmm15, 128(%rsi)
7078 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
7079 ; AVX512-NEXT: vmovaps %zmm7, 64(%rsi)
7080 ; AVX512-NEXT: vmovdqa64 %zmm29, (%rsi)
7081 ; AVX512-NEXT: vmovdqa64 %zmm17, 192(%rdx)
7082 ; AVX512-NEXT: vmovdqa64 %zmm19, (%rdx)
7083 ; AVX512-NEXT: vmovdqa64 %zmm21, 64(%rdx)
7084 ; AVX512-NEXT: vmovdqa64 %zmm12, 128(%rdx)
7085 ; AVX512-NEXT: vmovdqa64 %zmm11, 192(%rcx)
7086 ; AVX512-NEXT: vmovdqa64 %zmm24, (%rcx)
7087 ; AVX512-NEXT: vmovdqa64 %zmm22, 64(%rcx)
7088 ; AVX512-NEXT: vmovdqa64 %zmm13, 128(%rcx)
7089 ; AVX512-NEXT: vmovdqa64 %zmm31, 192(%r8)
7090 ; AVX512-NEXT: vmovdqa64 %zmm23, (%r8)
7091 ; AVX512-NEXT: vmovdqa64 %zmm18, 64(%r8)
7092 ; AVX512-NEXT: vmovdqa64 %zmm14, 128(%r8)
7093 ; AVX512-NEXT: vmovdqa64 %zmm5, 192(%r9)
7094 ; AVX512-NEXT: vmovdqa64 %zmm2, (%r9)
7095 ; AVX512-NEXT: vmovdqa64 %zmm1, 64(%r9)
7096 ; AVX512-NEXT: vmovdqa64 %zmm0, 128(%r9)
7097 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
7098 ; AVX512-NEXT: vmovdqa64 %zmm6, 128(%rax)
7099 ; AVX512-NEXT: vmovdqa64 %zmm4, 192(%rax)
7100 ; AVX512-NEXT: vmovdqa64 %zmm10, (%rax)
7101 ; AVX512-NEXT: vmovdqa64 %zmm3, 64(%rax)
7102 ; AVX512-NEXT: addq $2632, %rsp # imm = 0xA48
7103 ; AVX512-NEXT: vzeroupper
7106 ; AVX512-FCP-LABEL: load_i64_stride6_vf32:
7107 ; AVX512-FCP: # %bb.0:
7108 ; AVX512-FCP-NEXT: subq $2632, %rsp # imm = 0xA48
7109 ; AVX512-FCP-NEXT: vmovdqa64 1280(%rdi), %zmm2
7110 ; AVX512-FCP-NEXT: vmovdqa64 1344(%rdi), %zmm21
7111 ; AVX512-FCP-NEXT: vmovdqa64 896(%rdi), %zmm1
7112 ; AVX512-FCP-NEXT: vmovdqa64 960(%rdi), %zmm19
7113 ; AVX512-FCP-NEXT: vmovdqa64 448(%rdi), %zmm18
7114 ; AVX512-FCP-NEXT: vmovdqa64 384(%rdi), %zmm3
7115 ; AVX512-FCP-NEXT: vmovdqa64 512(%rdi), %zmm4
7116 ; AVX512-FCP-NEXT: vmovdqa64 576(%rdi), %zmm0
7117 ; AVX512-FCP-NEXT: vmovdqa64 128(%rdi), %zmm5
7118 ; AVX512-FCP-NEXT: vmovdqa64 192(%rdi), %zmm25
7119 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,6,0,10,0,6,0,10]
7120 ; AVX512-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
7121 ; AVX512-FCP-NEXT: vmovdqa64 %zmm25, %zmm7
7122 ; AVX512-FCP-NEXT: vpermt2q %zmm5, %zmm6, %zmm7
7123 ; AVX512-FCP-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7124 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm7 = [0,6,12,0]
7125 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm8
7126 ; AVX512-FCP-NEXT: vpermt2q %zmm4, %zmm6, %zmm8
7127 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7128 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, %zmm8
7129 ; AVX512-FCP-NEXT: vpermt2q %zmm18, %zmm7, %zmm8
7130 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7131 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, %zmm8
7132 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
7133 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7134 ; AVX512-FCP-NEXT: vpermi2q %zmm2, %zmm21, %zmm6
7135 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7136 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [1,7,0,11,1,7,0,11]
7137 ; AVX512-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
7138 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, %zmm8
7139 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
7140 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7141 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm8
7142 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm6
7143 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm10 = [1,7,13,0]
7144 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
7145 ; AVX512-FCP-NEXT: vpermt2q %zmm4, %zmm8, %zmm1
7146 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7147 ; AVX512-FCP-NEXT: vmovdqa64 %zmm25, %zmm1
7148 ; AVX512-FCP-NEXT: vpermt2q %zmm5, %zmm8, %zmm1
7149 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7150 ; AVX512-FCP-NEXT: vpermi2q %zmm2, %zmm21, %zmm8
7151 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7152 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [10,4,10,4,10,4,10,4]
7153 ; AVX512-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7154 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm8
7155 ; AVX512-FCP-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
7156 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7157 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm8
7158 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
7159 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7160 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm8
7161 ; AVX512-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
7162 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7163 ; AVX512-FCP-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
7164 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7165 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [11,5,11,5,11,5,11,5]
7166 ; AVX512-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7167 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm8
7168 ; AVX512-FCP-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
7169 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7170 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm8
7171 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
7172 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7173 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm8
7174 ; AVX512-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
7175 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7176 ; AVX512-FCP-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
7177 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7178 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [12,0,0,6,12,0,0,6]
7179 ; AVX512-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
7180 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm1
7181 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm8, %zmm1
7182 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7183 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [13,0,1,7,13,0,1,7]
7184 ; AVX512-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
7185 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm4
7186 ; AVX512-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7187 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
7188 ; AVX512-FCP-NEXT: vpermt2q %zmm25, %zmm8, %zmm0
7189 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7190 ; AVX512-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm5
7191 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7192 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm0
7193 ; AVX512-FCP-NEXT: vpermt2q %zmm19, %zmm8, %zmm0
7194 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7195 ; AVX512-FCP-NEXT: vpermi2q %zmm21, %zmm2, %zmm8
7196 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7197 ; AVX512-FCP-NEXT: vpermt2q %zmm21, %zmm1, %zmm2
7198 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7199 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, %zmm0
7200 ; AVX512-FCP-NEXT: vpermt2q %zmm18, %zmm10, %zmm0
7201 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7202 ; AVX512-FCP-NEXT: vpermt2q %zmm19, %zmm1, %zmm6
7203 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7204 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm11 = [10,0,6,0]
7205 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, %zmm0
7206 ; AVX512-FCP-NEXT: vpermt2q %zmm3, %zmm11, %zmm0
7207 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7208 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm31 = [11,1,7,0]
7209 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, %zmm0
7210 ; AVX512-FCP-NEXT: vpermt2q %zmm3, %zmm31, %zmm0
7211 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7212 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
7213 ; AVX512-FCP-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7214 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, %zmm0
7215 ; AVX512-FCP-NEXT: vpermt2q %zmm18, %zmm5, %zmm0
7216 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7217 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
7218 ; AVX512-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7219 ; AVX512-FCP-NEXT: vpermt2q %zmm18, %zmm0, %zmm3
7220 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7221 ; AVX512-FCP-NEXT: vmovdqa64 (%rdi), %zmm25
7222 ; AVX512-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
7223 ; AVX512-FCP-NEXT: vmovdqa64 %zmm25, %zmm29
7224 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm2
7225 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm7, %zmm29
7226 ; AVX512-FCP-NEXT: vmovdqa64 %zmm25, %zmm3
7227 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm10, %zmm3
7228 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7229 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
7230 ; AVX512-FCP-NEXT: vpermt2q %zmm25, %zmm11, %zmm3
7231 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7232 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
7233 ; AVX512-FCP-NEXT: vpermt2q %zmm25, %zmm31, %zmm3
7234 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7235 ; AVX512-FCP-NEXT: vmovdqa64 %zmm25, %zmm3
7236 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm3
7237 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7238 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm25
7239 ; AVX512-FCP-NEXT: vmovdqa64 320(%rdi), %zmm9
7240 ; AVX512-FCP-NEXT: vmovdqa64 256(%rdi), %zmm27
7241 ; AVX512-FCP-NEXT: vmovdqa64 %zmm27, %zmm1
7242 ; AVX512-FCP-NEXT: vpermt2q %zmm9, %zmm5, %zmm1
7243 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7244 ; AVX512-FCP-NEXT: vmovdqa64 704(%rdi), %zmm30
7245 ; AVX512-FCP-NEXT: vmovdqa64 640(%rdi), %zmm26
7246 ; AVX512-FCP-NEXT: vmovdqa64 %zmm26, %zmm1
7247 ; AVX512-FCP-NEXT: vpermt2q %zmm30, %zmm5, %zmm1
7248 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7249 ; AVX512-FCP-NEXT: vmovdqa64 1088(%rdi), %zmm7
7250 ; AVX512-FCP-NEXT: vmovdqa64 1024(%rdi), %zmm20
7251 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, %zmm1
7252 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm5, %zmm1
7253 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7254 ; AVX512-FCP-NEXT: vmovdqa64 832(%rdi), %zmm1
7255 ; AVX512-FCP-NEXT: vmovdqa64 768(%rdi), %zmm8
7256 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, %zmm15
7257 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm15
7258 ; AVX512-FCP-NEXT: vmovdqa64 1472(%rdi), %zmm6
7259 ; AVX512-FCP-NEXT: vmovdqa64 1408(%rdi), %zmm21
7260 ; AVX512-FCP-NEXT: vmovdqa64 %zmm21, %zmm3
7261 ; AVX512-FCP-NEXT: vpermt2q %zmm6, %zmm5, %zmm3
7262 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7263 ; AVX512-FCP-NEXT: vmovdqa64 1216(%rdi), %zmm19
7264 ; AVX512-FCP-NEXT: vmovdqa64 1152(%rdi), %zmm4
7265 ; AVX512-FCP-NEXT: vpermi2q %zmm19, %zmm4, %zmm2
7266 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
7267 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, %zmm12
7268 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm10, %zmm12
7269 ; AVX512-FCP-NEXT: vpermi2q %zmm19, %zmm4, %zmm10
7270 ; AVX512-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7271 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm13
7272 ; AVX512-FCP-NEXT: vpermt2q %zmm8, %zmm11, %zmm13
7273 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm19, %zmm11
7274 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm14
7275 ; AVX512-FCP-NEXT: vpermt2q %zmm8, %zmm31, %zmm14
7276 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm19, %zmm31
7277 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, %zmm2
7278 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm2
7279 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7280 ; AVX512-FCP-NEXT: vpermi2q %zmm19, %zmm4, %zmm5
7281 ; AVX512-FCP-NEXT: vpermt2q %zmm19, %zmm0, %zmm4
7282 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm8
7283 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, %zmm19
7284 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm19
7285 ; AVX512-FCP-NEXT: vmovdqa64 %zmm26, %zmm22
7286 ; AVX512-FCP-NEXT: vpermt2q %zmm30, %zmm0, %zmm22
7287 ; AVX512-FCP-NEXT: vmovdqa64 %zmm27, %zmm17
7288 ; AVX512-FCP-NEXT: vpermt2q %zmm9, %zmm0, %zmm17
7289 ; AVX512-FCP-NEXT: vmovdqa64 %zmm21, %zmm18
7290 ; AVX512-FCP-NEXT: vpermt2q %zmm6, %zmm0, %zmm18
7291 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
7292 ; AVX512-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
7293 ; AVX512-FCP-NEXT: vmovdqa64 %zmm26, %zmm24
7294 ; AVX512-FCP-NEXT: vpermt2q %zmm30, %zmm1, %zmm24
7295 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
7296 ; AVX512-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
7297 ; AVX512-FCP-NEXT: vmovdqa64 %zmm26, %zmm0
7298 ; AVX512-FCP-NEXT: vpermt2q %zmm30, %zmm2, %zmm0
7299 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7300 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
7301 ; AVX512-FCP-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
7302 ; AVX512-FCP-NEXT: vmovdqa64 %zmm30, %zmm0
7303 ; AVX512-FCP-NEXT: vpermt2q %zmm26, %zmm3, %zmm0
7304 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7305 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
7306 ; AVX512-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
7307 ; AVX512-FCP-NEXT: vpermt2q %zmm26, %zmm0, %zmm30
7308 ; AVX512-FCP-NEXT: vmovdqa64 %zmm27, %zmm26
7309 ; AVX512-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm26
7310 ; AVX512-FCP-NEXT: vmovdqa64 %zmm27, %zmm23
7311 ; AVX512-FCP-NEXT: vpermt2q %zmm9, %zmm2, %zmm23
7312 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, %zmm28
7313 ; AVX512-FCP-NEXT: vpermt2q %zmm27, %zmm3, %zmm28
7314 ; AVX512-FCP-NEXT: vpermt2q %zmm27, %zmm0, %zmm9
7315 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, %zmm27
7316 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm27
7317 ; AVX512-FCP-NEXT: vpermi2q %zmm6, %zmm21, %zmm1
7318 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, %zmm10
7319 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm2, %zmm10
7320 ; AVX512-FCP-NEXT: vpermi2q %zmm6, %zmm21, %zmm2
7321 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm16
7322 ; AVX512-FCP-NEXT: vpermt2q %zmm20, %zmm3, %zmm16
7323 ; AVX512-FCP-NEXT: vpermi2q %zmm21, %zmm6, %zmm3
7324 ; AVX512-FCP-NEXT: vpermt2q %zmm21, %zmm0, %zmm6
7325 ; AVX512-FCP-NEXT: vpermt2q %zmm20, %zmm0, %zmm7
7326 ; AVX512-FCP-NEXT: movb $56, %al
7327 ; AVX512-FCP-NEXT: kmovw %eax, %k1
7328 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7329 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
7330 ; AVX512-FCP-NEXT: movb $-64, %al
7331 ; AVX512-FCP-NEXT: kmovw %eax, %k2
7332 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7333 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
7334 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7335 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
7336 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
7337 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
7338 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
7339 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7340 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7341 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm15 {%k1}
7342 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7343 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm15 {%k2}
7344 ; AVX512-FCP-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
7345 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
7346 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
7347 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
7348 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
7349 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm20
7350 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7351 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
7352 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, %zmm12 {%k2}
7353 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7354 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
7355 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm21 {%k1}
7356 ; AVX512-FCP-NEXT: vmovdqa64 %zmm22, %zmm21 {%k2}
7357 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7358 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
7359 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
7360 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, %zmm19 {%k2}
7361 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7362 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
7363 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, %zmm0 {%k1}
7364 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, %zmm0 {%k2}
7365 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm17
7366 ; AVX512-FCP-NEXT: movb $24, %al
7367 ; AVX512-FCP-NEXT: kmovw %eax, %k2
7368 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7369 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm13 {%k2}
7370 ; AVX512-FCP-NEXT: movb $-32, %al
7371 ; AVX512-FCP-NEXT: kmovw %eax, %k1
7372 ; AVX512-FCP-NEXT: vmovdqa64 %zmm27, %zmm13 {%k1}
7373 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7374 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
7375 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
7376 ; AVX512-FCP-NEXT: vmovdqa64 %zmm24, %zmm18 {%k1}
7377 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, %zmm22
7378 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7379 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
7380 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
7381 ; AVX512-FCP-NEXT: vmovdqa64 %zmm26, %zmm18 {%k1}
7382 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, %zmm24
7383 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7384 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
7385 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm11 {%k1}
7386 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7387 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm14 {%k2}
7388 ; AVX512-FCP-NEXT: vmovdqa64 %zmm10, %zmm14 {%k1}
7389 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7390 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7391 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
7392 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7393 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
7394 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm18
7395 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7396 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7397 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
7398 ; AVX512-FCP-NEXT: vmovdqa64 %zmm23, %zmm1 {%k1}
7399 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm23
7400 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7401 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
7402 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm31 {%k1}
7403 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7404 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
7405 ; AVX512-FCP-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
7406 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7407 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
7408 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
7409 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
7410 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
7411 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
7412 ; AVX512-FCP-NEXT: vmovdqa64 %zmm28, %zmm2 {%k1}
7413 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
7414 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm5, %zmm10, %zmm5
7415 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, %zmm5 {%k1}
7416 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
7417 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
7418 ; AVX512-FCP-NEXT: vmovdqa64 %zmm30, %zmm3 {%k1}
7419 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
7420 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm25, %zmm10, %zmm10
7421 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, %zmm10 {%k1}
7422 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
7423 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm4, %zmm9, %zmm4
7424 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm4 {%k1}
7425 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7426 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm8, %zmm6, %zmm6
7427 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm6 {%k1}
7428 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, 192(%rsi)
7429 ; AVX512-FCP-NEXT: vmovdqa64 %zmm15, 128(%rsi)
7430 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
7431 ; AVX512-FCP-NEXT: vmovaps %zmm7, 64(%rsi)
7432 ; AVX512-FCP-NEXT: vmovdqa64 %zmm29, (%rsi)
7433 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, 192(%rdx)
7434 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, (%rdx)
7435 ; AVX512-FCP-NEXT: vmovdqa64 %zmm21, 64(%rdx)
7436 ; AVX512-FCP-NEXT: vmovdqa64 %zmm12, 128(%rdx)
7437 ; AVX512-FCP-NEXT: vmovdqa64 %zmm11, 192(%rcx)
7438 ; AVX512-FCP-NEXT: vmovdqa64 %zmm24, (%rcx)
7439 ; AVX512-FCP-NEXT: vmovdqa64 %zmm22, 64(%rcx)
7440 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, 128(%rcx)
7441 ; AVX512-FCP-NEXT: vmovdqa64 %zmm31, 192(%r8)
7442 ; AVX512-FCP-NEXT: vmovdqa64 %zmm23, (%r8)
7443 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, 64(%r8)
7444 ; AVX512-FCP-NEXT: vmovdqa64 %zmm14, 128(%r8)
7445 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, 192(%r9)
7446 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, (%r9)
7447 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, 64(%r9)
7448 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, 128(%r9)
7449 ; AVX512-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
7450 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, 128(%rax)
7451 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, 192(%rax)
7452 ; AVX512-FCP-NEXT: vmovdqa64 %zmm10, (%rax)
7453 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, 64(%rax)
7454 ; AVX512-FCP-NEXT: addq $2632, %rsp # imm = 0xA48
7455 ; AVX512-FCP-NEXT: vzeroupper
7456 ; AVX512-FCP-NEXT: retq
7458 ; AVX512DQ-LABEL: load_i64_stride6_vf32:
7459 ; AVX512DQ: # %bb.0:
7460 ; AVX512DQ-NEXT: subq $2632, %rsp # imm = 0xA48
7461 ; AVX512DQ-NEXT: vmovdqa64 1280(%rdi), %zmm2
7462 ; AVX512DQ-NEXT: vmovdqa64 1344(%rdi), %zmm21
7463 ; AVX512DQ-NEXT: vmovdqa64 896(%rdi), %zmm1
7464 ; AVX512DQ-NEXT: vmovdqa64 960(%rdi), %zmm19
7465 ; AVX512DQ-NEXT: vmovdqa64 448(%rdi), %zmm18
7466 ; AVX512DQ-NEXT: vmovdqa64 384(%rdi), %zmm3
7467 ; AVX512DQ-NEXT: vmovdqa64 512(%rdi), %zmm4
7468 ; AVX512DQ-NEXT: vmovdqa64 576(%rdi), %zmm0
7469 ; AVX512DQ-NEXT: vmovdqa64 128(%rdi), %zmm5
7470 ; AVX512DQ-NEXT: vmovdqa64 192(%rdi), %zmm25
7471 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,6,0,10,0,6,0,10]
7472 ; AVX512DQ-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
7473 ; AVX512DQ-NEXT: vmovdqa64 %zmm25, %zmm7
7474 ; AVX512DQ-NEXT: vpermt2q %zmm5, %zmm6, %zmm7
7475 ; AVX512DQ-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7476 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm7 = [0,6,12,0]
7477 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm8
7478 ; AVX512DQ-NEXT: vpermt2q %zmm4, %zmm6, %zmm8
7479 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7480 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, %zmm8
7481 ; AVX512DQ-NEXT: vpermt2q %zmm18, %zmm7, %zmm8
7482 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7483 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, %zmm8
7484 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
7485 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7486 ; AVX512DQ-NEXT: vpermi2q %zmm2, %zmm21, %zmm6
7487 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7488 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [1,7,0,11,1,7,0,11]
7489 ; AVX512DQ-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
7490 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, %zmm8
7491 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
7492 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7493 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm8
7494 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm6
7495 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm10 = [1,7,13,0]
7496 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1
7497 ; AVX512DQ-NEXT: vpermt2q %zmm4, %zmm8, %zmm1
7498 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7499 ; AVX512DQ-NEXT: vmovdqa64 %zmm25, %zmm1
7500 ; AVX512DQ-NEXT: vpermt2q %zmm5, %zmm8, %zmm1
7501 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7502 ; AVX512DQ-NEXT: vpermi2q %zmm2, %zmm21, %zmm8
7503 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7504 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [10,4,10,4,10,4,10,4]
7505 ; AVX512DQ-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7506 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm8
7507 ; AVX512DQ-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
7508 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7509 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm8
7510 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
7511 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7512 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm8
7513 ; AVX512DQ-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
7514 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7515 ; AVX512DQ-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
7516 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7517 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [11,5,11,5,11,5,11,5]
7518 ; AVX512DQ-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7519 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm8
7520 ; AVX512DQ-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
7521 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7522 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm8
7523 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
7524 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7525 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm8
7526 ; AVX512DQ-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
7527 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7528 ; AVX512DQ-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
7529 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7530 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [12,0,0,6,12,0,0,6]
7531 ; AVX512DQ-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
7532 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm1
7533 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm8, %zmm1
7534 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7535 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [13,0,1,7,13,0,1,7]
7536 ; AVX512DQ-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
7537 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm1, %zmm4
7538 ; AVX512DQ-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7539 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm0
7540 ; AVX512DQ-NEXT: vpermt2q %zmm25, %zmm8, %zmm0
7541 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7542 ; AVX512DQ-NEXT: vpermt2q %zmm25, %zmm1, %zmm5
7543 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7544 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm0
7545 ; AVX512DQ-NEXT: vpermt2q %zmm19, %zmm8, %zmm0
7546 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7547 ; AVX512DQ-NEXT: vpermi2q %zmm21, %zmm2, %zmm8
7548 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7549 ; AVX512DQ-NEXT: vpermt2q %zmm21, %zmm1, %zmm2
7550 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7551 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, %zmm0
7552 ; AVX512DQ-NEXT: vpermt2q %zmm18, %zmm10, %zmm0
7553 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7554 ; AVX512DQ-NEXT: vpermt2q %zmm19, %zmm1, %zmm6
7555 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7556 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm11 = [10,0,6,0]
7557 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, %zmm0
7558 ; AVX512DQ-NEXT: vpermt2q %zmm3, %zmm11, %zmm0
7559 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7560 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm31 = [11,1,7,0]
7561 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, %zmm0
7562 ; AVX512DQ-NEXT: vpermt2q %zmm3, %zmm31, %zmm0
7563 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7564 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
7565 ; AVX512DQ-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7566 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, %zmm0
7567 ; AVX512DQ-NEXT: vpermt2q %zmm18, %zmm5, %zmm0
7568 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7569 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
7570 ; AVX512DQ-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7571 ; AVX512DQ-NEXT: vpermt2q %zmm18, %zmm0, %zmm3
7572 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7573 ; AVX512DQ-NEXT: vmovdqa64 (%rdi), %zmm25
7574 ; AVX512DQ-NEXT: vmovdqa64 64(%rdi), %zmm1
7575 ; AVX512DQ-NEXT: vmovdqa64 %zmm25, %zmm29
7576 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm2
7577 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm7, %zmm29
7578 ; AVX512DQ-NEXT: vmovdqa64 %zmm25, %zmm3
7579 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm10, %zmm3
7580 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7581 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
7582 ; AVX512DQ-NEXT: vpermt2q %zmm25, %zmm11, %zmm3
7583 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7584 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
7585 ; AVX512DQ-NEXT: vpermt2q %zmm25, %zmm31, %zmm3
7586 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7587 ; AVX512DQ-NEXT: vmovdqa64 %zmm25, %zmm3
7588 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm5, %zmm3
7589 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7590 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm0, %zmm25
7591 ; AVX512DQ-NEXT: vmovdqa64 320(%rdi), %zmm9
7592 ; AVX512DQ-NEXT: vmovdqa64 256(%rdi), %zmm27
7593 ; AVX512DQ-NEXT: vmovdqa64 %zmm27, %zmm1
7594 ; AVX512DQ-NEXT: vpermt2q %zmm9, %zmm5, %zmm1
7595 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7596 ; AVX512DQ-NEXT: vmovdqa64 704(%rdi), %zmm30
7597 ; AVX512DQ-NEXT: vmovdqa64 640(%rdi), %zmm26
7598 ; AVX512DQ-NEXT: vmovdqa64 %zmm26, %zmm1
7599 ; AVX512DQ-NEXT: vpermt2q %zmm30, %zmm5, %zmm1
7600 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7601 ; AVX512DQ-NEXT: vmovdqa64 1088(%rdi), %zmm7
7602 ; AVX512DQ-NEXT: vmovdqa64 1024(%rdi), %zmm20
7603 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, %zmm1
7604 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm5, %zmm1
7605 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7606 ; AVX512DQ-NEXT: vmovdqa64 832(%rdi), %zmm1
7607 ; AVX512DQ-NEXT: vmovdqa64 768(%rdi), %zmm8
7608 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, %zmm15
7609 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm2, %zmm15
7610 ; AVX512DQ-NEXT: vmovdqa64 1472(%rdi), %zmm6
7611 ; AVX512DQ-NEXT: vmovdqa64 1408(%rdi), %zmm21
7612 ; AVX512DQ-NEXT: vmovdqa64 %zmm21, %zmm3
7613 ; AVX512DQ-NEXT: vpermt2q %zmm6, %zmm5, %zmm3
7614 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7615 ; AVX512DQ-NEXT: vmovdqa64 1216(%rdi), %zmm19
7616 ; AVX512DQ-NEXT: vmovdqa64 1152(%rdi), %zmm4
7617 ; AVX512DQ-NEXT: vpermi2q %zmm19, %zmm4, %zmm2
7618 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
7619 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, %zmm12
7620 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm10, %zmm12
7621 ; AVX512DQ-NEXT: vpermi2q %zmm19, %zmm4, %zmm10
7622 ; AVX512DQ-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7623 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm13
7624 ; AVX512DQ-NEXT: vpermt2q %zmm8, %zmm11, %zmm13
7625 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm19, %zmm11
7626 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm14
7627 ; AVX512DQ-NEXT: vpermt2q %zmm8, %zmm31, %zmm14
7628 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm19, %zmm31
7629 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, %zmm2
7630 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm5, %zmm2
7631 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7632 ; AVX512DQ-NEXT: vpermi2q %zmm19, %zmm4, %zmm5
7633 ; AVX512DQ-NEXT: vpermt2q %zmm19, %zmm0, %zmm4
7634 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm0, %zmm8
7635 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, %zmm19
7636 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm0, %zmm19
7637 ; AVX512DQ-NEXT: vmovdqa64 %zmm26, %zmm22
7638 ; AVX512DQ-NEXT: vpermt2q %zmm30, %zmm0, %zmm22
7639 ; AVX512DQ-NEXT: vmovdqa64 %zmm27, %zmm17
7640 ; AVX512DQ-NEXT: vpermt2q %zmm9, %zmm0, %zmm17
7641 ; AVX512DQ-NEXT: vmovdqa64 %zmm21, %zmm18
7642 ; AVX512DQ-NEXT: vpermt2q %zmm6, %zmm0, %zmm18
7643 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
7644 ; AVX512DQ-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
7645 ; AVX512DQ-NEXT: vmovdqa64 %zmm26, %zmm24
7646 ; AVX512DQ-NEXT: vpermt2q %zmm30, %zmm1, %zmm24
7647 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
7648 ; AVX512DQ-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
7649 ; AVX512DQ-NEXT: vmovdqa64 %zmm26, %zmm0
7650 ; AVX512DQ-NEXT: vpermt2q %zmm30, %zmm2, %zmm0
7651 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7652 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
7653 ; AVX512DQ-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
7654 ; AVX512DQ-NEXT: vmovdqa64 %zmm30, %zmm0
7655 ; AVX512DQ-NEXT: vpermt2q %zmm26, %zmm3, %zmm0
7656 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7657 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
7658 ; AVX512DQ-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
7659 ; AVX512DQ-NEXT: vpermt2q %zmm26, %zmm0, %zmm30
7660 ; AVX512DQ-NEXT: vmovdqa64 %zmm27, %zmm26
7661 ; AVX512DQ-NEXT: vpermt2q %zmm9, %zmm1, %zmm26
7662 ; AVX512DQ-NEXT: vmovdqa64 %zmm27, %zmm23
7663 ; AVX512DQ-NEXT: vpermt2q %zmm9, %zmm2, %zmm23
7664 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, %zmm28
7665 ; AVX512DQ-NEXT: vpermt2q %zmm27, %zmm3, %zmm28
7666 ; AVX512DQ-NEXT: vpermt2q %zmm27, %zmm0, %zmm9
7667 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, %zmm27
7668 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm1, %zmm27
7669 ; AVX512DQ-NEXT: vpermi2q %zmm6, %zmm21, %zmm1
7670 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, %zmm10
7671 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm2, %zmm10
7672 ; AVX512DQ-NEXT: vpermi2q %zmm6, %zmm21, %zmm2
7673 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm16
7674 ; AVX512DQ-NEXT: vpermt2q %zmm20, %zmm3, %zmm16
7675 ; AVX512DQ-NEXT: vpermi2q %zmm21, %zmm6, %zmm3
7676 ; AVX512DQ-NEXT: vpermt2q %zmm21, %zmm0, %zmm6
7677 ; AVX512DQ-NEXT: vpermt2q %zmm20, %zmm0, %zmm7
7678 ; AVX512DQ-NEXT: movb $56, %al
7679 ; AVX512DQ-NEXT: kmovw %eax, %k1
7680 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7681 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
7682 ; AVX512DQ-NEXT: movb $-64, %al
7683 ; AVX512DQ-NEXT: kmovw %eax, %k2
7684 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7685 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
7686 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7687 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
7688 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
7689 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
7690 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
7691 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7692 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7693 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm15 {%k1}
7694 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7695 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm15 {%k2}
7696 ; AVX512DQ-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
7697 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
7698 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
7699 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
7700 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
7701 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm20
7702 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7703 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
7704 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, %zmm12 {%k2}
7705 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7706 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
7707 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm21 {%k1}
7708 ; AVX512DQ-NEXT: vmovdqa64 %zmm22, %zmm21 {%k2}
7709 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7710 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
7711 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
7712 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, %zmm19 {%k2}
7713 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7714 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
7715 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, %zmm0 {%k1}
7716 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, %zmm0 {%k2}
7717 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm17
7718 ; AVX512DQ-NEXT: movb $24, %al
7719 ; AVX512DQ-NEXT: kmovw %eax, %k2
7720 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7721 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm13 {%k2}
7722 ; AVX512DQ-NEXT: movb $-32, %al
7723 ; AVX512DQ-NEXT: kmovw %eax, %k1
7724 ; AVX512DQ-NEXT: vmovdqa64 %zmm27, %zmm13 {%k1}
7725 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7726 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
7727 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
7728 ; AVX512DQ-NEXT: vmovdqa64 %zmm24, %zmm18 {%k1}
7729 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, %zmm22
7730 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7731 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
7732 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
7733 ; AVX512DQ-NEXT: vmovdqa64 %zmm26, %zmm18 {%k1}
7734 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, %zmm24
7735 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7736 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
7737 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm11 {%k1}
7738 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7739 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm14 {%k2}
7740 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, %zmm14 {%k1}
7741 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7742 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7743 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
7744 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7745 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
7746 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm18
7747 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7748 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7749 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
7750 ; AVX512DQ-NEXT: vmovdqa64 %zmm23, %zmm1 {%k1}
7751 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm23
7752 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7753 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
7754 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm31 {%k1}
7755 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7756 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
7757 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
7758 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7759 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
7760 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
7761 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
7762 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
7763 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
7764 ; AVX512DQ-NEXT: vmovdqa64 %zmm28, %zmm2 {%k1}
7765 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
7766 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm5, %zmm10, %zmm5
7767 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, %zmm5 {%k1}
7768 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
7769 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
7770 ; AVX512DQ-NEXT: vmovdqa64 %zmm30, %zmm3 {%k1}
7771 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
7772 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm25, %zmm10, %zmm10
7773 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, %zmm10 {%k1}
7774 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
7775 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm4, %zmm9, %zmm4
7776 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm4 {%k1}
7777 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7778 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm8, %zmm6, %zmm6
7779 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm6 {%k1}
7780 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, 192(%rsi)
7781 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, 128(%rsi)
7782 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
7783 ; AVX512DQ-NEXT: vmovaps %zmm7, 64(%rsi)
7784 ; AVX512DQ-NEXT: vmovdqa64 %zmm29, (%rsi)
7785 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, 192(%rdx)
7786 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, (%rdx)
7787 ; AVX512DQ-NEXT: vmovdqa64 %zmm21, 64(%rdx)
7788 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, 128(%rdx)
7789 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, 192(%rcx)
7790 ; AVX512DQ-NEXT: vmovdqa64 %zmm24, (%rcx)
7791 ; AVX512DQ-NEXT: vmovdqa64 %zmm22, 64(%rcx)
7792 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, 128(%rcx)
7793 ; AVX512DQ-NEXT: vmovdqa64 %zmm31, 192(%r8)
7794 ; AVX512DQ-NEXT: vmovdqa64 %zmm23, (%r8)
7795 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, 64(%r8)
7796 ; AVX512DQ-NEXT: vmovdqa64 %zmm14, 128(%r8)
7797 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, 192(%r9)
7798 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, (%r9)
7799 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 64(%r9)
7800 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, 128(%r9)
7801 ; AVX512DQ-NEXT: movq {{[0-9]+}}(%rsp), %rax
7802 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, 128(%rax)
7803 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, 192(%rax)
7804 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, (%rax)
7805 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, 64(%rax)
7806 ; AVX512DQ-NEXT: addq $2632, %rsp # imm = 0xA48
7807 ; AVX512DQ-NEXT: vzeroupper
7808 ; AVX512DQ-NEXT: retq
7810 ; AVX512DQ-FCP-LABEL: load_i64_stride6_vf32:
7811 ; AVX512DQ-FCP: # %bb.0:
7812 ; AVX512DQ-FCP-NEXT: subq $2632, %rsp # imm = 0xA48
7813 ; AVX512DQ-FCP-NEXT: vmovdqa64 1280(%rdi), %zmm2
7814 ; AVX512DQ-FCP-NEXT: vmovdqa64 1344(%rdi), %zmm21
7815 ; AVX512DQ-FCP-NEXT: vmovdqa64 896(%rdi), %zmm1
7816 ; AVX512DQ-FCP-NEXT: vmovdqa64 960(%rdi), %zmm19
7817 ; AVX512DQ-FCP-NEXT: vmovdqa64 448(%rdi), %zmm18
7818 ; AVX512DQ-FCP-NEXT: vmovdqa64 384(%rdi), %zmm3
7819 ; AVX512DQ-FCP-NEXT: vmovdqa64 512(%rdi), %zmm4
7820 ; AVX512DQ-FCP-NEXT: vmovdqa64 576(%rdi), %zmm0
7821 ; AVX512DQ-FCP-NEXT: vmovdqa64 128(%rdi), %zmm5
7822 ; AVX512DQ-FCP-NEXT: vmovdqa64 192(%rdi), %zmm25
7823 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,6,0,10,0,6,0,10]
7824 ; AVX512DQ-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
7825 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm25, %zmm7
7826 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm5, %zmm6, %zmm7
7827 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7828 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm7 = [0,6,12,0]
7829 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm8
7830 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm4, %zmm6, %zmm8
7831 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7832 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, %zmm8
7833 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm18, %zmm7, %zmm8
7834 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7835 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, %zmm8
7836 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
7837 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7838 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm2, %zmm21, %zmm6
7839 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7840 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [1,7,0,11,1,7,0,11]
7841 ; AVX512DQ-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
7842 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, %zmm8
7843 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
7844 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7845 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm8
7846 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm6
7847 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm10 = [1,7,13,0]
7848 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
7849 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm4, %zmm8, %zmm1
7850 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7851 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm25, %zmm1
7852 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm5, %zmm8, %zmm1
7853 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7854 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm2, %zmm21, %zmm8
7855 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7856 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [10,4,10,4,10,4,10,4]
7857 ; AVX512DQ-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7858 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm8
7859 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
7860 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7861 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm8
7862 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
7863 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7864 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm8
7865 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
7866 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7867 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
7868 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7869 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [11,5,11,5,11,5,11,5]
7870 ; AVX512DQ-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7871 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm8
7872 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
7873 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7874 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm8
7875 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
7876 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7877 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm8
7878 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
7879 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7880 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
7881 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7882 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [12,0,0,6,12,0,0,6]
7883 ; AVX512DQ-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
7884 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm1
7885 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm8, %zmm1
7886 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7887 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [13,0,1,7,13,0,1,7]
7888 ; AVX512DQ-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
7889 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm4
7890 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7891 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
7892 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm25, %zmm8, %zmm0
7893 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7894 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm5
7895 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7896 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm0
7897 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm19, %zmm8, %zmm0
7898 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7899 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm21, %zmm2, %zmm8
7900 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7901 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm21, %zmm1, %zmm2
7902 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7903 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, %zmm0
7904 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm18, %zmm10, %zmm0
7905 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7906 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm19, %zmm1, %zmm6
7907 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7908 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm11 = [10,0,6,0]
7909 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, %zmm0
7910 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm3, %zmm11, %zmm0
7911 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7912 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm31 = [11,1,7,0]
7913 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, %zmm0
7914 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm3, %zmm31, %zmm0
7915 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7916 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
7917 ; AVX512DQ-FCP-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7918 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, %zmm0
7919 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm18, %zmm5, %zmm0
7920 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7921 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
7922 ; AVX512DQ-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7923 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm18, %zmm0, %zmm3
7924 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7925 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rdi), %zmm25
7926 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
7927 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm25, %zmm29
7928 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm2
7929 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm7, %zmm29
7930 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm25, %zmm3
7931 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm10, %zmm3
7932 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7933 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
7934 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm25, %zmm11, %zmm3
7935 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7936 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
7937 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm25, %zmm31, %zmm3
7938 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7939 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm25, %zmm3
7940 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm3
7941 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7942 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm25
7943 ; AVX512DQ-FCP-NEXT: vmovdqa64 320(%rdi), %zmm9
7944 ; AVX512DQ-FCP-NEXT: vmovdqa64 256(%rdi), %zmm27
7945 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm27, %zmm1
7946 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm9, %zmm5, %zmm1
7947 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7948 ; AVX512DQ-FCP-NEXT: vmovdqa64 704(%rdi), %zmm30
7949 ; AVX512DQ-FCP-NEXT: vmovdqa64 640(%rdi), %zmm26
7950 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm26, %zmm1
7951 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm30, %zmm5, %zmm1
7952 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7953 ; AVX512DQ-FCP-NEXT: vmovdqa64 1088(%rdi), %zmm7
7954 ; AVX512DQ-FCP-NEXT: vmovdqa64 1024(%rdi), %zmm20
7955 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, %zmm1
7956 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm5, %zmm1
7957 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7958 ; AVX512DQ-FCP-NEXT: vmovdqa64 832(%rdi), %zmm1
7959 ; AVX512DQ-FCP-NEXT: vmovdqa64 768(%rdi), %zmm8
7960 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, %zmm15
7961 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm15
7962 ; AVX512DQ-FCP-NEXT: vmovdqa64 1472(%rdi), %zmm6
7963 ; AVX512DQ-FCP-NEXT: vmovdqa64 1408(%rdi), %zmm21
7964 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm21, %zmm3
7965 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm6, %zmm5, %zmm3
7966 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7967 ; AVX512DQ-FCP-NEXT: vmovdqa64 1216(%rdi), %zmm19
7968 ; AVX512DQ-FCP-NEXT: vmovdqa64 1152(%rdi), %zmm4
7969 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm19, %zmm4, %zmm2
7970 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
7971 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, %zmm12
7972 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm10, %zmm12
7973 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm19, %zmm4, %zmm10
7974 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7975 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm13
7976 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm8, %zmm11, %zmm13
7977 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm19, %zmm11
7978 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm14
7979 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm8, %zmm31, %zmm14
7980 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm19, %zmm31
7981 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, %zmm2
7982 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm2
7983 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7984 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm19, %zmm4, %zmm5
7985 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm19, %zmm0, %zmm4
7986 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm8
7987 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, %zmm19
7988 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm19
7989 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm26, %zmm22
7990 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm30, %zmm0, %zmm22
7991 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm27, %zmm17
7992 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm9, %zmm0, %zmm17
7993 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm21, %zmm18
7994 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm6, %zmm0, %zmm18
7995 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
7996 ; AVX512DQ-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
7997 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm26, %zmm24
7998 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm30, %zmm1, %zmm24
7999 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
8000 ; AVX512DQ-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
8001 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm26, %zmm0
8002 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm30, %zmm2, %zmm0
8003 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8004 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
8005 ; AVX512DQ-FCP-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
8006 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm30, %zmm0
8007 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm26, %zmm3, %zmm0
8008 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8009 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
8010 ; AVX512DQ-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
8011 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm26, %zmm0, %zmm30
8012 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm27, %zmm26
8013 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm26
8014 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm27, %zmm23
8015 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm9, %zmm2, %zmm23
8016 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, %zmm28
8017 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm27, %zmm3, %zmm28
8018 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm27, %zmm0, %zmm9
8019 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, %zmm27
8020 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm27
8021 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm6, %zmm21, %zmm1
8022 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, %zmm10
8023 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm2, %zmm10
8024 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm6, %zmm21, %zmm2
8025 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm16
8026 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm20, %zmm3, %zmm16
8027 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm21, %zmm6, %zmm3
8028 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm21, %zmm0, %zmm6
8029 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm20, %zmm0, %zmm7
8030 ; AVX512DQ-FCP-NEXT: movb $56, %al
8031 ; AVX512DQ-FCP-NEXT: kmovw %eax, %k1
8032 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8033 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
8034 ; AVX512DQ-FCP-NEXT: movb $-64, %al
8035 ; AVX512DQ-FCP-NEXT: kmovw %eax, %k2
8036 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8037 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
8038 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8039 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
8040 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
8041 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
8042 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
8043 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8044 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8045 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm15 {%k1}
8046 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8047 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm15 {%k2}
8048 ; AVX512DQ-FCP-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
8049 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
8050 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
8051 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
8052 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
8053 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm20
8054 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8055 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
8056 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, %zmm12 {%k2}
8057 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8058 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
8059 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm21 {%k1}
8060 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm22, %zmm21 {%k2}
8061 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8062 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
8063 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
8064 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, %zmm19 {%k2}
8065 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8066 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
8067 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, %zmm0 {%k1}
8068 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, %zmm0 {%k2}
8069 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm17
8070 ; AVX512DQ-FCP-NEXT: movb $24, %al
8071 ; AVX512DQ-FCP-NEXT: kmovw %eax, %k2
8072 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8073 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm13 {%k2}
8074 ; AVX512DQ-FCP-NEXT: movb $-32, %al
8075 ; AVX512DQ-FCP-NEXT: kmovw %eax, %k1
8076 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm27, %zmm13 {%k1}
8077 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8078 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
8079 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
8080 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm24, %zmm18 {%k1}
8081 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, %zmm22
8082 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8083 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
8084 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
8085 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm26, %zmm18 {%k1}
8086 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, %zmm24
8087 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8088 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
8089 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm11 {%k1}
8090 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8091 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm14 {%k2}
8092 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm10, %zmm14 {%k1}
8093 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8094 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8095 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
8096 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8097 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
8098 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm18
8099 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8100 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8101 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
8102 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm23, %zmm1 {%k1}
8103 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm23
8104 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8105 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
8106 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm31 {%k1}
8107 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8108 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
8109 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
8110 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8111 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
8112 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
8113 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
8114 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
8115 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
8116 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm28, %zmm2 {%k1}
8117 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
8118 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm5, %zmm10, %zmm5
8119 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, %zmm5 {%k1}
8120 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
8121 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
8122 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm30, %zmm3 {%k1}
8123 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
8124 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm25, %zmm10, %zmm10
8125 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, %zmm10 {%k1}
8126 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
8127 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm4, %zmm9, %zmm4
8128 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm4 {%k1}
8129 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
8130 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm8, %zmm6, %zmm6
8131 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm6 {%k1}
8132 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, 192(%rsi)
8133 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm15, 128(%rsi)
8134 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
8135 ; AVX512DQ-FCP-NEXT: vmovaps %zmm7, 64(%rsi)
8136 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm29, (%rsi)
8137 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, 192(%rdx)
8138 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, (%rdx)
8139 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm21, 64(%rdx)
8140 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm12, 128(%rdx)
8141 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm11, 192(%rcx)
8142 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm24, (%rcx)
8143 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm22, 64(%rcx)
8144 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, 128(%rcx)
8145 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm31, 192(%r8)
8146 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm23, (%r8)
8147 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, 64(%r8)
8148 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm14, 128(%r8)
8149 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, 192(%r9)
8150 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, (%r9)
8151 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, 64(%r9)
8152 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, 128(%r9)
8153 ; AVX512DQ-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
8154 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, 128(%rax)
8155 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, 192(%rax)
8156 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm10, (%rax)
8157 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, 64(%rax)
8158 ; AVX512DQ-FCP-NEXT: addq $2632, %rsp # imm = 0xA48
8159 ; AVX512DQ-FCP-NEXT: vzeroupper
8160 ; AVX512DQ-FCP-NEXT: retq
8162 ; AVX512BW-LABEL: load_i64_stride6_vf32:
8163 ; AVX512BW: # %bb.0:
8164 ; AVX512BW-NEXT: subq $2632, %rsp # imm = 0xA48
8165 ; AVX512BW-NEXT: vmovdqa64 1280(%rdi), %zmm2
8166 ; AVX512BW-NEXT: vmovdqa64 1344(%rdi), %zmm21
8167 ; AVX512BW-NEXT: vmovdqa64 896(%rdi), %zmm1
8168 ; AVX512BW-NEXT: vmovdqa64 960(%rdi), %zmm19
8169 ; AVX512BW-NEXT: vmovdqa64 448(%rdi), %zmm18
8170 ; AVX512BW-NEXT: vmovdqa64 384(%rdi), %zmm3
8171 ; AVX512BW-NEXT: vmovdqa64 512(%rdi), %zmm4
8172 ; AVX512BW-NEXT: vmovdqa64 576(%rdi), %zmm0
8173 ; AVX512BW-NEXT: vmovdqa64 128(%rdi), %zmm5
8174 ; AVX512BW-NEXT: vmovdqa64 192(%rdi), %zmm25
8175 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,6,0,10,0,6,0,10]
8176 ; AVX512BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
8177 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm7
8178 ; AVX512BW-NEXT: vpermt2q %zmm5, %zmm6, %zmm7
8179 ; AVX512BW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8180 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm7 = [0,6,12,0]
8181 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm8
8182 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm6, %zmm8
8183 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8184 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm8
8185 ; AVX512BW-NEXT: vpermt2q %zmm18, %zmm7, %zmm8
8186 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8187 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm8
8188 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
8189 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8190 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm21, %zmm6
8191 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8192 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [1,7,0,11,1,7,0,11]
8193 ; AVX512BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
8194 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm8
8195 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
8196 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8197 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm8
8198 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm6
8199 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm10 = [1,7,13,0]
8200 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1
8201 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm8, %zmm1
8202 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8203 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm1
8204 ; AVX512BW-NEXT: vpermt2q %zmm5, %zmm8, %zmm1
8205 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8206 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm21, %zmm8
8207 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8208 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [10,4,10,4,10,4,10,4]
8209 ; AVX512BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8210 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm8
8211 ; AVX512BW-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
8212 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8213 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm8
8214 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
8215 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8216 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm8
8217 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
8218 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8219 ; AVX512BW-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
8220 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8221 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [11,5,11,5,11,5,11,5]
8222 ; AVX512BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8223 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm8
8224 ; AVX512BW-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
8225 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8226 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm8
8227 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
8228 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8229 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm8
8230 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
8231 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8232 ; AVX512BW-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
8233 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8234 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [12,0,0,6,12,0,0,6]
8235 ; AVX512BW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
8236 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm1
8237 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm8, %zmm1
8238 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8239 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [13,0,1,7,13,0,1,7]
8240 ; AVX512BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
8241 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm1, %zmm4
8242 ; AVX512BW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8243 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm0
8244 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm8, %zmm0
8245 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8246 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm1, %zmm5
8247 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8248 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm0
8249 ; AVX512BW-NEXT: vpermt2q %zmm19, %zmm8, %zmm0
8250 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8251 ; AVX512BW-NEXT: vpermi2q %zmm21, %zmm2, %zmm8
8252 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8253 ; AVX512BW-NEXT: vpermt2q %zmm21, %zmm1, %zmm2
8254 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8255 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm0
8256 ; AVX512BW-NEXT: vpermt2q %zmm18, %zmm10, %zmm0
8257 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8258 ; AVX512BW-NEXT: vpermt2q %zmm19, %zmm1, %zmm6
8259 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8260 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm11 = [10,0,6,0]
8261 ; AVX512BW-NEXT: vmovdqa64 %zmm18, %zmm0
8262 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm11, %zmm0
8263 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8264 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm31 = [11,1,7,0]
8265 ; AVX512BW-NEXT: vmovdqa64 %zmm18, %zmm0
8266 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm31, %zmm0
8267 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8268 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
8269 ; AVX512BW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8270 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm0
8271 ; AVX512BW-NEXT: vpermt2q %zmm18, %zmm5, %zmm0
8272 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8273 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
8274 ; AVX512BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8275 ; AVX512BW-NEXT: vpermt2q %zmm18, %zmm0, %zmm3
8276 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8277 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm25
8278 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %zmm1
8279 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm29
8280 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm2
8281 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm7, %zmm29
8282 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm3
8283 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm10, %zmm3
8284 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8285 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
8286 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm11, %zmm3
8287 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8288 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
8289 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm31, %zmm3
8290 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8291 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm3
8292 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm5, %zmm3
8293 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8294 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm25
8295 ; AVX512BW-NEXT: vmovdqa64 320(%rdi), %zmm9
8296 ; AVX512BW-NEXT: vmovdqa64 256(%rdi), %zmm27
8297 ; AVX512BW-NEXT: vmovdqa64 %zmm27, %zmm1
8298 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm5, %zmm1
8299 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8300 ; AVX512BW-NEXT: vmovdqa64 704(%rdi), %zmm30
8301 ; AVX512BW-NEXT: vmovdqa64 640(%rdi), %zmm26
8302 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm1
8303 ; AVX512BW-NEXT: vpermt2q %zmm30, %zmm5, %zmm1
8304 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8305 ; AVX512BW-NEXT: vmovdqa64 1088(%rdi), %zmm7
8306 ; AVX512BW-NEXT: vmovdqa64 1024(%rdi), %zmm20
8307 ; AVX512BW-NEXT: vmovdqa64 %zmm20, %zmm1
8308 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm5, %zmm1
8309 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8310 ; AVX512BW-NEXT: vmovdqa64 832(%rdi), %zmm1
8311 ; AVX512BW-NEXT: vmovdqa64 768(%rdi), %zmm8
8312 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm15
8313 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm2, %zmm15
8314 ; AVX512BW-NEXT: vmovdqa64 1472(%rdi), %zmm6
8315 ; AVX512BW-NEXT: vmovdqa64 1408(%rdi), %zmm21
8316 ; AVX512BW-NEXT: vmovdqa64 %zmm21, %zmm3
8317 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm5, %zmm3
8318 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8319 ; AVX512BW-NEXT: vmovdqa64 1216(%rdi), %zmm19
8320 ; AVX512BW-NEXT: vmovdqa64 1152(%rdi), %zmm4
8321 ; AVX512BW-NEXT: vpermi2q %zmm19, %zmm4, %zmm2
8322 ; AVX512BW-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
8323 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm12
8324 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm10, %zmm12
8325 ; AVX512BW-NEXT: vpermi2q %zmm19, %zmm4, %zmm10
8326 ; AVX512BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8327 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm13
8328 ; AVX512BW-NEXT: vpermt2q %zmm8, %zmm11, %zmm13
8329 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm19, %zmm11
8330 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm14
8331 ; AVX512BW-NEXT: vpermt2q %zmm8, %zmm31, %zmm14
8332 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm19, %zmm31
8333 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm2
8334 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm5, %zmm2
8335 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8336 ; AVX512BW-NEXT: vpermi2q %zmm19, %zmm4, %zmm5
8337 ; AVX512BW-NEXT: vpermt2q %zmm19, %zmm0, %zmm4
8338 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm8
8339 ; AVX512BW-NEXT: vmovdqa64 %zmm20, %zmm19
8340 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm0, %zmm19
8341 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm22
8342 ; AVX512BW-NEXT: vpermt2q %zmm30, %zmm0, %zmm22
8343 ; AVX512BW-NEXT: vmovdqa64 %zmm27, %zmm17
8344 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm0, %zmm17
8345 ; AVX512BW-NEXT: vmovdqa64 %zmm21, %zmm18
8346 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm0, %zmm18
8347 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
8348 ; AVX512BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
8349 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm24
8350 ; AVX512BW-NEXT: vpermt2q %zmm30, %zmm1, %zmm24
8351 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
8352 ; AVX512BW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
8353 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm0
8354 ; AVX512BW-NEXT: vpermt2q %zmm30, %zmm2, %zmm0
8355 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8356 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
8357 ; AVX512BW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
8358 ; AVX512BW-NEXT: vmovdqa64 %zmm30, %zmm0
8359 ; AVX512BW-NEXT: vpermt2q %zmm26, %zmm3, %zmm0
8360 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8361 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
8362 ; AVX512BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
8363 ; AVX512BW-NEXT: vpermt2q %zmm26, %zmm0, %zmm30
8364 ; AVX512BW-NEXT: vmovdqa64 %zmm27, %zmm26
8365 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm1, %zmm26
8366 ; AVX512BW-NEXT: vmovdqa64 %zmm27, %zmm23
8367 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm2, %zmm23
8368 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm28
8369 ; AVX512BW-NEXT: vpermt2q %zmm27, %zmm3, %zmm28
8370 ; AVX512BW-NEXT: vpermt2q %zmm27, %zmm0, %zmm9
8371 ; AVX512BW-NEXT: vmovdqa64 %zmm20, %zmm27
8372 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm1, %zmm27
8373 ; AVX512BW-NEXT: vpermi2q %zmm6, %zmm21, %zmm1
8374 ; AVX512BW-NEXT: vmovdqa64 %zmm20, %zmm10
8375 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm2, %zmm10
8376 ; AVX512BW-NEXT: vpermi2q %zmm6, %zmm21, %zmm2
8377 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm16
8378 ; AVX512BW-NEXT: vpermt2q %zmm20, %zmm3, %zmm16
8379 ; AVX512BW-NEXT: vpermi2q %zmm21, %zmm6, %zmm3
8380 ; AVX512BW-NEXT: vpermt2q %zmm21, %zmm0, %zmm6
8381 ; AVX512BW-NEXT: vpermt2q %zmm20, %zmm0, %zmm7
8382 ; AVX512BW-NEXT: movb $56, %al
8383 ; AVX512BW-NEXT: kmovd %eax, %k1
8384 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8385 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
8386 ; AVX512BW-NEXT: movb $-64, %al
8387 ; AVX512BW-NEXT: kmovd %eax, %k2
8388 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8389 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
8390 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8391 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
8392 ; AVX512BW-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
8393 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
8394 ; AVX512BW-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
8395 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8396 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8397 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm15 {%k1}
8398 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8399 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm15 {%k2}
8400 ; AVX512BW-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
8401 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
8402 ; AVX512BW-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
8403 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
8404 ; AVX512BW-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
8405 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm20
8406 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8407 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
8408 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm12 {%k2}
8409 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8410 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
8411 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm21 {%k1}
8412 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm21 {%k2}
8413 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8414 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
8415 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
8416 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm19 {%k2}
8417 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8418 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
8419 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm0 {%k1}
8420 ; AVX512BW-NEXT: vmovdqa64 %zmm18, %zmm0 {%k2}
8421 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm17
8422 ; AVX512BW-NEXT: movb $24, %al
8423 ; AVX512BW-NEXT: kmovd %eax, %k2
8424 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8425 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm13 {%k2}
8426 ; AVX512BW-NEXT: movb $-32, %al
8427 ; AVX512BW-NEXT: kmovd %eax, %k1
8428 ; AVX512BW-NEXT: vmovdqa64 %zmm27, %zmm13 {%k1}
8429 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8430 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
8431 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
8432 ; AVX512BW-NEXT: vmovdqa64 %zmm24, %zmm18 {%k1}
8433 ; AVX512BW-NEXT: vmovdqa64 %zmm18, %zmm22
8434 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8435 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
8436 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
8437 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm18 {%k1}
8438 ; AVX512BW-NEXT: vmovdqa64 %zmm18, %zmm24
8439 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8440 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
8441 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm11 {%k1}
8442 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8443 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm14 {%k2}
8444 ; AVX512BW-NEXT: vmovdqa64 %zmm10, %zmm14 {%k1}
8445 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8446 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8447 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
8448 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8449 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
8450 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm18
8451 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8452 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8453 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
8454 ; AVX512BW-NEXT: vmovdqa64 %zmm23, %zmm1 {%k1}
8455 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm23
8456 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8457 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
8458 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm31 {%k1}
8459 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8460 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
8461 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
8462 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8463 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
8464 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
8465 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
8466 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
8467 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
8468 ; AVX512BW-NEXT: vmovdqa64 %zmm28, %zmm2 {%k1}
8469 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
8470 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm5, %zmm10, %zmm5
8471 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm5 {%k1}
8472 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
8473 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
8474 ; AVX512BW-NEXT: vmovdqa64 %zmm30, %zmm3 {%k1}
8475 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
8476 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm25, %zmm10, %zmm10
8477 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm10 {%k1}
8478 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
8479 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm4, %zmm9, %zmm4
8480 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm4 {%k1}
8481 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
8482 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm8, %zmm6, %zmm6
8483 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm6 {%k1}
8484 ; AVX512BW-NEXT: vmovdqa64 %zmm20, 192(%rsi)
8485 ; AVX512BW-NEXT: vmovdqa64 %zmm15, 128(%rsi)
8486 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
8487 ; AVX512BW-NEXT: vmovaps %zmm7, 64(%rsi)
8488 ; AVX512BW-NEXT: vmovdqa64 %zmm29, (%rsi)
8489 ; AVX512BW-NEXT: vmovdqa64 %zmm17, 192(%rdx)
8490 ; AVX512BW-NEXT: vmovdqa64 %zmm19, (%rdx)
8491 ; AVX512BW-NEXT: vmovdqa64 %zmm21, 64(%rdx)
8492 ; AVX512BW-NEXT: vmovdqa64 %zmm12, 128(%rdx)
8493 ; AVX512BW-NEXT: vmovdqa64 %zmm11, 192(%rcx)
8494 ; AVX512BW-NEXT: vmovdqa64 %zmm24, (%rcx)
8495 ; AVX512BW-NEXT: vmovdqa64 %zmm22, 64(%rcx)
8496 ; AVX512BW-NEXT: vmovdqa64 %zmm13, 128(%rcx)
8497 ; AVX512BW-NEXT: vmovdqa64 %zmm31, 192(%r8)
8498 ; AVX512BW-NEXT: vmovdqa64 %zmm23, (%r8)
8499 ; AVX512BW-NEXT: vmovdqa64 %zmm18, 64(%r8)
8500 ; AVX512BW-NEXT: vmovdqa64 %zmm14, 128(%r8)
8501 ; AVX512BW-NEXT: vmovdqa64 %zmm5, 192(%r9)
8502 ; AVX512BW-NEXT: vmovdqa64 %zmm2, (%r9)
8503 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 64(%r9)
8504 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 128(%r9)
8505 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
8506 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 128(%rax)
8507 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 192(%rax)
8508 ; AVX512BW-NEXT: vmovdqa64 %zmm10, (%rax)
8509 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 64(%rax)
8510 ; AVX512BW-NEXT: addq $2632, %rsp # imm = 0xA48
8511 ; AVX512BW-NEXT: vzeroupper
8512 ; AVX512BW-NEXT: retq
8514 ; AVX512BW-FCP-LABEL: load_i64_stride6_vf32:
8515 ; AVX512BW-FCP: # %bb.0:
8516 ; AVX512BW-FCP-NEXT: subq $2632, %rsp # imm = 0xA48
8517 ; AVX512BW-FCP-NEXT: vmovdqa64 1280(%rdi), %zmm2
8518 ; AVX512BW-FCP-NEXT: vmovdqa64 1344(%rdi), %zmm21
8519 ; AVX512BW-FCP-NEXT: vmovdqa64 896(%rdi), %zmm1
8520 ; AVX512BW-FCP-NEXT: vmovdqa64 960(%rdi), %zmm19
8521 ; AVX512BW-FCP-NEXT: vmovdqa64 448(%rdi), %zmm18
8522 ; AVX512BW-FCP-NEXT: vmovdqa64 384(%rdi), %zmm3
8523 ; AVX512BW-FCP-NEXT: vmovdqa64 512(%rdi), %zmm4
8524 ; AVX512BW-FCP-NEXT: vmovdqa64 576(%rdi), %zmm0
8525 ; AVX512BW-FCP-NEXT: vmovdqa64 128(%rdi), %zmm5
8526 ; AVX512BW-FCP-NEXT: vmovdqa64 192(%rdi), %zmm25
8527 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,6,0,10,0,6,0,10]
8528 ; AVX512BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
8529 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm7
8530 ; AVX512BW-FCP-NEXT: vpermt2q %zmm5, %zmm6, %zmm7
8531 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8532 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm7 = [0,6,12,0]
8533 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm8
8534 ; AVX512BW-FCP-NEXT: vpermt2q %zmm4, %zmm6, %zmm8
8535 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8536 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm8
8537 ; AVX512BW-FCP-NEXT: vpermt2q %zmm18, %zmm7, %zmm8
8538 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8539 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm8
8540 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
8541 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8542 ; AVX512BW-FCP-NEXT: vpermi2q %zmm2, %zmm21, %zmm6
8543 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8544 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [1,7,0,11,1,7,0,11]
8545 ; AVX512BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
8546 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm8
8547 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
8548 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8549 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm8
8550 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm6
8551 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm10 = [1,7,13,0]
8552 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
8553 ; AVX512BW-FCP-NEXT: vpermt2q %zmm4, %zmm8, %zmm1
8554 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8555 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm1
8556 ; AVX512BW-FCP-NEXT: vpermt2q %zmm5, %zmm8, %zmm1
8557 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8558 ; AVX512BW-FCP-NEXT: vpermi2q %zmm2, %zmm21, %zmm8
8559 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8560 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [10,4,10,4,10,4,10,4]
8561 ; AVX512BW-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8562 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm8
8563 ; AVX512BW-FCP-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
8564 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8565 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm8
8566 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
8567 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8568 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm8
8569 ; AVX512BW-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
8570 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8571 ; AVX512BW-FCP-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
8572 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8573 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [11,5,11,5,11,5,11,5]
8574 ; AVX512BW-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8575 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm8
8576 ; AVX512BW-FCP-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
8577 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8578 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm8
8579 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
8580 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8581 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm8
8582 ; AVX512BW-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
8583 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8584 ; AVX512BW-FCP-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
8585 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8586 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [12,0,0,6,12,0,0,6]
8587 ; AVX512BW-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
8588 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm1
8589 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm8, %zmm1
8590 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8591 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [13,0,1,7,13,0,1,7]
8592 ; AVX512BW-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
8593 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm4
8594 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8595 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
8596 ; AVX512BW-FCP-NEXT: vpermt2q %zmm25, %zmm8, %zmm0
8597 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8598 ; AVX512BW-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm5
8599 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8600 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm0
8601 ; AVX512BW-FCP-NEXT: vpermt2q %zmm19, %zmm8, %zmm0
8602 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8603 ; AVX512BW-FCP-NEXT: vpermi2q %zmm21, %zmm2, %zmm8
8604 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8605 ; AVX512BW-FCP-NEXT: vpermt2q %zmm21, %zmm1, %zmm2
8606 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8607 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm0
8608 ; AVX512BW-FCP-NEXT: vpermt2q %zmm18, %zmm10, %zmm0
8609 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8610 ; AVX512BW-FCP-NEXT: vpermt2q %zmm19, %zmm1, %zmm6
8611 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8612 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm11 = [10,0,6,0]
8613 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm0
8614 ; AVX512BW-FCP-NEXT: vpermt2q %zmm3, %zmm11, %zmm0
8615 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8616 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm31 = [11,1,7,0]
8617 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm0
8618 ; AVX512BW-FCP-NEXT: vpermt2q %zmm3, %zmm31, %zmm0
8619 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8620 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
8621 ; AVX512BW-FCP-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8622 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm0
8623 ; AVX512BW-FCP-NEXT: vpermt2q %zmm18, %zmm5, %zmm0
8624 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8625 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
8626 ; AVX512BW-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8627 ; AVX512BW-FCP-NEXT: vpermt2q %zmm18, %zmm0, %zmm3
8628 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8629 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm25
8630 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
8631 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm29
8632 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm2
8633 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm7, %zmm29
8634 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm3
8635 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm10, %zmm3
8636 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8637 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
8638 ; AVX512BW-FCP-NEXT: vpermt2q %zmm25, %zmm11, %zmm3
8639 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8640 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
8641 ; AVX512BW-FCP-NEXT: vpermt2q %zmm25, %zmm31, %zmm3
8642 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8643 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm3
8644 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm3
8645 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8646 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm25
8647 ; AVX512BW-FCP-NEXT: vmovdqa64 320(%rdi), %zmm9
8648 ; AVX512BW-FCP-NEXT: vmovdqa64 256(%rdi), %zmm27
8649 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm27, %zmm1
8650 ; AVX512BW-FCP-NEXT: vpermt2q %zmm9, %zmm5, %zmm1
8651 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8652 ; AVX512BW-FCP-NEXT: vmovdqa64 704(%rdi), %zmm30
8653 ; AVX512BW-FCP-NEXT: vmovdqa64 640(%rdi), %zmm26
8654 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm1
8655 ; AVX512BW-FCP-NEXT: vpermt2q %zmm30, %zmm5, %zmm1
8656 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8657 ; AVX512BW-FCP-NEXT: vmovdqa64 1088(%rdi), %zmm7
8658 ; AVX512BW-FCP-NEXT: vmovdqa64 1024(%rdi), %zmm20
8659 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm1
8660 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm5, %zmm1
8661 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8662 ; AVX512BW-FCP-NEXT: vmovdqa64 832(%rdi), %zmm1
8663 ; AVX512BW-FCP-NEXT: vmovdqa64 768(%rdi), %zmm8
8664 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm15
8665 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm15
8666 ; AVX512BW-FCP-NEXT: vmovdqa64 1472(%rdi), %zmm6
8667 ; AVX512BW-FCP-NEXT: vmovdqa64 1408(%rdi), %zmm21
8668 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm3
8669 ; AVX512BW-FCP-NEXT: vpermt2q %zmm6, %zmm5, %zmm3
8670 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8671 ; AVX512BW-FCP-NEXT: vmovdqa64 1216(%rdi), %zmm19
8672 ; AVX512BW-FCP-NEXT: vmovdqa64 1152(%rdi), %zmm4
8673 ; AVX512BW-FCP-NEXT: vpermi2q %zmm19, %zmm4, %zmm2
8674 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
8675 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm12
8676 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm10, %zmm12
8677 ; AVX512BW-FCP-NEXT: vpermi2q %zmm19, %zmm4, %zmm10
8678 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8679 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm13
8680 ; AVX512BW-FCP-NEXT: vpermt2q %zmm8, %zmm11, %zmm13
8681 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm19, %zmm11
8682 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm14
8683 ; AVX512BW-FCP-NEXT: vpermt2q %zmm8, %zmm31, %zmm14
8684 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm19, %zmm31
8685 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm2
8686 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm2
8687 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8688 ; AVX512BW-FCP-NEXT: vpermi2q %zmm19, %zmm4, %zmm5
8689 ; AVX512BW-FCP-NEXT: vpermt2q %zmm19, %zmm0, %zmm4
8690 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm8
8691 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm19
8692 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm19
8693 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm22
8694 ; AVX512BW-FCP-NEXT: vpermt2q %zmm30, %zmm0, %zmm22
8695 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm27, %zmm17
8696 ; AVX512BW-FCP-NEXT: vpermt2q %zmm9, %zmm0, %zmm17
8697 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm18
8698 ; AVX512BW-FCP-NEXT: vpermt2q %zmm6, %zmm0, %zmm18
8699 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
8700 ; AVX512BW-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
8701 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm24
8702 ; AVX512BW-FCP-NEXT: vpermt2q %zmm30, %zmm1, %zmm24
8703 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
8704 ; AVX512BW-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
8705 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm0
8706 ; AVX512BW-FCP-NEXT: vpermt2q %zmm30, %zmm2, %zmm0
8707 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8708 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
8709 ; AVX512BW-FCP-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
8710 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm30, %zmm0
8711 ; AVX512BW-FCP-NEXT: vpermt2q %zmm26, %zmm3, %zmm0
8712 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8713 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
8714 ; AVX512BW-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
8715 ; AVX512BW-FCP-NEXT: vpermt2q %zmm26, %zmm0, %zmm30
8716 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm27, %zmm26
8717 ; AVX512BW-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm26
8718 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm27, %zmm23
8719 ; AVX512BW-FCP-NEXT: vpermt2q %zmm9, %zmm2, %zmm23
8720 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm28
8721 ; AVX512BW-FCP-NEXT: vpermt2q %zmm27, %zmm3, %zmm28
8722 ; AVX512BW-FCP-NEXT: vpermt2q %zmm27, %zmm0, %zmm9
8723 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm27
8724 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm27
8725 ; AVX512BW-FCP-NEXT: vpermi2q %zmm6, %zmm21, %zmm1
8726 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm10
8727 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm2, %zmm10
8728 ; AVX512BW-FCP-NEXT: vpermi2q %zmm6, %zmm21, %zmm2
8729 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm16
8730 ; AVX512BW-FCP-NEXT: vpermt2q %zmm20, %zmm3, %zmm16
8731 ; AVX512BW-FCP-NEXT: vpermi2q %zmm21, %zmm6, %zmm3
8732 ; AVX512BW-FCP-NEXT: vpermt2q %zmm21, %zmm0, %zmm6
8733 ; AVX512BW-FCP-NEXT: vpermt2q %zmm20, %zmm0, %zmm7
8734 ; AVX512BW-FCP-NEXT: movb $56, %al
8735 ; AVX512BW-FCP-NEXT: kmovd %eax, %k1
8736 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8737 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
8738 ; AVX512BW-FCP-NEXT: movb $-64, %al
8739 ; AVX512BW-FCP-NEXT: kmovd %eax, %k2
8740 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8741 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
8742 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8743 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
8744 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
8745 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
8746 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
8747 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8748 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8749 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm15 {%k1}
8750 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8751 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm15 {%k2}
8752 ; AVX512BW-FCP-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
8753 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
8754 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
8755 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
8756 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
8757 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm20
8758 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8759 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
8760 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm12 {%k2}
8761 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8762 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
8763 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm21 {%k1}
8764 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm21 {%k2}
8765 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8766 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
8767 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
8768 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm19 {%k2}
8769 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8770 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
8771 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm0 {%k1}
8772 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm0 {%k2}
8773 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm17
8774 ; AVX512BW-FCP-NEXT: movb $24, %al
8775 ; AVX512BW-FCP-NEXT: kmovd %eax, %k2
8776 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8777 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm13 {%k2}
8778 ; AVX512BW-FCP-NEXT: movb $-32, %al
8779 ; AVX512BW-FCP-NEXT: kmovd %eax, %k1
8780 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm27, %zmm13 {%k1}
8781 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8782 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
8783 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
8784 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm18 {%k1}
8785 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm22
8786 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8787 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
8788 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
8789 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm18 {%k1}
8790 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm24
8791 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8792 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
8793 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm11 {%k1}
8794 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8795 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm14 {%k2}
8796 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm14 {%k1}
8797 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8798 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8799 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
8800 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8801 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
8802 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm18
8803 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8804 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8805 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
8806 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm1 {%k1}
8807 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm23
8808 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8809 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
8810 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm31 {%k1}
8811 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8812 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
8813 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
8814 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8815 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
8816 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
8817 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
8818 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
8819 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
8820 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm28, %zmm2 {%k1}
8821 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
8822 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm5, %zmm10, %zmm5
8823 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm5 {%k1}
8824 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
8825 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
8826 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm30, %zmm3 {%k1}
8827 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
8828 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm25, %zmm10, %zmm10
8829 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm10 {%k1}
8830 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
8831 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm4, %zmm9, %zmm4
8832 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm4 {%k1}
8833 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
8834 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm8, %zmm6, %zmm6
8835 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm6 {%k1}
8836 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, 192(%rsi)
8837 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm15, 128(%rsi)
8838 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
8839 ; AVX512BW-FCP-NEXT: vmovaps %zmm7, 64(%rsi)
8840 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm29, (%rsi)
8841 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, 192(%rdx)
8842 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, (%rdx)
8843 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm21, 64(%rdx)
8844 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm12, 128(%rdx)
8845 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm11, 192(%rcx)
8846 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm24, (%rcx)
8847 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm22, 64(%rcx)
8848 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, 128(%rcx)
8849 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm31, 192(%r8)
8850 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm23, (%r8)
8851 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, 64(%r8)
8852 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm14, 128(%r8)
8853 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, 192(%r9)
8854 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, (%r9)
8855 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, 64(%r9)
8856 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, 128(%r9)
8857 ; AVX512BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
8858 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, 128(%rax)
8859 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, 192(%rax)
8860 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, (%rax)
8861 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, 64(%rax)
8862 ; AVX512BW-FCP-NEXT: addq $2632, %rsp # imm = 0xA48
8863 ; AVX512BW-FCP-NEXT: vzeroupper
8864 ; AVX512BW-FCP-NEXT: retq
8866 ; AVX512DQ-BW-LABEL: load_i64_stride6_vf32:
8867 ; AVX512DQ-BW: # %bb.0:
8868 ; AVX512DQ-BW-NEXT: subq $2632, %rsp # imm = 0xA48
8869 ; AVX512DQ-BW-NEXT: vmovdqa64 1280(%rdi), %zmm2
8870 ; AVX512DQ-BW-NEXT: vmovdqa64 1344(%rdi), %zmm21
8871 ; AVX512DQ-BW-NEXT: vmovdqa64 896(%rdi), %zmm1
8872 ; AVX512DQ-BW-NEXT: vmovdqa64 960(%rdi), %zmm19
8873 ; AVX512DQ-BW-NEXT: vmovdqa64 448(%rdi), %zmm18
8874 ; AVX512DQ-BW-NEXT: vmovdqa64 384(%rdi), %zmm3
8875 ; AVX512DQ-BW-NEXT: vmovdqa64 512(%rdi), %zmm4
8876 ; AVX512DQ-BW-NEXT: vmovdqa64 576(%rdi), %zmm0
8877 ; AVX512DQ-BW-NEXT: vmovdqa64 128(%rdi), %zmm5
8878 ; AVX512DQ-BW-NEXT: vmovdqa64 192(%rdi), %zmm25
8879 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,6,0,10,0,6,0,10]
8880 ; AVX512DQ-BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
8881 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm25, %zmm7
8882 ; AVX512DQ-BW-NEXT: vpermt2q %zmm5, %zmm6, %zmm7
8883 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8884 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm7 = [0,6,12,0]
8885 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm8
8886 ; AVX512DQ-BW-NEXT: vpermt2q %zmm4, %zmm6, %zmm8
8887 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8888 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, %zmm8
8889 ; AVX512DQ-BW-NEXT: vpermt2q %zmm18, %zmm7, %zmm8
8890 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8891 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, %zmm8
8892 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
8893 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8894 ; AVX512DQ-BW-NEXT: vpermi2q %zmm2, %zmm21, %zmm6
8895 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8896 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [1,7,0,11,1,7,0,11]
8897 ; AVX512DQ-BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
8898 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, %zmm8
8899 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
8900 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8901 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm8
8902 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm6
8903 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm10 = [1,7,13,0]
8904 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1
8905 ; AVX512DQ-BW-NEXT: vpermt2q %zmm4, %zmm8, %zmm1
8906 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8907 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm25, %zmm1
8908 ; AVX512DQ-BW-NEXT: vpermt2q %zmm5, %zmm8, %zmm1
8909 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8910 ; AVX512DQ-BW-NEXT: vpermi2q %zmm2, %zmm21, %zmm8
8911 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8912 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [10,4,10,4,10,4,10,4]
8913 ; AVX512DQ-BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8914 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm8
8915 ; AVX512DQ-BW-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
8916 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8917 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm8
8918 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
8919 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8920 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm8
8921 ; AVX512DQ-BW-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
8922 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8923 ; AVX512DQ-BW-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
8924 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8925 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [11,5,11,5,11,5,11,5]
8926 ; AVX512DQ-BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8927 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm8
8928 ; AVX512DQ-BW-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
8929 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8930 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm8
8931 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
8932 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8933 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm8
8934 ; AVX512DQ-BW-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
8935 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8936 ; AVX512DQ-BW-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
8937 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8938 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [12,0,0,6,12,0,0,6]
8939 ; AVX512DQ-BW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
8940 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm1
8941 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm8, %zmm1
8942 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8943 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [13,0,1,7,13,0,1,7]
8944 ; AVX512DQ-BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
8945 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm1, %zmm4
8946 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8947 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm0
8948 ; AVX512DQ-BW-NEXT: vpermt2q %zmm25, %zmm8, %zmm0
8949 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8950 ; AVX512DQ-BW-NEXT: vpermt2q %zmm25, %zmm1, %zmm5
8951 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8952 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm0
8953 ; AVX512DQ-BW-NEXT: vpermt2q %zmm19, %zmm8, %zmm0
8954 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8955 ; AVX512DQ-BW-NEXT: vpermi2q %zmm21, %zmm2, %zmm8
8956 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8957 ; AVX512DQ-BW-NEXT: vpermt2q %zmm21, %zmm1, %zmm2
8958 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8959 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, %zmm0
8960 ; AVX512DQ-BW-NEXT: vpermt2q %zmm18, %zmm10, %zmm0
8961 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8962 ; AVX512DQ-BW-NEXT: vpermt2q %zmm19, %zmm1, %zmm6
8963 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8964 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm11 = [10,0,6,0]
8965 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, %zmm0
8966 ; AVX512DQ-BW-NEXT: vpermt2q %zmm3, %zmm11, %zmm0
8967 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8968 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm31 = [11,1,7,0]
8969 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, %zmm0
8970 ; AVX512DQ-BW-NEXT: vpermt2q %zmm3, %zmm31, %zmm0
8971 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8972 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
8973 ; AVX512DQ-BW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8974 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, %zmm0
8975 ; AVX512DQ-BW-NEXT: vpermt2q %zmm18, %zmm5, %zmm0
8976 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8977 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
8978 ; AVX512DQ-BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8979 ; AVX512DQ-BW-NEXT: vpermt2q %zmm18, %zmm0, %zmm3
8980 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8981 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdi), %zmm25
8982 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rdi), %zmm1
8983 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm25, %zmm29
8984 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm2
8985 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm7, %zmm29
8986 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm25, %zmm3
8987 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm10, %zmm3
8988 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8989 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
8990 ; AVX512DQ-BW-NEXT: vpermt2q %zmm25, %zmm11, %zmm3
8991 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8992 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
8993 ; AVX512DQ-BW-NEXT: vpermt2q %zmm25, %zmm31, %zmm3
8994 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8995 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm25, %zmm3
8996 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm5, %zmm3
8997 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8998 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm25
8999 ; AVX512DQ-BW-NEXT: vmovdqa64 320(%rdi), %zmm9
9000 ; AVX512DQ-BW-NEXT: vmovdqa64 256(%rdi), %zmm27
9001 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm27, %zmm1
9002 ; AVX512DQ-BW-NEXT: vpermt2q %zmm9, %zmm5, %zmm1
9003 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9004 ; AVX512DQ-BW-NEXT: vmovdqa64 704(%rdi), %zmm30
9005 ; AVX512DQ-BW-NEXT: vmovdqa64 640(%rdi), %zmm26
9006 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm26, %zmm1
9007 ; AVX512DQ-BW-NEXT: vpermt2q %zmm30, %zmm5, %zmm1
9008 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9009 ; AVX512DQ-BW-NEXT: vmovdqa64 1088(%rdi), %zmm7
9010 ; AVX512DQ-BW-NEXT: vmovdqa64 1024(%rdi), %zmm20
9011 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, %zmm1
9012 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm5, %zmm1
9013 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9014 ; AVX512DQ-BW-NEXT: vmovdqa64 832(%rdi), %zmm1
9015 ; AVX512DQ-BW-NEXT: vmovdqa64 768(%rdi), %zmm8
9016 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, %zmm15
9017 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm2, %zmm15
9018 ; AVX512DQ-BW-NEXT: vmovdqa64 1472(%rdi), %zmm6
9019 ; AVX512DQ-BW-NEXT: vmovdqa64 1408(%rdi), %zmm21
9020 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm21, %zmm3
9021 ; AVX512DQ-BW-NEXT: vpermt2q %zmm6, %zmm5, %zmm3
9022 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9023 ; AVX512DQ-BW-NEXT: vmovdqa64 1216(%rdi), %zmm19
9024 ; AVX512DQ-BW-NEXT: vmovdqa64 1152(%rdi), %zmm4
9025 ; AVX512DQ-BW-NEXT: vpermi2q %zmm19, %zmm4, %zmm2
9026 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
9027 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, %zmm12
9028 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm10, %zmm12
9029 ; AVX512DQ-BW-NEXT: vpermi2q %zmm19, %zmm4, %zmm10
9030 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9031 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm13
9032 ; AVX512DQ-BW-NEXT: vpermt2q %zmm8, %zmm11, %zmm13
9033 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm19, %zmm11
9034 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm14
9035 ; AVX512DQ-BW-NEXT: vpermt2q %zmm8, %zmm31, %zmm14
9036 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm19, %zmm31
9037 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, %zmm2
9038 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm5, %zmm2
9039 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9040 ; AVX512DQ-BW-NEXT: vpermi2q %zmm19, %zmm4, %zmm5
9041 ; AVX512DQ-BW-NEXT: vpermt2q %zmm19, %zmm0, %zmm4
9042 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm8
9043 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, %zmm19
9044 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm0, %zmm19
9045 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm26, %zmm22
9046 ; AVX512DQ-BW-NEXT: vpermt2q %zmm30, %zmm0, %zmm22
9047 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm27, %zmm17
9048 ; AVX512DQ-BW-NEXT: vpermt2q %zmm9, %zmm0, %zmm17
9049 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm21, %zmm18
9050 ; AVX512DQ-BW-NEXT: vpermt2q %zmm6, %zmm0, %zmm18
9051 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
9052 ; AVX512DQ-BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
9053 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm26, %zmm24
9054 ; AVX512DQ-BW-NEXT: vpermt2q %zmm30, %zmm1, %zmm24
9055 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
9056 ; AVX512DQ-BW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
9057 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm26, %zmm0
9058 ; AVX512DQ-BW-NEXT: vpermt2q %zmm30, %zmm2, %zmm0
9059 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9060 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
9061 ; AVX512DQ-BW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
9062 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm30, %zmm0
9063 ; AVX512DQ-BW-NEXT: vpermt2q %zmm26, %zmm3, %zmm0
9064 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9065 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
9066 ; AVX512DQ-BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
9067 ; AVX512DQ-BW-NEXT: vpermt2q %zmm26, %zmm0, %zmm30
9068 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm27, %zmm26
9069 ; AVX512DQ-BW-NEXT: vpermt2q %zmm9, %zmm1, %zmm26
9070 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm27, %zmm23
9071 ; AVX512DQ-BW-NEXT: vpermt2q %zmm9, %zmm2, %zmm23
9072 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, %zmm28
9073 ; AVX512DQ-BW-NEXT: vpermt2q %zmm27, %zmm3, %zmm28
9074 ; AVX512DQ-BW-NEXT: vpermt2q %zmm27, %zmm0, %zmm9
9075 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, %zmm27
9076 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm1, %zmm27
9077 ; AVX512DQ-BW-NEXT: vpermi2q %zmm6, %zmm21, %zmm1
9078 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, %zmm10
9079 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm2, %zmm10
9080 ; AVX512DQ-BW-NEXT: vpermi2q %zmm6, %zmm21, %zmm2
9081 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm16
9082 ; AVX512DQ-BW-NEXT: vpermt2q %zmm20, %zmm3, %zmm16
9083 ; AVX512DQ-BW-NEXT: vpermi2q %zmm21, %zmm6, %zmm3
9084 ; AVX512DQ-BW-NEXT: vpermt2q %zmm21, %zmm0, %zmm6
9085 ; AVX512DQ-BW-NEXT: vpermt2q %zmm20, %zmm0, %zmm7
9086 ; AVX512DQ-BW-NEXT: movb $56, %al
9087 ; AVX512DQ-BW-NEXT: kmovd %eax, %k1
9088 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9089 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
9090 ; AVX512DQ-BW-NEXT: movb $-64, %al
9091 ; AVX512DQ-BW-NEXT: kmovd %eax, %k2
9092 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9093 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
9094 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9095 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
9096 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
9097 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
9098 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
9099 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9100 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9101 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm15 {%k1}
9102 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9103 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm15 {%k2}
9104 ; AVX512DQ-BW-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
9105 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
9106 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
9107 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
9108 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
9109 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm20
9110 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9111 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
9112 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, %zmm12 {%k2}
9113 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9114 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
9115 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm21 {%k1}
9116 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm22, %zmm21 {%k2}
9117 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9118 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
9119 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
9120 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, %zmm19 {%k2}
9121 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9122 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
9123 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, %zmm0 {%k1}
9124 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, %zmm0 {%k2}
9125 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm17
9126 ; AVX512DQ-BW-NEXT: movb $24, %al
9127 ; AVX512DQ-BW-NEXT: kmovd %eax, %k2
9128 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9129 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm13 {%k2}
9130 ; AVX512DQ-BW-NEXT: movb $-32, %al
9131 ; AVX512DQ-BW-NEXT: kmovd %eax, %k1
9132 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm27, %zmm13 {%k1}
9133 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9134 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
9135 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
9136 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm24, %zmm18 {%k1}
9137 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, %zmm22
9138 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9139 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
9140 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
9141 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm26, %zmm18 {%k1}
9142 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, %zmm24
9143 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9144 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
9145 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm11 {%k1}
9146 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9147 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm14 {%k2}
9148 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, %zmm14 {%k1}
9149 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9150 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9151 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
9152 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9153 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
9154 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm18
9155 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9156 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9157 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
9158 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm23, %zmm1 {%k1}
9159 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm23
9160 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9161 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
9162 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm31 {%k1}
9163 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9164 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
9165 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
9166 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9167 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
9168 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9169 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
9170 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9171 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
9172 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm28, %zmm2 {%k1}
9173 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
9174 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm5, %zmm10, %zmm5
9175 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, %zmm5 {%k1}
9176 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
9177 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
9178 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm30, %zmm3 {%k1}
9179 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
9180 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm25, %zmm10, %zmm10
9181 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, %zmm10 {%k1}
9182 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
9183 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm4, %zmm9, %zmm4
9184 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm4 {%k1}
9185 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
9186 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm8, %zmm6, %zmm6
9187 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm6 {%k1}
9188 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, 192(%rsi)
9189 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm15, 128(%rsi)
9190 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
9191 ; AVX512DQ-BW-NEXT: vmovaps %zmm7, 64(%rsi)
9192 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm29, (%rsi)
9193 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, 192(%rdx)
9194 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, (%rdx)
9195 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm21, 64(%rdx)
9196 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm12, 128(%rdx)
9197 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm11, 192(%rcx)
9198 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm24, (%rcx)
9199 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm22, 64(%rcx)
9200 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, 128(%rcx)
9201 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm31, 192(%r8)
9202 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm23, (%r8)
9203 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, 64(%r8)
9204 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm14, 128(%r8)
9205 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, 192(%r9)
9206 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, (%r9)
9207 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, 64(%r9)
9208 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, 128(%r9)
9209 ; AVX512DQ-BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
9210 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, 128(%rax)
9211 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, 192(%rax)
9212 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, (%rax)
9213 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, 64(%rax)
9214 ; AVX512DQ-BW-NEXT: addq $2632, %rsp # imm = 0xA48
9215 ; AVX512DQ-BW-NEXT: vzeroupper
9216 ; AVX512DQ-BW-NEXT: retq
9218 ; AVX512DQ-BW-FCP-LABEL: load_i64_stride6_vf32:
9219 ; AVX512DQ-BW-FCP: # %bb.0:
9220 ; AVX512DQ-BW-FCP-NEXT: subq $2632, %rsp # imm = 0xA48
9221 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1280(%rdi), %zmm2
9222 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1344(%rdi), %zmm21
9223 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 896(%rdi), %zmm1
9224 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 960(%rdi), %zmm19
9225 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 448(%rdi), %zmm18
9226 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 384(%rdi), %zmm3
9227 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 512(%rdi), %zmm4
9228 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 576(%rdi), %zmm0
9229 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 128(%rdi), %zmm5
9230 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 192(%rdi), %zmm25
9231 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,6,0,10,0,6,0,10]
9232 ; AVX512DQ-BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
9233 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm7
9234 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm5, %zmm6, %zmm7
9235 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9236 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm7 = [0,6,12,0]
9237 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm8
9238 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm4, %zmm6, %zmm8
9239 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9240 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm8
9241 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm18, %zmm7, %zmm8
9242 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9243 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm8
9244 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
9245 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9246 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm2, %zmm21, %zmm6
9247 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9248 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [1,7,0,11,1,7,0,11]
9249 ; AVX512DQ-BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
9250 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm8
9251 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
9252 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9253 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm8
9254 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm6
9255 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm10 = [1,7,13,0]
9256 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
9257 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm4, %zmm8, %zmm1
9258 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9259 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm1
9260 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm5, %zmm8, %zmm1
9261 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9262 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm2, %zmm21, %zmm8
9263 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9264 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [10,4,10,4,10,4,10,4]
9265 ; AVX512DQ-BW-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
9266 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm8
9267 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
9268 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9269 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm8
9270 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
9271 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9272 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm8
9273 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
9274 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9275 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
9276 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9277 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [11,5,11,5,11,5,11,5]
9278 ; AVX512DQ-BW-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
9279 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm8
9280 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
9281 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9282 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm8
9283 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
9284 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9285 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm8
9286 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
9287 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9288 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
9289 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9290 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [12,0,0,6,12,0,0,6]
9291 ; AVX512DQ-BW-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
9292 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm1
9293 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm8, %zmm1
9294 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9295 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [13,0,1,7,13,0,1,7]
9296 ; AVX512DQ-BW-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
9297 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm4
9298 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9299 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
9300 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm25, %zmm8, %zmm0
9301 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9302 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm5
9303 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9304 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm0
9305 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm19, %zmm8, %zmm0
9306 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9307 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm21, %zmm2, %zmm8
9308 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9309 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm21, %zmm1, %zmm2
9310 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9311 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm0
9312 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm18, %zmm10, %zmm0
9313 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9314 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm19, %zmm1, %zmm6
9315 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9316 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm11 = [10,0,6,0]
9317 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm0
9318 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm3, %zmm11, %zmm0
9319 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9320 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm31 = [11,1,7,0]
9321 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm0
9322 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm3, %zmm31, %zmm0
9323 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9324 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
9325 ; AVX512DQ-BW-FCP-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
9326 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm0
9327 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm18, %zmm5, %zmm0
9328 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9329 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
9330 ; AVX512DQ-BW-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
9331 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm18, %zmm0, %zmm3
9332 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9333 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm25
9334 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
9335 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm29
9336 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm2
9337 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm7, %zmm29
9338 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm3
9339 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm10, %zmm3
9340 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9341 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
9342 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm25, %zmm11, %zmm3
9343 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9344 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
9345 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm25, %zmm31, %zmm3
9346 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9347 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm3
9348 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm3
9349 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9350 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm25
9351 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 320(%rdi), %zmm9
9352 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 256(%rdi), %zmm27
9353 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm27, %zmm1
9354 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm9, %zmm5, %zmm1
9355 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9356 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 704(%rdi), %zmm30
9357 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 640(%rdi), %zmm26
9358 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm1
9359 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm30, %zmm5, %zmm1
9360 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9361 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1088(%rdi), %zmm7
9362 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1024(%rdi), %zmm20
9363 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm1
9364 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm5, %zmm1
9365 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9366 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 832(%rdi), %zmm1
9367 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 768(%rdi), %zmm8
9368 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm15
9369 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm15
9370 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1472(%rdi), %zmm6
9371 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1408(%rdi), %zmm21
9372 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm3
9373 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm6, %zmm5, %zmm3
9374 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9375 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1216(%rdi), %zmm19
9376 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1152(%rdi), %zmm4
9377 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm19, %zmm4, %zmm2
9378 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
9379 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm12
9380 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm10, %zmm12
9381 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm19, %zmm4, %zmm10
9382 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9383 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm13
9384 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm8, %zmm11, %zmm13
9385 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm19, %zmm11
9386 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm14
9387 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm8, %zmm31, %zmm14
9388 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm19, %zmm31
9389 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm2
9390 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm2
9391 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9392 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm19, %zmm4, %zmm5
9393 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm19, %zmm0, %zmm4
9394 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm8
9395 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm19
9396 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm19
9397 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm22
9398 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm30, %zmm0, %zmm22
9399 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm27, %zmm17
9400 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm9, %zmm0, %zmm17
9401 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm18
9402 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm6, %zmm0, %zmm18
9403 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
9404 ; AVX512DQ-BW-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
9405 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm24
9406 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm30, %zmm1, %zmm24
9407 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
9408 ; AVX512DQ-BW-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
9409 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm0
9410 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm30, %zmm2, %zmm0
9411 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9412 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
9413 ; AVX512DQ-BW-FCP-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
9414 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm30, %zmm0
9415 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm26, %zmm3, %zmm0
9416 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9417 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
9418 ; AVX512DQ-BW-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
9419 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm26, %zmm0, %zmm30
9420 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm27, %zmm26
9421 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm26
9422 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm27, %zmm23
9423 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm9, %zmm2, %zmm23
9424 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm28
9425 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm27, %zmm3, %zmm28
9426 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm27, %zmm0, %zmm9
9427 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm27
9428 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm27
9429 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm6, %zmm21, %zmm1
9430 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm10
9431 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm2, %zmm10
9432 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm6, %zmm21, %zmm2
9433 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm16
9434 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm20, %zmm3, %zmm16
9435 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm21, %zmm6, %zmm3
9436 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm21, %zmm0, %zmm6
9437 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm20, %zmm0, %zmm7
9438 ; AVX512DQ-BW-FCP-NEXT: movb $56, %al
9439 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k1
9440 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9441 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
9442 ; AVX512DQ-BW-FCP-NEXT: movb $-64, %al
9443 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k2
9444 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9445 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
9446 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9447 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
9448 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
9449 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
9450 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
9451 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9452 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9453 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm15 {%k1}
9454 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9455 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm15 {%k2}
9456 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
9457 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
9458 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
9459 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
9460 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
9461 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm20
9462 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9463 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
9464 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm12 {%k2}
9465 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9466 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
9467 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm21 {%k1}
9468 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm21 {%k2}
9469 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9470 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
9471 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
9472 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm19 {%k2}
9473 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9474 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
9475 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm0 {%k1}
9476 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm0 {%k2}
9477 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm17
9478 ; AVX512DQ-BW-FCP-NEXT: movb $24, %al
9479 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k2
9480 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9481 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm13 {%k2}
9482 ; AVX512DQ-BW-FCP-NEXT: movb $-32, %al
9483 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k1
9484 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm27, %zmm13 {%k1}
9485 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9486 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
9487 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
9488 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm18 {%k1}
9489 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm22
9490 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9491 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
9492 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
9493 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm18 {%k1}
9494 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm24
9495 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9496 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
9497 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm11 {%k1}
9498 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9499 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm14 {%k2}
9500 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm14 {%k1}
9501 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9502 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9503 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
9504 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9505 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
9506 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm18
9507 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9508 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9509 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
9510 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm1 {%k1}
9511 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm23
9512 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9513 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
9514 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm31 {%k1}
9515 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9516 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
9517 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
9518 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9519 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
9520 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9521 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
9522 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9523 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
9524 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm28, %zmm2 {%k1}
9525 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
9526 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm5, %zmm10, %zmm5
9527 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm5 {%k1}
9528 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
9529 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
9530 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm30, %zmm3 {%k1}
9531 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
9532 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm25, %zmm10, %zmm10
9533 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm10 {%k1}
9534 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
9535 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm4, %zmm9, %zmm4
9536 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm4 {%k1}
9537 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
9538 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm8, %zmm6, %zmm6
9539 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm6 {%k1}
9540 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, 192(%rsi)
9541 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm15, 128(%rsi)
9542 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
9543 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm7, 64(%rsi)
9544 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm29, (%rsi)
9545 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, 192(%rdx)
9546 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, (%rdx)
9547 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm21, 64(%rdx)
9548 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm12, 128(%rdx)
9549 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm11, 192(%rcx)
9550 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm24, (%rcx)
9551 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm22, 64(%rcx)
9552 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, 128(%rcx)
9553 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm31, 192(%r8)
9554 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm23, (%r8)
9555 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, 64(%r8)
9556 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm14, 128(%r8)
9557 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, 192(%r9)
9558 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, (%r9)
9559 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, 64(%r9)
9560 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, 128(%r9)
9561 ; AVX512DQ-BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
9562 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, 128(%rax)
9563 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, 192(%rax)
9564 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, (%rax)
9565 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, 64(%rax)
9566 ; AVX512DQ-BW-FCP-NEXT: addq $2632, %rsp # imm = 0xA48
9567 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
9568 ; AVX512DQ-BW-FCP-NEXT: retq
9569 %wide.vec = load <192 x i64>, ptr %in.vec, align 64
9570 %strided.vec0 = shufflevector <192 x i64> %wide.vec, <192 x i64> poison, <32 x i32> <i32 0, i32 6, i32 12, i32 18, i32 24, i32 30, i32 36, i32 42, i32 48, i32 54, i32 60, i32 66, i32 72, i32 78, i32 84, i32 90, i32 96, i32 102, i32 108, i32 114, i32 120, i32 126, i32 132, i32 138, i32 144, i32 150, i32 156, i32 162, i32 168, i32 174, i32 180, i32 186>
9571 %strided.vec1 = shufflevector <192 x i64> %wide.vec, <192 x i64> poison, <32 x i32> <i32 1, i32 7, i32 13, i32 19, i32 25, i32 31, i32 37, i32 43, i32 49, i32 55, i32 61, i32 67, i32 73, i32 79, i32 85, i32 91, i32 97, i32 103, i32 109, i32 115, i32 121, i32 127, i32 133, i32 139, i32 145, i32 151, i32 157, i32 163, i32 169, i32 175, i32 181, i32 187>
9572 %strided.vec2 = shufflevector <192 x i64> %wide.vec, <192 x i64> poison, <32 x i32> <i32 2, i32 8, i32 14, i32 20, i32 26, i32 32, i32 38, i32 44, i32 50, i32 56, i32 62, i32 68, i32 74, i32 80, i32 86, i32 92, i32 98, i32 104, i32 110, i32 116, i32 122, i32 128, i32 134, i32 140, i32 146, i32 152, i32 158, i32 164, i32 170, i32 176, i32 182, i32 188>
9573 %strided.vec3 = shufflevector <192 x i64> %wide.vec, <192 x i64> poison, <32 x i32> <i32 3, i32 9, i32 15, i32 21, i32 27, i32 33, i32 39, i32 45, i32 51, i32 57, i32 63, i32 69, i32 75, i32 81, i32 87, i32 93, i32 99, i32 105, i32 111, i32 117, i32 123, i32 129, i32 135, i32 141, i32 147, i32 153, i32 159, i32 165, i32 171, i32 177, i32 183, i32 189>
9574 %strided.vec4 = shufflevector <192 x i64> %wide.vec, <192 x i64> poison, <32 x i32> <i32 4, i32 10, i32 16, i32 22, i32 28, i32 34, i32 40, i32 46, i32 52, i32 58, i32 64, i32 70, i32 76, i32 82, i32 88, i32 94, i32 100, i32 106, i32 112, i32 118, i32 124, i32 130, i32 136, i32 142, i32 148, i32 154, i32 160, i32 166, i32 172, i32 178, i32 184, i32 190>
9575 %strided.vec5 = shufflevector <192 x i64> %wide.vec, <192 x i64> poison, <32 x i32> <i32 5, i32 11, i32 17, i32 23, i32 29, i32 35, i32 41, i32 47, i32 53, i32 59, i32 65, i32 71, i32 77, i32 83, i32 89, i32 95, i32 101, i32 107, i32 113, i32 119, i32 125, i32 131, i32 137, i32 143, i32 149, i32 155, i32 161, i32 167, i32 173, i32 179, i32 185, i32 191>
9576 store <32 x i64> %strided.vec0, ptr %out.vec0, align 64
9577 store <32 x i64> %strided.vec1, ptr %out.vec1, align 64
9578 store <32 x i64> %strided.vec2, ptr %out.vec2, align 64
9579 store <32 x i64> %strided.vec3, ptr %out.vec3, align 64
9580 store <32 x i64> %strided.vec4, ptr %out.vec4, align 64
9581 store <32 x i64> %strided.vec5, ptr %out.vec5, align 64
9585 define void @load_i64_stride6_vf64(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr %out.vec2, ptr %out.vec3, ptr %out.vec4, ptr %out.vec5) nounwind {
9586 ; SSE-LABEL: load_i64_stride6_vf64:
9588 ; SSE-NEXT: subq $2712, %rsp # imm = 0xA98
9589 ; SSE-NEXT: movaps 816(%rdi), %xmm0
9590 ; SSE-NEXT: movaps 720(%rdi), %xmm1
9591 ; SSE-NEXT: movaps 672(%rdi), %xmm8
9592 ; SSE-NEXT: movaps 624(%rdi), %xmm2
9593 ; SSE-NEXT: movaps 576(%rdi), %xmm9
9594 ; SSE-NEXT: movaps 528(%rdi), %xmm3
9595 ; SSE-NEXT: movaps 480(%rdi), %xmm10
9596 ; SSE-NEXT: movaps 432(%rdi), %xmm4
9597 ; SSE-NEXT: movaps 384(%rdi), %xmm11
9598 ; SSE-NEXT: movaps 336(%rdi), %xmm5
9599 ; SSE-NEXT: movaps 288(%rdi), %xmm12
9600 ; SSE-NEXT: movaps 240(%rdi), %xmm6
9601 ; SSE-NEXT: movaps 192(%rdi), %xmm13
9602 ; SSE-NEXT: movaps 144(%rdi), %xmm7
9603 ; SSE-NEXT: movaps 96(%rdi), %xmm14
9604 ; SSE-NEXT: movaps %xmm14, %xmm15
9605 ; SSE-NEXT: movlhps {{.*#+}} xmm15 = xmm15[0],xmm7[0]
9606 ; SSE-NEXT: movaps %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9607 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm7[1]
9608 ; SSE-NEXT: movaps %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9609 ; SSE-NEXT: movaps %xmm13, %xmm7
9610 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm6[0]
9611 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9612 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm6[1]
9613 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9614 ; SSE-NEXT: movaps %xmm12, %xmm6
9615 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm5[0]
9616 ; SSE-NEXT: movaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9617 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm5[1]
9618 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9619 ; SSE-NEXT: movaps %xmm11, %xmm5
9620 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm4[0]
9621 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9622 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm4[1]
9623 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9624 ; SSE-NEXT: movaps %xmm10, %xmm4
9625 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm3[0]
9626 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9627 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm3[1]
9628 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9629 ; SSE-NEXT: movaps %xmm9, %xmm3
9630 ; SSE-NEXT: movlhps {{.*#+}} xmm3 = xmm3[0],xmm2[0]
9631 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9632 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm2[1]
9633 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9634 ; SSE-NEXT: movaps %xmm8, %xmm2
9635 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm1[0]
9636 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9637 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm1[1]
9638 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9639 ; SSE-NEXT: movaps 768(%rdi), %xmm1
9640 ; SSE-NEXT: movaps %xmm1, %xmm2
9641 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9642 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9643 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9644 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9645 ; SSE-NEXT: movaps 912(%rdi), %xmm0
9646 ; SSE-NEXT: movaps 864(%rdi), %xmm1
9647 ; SSE-NEXT: movaps %xmm1, %xmm2
9648 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9649 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9650 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9651 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9652 ; SSE-NEXT: movaps 1008(%rdi), %xmm0
9653 ; SSE-NEXT: movaps 960(%rdi), %xmm1
9654 ; SSE-NEXT: movaps %xmm1, %xmm2
9655 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9656 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9657 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9658 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9659 ; SSE-NEXT: movaps 1104(%rdi), %xmm0
9660 ; SSE-NEXT: movaps 1056(%rdi), %xmm1
9661 ; SSE-NEXT: movaps %xmm1, %xmm2
9662 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9663 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9664 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9665 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9666 ; SSE-NEXT: movaps 1200(%rdi), %xmm0
9667 ; SSE-NEXT: movaps 1152(%rdi), %xmm1
9668 ; SSE-NEXT: movaps %xmm1, %xmm2
9669 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9670 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9671 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9672 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9673 ; SSE-NEXT: movaps 1296(%rdi), %xmm0
9674 ; SSE-NEXT: movaps 1248(%rdi), %xmm1
9675 ; SSE-NEXT: movaps %xmm1, %xmm2
9676 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9677 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9678 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9679 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9680 ; SSE-NEXT: movaps 1392(%rdi), %xmm0
9681 ; SSE-NEXT: movaps 1344(%rdi), %xmm1
9682 ; SSE-NEXT: movaps %xmm1, %xmm2
9683 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9684 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9685 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9686 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9687 ; SSE-NEXT: movaps 1488(%rdi), %xmm0
9688 ; SSE-NEXT: movaps 1440(%rdi), %xmm1
9689 ; SSE-NEXT: movaps %xmm1, %xmm2
9690 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9691 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9692 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9693 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9694 ; SSE-NEXT: movaps 1584(%rdi), %xmm0
9695 ; SSE-NEXT: movaps 1536(%rdi), %xmm1
9696 ; SSE-NEXT: movaps %xmm1, %xmm2
9697 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9698 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9699 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9700 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9701 ; SSE-NEXT: movaps 1680(%rdi), %xmm0
9702 ; SSE-NEXT: movaps 1632(%rdi), %xmm1
9703 ; SSE-NEXT: movaps %xmm1, %xmm2
9704 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9705 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9706 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9707 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9708 ; SSE-NEXT: movaps 1776(%rdi), %xmm0
9709 ; SSE-NEXT: movaps 1728(%rdi), %xmm1
9710 ; SSE-NEXT: movaps %xmm1, %xmm2
9711 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9712 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9713 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9714 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9715 ; SSE-NEXT: movaps 1872(%rdi), %xmm0
9716 ; SSE-NEXT: movaps 1824(%rdi), %xmm1
9717 ; SSE-NEXT: movaps %xmm1, %xmm2
9718 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9719 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9720 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9721 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9722 ; SSE-NEXT: movaps 1968(%rdi), %xmm0
9723 ; SSE-NEXT: movaps 1920(%rdi), %xmm1
9724 ; SSE-NEXT: movaps %xmm1, %xmm2
9725 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9726 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9727 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9728 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9729 ; SSE-NEXT: movaps 2064(%rdi), %xmm0
9730 ; SSE-NEXT: movaps 2016(%rdi), %xmm1
9731 ; SSE-NEXT: movaps %xmm1, %xmm2
9732 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9733 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9734 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9735 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9736 ; SSE-NEXT: movaps 2160(%rdi), %xmm0
9737 ; SSE-NEXT: movaps 2112(%rdi), %xmm1
9738 ; SSE-NEXT: movaps %xmm1, %xmm2
9739 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9740 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9741 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9742 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9743 ; SSE-NEXT: movaps 2256(%rdi), %xmm0
9744 ; SSE-NEXT: movaps 2208(%rdi), %xmm1
9745 ; SSE-NEXT: movaps %xmm1, %xmm2
9746 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9747 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9748 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9749 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9750 ; SSE-NEXT: movaps 2352(%rdi), %xmm0
9751 ; SSE-NEXT: movaps 2304(%rdi), %xmm1
9752 ; SSE-NEXT: movaps %xmm1, %xmm2
9753 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9754 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9755 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9756 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9757 ; SSE-NEXT: movaps 2448(%rdi), %xmm0
9758 ; SSE-NEXT: movaps 2400(%rdi), %xmm1
9759 ; SSE-NEXT: movaps %xmm1, %xmm2
9760 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9761 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9762 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9763 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9764 ; SSE-NEXT: movaps 2544(%rdi), %xmm0
9765 ; SSE-NEXT: movaps 2496(%rdi), %xmm1
9766 ; SSE-NEXT: movaps %xmm1, %xmm2
9767 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9768 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9769 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9770 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9771 ; SSE-NEXT: movaps 2640(%rdi), %xmm0
9772 ; SSE-NEXT: movaps 2592(%rdi), %xmm1
9773 ; SSE-NEXT: movaps %xmm1, %xmm2
9774 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9775 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9776 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9777 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9778 ; SSE-NEXT: movaps 2736(%rdi), %xmm0
9779 ; SSE-NEXT: movaps 2688(%rdi), %xmm1
9780 ; SSE-NEXT: movaps %xmm1, %xmm2
9781 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9782 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9783 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9784 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9785 ; SSE-NEXT: movaps 2832(%rdi), %xmm0
9786 ; SSE-NEXT: movaps 2784(%rdi), %xmm1
9787 ; SSE-NEXT: movaps %xmm1, %xmm2
9788 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9789 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9790 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9791 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9792 ; SSE-NEXT: movaps 2928(%rdi), %xmm0
9793 ; SSE-NEXT: movaps 2880(%rdi), %xmm1
9794 ; SSE-NEXT: movaps %xmm1, %xmm2
9795 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9796 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9797 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9798 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9799 ; SSE-NEXT: movaps 3024(%rdi), %xmm0
9800 ; SSE-NEXT: movaps 2976(%rdi), %xmm1
9801 ; SSE-NEXT: movaps %xmm1, %xmm2
9802 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9803 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9804 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9805 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9806 ; SSE-NEXT: movaps (%rdi), %xmm1
9807 ; SSE-NEXT: movaps 48(%rdi), %xmm0
9808 ; SSE-NEXT: movaps %xmm1, %xmm2
9809 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9810 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9811 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9812 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9813 ; SSE-NEXT: movaps 64(%rdi), %xmm0
9814 ; SSE-NEXT: movaps 16(%rdi), %xmm1
9815 ; SSE-NEXT: movaps %xmm1, %xmm2
9816 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9817 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9818 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9819 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9820 ; SSE-NEXT: movaps 160(%rdi), %xmm0
9821 ; SSE-NEXT: movaps 112(%rdi), %xmm1
9822 ; SSE-NEXT: movaps %xmm1, %xmm2
9823 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9824 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9825 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9826 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9827 ; SSE-NEXT: movaps 256(%rdi), %xmm0
9828 ; SSE-NEXT: movaps 208(%rdi), %xmm1
9829 ; SSE-NEXT: movaps %xmm1, %xmm2
9830 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9831 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9832 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9833 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9834 ; SSE-NEXT: movaps 352(%rdi), %xmm0
9835 ; SSE-NEXT: movaps 304(%rdi), %xmm1
9836 ; SSE-NEXT: movaps %xmm1, %xmm2
9837 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9838 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9839 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9840 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9841 ; SSE-NEXT: movaps 448(%rdi), %xmm0
9842 ; SSE-NEXT: movaps 400(%rdi), %xmm1
9843 ; SSE-NEXT: movaps %xmm1, %xmm2
9844 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9845 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9846 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9847 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9848 ; SSE-NEXT: movaps 544(%rdi), %xmm0
9849 ; SSE-NEXT: movaps 496(%rdi), %xmm1
9850 ; SSE-NEXT: movaps %xmm1, %xmm2
9851 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9852 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9853 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9854 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9855 ; SSE-NEXT: movaps 640(%rdi), %xmm0
9856 ; SSE-NEXT: movaps 592(%rdi), %xmm1
9857 ; SSE-NEXT: movaps %xmm1, %xmm2
9858 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9859 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9860 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9861 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9862 ; SSE-NEXT: movaps 736(%rdi), %xmm0
9863 ; SSE-NEXT: movaps 688(%rdi), %xmm1
9864 ; SSE-NEXT: movaps %xmm1, %xmm2
9865 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9866 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9867 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9868 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9869 ; SSE-NEXT: movaps 832(%rdi), %xmm0
9870 ; SSE-NEXT: movaps 784(%rdi), %xmm1
9871 ; SSE-NEXT: movaps %xmm1, %xmm2
9872 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9873 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9874 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9875 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9876 ; SSE-NEXT: movaps 928(%rdi), %xmm0
9877 ; SSE-NEXT: movaps 880(%rdi), %xmm1
9878 ; SSE-NEXT: movaps %xmm1, %xmm2
9879 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9880 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9881 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9882 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9883 ; SSE-NEXT: movaps 1024(%rdi), %xmm0
9884 ; SSE-NEXT: movaps 976(%rdi), %xmm1
9885 ; SSE-NEXT: movaps %xmm1, %xmm2
9886 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9887 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9888 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9889 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9890 ; SSE-NEXT: movaps 1120(%rdi), %xmm0
9891 ; SSE-NEXT: movaps 1072(%rdi), %xmm1
9892 ; SSE-NEXT: movaps %xmm1, %xmm2
9893 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9894 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9895 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9896 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9897 ; SSE-NEXT: movaps 1216(%rdi), %xmm0
9898 ; SSE-NEXT: movaps 1168(%rdi), %xmm1
9899 ; SSE-NEXT: movaps %xmm1, %xmm2
9900 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9901 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9902 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9903 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9904 ; SSE-NEXT: movaps 1312(%rdi), %xmm0
9905 ; SSE-NEXT: movaps 1264(%rdi), %xmm1
9906 ; SSE-NEXT: movaps %xmm1, %xmm2
9907 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9908 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9909 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9910 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9911 ; SSE-NEXT: movaps 1408(%rdi), %xmm0
9912 ; SSE-NEXT: movaps 1360(%rdi), %xmm1
9913 ; SSE-NEXT: movaps %xmm1, %xmm2
9914 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9915 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9916 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9917 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9918 ; SSE-NEXT: movaps 1504(%rdi), %xmm0
9919 ; SSE-NEXT: movaps 1456(%rdi), %xmm1
9920 ; SSE-NEXT: movaps %xmm1, %xmm2
9921 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9922 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9923 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9924 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9925 ; SSE-NEXT: movaps 1600(%rdi), %xmm0
9926 ; SSE-NEXT: movaps 1552(%rdi), %xmm1
9927 ; SSE-NEXT: movaps %xmm1, %xmm2
9928 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9929 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9930 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9931 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9932 ; SSE-NEXT: movaps 1696(%rdi), %xmm0
9933 ; SSE-NEXT: movaps 1648(%rdi), %xmm1
9934 ; SSE-NEXT: movaps %xmm1, %xmm2
9935 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9936 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9937 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9938 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9939 ; SSE-NEXT: movaps 1792(%rdi), %xmm0
9940 ; SSE-NEXT: movaps 1744(%rdi), %xmm1
9941 ; SSE-NEXT: movaps %xmm1, %xmm2
9942 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9943 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9944 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9945 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9946 ; SSE-NEXT: movaps 1888(%rdi), %xmm0
9947 ; SSE-NEXT: movaps 1840(%rdi), %xmm1
9948 ; SSE-NEXT: movaps %xmm1, %xmm2
9949 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9950 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9951 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9952 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9953 ; SSE-NEXT: movaps 1984(%rdi), %xmm0
9954 ; SSE-NEXT: movaps 1936(%rdi), %xmm1
9955 ; SSE-NEXT: movaps %xmm1, %xmm2
9956 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9957 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9958 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9959 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9960 ; SSE-NEXT: movaps 2080(%rdi), %xmm0
9961 ; SSE-NEXT: movaps 2032(%rdi), %xmm1
9962 ; SSE-NEXT: movaps %xmm1, %xmm2
9963 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9964 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9965 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9966 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9967 ; SSE-NEXT: movaps 2176(%rdi), %xmm0
9968 ; SSE-NEXT: movaps 2128(%rdi), %xmm1
9969 ; SSE-NEXT: movaps %xmm1, %xmm2
9970 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9971 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9972 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9973 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9974 ; SSE-NEXT: movaps 2272(%rdi), %xmm0
9975 ; SSE-NEXT: movaps 2224(%rdi), %xmm1
9976 ; SSE-NEXT: movaps %xmm1, %xmm2
9977 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9978 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9979 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9980 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9981 ; SSE-NEXT: movaps 2368(%rdi), %xmm0
9982 ; SSE-NEXT: movaps 2320(%rdi), %xmm1
9983 ; SSE-NEXT: movaps %xmm1, %xmm2
9984 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9985 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9986 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9987 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9988 ; SSE-NEXT: movaps 2464(%rdi), %xmm0
9989 ; SSE-NEXT: movaps 2416(%rdi), %xmm1
9990 ; SSE-NEXT: movaps %xmm1, %xmm2
9991 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9992 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9993 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9994 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9995 ; SSE-NEXT: movaps 2560(%rdi), %xmm0
9996 ; SSE-NEXT: movaps 2512(%rdi), %xmm1
9997 ; SSE-NEXT: movaps %xmm1, %xmm2
9998 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9999 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10000 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10001 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10002 ; SSE-NEXT: movaps 2656(%rdi), %xmm0
10003 ; SSE-NEXT: movaps 2608(%rdi), %xmm1
10004 ; SSE-NEXT: movaps %xmm1, %xmm2
10005 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10006 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10007 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10008 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10009 ; SSE-NEXT: movaps 2752(%rdi), %xmm0
10010 ; SSE-NEXT: movaps 2704(%rdi), %xmm1
10011 ; SSE-NEXT: movaps %xmm1, %xmm2
10012 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10013 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10014 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10015 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10016 ; SSE-NEXT: movaps 2848(%rdi), %xmm0
10017 ; SSE-NEXT: movaps 2800(%rdi), %xmm1
10018 ; SSE-NEXT: movaps %xmm1, %xmm2
10019 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10020 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10021 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10022 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10023 ; SSE-NEXT: movaps 2944(%rdi), %xmm0
10024 ; SSE-NEXT: movaps 2896(%rdi), %xmm1
10025 ; SSE-NEXT: movaps %xmm1, %xmm2
10026 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10027 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10028 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10029 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10030 ; SSE-NEXT: movaps 3040(%rdi), %xmm0
10031 ; SSE-NEXT: movaps 2992(%rdi), %xmm1
10032 ; SSE-NEXT: movaps %xmm1, %xmm2
10033 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10034 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10035 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10036 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10037 ; SSE-NEXT: movaps 80(%rdi), %xmm0
10038 ; SSE-NEXT: movaps 32(%rdi), %xmm1
10039 ; SSE-NEXT: movaps %xmm1, %xmm2
10040 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10041 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10042 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10043 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10044 ; SSE-NEXT: movaps 176(%rdi), %xmm0
10045 ; SSE-NEXT: movaps 128(%rdi), %xmm1
10046 ; SSE-NEXT: movaps %xmm1, %xmm2
10047 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10048 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10049 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10050 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10051 ; SSE-NEXT: movaps 272(%rdi), %xmm0
10052 ; SSE-NEXT: movaps 224(%rdi), %xmm1
10053 ; SSE-NEXT: movaps %xmm1, %xmm2
10054 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10055 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10056 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10057 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10058 ; SSE-NEXT: movaps 368(%rdi), %xmm0
10059 ; SSE-NEXT: movaps 320(%rdi), %xmm1
10060 ; SSE-NEXT: movaps %xmm1, %xmm2
10061 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10062 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10063 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10064 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10065 ; SSE-NEXT: movaps 464(%rdi), %xmm0
10066 ; SSE-NEXT: movaps 416(%rdi), %xmm1
10067 ; SSE-NEXT: movaps %xmm1, %xmm2
10068 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10069 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10070 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10071 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10072 ; SSE-NEXT: movaps 560(%rdi), %xmm0
10073 ; SSE-NEXT: movaps 512(%rdi), %xmm1
10074 ; SSE-NEXT: movaps %xmm1, %xmm2
10075 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10076 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10077 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10078 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10079 ; SSE-NEXT: movaps 656(%rdi), %xmm0
10080 ; SSE-NEXT: movaps 608(%rdi), %xmm1
10081 ; SSE-NEXT: movaps %xmm1, %xmm2
10082 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10083 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10084 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10085 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10086 ; SSE-NEXT: movaps 752(%rdi), %xmm0
10087 ; SSE-NEXT: movaps 704(%rdi), %xmm1
10088 ; SSE-NEXT: movaps %xmm1, %xmm2
10089 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10090 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10091 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10092 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10093 ; SSE-NEXT: movaps 848(%rdi), %xmm0
10094 ; SSE-NEXT: movaps 800(%rdi), %xmm1
10095 ; SSE-NEXT: movaps %xmm1, %xmm2
10096 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10097 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10098 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10099 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10100 ; SSE-NEXT: movaps 944(%rdi), %xmm0
10101 ; SSE-NEXT: movaps 896(%rdi), %xmm1
10102 ; SSE-NEXT: movaps %xmm1, %xmm2
10103 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10104 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10105 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10106 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10107 ; SSE-NEXT: movaps 1040(%rdi), %xmm0
10108 ; SSE-NEXT: movaps 992(%rdi), %xmm1
10109 ; SSE-NEXT: movaps %xmm1, %xmm2
10110 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10111 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10112 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10113 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10114 ; SSE-NEXT: movaps 1136(%rdi), %xmm0
10115 ; SSE-NEXT: movaps 1088(%rdi), %xmm1
10116 ; SSE-NEXT: movaps %xmm1, %xmm2
10117 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10118 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10119 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10120 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10121 ; SSE-NEXT: movaps 1232(%rdi), %xmm0
10122 ; SSE-NEXT: movaps 1184(%rdi), %xmm1
10123 ; SSE-NEXT: movaps %xmm1, %xmm2
10124 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10125 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10126 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10127 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10128 ; SSE-NEXT: movaps 1328(%rdi), %xmm0
10129 ; SSE-NEXT: movaps 1280(%rdi), %xmm1
10130 ; SSE-NEXT: movaps %xmm1, %xmm2
10131 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10132 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10133 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10134 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10135 ; SSE-NEXT: movaps 1424(%rdi), %xmm0
10136 ; SSE-NEXT: movaps 1376(%rdi), %xmm1
10137 ; SSE-NEXT: movaps %xmm1, %xmm2
10138 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10139 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10140 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10141 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10142 ; SSE-NEXT: movaps 1520(%rdi), %xmm0
10143 ; SSE-NEXT: movaps 1472(%rdi), %xmm1
10144 ; SSE-NEXT: movaps %xmm1, %xmm2
10145 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10146 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10147 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10148 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10149 ; SSE-NEXT: movaps 1616(%rdi), %xmm0
10150 ; SSE-NEXT: movaps 1568(%rdi), %xmm1
10151 ; SSE-NEXT: movaps %xmm1, %xmm2
10152 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10153 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10154 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10155 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10156 ; SSE-NEXT: movaps 1712(%rdi), %xmm0
10157 ; SSE-NEXT: movaps 1664(%rdi), %xmm1
10158 ; SSE-NEXT: movaps %xmm1, %xmm2
10159 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10160 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10161 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10162 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10163 ; SSE-NEXT: movaps 1808(%rdi), %xmm0
10164 ; SSE-NEXT: movaps 1760(%rdi), %xmm1
10165 ; SSE-NEXT: movaps %xmm1, %xmm2
10166 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10167 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10168 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10169 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10170 ; SSE-NEXT: movaps 1904(%rdi), %xmm0
10171 ; SSE-NEXT: movaps 1856(%rdi), %xmm1
10172 ; SSE-NEXT: movaps %xmm1, %xmm2
10173 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10174 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10175 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10176 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10177 ; SSE-NEXT: movaps 2000(%rdi), %xmm0
10178 ; SSE-NEXT: movaps 1952(%rdi), %xmm1
10179 ; SSE-NEXT: movaps %xmm1, %xmm2
10180 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10181 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10182 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10183 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10184 ; SSE-NEXT: movaps 2096(%rdi), %xmm0
10185 ; SSE-NEXT: movaps 2048(%rdi), %xmm1
10186 ; SSE-NEXT: movaps %xmm1, %xmm2
10187 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10188 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10189 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10190 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10191 ; SSE-NEXT: movaps 2192(%rdi), %xmm0
10192 ; SSE-NEXT: movaps 2144(%rdi), %xmm1
10193 ; SSE-NEXT: movaps %xmm1, %xmm2
10194 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10195 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10196 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10197 ; SSE-NEXT: movaps %xmm1, (%rsp) # 16-byte Spill
10198 ; SSE-NEXT: movaps 2288(%rdi), %xmm0
10199 ; SSE-NEXT: movaps 2240(%rdi), %xmm15
10200 ; SSE-NEXT: movaps %xmm15, %xmm1
10201 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
10202 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10203 ; SSE-NEXT: unpckhpd {{.*#+}} xmm15 = xmm15[1],xmm0[1]
10204 ; SSE-NEXT: movaps 2384(%rdi), %xmm0
10205 ; SSE-NEXT: movaps 2336(%rdi), %xmm12
10206 ; SSE-NEXT: movaps %xmm12, %xmm1
10207 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
10208 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10209 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm0[1]
10210 ; SSE-NEXT: movaps 2480(%rdi), %xmm0
10211 ; SSE-NEXT: movaps 2432(%rdi), %xmm14
10212 ; SSE-NEXT: movaps %xmm14, %xmm1
10213 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
10214 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10215 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm0[1]
10216 ; SSE-NEXT: movaps 2576(%rdi), %xmm0
10217 ; SSE-NEXT: movaps 2528(%rdi), %xmm10
10218 ; SSE-NEXT: movaps %xmm10, %xmm13
10219 ; SSE-NEXT: movlhps {{.*#+}} xmm13 = xmm13[0],xmm0[0]
10220 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm0[1]
10221 ; SSE-NEXT: movaps 2672(%rdi), %xmm0
10222 ; SSE-NEXT: movaps 2624(%rdi), %xmm9
10223 ; SSE-NEXT: movaps %xmm9, %xmm11
10224 ; SSE-NEXT: movlhps {{.*#+}} xmm11 = xmm11[0],xmm0[0]
10225 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm0[1]
10226 ; SSE-NEXT: movaps 2768(%rdi), %xmm0
10227 ; SSE-NEXT: movaps 2720(%rdi), %xmm5
10228 ; SSE-NEXT: movaps %xmm5, %xmm8
10229 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm0[0]
10230 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm0[1]
10231 ; SSE-NEXT: movaps 2864(%rdi), %xmm0
10232 ; SSE-NEXT: movaps 2816(%rdi), %xmm6
10233 ; SSE-NEXT: movaps %xmm6, %xmm7
10234 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm0[0]
10235 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm0[1]
10236 ; SSE-NEXT: movaps 2960(%rdi), %xmm0
10237 ; SSE-NEXT: movaps 2912(%rdi), %xmm3
10238 ; SSE-NEXT: movaps %xmm3, %xmm4
10239 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm0[0]
10240 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm0[1]
10241 ; SSE-NEXT: movaps 3056(%rdi), %xmm0
10242 ; SSE-NEXT: movaps 3008(%rdi), %xmm1
10243 ; SSE-NEXT: movaps %xmm1, %xmm2
10244 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10245 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10246 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10247 ; SSE-NEXT: movaps %xmm0, 496(%rsi)
10248 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10249 ; SSE-NEXT: movaps %xmm0, 480(%rsi)
10250 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10251 ; SSE-NEXT: movaps %xmm0, 464(%rsi)
10252 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10253 ; SSE-NEXT: movaps %xmm0, 448(%rsi)
10254 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10255 ; SSE-NEXT: movaps %xmm0, 432(%rsi)
10256 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10257 ; SSE-NEXT: movaps %xmm0, 416(%rsi)
10258 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10259 ; SSE-NEXT: movaps %xmm0, 400(%rsi)
10260 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10261 ; SSE-NEXT: movaps %xmm0, 384(%rsi)
10262 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10263 ; SSE-NEXT: movaps %xmm0, 368(%rsi)
10264 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10265 ; SSE-NEXT: movaps %xmm0, 352(%rsi)
10266 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10267 ; SSE-NEXT: movaps %xmm0, 336(%rsi)
10268 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10269 ; SSE-NEXT: movaps %xmm0, 320(%rsi)
10270 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10271 ; SSE-NEXT: movaps %xmm0, 304(%rsi)
10272 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10273 ; SSE-NEXT: movaps %xmm0, 288(%rsi)
10274 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10275 ; SSE-NEXT: movaps %xmm0, 272(%rsi)
10276 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10277 ; SSE-NEXT: movaps %xmm0, 256(%rsi)
10278 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10279 ; SSE-NEXT: movaps %xmm0, 240(%rsi)
10280 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10281 ; SSE-NEXT: movaps %xmm0, 224(%rsi)
10282 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10283 ; SSE-NEXT: movaps %xmm0, 208(%rsi)
10284 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10285 ; SSE-NEXT: movaps %xmm0, 192(%rsi)
10286 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10287 ; SSE-NEXT: movaps %xmm0, 176(%rsi)
10288 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10289 ; SSE-NEXT: movaps %xmm0, 160(%rsi)
10290 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10291 ; SSE-NEXT: movaps %xmm0, 144(%rsi)
10292 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10293 ; SSE-NEXT: movaps %xmm0, 128(%rsi)
10294 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10295 ; SSE-NEXT: movaps %xmm0, 112(%rsi)
10296 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10297 ; SSE-NEXT: movaps %xmm0, 96(%rsi)
10298 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10299 ; SSE-NEXT: movaps %xmm0, 80(%rsi)
10300 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10301 ; SSE-NEXT: movaps %xmm0, 64(%rsi)
10302 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10303 ; SSE-NEXT: movaps %xmm0, 48(%rsi)
10304 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10305 ; SSE-NEXT: movaps %xmm0, 32(%rsi)
10306 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10307 ; SSE-NEXT: movaps %xmm0, 16(%rsi)
10308 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10309 ; SSE-NEXT: movaps %xmm0, (%rsi)
10310 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10311 ; SSE-NEXT: movaps %xmm0, 496(%rdx)
10312 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10313 ; SSE-NEXT: movaps %xmm0, 480(%rdx)
10314 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10315 ; SSE-NEXT: movaps %xmm0, 464(%rdx)
10316 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10317 ; SSE-NEXT: movaps %xmm0, 448(%rdx)
10318 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10319 ; SSE-NEXT: movaps %xmm0, 432(%rdx)
10320 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10321 ; SSE-NEXT: movaps %xmm0, 416(%rdx)
10322 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10323 ; SSE-NEXT: movaps %xmm0, 400(%rdx)
10324 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10325 ; SSE-NEXT: movaps %xmm0, 384(%rdx)
10326 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10327 ; SSE-NEXT: movaps %xmm0, 368(%rdx)
10328 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10329 ; SSE-NEXT: movaps %xmm0, 352(%rdx)
10330 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10331 ; SSE-NEXT: movaps %xmm0, 336(%rdx)
10332 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10333 ; SSE-NEXT: movaps %xmm0, 320(%rdx)
10334 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10335 ; SSE-NEXT: movaps %xmm0, 304(%rdx)
10336 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10337 ; SSE-NEXT: movaps %xmm0, 288(%rdx)
10338 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10339 ; SSE-NEXT: movaps %xmm0, 272(%rdx)
10340 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10341 ; SSE-NEXT: movaps %xmm0, 256(%rdx)
10342 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10343 ; SSE-NEXT: movaps %xmm0, 240(%rdx)
10344 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10345 ; SSE-NEXT: movaps %xmm0, 224(%rdx)
10346 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10347 ; SSE-NEXT: movaps %xmm0, 208(%rdx)
10348 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10349 ; SSE-NEXT: movaps %xmm0, 192(%rdx)
10350 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10351 ; SSE-NEXT: movaps %xmm0, 176(%rdx)
10352 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10353 ; SSE-NEXT: movaps %xmm0, 160(%rdx)
10354 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10355 ; SSE-NEXT: movaps %xmm0, 144(%rdx)
10356 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10357 ; SSE-NEXT: movaps %xmm0, 128(%rdx)
10358 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10359 ; SSE-NEXT: movaps %xmm0, 112(%rdx)
10360 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10361 ; SSE-NEXT: movaps %xmm0, 96(%rdx)
10362 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10363 ; SSE-NEXT: movaps %xmm0, 80(%rdx)
10364 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10365 ; SSE-NEXT: movaps %xmm0, 64(%rdx)
10366 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10367 ; SSE-NEXT: movaps %xmm0, 48(%rdx)
10368 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10369 ; SSE-NEXT: movaps %xmm0, 32(%rdx)
10370 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10371 ; SSE-NEXT: movaps %xmm0, 16(%rdx)
10372 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10373 ; SSE-NEXT: movaps %xmm0, (%rdx)
10374 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10375 ; SSE-NEXT: movaps %xmm0, 496(%rcx)
10376 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10377 ; SSE-NEXT: movaps %xmm0, 480(%rcx)
10378 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10379 ; SSE-NEXT: movaps %xmm0, 464(%rcx)
10380 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10381 ; SSE-NEXT: movaps %xmm0, 448(%rcx)
10382 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10383 ; SSE-NEXT: movaps %xmm0, 432(%rcx)
10384 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10385 ; SSE-NEXT: movaps %xmm0, 416(%rcx)
10386 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10387 ; SSE-NEXT: movaps %xmm0, 400(%rcx)
10388 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10389 ; SSE-NEXT: movaps %xmm0, 384(%rcx)
10390 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10391 ; SSE-NEXT: movaps %xmm0, 368(%rcx)
10392 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10393 ; SSE-NEXT: movaps %xmm0, 352(%rcx)
10394 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10395 ; SSE-NEXT: movaps %xmm0, 336(%rcx)
10396 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10397 ; SSE-NEXT: movaps %xmm0, 320(%rcx)
10398 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10399 ; SSE-NEXT: movaps %xmm0, 304(%rcx)
10400 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10401 ; SSE-NEXT: movaps %xmm0, 288(%rcx)
10402 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10403 ; SSE-NEXT: movaps %xmm0, 272(%rcx)
10404 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10405 ; SSE-NEXT: movaps %xmm0, 256(%rcx)
10406 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10407 ; SSE-NEXT: movaps %xmm0, 240(%rcx)
10408 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10409 ; SSE-NEXT: movaps %xmm0, 224(%rcx)
10410 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10411 ; SSE-NEXT: movaps %xmm0, 208(%rcx)
10412 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10413 ; SSE-NEXT: movaps %xmm0, 192(%rcx)
10414 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10415 ; SSE-NEXT: movaps %xmm0, 176(%rcx)
10416 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10417 ; SSE-NEXT: movaps %xmm0, 160(%rcx)
10418 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10419 ; SSE-NEXT: movaps %xmm0, 144(%rcx)
10420 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10421 ; SSE-NEXT: movaps %xmm0, 128(%rcx)
10422 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10423 ; SSE-NEXT: movaps %xmm0, 112(%rcx)
10424 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10425 ; SSE-NEXT: movaps %xmm0, 96(%rcx)
10426 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10427 ; SSE-NEXT: movaps %xmm0, 80(%rcx)
10428 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10429 ; SSE-NEXT: movaps %xmm0, 64(%rcx)
10430 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10431 ; SSE-NEXT: movaps %xmm0, 48(%rcx)
10432 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10433 ; SSE-NEXT: movaps %xmm0, 32(%rcx)
10434 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10435 ; SSE-NEXT: movaps %xmm0, 16(%rcx)
10436 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10437 ; SSE-NEXT: movaps %xmm0, (%rcx)
10438 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10439 ; SSE-NEXT: movaps %xmm0, 496(%r8)
10440 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10441 ; SSE-NEXT: movaps %xmm0, 480(%r8)
10442 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10443 ; SSE-NEXT: movaps %xmm0, 464(%r8)
10444 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10445 ; SSE-NEXT: movaps %xmm0, 448(%r8)
10446 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10447 ; SSE-NEXT: movaps %xmm0, 432(%r8)
10448 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10449 ; SSE-NEXT: movaps %xmm0, 416(%r8)
10450 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10451 ; SSE-NEXT: movaps %xmm0, 400(%r8)
10452 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10453 ; SSE-NEXT: movaps %xmm0, 384(%r8)
10454 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10455 ; SSE-NEXT: movaps %xmm0, 368(%r8)
10456 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10457 ; SSE-NEXT: movaps %xmm0, 352(%r8)
10458 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10459 ; SSE-NEXT: movaps %xmm0, 336(%r8)
10460 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10461 ; SSE-NEXT: movaps %xmm0, 320(%r8)
10462 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10463 ; SSE-NEXT: movaps %xmm0, 304(%r8)
10464 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10465 ; SSE-NEXT: movaps %xmm0, 288(%r8)
10466 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10467 ; SSE-NEXT: movaps %xmm0, 272(%r8)
10468 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10469 ; SSE-NEXT: movaps %xmm0, 256(%r8)
10470 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10471 ; SSE-NEXT: movaps %xmm0, 240(%r8)
10472 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10473 ; SSE-NEXT: movaps %xmm0, 224(%r8)
10474 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10475 ; SSE-NEXT: movaps %xmm0, 208(%r8)
10476 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10477 ; SSE-NEXT: movaps %xmm0, 192(%r8)
10478 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10479 ; SSE-NEXT: movaps %xmm0, 176(%r8)
10480 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10481 ; SSE-NEXT: movaps %xmm0, 160(%r8)
10482 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10483 ; SSE-NEXT: movaps %xmm0, 144(%r8)
10484 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10485 ; SSE-NEXT: movaps %xmm0, 128(%r8)
10486 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10487 ; SSE-NEXT: movaps %xmm0, 112(%r8)
10488 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10489 ; SSE-NEXT: movaps %xmm0, 96(%r8)
10490 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10491 ; SSE-NEXT: movaps %xmm0, 80(%r8)
10492 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10493 ; SSE-NEXT: movaps %xmm0, 64(%r8)
10494 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10495 ; SSE-NEXT: movaps %xmm0, 48(%r8)
10496 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10497 ; SSE-NEXT: movaps %xmm0, 32(%r8)
10498 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10499 ; SSE-NEXT: movaps %xmm0, 16(%r8)
10500 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10501 ; SSE-NEXT: movaps %xmm0, (%r8)
10502 ; SSE-NEXT: movaps %xmm2, 496(%r9)
10503 ; SSE-NEXT: movaps %xmm4, 480(%r9)
10504 ; SSE-NEXT: movaps %xmm7, 464(%r9)
10505 ; SSE-NEXT: movaps %xmm8, 448(%r9)
10506 ; SSE-NEXT: movaps %xmm11, 432(%r9)
10507 ; SSE-NEXT: movaps %xmm13, 416(%r9)
10508 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10509 ; SSE-NEXT: movaps %xmm0, 400(%r9)
10510 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10511 ; SSE-NEXT: movaps %xmm0, 384(%r9)
10512 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10513 ; SSE-NEXT: movaps %xmm0, 368(%r9)
10514 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10515 ; SSE-NEXT: movaps %xmm0, 352(%r9)
10516 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10517 ; SSE-NEXT: movaps %xmm0, 336(%r9)
10518 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10519 ; SSE-NEXT: movaps %xmm0, 320(%r9)
10520 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10521 ; SSE-NEXT: movaps %xmm0, 304(%r9)
10522 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10523 ; SSE-NEXT: movaps %xmm0, 288(%r9)
10524 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10525 ; SSE-NEXT: movaps %xmm0, 272(%r9)
10526 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10527 ; SSE-NEXT: movaps %xmm0, 256(%r9)
10528 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10529 ; SSE-NEXT: movaps %xmm0, 240(%r9)
10530 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10531 ; SSE-NEXT: movaps %xmm0, 224(%r9)
10532 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10533 ; SSE-NEXT: movaps %xmm0, 208(%r9)
10534 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10535 ; SSE-NEXT: movaps %xmm0, 192(%r9)
10536 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10537 ; SSE-NEXT: movaps %xmm0, 176(%r9)
10538 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10539 ; SSE-NEXT: movaps %xmm0, 160(%r9)
10540 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10541 ; SSE-NEXT: movaps %xmm0, 144(%r9)
10542 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10543 ; SSE-NEXT: movaps %xmm0, 128(%r9)
10544 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10545 ; SSE-NEXT: movaps %xmm0, 112(%r9)
10546 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10547 ; SSE-NEXT: movaps %xmm0, 96(%r9)
10548 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10549 ; SSE-NEXT: movaps %xmm0, 80(%r9)
10550 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10551 ; SSE-NEXT: movaps %xmm0, 64(%r9)
10552 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10553 ; SSE-NEXT: movaps %xmm0, 48(%r9)
10554 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10555 ; SSE-NEXT: movaps %xmm0, 32(%r9)
10556 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10557 ; SSE-NEXT: movaps %xmm0, 16(%r9)
10558 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10559 ; SSE-NEXT: movaps %xmm0, (%r9)
10560 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
10561 ; SSE-NEXT: movaps %xmm1, 496(%rax)
10562 ; SSE-NEXT: movaps %xmm3, 480(%rax)
10563 ; SSE-NEXT: movaps %xmm6, 464(%rax)
10564 ; SSE-NEXT: movaps %xmm5, 448(%rax)
10565 ; SSE-NEXT: movaps %xmm9, 432(%rax)
10566 ; SSE-NEXT: movaps %xmm10, 416(%rax)
10567 ; SSE-NEXT: movaps %xmm14, 400(%rax)
10568 ; SSE-NEXT: movaps %xmm12, 384(%rax)
10569 ; SSE-NEXT: movaps %xmm15, 368(%rax)
10570 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
10571 ; SSE-NEXT: movaps %xmm0, 352(%rax)
10572 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10573 ; SSE-NEXT: movaps %xmm0, 336(%rax)
10574 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10575 ; SSE-NEXT: movaps %xmm0, 320(%rax)
10576 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10577 ; SSE-NEXT: movaps %xmm0, 304(%rax)
10578 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10579 ; SSE-NEXT: movaps %xmm0, 288(%rax)
10580 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10581 ; SSE-NEXT: movaps %xmm0, 272(%rax)
10582 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10583 ; SSE-NEXT: movaps %xmm0, 256(%rax)
10584 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10585 ; SSE-NEXT: movaps %xmm0, 240(%rax)
10586 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10587 ; SSE-NEXT: movaps %xmm0, 224(%rax)
10588 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10589 ; SSE-NEXT: movaps %xmm0, 208(%rax)
10590 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10591 ; SSE-NEXT: movaps %xmm0, 192(%rax)
10592 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10593 ; SSE-NEXT: movaps %xmm0, 176(%rax)
10594 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10595 ; SSE-NEXT: movaps %xmm0, 160(%rax)
10596 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10597 ; SSE-NEXT: movaps %xmm0, 144(%rax)
10598 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10599 ; SSE-NEXT: movaps %xmm0, 128(%rax)
10600 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10601 ; SSE-NEXT: movaps %xmm0, 112(%rax)
10602 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10603 ; SSE-NEXT: movaps %xmm0, 96(%rax)
10604 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10605 ; SSE-NEXT: movaps %xmm0, 80(%rax)
10606 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10607 ; SSE-NEXT: movaps %xmm0, 64(%rax)
10608 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10609 ; SSE-NEXT: movaps %xmm0, 48(%rax)
10610 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10611 ; SSE-NEXT: movaps %xmm0, 32(%rax)
10612 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10613 ; SSE-NEXT: movaps %xmm0, 16(%rax)
10614 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10615 ; SSE-NEXT: movaps %xmm0, (%rax)
10616 ; SSE-NEXT: addq $2712, %rsp # imm = 0xA98
10619 ; AVX-LABEL: load_i64_stride6_vf64:
10621 ; AVX-NEXT: subq $3768, %rsp # imm = 0xEB8
10622 ; AVX-NEXT: vmovaps 1088(%rdi), %ymm2
10623 ; AVX-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10624 ; AVX-NEXT: vmovaps 704(%rdi), %ymm3
10625 ; AVX-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10626 ; AVX-NEXT: vmovaps 320(%rdi), %ymm1
10627 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10628 ; AVX-NEXT: vinsertf128 $1, 288(%rdi), %ymm0, %ymm0
10629 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10630 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
10631 ; AVX-NEXT: vmovaps 240(%rdi), %xmm4
10632 ; AVX-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10633 ; AVX-NEXT: vmovaps 192(%rdi), %xmm1
10634 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10635 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm4[0]
10636 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10637 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10638 ; AVX-NEXT: vinsertf128 $1, 672(%rdi), %ymm0, %ymm0
10639 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10640 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
10641 ; AVX-NEXT: vmovaps 624(%rdi), %xmm3
10642 ; AVX-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10643 ; AVX-NEXT: vmovaps 576(%rdi), %xmm1
10644 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10645 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
10646 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10647 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10648 ; AVX-NEXT: vinsertf128 $1, 1056(%rdi), %ymm0, %ymm0
10649 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10650 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm2[0],ymm0[2],ymm2[2]
10651 ; AVX-NEXT: vmovaps 1008(%rdi), %xmm2
10652 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10653 ; AVX-NEXT: vmovaps 960(%rdi), %xmm1
10654 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10655 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10656 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10657 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10658 ; AVX-NEXT: vmovaps 1472(%rdi), %ymm1
10659 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10660 ; AVX-NEXT: vinsertf128 $1, 1440(%rdi), %ymm0, %ymm0
10661 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10662 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
10663 ; AVX-NEXT: vmovaps 1392(%rdi), %xmm2
10664 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10665 ; AVX-NEXT: vmovaps 1344(%rdi), %xmm1
10666 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10667 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10668 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10669 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10670 ; AVX-NEXT: vmovaps 1856(%rdi), %ymm1
10671 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10672 ; AVX-NEXT: vinsertf128 $1, 1824(%rdi), %ymm0, %ymm0
10673 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10674 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
10675 ; AVX-NEXT: vmovaps 1776(%rdi), %xmm2
10676 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10677 ; AVX-NEXT: vmovaps 1728(%rdi), %xmm1
10678 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10679 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10680 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10681 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10682 ; AVX-NEXT: vmovaps 2240(%rdi), %ymm1
10683 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10684 ; AVX-NEXT: vinsertf128 $1, 2208(%rdi), %ymm0, %ymm0
10685 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10686 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
10687 ; AVX-NEXT: vmovaps 2160(%rdi), %xmm2
10688 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10689 ; AVX-NEXT: vmovaps 2112(%rdi), %xmm1
10690 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10691 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10692 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10693 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10694 ; AVX-NEXT: vmovaps 2624(%rdi), %ymm1
10695 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10696 ; AVX-NEXT: vinsertf128 $1, 2592(%rdi), %ymm0, %ymm0
10697 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10698 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
10699 ; AVX-NEXT: vmovaps 2544(%rdi), %xmm2
10700 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10701 ; AVX-NEXT: vmovaps 2496(%rdi), %xmm1
10702 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10703 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10704 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10705 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10706 ; AVX-NEXT: vmovaps 3008(%rdi), %ymm1
10707 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10708 ; AVX-NEXT: vinsertf128 $1, 2976(%rdi), %ymm0, %ymm0
10709 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10710 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
10711 ; AVX-NEXT: vmovaps 2928(%rdi), %xmm2
10712 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10713 ; AVX-NEXT: vmovaps 2880(%rdi), %xmm1
10714 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10715 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10716 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10717 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10718 ; AVX-NEXT: vmovaps 128(%rdi), %ymm1
10719 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10720 ; AVX-NEXT: vinsertf128 $1, 96(%rdi), %ymm0, %ymm0
10721 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10722 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
10723 ; AVX-NEXT: vmovaps (%rdi), %xmm2
10724 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10725 ; AVX-NEXT: vmovaps 48(%rdi), %xmm1
10726 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10727 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
10728 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10729 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10730 ; AVX-NEXT: vmovaps 512(%rdi), %ymm1
10731 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10732 ; AVX-NEXT: vinsertf128 $1, 480(%rdi), %ymm0, %ymm0
10733 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10734 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
10735 ; AVX-NEXT: vmovaps 432(%rdi), %xmm2
10736 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10737 ; AVX-NEXT: vmovaps 384(%rdi), %xmm1
10738 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10739 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10740 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10741 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10742 ; AVX-NEXT: vmovaps 896(%rdi), %ymm1
10743 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10744 ; AVX-NEXT: vinsertf128 $1, 864(%rdi), %ymm0, %ymm0
10745 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10746 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
10747 ; AVX-NEXT: vmovaps 816(%rdi), %xmm2
10748 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10749 ; AVX-NEXT: vmovaps 768(%rdi), %xmm1
10750 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10751 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10752 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10753 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10754 ; AVX-NEXT: vmovaps 1280(%rdi), %ymm1
10755 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10756 ; AVX-NEXT: vinsertf128 $1, 1248(%rdi), %ymm0, %ymm0
10757 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10758 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
10759 ; AVX-NEXT: vmovaps 1200(%rdi), %xmm2
10760 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10761 ; AVX-NEXT: vmovaps 1152(%rdi), %xmm1
10762 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10763 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10764 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10765 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10766 ; AVX-NEXT: vmovaps 1664(%rdi), %ymm1
10767 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10768 ; AVX-NEXT: vinsertf128 $1, 1632(%rdi), %ymm0, %ymm0
10769 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10770 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
10771 ; AVX-NEXT: vmovaps 1584(%rdi), %xmm14
10772 ; AVX-NEXT: vmovaps 1536(%rdi), %xmm13
10773 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm13[0],xmm14[0]
10774 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10775 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10776 ; AVX-NEXT: vmovaps 2048(%rdi), %ymm12
10777 ; AVX-NEXT: vinsertf128 $1, 2016(%rdi), %ymm0, %ymm11
10778 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm11[0],ymm12[0],ymm11[2],ymm12[2]
10779 ; AVX-NEXT: vmovaps 1968(%rdi), %xmm10
10780 ; AVX-NEXT: vmovaps 1920(%rdi), %xmm9
10781 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm9[0],xmm10[0]
10782 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10783 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10784 ; AVX-NEXT: vmovaps 2432(%rdi), %ymm8
10785 ; AVX-NEXT: vinsertf128 $1, 2400(%rdi), %ymm0, %ymm7
10786 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm7[0],ymm8[0],ymm7[2],ymm8[2]
10787 ; AVX-NEXT: vmovaps 2352(%rdi), %xmm6
10788 ; AVX-NEXT: vmovaps 2304(%rdi), %xmm5
10789 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm6[0]
10790 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10791 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10792 ; AVX-NEXT: vmovaps 2816(%rdi), %ymm4
10793 ; AVX-NEXT: vinsertf128 $1, 2784(%rdi), %ymm0, %ymm3
10794 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm3[0],ymm4[0],ymm3[2],ymm4[2]
10795 ; AVX-NEXT: vmovaps 2736(%rdi), %xmm2
10796 ; AVX-NEXT: vmovaps 2688(%rdi), %xmm1
10797 ; AVX-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
10798 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
10799 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10800 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10801 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10802 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10803 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
10804 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
10805 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
10806 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
10807 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10808 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10809 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10810 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10811 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
10812 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
10813 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
10814 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
10815 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10816 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10817 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10818 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10819 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
10820 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
10821 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
10822 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
10823 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10824 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10825 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10826 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10827 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
10828 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
10829 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
10830 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
10831 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10832 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10833 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10834 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10835 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
10836 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
10837 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
10838 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
10839 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10840 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10841 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10842 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10843 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
10844 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
10845 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
10846 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
10847 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10848 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10849 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10850 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10851 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
10852 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
10853 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
10854 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
10855 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10856 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10857 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10858 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10859 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
10860 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
10861 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
10862 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
10863 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10864 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10865 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10866 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10867 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
10868 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
10869 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
10870 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
10871 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10872 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10873 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10874 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10875 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
10876 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
10877 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
10878 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
10879 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10880 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10881 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10882 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10883 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
10884 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
10885 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
10886 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
10887 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10888 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10889 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10890 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10891 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
10892 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
10893 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
10894 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
10895 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10896 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10897 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10898 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10899 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm13 = xmm13[1],xmm14[1]
10900 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm13[0,1,2,3],ymm0[4,5,6,7]
10901 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10902 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm11[1],ymm12[1],ymm11[3],ymm12[3]
10903 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm9 = xmm9[1],xmm10[1]
10904 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm9[0,1,2,3],ymm0[4,5,6,7]
10905 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10906 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm7[1],ymm8[1],ymm7[3],ymm8[3]
10907 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm5 = xmm5[1],xmm6[1]
10908 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm5[0,1,2,3],ymm0[4,5,6,7]
10909 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10910 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm3[1],ymm4[1],ymm3[3],ymm4[3]
10911 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
10912 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10913 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10914 ; AVX-NEXT: vmovaps 288(%rdi), %ymm1
10915 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10916 ; AVX-NEXT: vinsertf128 $1, 352(%rdi), %ymm0, %ymm0
10917 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10918 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
10919 ; AVX-NEXT: vmovaps 256(%rdi), %xmm2
10920 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10921 ; AVX-NEXT: vmovaps 208(%rdi), %xmm1
10922 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10923 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10924 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10925 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10926 ; AVX-NEXT: vmovaps 672(%rdi), %ymm1
10927 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10928 ; AVX-NEXT: vinsertf128 $1, 736(%rdi), %ymm0, %ymm0
10929 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10930 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
10931 ; AVX-NEXT: vmovaps 640(%rdi), %xmm2
10932 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10933 ; AVX-NEXT: vmovaps 592(%rdi), %xmm1
10934 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10935 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10936 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10937 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10938 ; AVX-NEXT: vmovaps 1056(%rdi), %ymm1
10939 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10940 ; AVX-NEXT: vinsertf128 $1, 1120(%rdi), %ymm0, %ymm0
10941 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10942 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
10943 ; AVX-NEXT: vmovaps 1024(%rdi), %xmm2
10944 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10945 ; AVX-NEXT: vmovaps 976(%rdi), %xmm1
10946 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10947 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10948 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10949 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10950 ; AVX-NEXT: vmovaps 1440(%rdi), %ymm1
10951 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10952 ; AVX-NEXT: vinsertf128 $1, 1504(%rdi), %ymm0, %ymm0
10953 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10954 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
10955 ; AVX-NEXT: vmovaps 1408(%rdi), %xmm2
10956 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10957 ; AVX-NEXT: vmovaps 1360(%rdi), %xmm1
10958 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10959 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10960 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10961 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10962 ; AVX-NEXT: vmovaps 1824(%rdi), %ymm1
10963 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10964 ; AVX-NEXT: vinsertf128 $1, 1888(%rdi), %ymm0, %ymm0
10965 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10966 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
10967 ; AVX-NEXT: vmovaps 1792(%rdi), %xmm2
10968 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10969 ; AVX-NEXT: vmovaps 1744(%rdi), %xmm1
10970 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10971 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10972 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10973 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10974 ; AVX-NEXT: vmovaps 2208(%rdi), %ymm1
10975 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10976 ; AVX-NEXT: vinsertf128 $1, 2272(%rdi), %ymm0, %ymm0
10977 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10978 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
10979 ; AVX-NEXT: vmovaps 2176(%rdi), %xmm2
10980 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10981 ; AVX-NEXT: vmovaps 2128(%rdi), %xmm1
10982 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10983 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10984 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10985 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10986 ; AVX-NEXT: vmovaps 2592(%rdi), %ymm1
10987 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10988 ; AVX-NEXT: vinsertf128 $1, 2656(%rdi), %ymm0, %ymm0
10989 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10990 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
10991 ; AVX-NEXT: vmovaps 2560(%rdi), %xmm2
10992 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10993 ; AVX-NEXT: vmovaps 2512(%rdi), %xmm1
10994 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10995 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10996 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10997 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10998 ; AVX-NEXT: vmovaps 2976(%rdi), %ymm1
10999 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11000 ; AVX-NEXT: vinsertf128 $1, 3040(%rdi), %ymm0, %ymm0
11001 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11002 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11003 ; AVX-NEXT: vmovaps 2944(%rdi), %xmm2
11004 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11005 ; AVX-NEXT: vmovaps 2896(%rdi), %xmm1
11006 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11007 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11008 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11009 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11010 ; AVX-NEXT: vmovaps 2784(%rdi), %ymm1
11011 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11012 ; AVX-NEXT: vinsertf128 $1, 2848(%rdi), %ymm0, %ymm0
11013 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11014 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11015 ; AVX-NEXT: vmovaps 2752(%rdi), %xmm2
11016 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11017 ; AVX-NEXT: vmovaps 2704(%rdi), %xmm1
11018 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11019 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11020 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11021 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11022 ; AVX-NEXT: vmovaps 2400(%rdi), %ymm1
11023 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11024 ; AVX-NEXT: vinsertf128 $1, 2464(%rdi), %ymm0, %ymm0
11025 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11026 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11027 ; AVX-NEXT: vmovaps 2368(%rdi), %xmm2
11028 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11029 ; AVX-NEXT: vmovaps 2320(%rdi), %xmm1
11030 ; AVX-NEXT: vmovaps %xmm1, (%rsp) # 16-byte Spill
11031 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11032 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11033 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11034 ; AVX-NEXT: vmovaps 2016(%rdi), %ymm1
11035 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11036 ; AVX-NEXT: vinsertf128 $1, 2080(%rdi), %ymm0, %ymm0
11037 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11038 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11039 ; AVX-NEXT: vmovaps 1984(%rdi), %xmm2
11040 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11041 ; AVX-NEXT: vmovaps 1936(%rdi), %xmm1
11042 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11043 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11044 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11045 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11046 ; AVX-NEXT: vmovaps 1632(%rdi), %ymm1
11047 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11048 ; AVX-NEXT: vinsertf128 $1, 1696(%rdi), %ymm0, %ymm0
11049 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11050 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11051 ; AVX-NEXT: vmovaps 1600(%rdi), %xmm2
11052 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11053 ; AVX-NEXT: vmovaps 1552(%rdi), %xmm1
11054 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11055 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11056 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11057 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11058 ; AVX-NEXT: vmovaps 1248(%rdi), %ymm1
11059 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11060 ; AVX-NEXT: vinsertf128 $1, 1312(%rdi), %ymm0, %ymm0
11061 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11062 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11063 ; AVX-NEXT: vmovaps 1216(%rdi), %xmm12
11064 ; AVX-NEXT: vmovaps 1168(%rdi), %xmm11
11065 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm12[0]
11066 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11067 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11068 ; AVX-NEXT: vmovaps 864(%rdi), %ymm10
11069 ; AVX-NEXT: vinsertf128 $1, 928(%rdi), %ymm0, %ymm9
11070 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm10[0],ymm9[0],ymm10[2],ymm9[2]
11071 ; AVX-NEXT: vmovaps 832(%rdi), %xmm8
11072 ; AVX-NEXT: vmovaps 784(%rdi), %xmm7
11073 ; AVX-NEXT: vmovlhps {{.*#+}} xmm15 = xmm7[0],xmm8[0]
11074 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11075 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11076 ; AVX-NEXT: vmovaps 480(%rdi), %ymm6
11077 ; AVX-NEXT: vinsertf128 $1, 544(%rdi), %ymm0, %ymm15
11078 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm6[0],ymm15[0],ymm6[2],ymm15[2]
11079 ; AVX-NEXT: vmovaps 448(%rdi), %xmm5
11080 ; AVX-NEXT: vmovaps 400(%rdi), %xmm4
11081 ; AVX-NEXT: vmovlhps {{.*#+}} xmm14 = xmm4[0],xmm5[0]
11082 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm14[0,1,2,3],ymm0[4,5,6,7]
11083 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11084 ; AVX-NEXT: vmovaps 96(%rdi), %ymm2
11085 ; AVX-NEXT: vmovaps 16(%rdi), %xmm14
11086 ; AVX-NEXT: vinsertf128 $1, 160(%rdi), %ymm0, %ymm1
11087 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
11088 ; AVX-NEXT: vmovaps 64(%rdi), %xmm0
11089 ; AVX-NEXT: vmovlhps {{.*#+}} xmm13 = xmm14[0],xmm0[0]
11090 ; AVX-NEXT: vblendps {{.*#+}} ymm3 = ymm13[0,1,2,3],ymm3[4,5,6,7]
11091 ; AVX-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11092 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm2[1],ymm1[1],ymm2[3],ymm1[3]
11093 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm14[1],xmm0[1]
11094 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
11095 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11096 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11097 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11098 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11099 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11100 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11101 ; AVX-NEXT: # xmm1 = xmm1[1],mem[1]
11102 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11103 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11104 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm6[1],ymm15[1],ymm6[3],ymm15[3]
11105 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm4[1],xmm5[1]
11106 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11107 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11108 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11109 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11110 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11111 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11112 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11113 ; AVX-NEXT: # xmm1 = xmm1[1],mem[1]
11114 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11115 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11116 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm10[1],ymm9[1],ymm10[3],ymm9[3]
11117 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm7[1],xmm8[1]
11118 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11119 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11120 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11121 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11122 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11123 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11124 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11125 ; AVX-NEXT: # xmm1 = xmm1[1],mem[1]
11126 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11127 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11128 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11129 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11130 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11131 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm11[1],xmm12[1]
11132 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11133 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11134 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11135 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11136 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11137 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11138 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11139 ; AVX-NEXT: # xmm1 = xmm1[1],mem[1]
11140 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11141 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11142 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11143 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11144 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11145 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11146 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11147 ; AVX-NEXT: # xmm1 = xmm1[1],mem[1]
11148 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11149 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11150 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11151 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11152 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11153 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11154 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11155 ; AVX-NEXT: # xmm1 = xmm1[1],mem[1]
11156 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11157 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11158 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11159 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11160 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11161 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11162 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11163 ; AVX-NEXT: # xmm1 = xmm1[1],mem[1]
11164 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11165 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11166 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11167 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11168 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11169 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11170 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11171 ; AVX-NEXT: # xmm1 = xmm1[1],mem[1]
11172 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11173 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11174 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11175 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11176 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11177 ; AVX-NEXT: vmovaps (%rsp), %xmm1 # 16-byte Reload
11178 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11179 ; AVX-NEXT: # xmm1 = xmm1[1],mem[1]
11180 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11181 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11182 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11183 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11184 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11185 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11186 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11187 ; AVX-NEXT: # xmm1 = xmm1[1],mem[1]
11188 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11189 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11190 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11191 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11192 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11193 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11194 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11195 ; AVX-NEXT: # xmm1 = xmm1[1],mem[1]
11196 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11197 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11198 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11199 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11200 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11201 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11202 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11203 ; AVX-NEXT: # xmm1 = xmm1[1],mem[1]
11204 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11205 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11206 ; AVX-NEXT: vmovaps 160(%rdi), %ymm1
11207 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11208 ; AVX-NEXT: vmovaps 32(%rdi), %xmm2
11209 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11210 ; AVX-NEXT: vinsertf128 $1, 128(%rdi), %ymm0, %ymm0
11211 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11212 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11213 ; AVX-NEXT: vmovaps 80(%rdi), %xmm1
11214 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11215 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
11216 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11217 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11218 ; AVX-NEXT: vmovaps 352(%rdi), %ymm1
11219 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11220 ; AVX-NEXT: vinsertf128 $1, 320(%rdi), %ymm0, %ymm0
11221 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11222 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11223 ; AVX-NEXT: vmovaps 272(%rdi), %xmm2
11224 ; AVX-NEXT: vmovaps %xmm2, (%rsp) # 16-byte Spill
11225 ; AVX-NEXT: vmovaps 224(%rdi), %xmm1
11226 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11227 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11228 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11229 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11230 ; AVX-NEXT: vmovaps 544(%rdi), %ymm1
11231 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11232 ; AVX-NEXT: vinsertf128 $1, 512(%rdi), %ymm0, %ymm0
11233 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11234 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11235 ; AVX-NEXT: vmovaps 464(%rdi), %xmm2
11236 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11237 ; AVX-NEXT: vmovaps 416(%rdi), %xmm1
11238 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11239 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11240 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11241 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11242 ; AVX-NEXT: vmovaps 736(%rdi), %ymm1
11243 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11244 ; AVX-NEXT: vinsertf128 $1, 704(%rdi), %ymm0, %ymm0
11245 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11246 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11247 ; AVX-NEXT: vmovaps 656(%rdi), %xmm2
11248 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11249 ; AVX-NEXT: vmovaps 608(%rdi), %xmm1
11250 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11251 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11252 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11253 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11254 ; AVX-NEXT: vmovaps 928(%rdi), %ymm1
11255 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11256 ; AVX-NEXT: vinsertf128 $1, 896(%rdi), %ymm0, %ymm0
11257 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11258 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11259 ; AVX-NEXT: vmovaps 848(%rdi), %xmm2
11260 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11261 ; AVX-NEXT: vmovaps 800(%rdi), %xmm1
11262 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11263 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11264 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11265 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11266 ; AVX-NEXT: vmovaps 1120(%rdi), %ymm1
11267 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11268 ; AVX-NEXT: vinsertf128 $1, 1088(%rdi), %ymm0, %ymm0
11269 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11270 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11271 ; AVX-NEXT: vmovaps 1040(%rdi), %xmm2
11272 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11273 ; AVX-NEXT: vmovaps 992(%rdi), %xmm1
11274 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11275 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11276 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11277 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11278 ; AVX-NEXT: vmovaps 1312(%rdi), %ymm1
11279 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11280 ; AVX-NEXT: vinsertf128 $1, 1280(%rdi), %ymm0, %ymm0
11281 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11282 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11283 ; AVX-NEXT: vmovaps 1232(%rdi), %xmm2
11284 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11285 ; AVX-NEXT: vmovaps 1184(%rdi), %xmm1
11286 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11287 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11288 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11289 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11290 ; AVX-NEXT: vmovaps 1504(%rdi), %ymm1
11291 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11292 ; AVX-NEXT: vinsertf128 $1, 1472(%rdi), %ymm0, %ymm0
11293 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11294 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11295 ; AVX-NEXT: vmovaps 1424(%rdi), %xmm2
11296 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11297 ; AVX-NEXT: vmovaps 1376(%rdi), %xmm1
11298 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11299 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11300 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11301 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11302 ; AVX-NEXT: vmovaps 1696(%rdi), %ymm1
11303 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11304 ; AVX-NEXT: vinsertf128 $1, 1664(%rdi), %ymm0, %ymm0
11305 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11306 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11307 ; AVX-NEXT: vmovaps 1616(%rdi), %xmm2
11308 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11309 ; AVX-NEXT: vmovaps 1568(%rdi), %xmm1
11310 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11311 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11312 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11313 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11314 ; AVX-NEXT: vmovaps 1888(%rdi), %ymm1
11315 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11316 ; AVX-NEXT: vinsertf128 $1, 1856(%rdi), %ymm0, %ymm0
11317 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11318 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11319 ; AVX-NEXT: vmovaps 1808(%rdi), %xmm2
11320 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11321 ; AVX-NEXT: vmovaps 1760(%rdi), %xmm1
11322 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11323 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11324 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11325 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11326 ; AVX-NEXT: vmovaps 2080(%rdi), %ymm1
11327 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11328 ; AVX-NEXT: vinsertf128 $1, 2048(%rdi), %ymm0, %ymm0
11329 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11330 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11331 ; AVX-NEXT: vmovaps 2000(%rdi), %xmm2
11332 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11333 ; AVX-NEXT: vmovaps 1952(%rdi), %xmm1
11334 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11335 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11336 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11337 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11338 ; AVX-NEXT: vmovaps 2272(%rdi), %ymm1
11339 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11340 ; AVX-NEXT: vinsertf128 $1, 2240(%rdi), %ymm0, %ymm0
11341 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11342 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11343 ; AVX-NEXT: vmovaps 2192(%rdi), %xmm2
11344 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11345 ; AVX-NEXT: vmovaps 2144(%rdi), %xmm1
11346 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11347 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11348 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11349 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11350 ; AVX-NEXT: vmovaps 2464(%rdi), %ymm1
11351 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11352 ; AVX-NEXT: vinsertf128 $1, 2432(%rdi), %ymm0, %ymm0
11353 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11354 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11355 ; AVX-NEXT: vmovaps 2384(%rdi), %xmm14
11356 ; AVX-NEXT: vmovaps 2336(%rdi), %xmm13
11357 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm13[0],xmm14[0]
11358 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11359 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11360 ; AVX-NEXT: vmovaps 2656(%rdi), %ymm12
11361 ; AVX-NEXT: vinsertf128 $1, 2624(%rdi), %ymm0, %ymm11
11362 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm11[0],ymm12[0],ymm11[2],ymm12[2]
11363 ; AVX-NEXT: vmovaps 2576(%rdi), %xmm10
11364 ; AVX-NEXT: vmovaps 2528(%rdi), %xmm9
11365 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm9[0],xmm10[0]
11366 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11367 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11368 ; AVX-NEXT: vmovaps 2848(%rdi), %ymm8
11369 ; AVX-NEXT: vinsertf128 $1, 2816(%rdi), %ymm0, %ymm7
11370 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm7[0],ymm8[0],ymm7[2],ymm8[2]
11371 ; AVX-NEXT: vmovaps 2768(%rdi), %xmm6
11372 ; AVX-NEXT: vmovaps 2720(%rdi), %xmm5
11373 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm6[0]
11374 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11375 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11376 ; AVX-NEXT: vmovaps 3040(%rdi), %ymm4
11377 ; AVX-NEXT: vinsertf128 $1, 3008(%rdi), %ymm0, %ymm3
11378 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm3[0],ymm4[0],ymm3[2],ymm4[2]
11379 ; AVX-NEXT: vmovaps 2960(%rdi), %xmm2
11380 ; AVX-NEXT: vmovaps 2912(%rdi), %xmm1
11381 ; AVX-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
11382 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11383 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11384 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11385 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11386 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11387 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11388 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
11389 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
11390 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11391 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11392 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11393 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11394 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11395 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11396 ; AVX-NEXT: vunpckhpd (%rsp), %xmm15, %xmm15 # 16-byte Folded Reload
11397 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
11398 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11399 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11400 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11401 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11402 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11403 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11404 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
11405 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
11406 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11407 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11408 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11409 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11410 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11411 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11412 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
11413 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
11414 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11415 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11416 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11417 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11418 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11419 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11420 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
11421 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
11422 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11423 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11424 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11425 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11426 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11427 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11428 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
11429 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
11430 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11431 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11432 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11433 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11434 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11435 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11436 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
11437 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
11438 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11439 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11440 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11441 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11442 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11443 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11444 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
11445 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
11446 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11447 ; AVX-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
11448 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11449 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11450 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11451 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11452 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
11453 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
11454 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11455 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11456 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11457 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11458 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11459 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11460 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
11461 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
11462 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11463 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11464 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11465 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11466 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11467 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11468 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
11469 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
11470 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11471 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11472 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11473 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11474 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11475 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11476 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
11477 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
11478 ; AVX-NEXT: vblendps {{.*#+}} ymm15 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11479 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11480 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11481 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11482 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm13 = xmm13[1],xmm14[1]
11483 ; AVX-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3],ymm0[4,5,6,7]
11484 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm11[1],ymm12[1],ymm11[3],ymm12[3]
11485 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm9 = xmm9[1],xmm10[1]
11486 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm9[0,1,2,3],ymm0[4,5,6,7]
11487 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm7 = ymm7[1],ymm8[1],ymm7[3],ymm8[3]
11488 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm5 = xmm5[1],xmm6[1]
11489 ; AVX-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm7[4,5,6,7]
11490 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm4[1],ymm3[3],ymm4[3]
11491 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
11492 ; AVX-NEXT: vblendps {{.*#+}} ymm4 = ymm1[0,1,2,3],ymm3[4,5,6,7]
11493 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11494 ; AVX-NEXT: vmovaps %ymm1, 448(%rsi)
11495 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11496 ; AVX-NEXT: vmovaps %ymm1, 384(%rsi)
11497 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11498 ; AVX-NEXT: vmovaps %ymm1, 320(%rsi)
11499 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11500 ; AVX-NEXT: vmovaps %ymm1, 256(%rsi)
11501 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11502 ; AVX-NEXT: vmovaps %ymm1, 192(%rsi)
11503 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11504 ; AVX-NEXT: vmovaps %ymm1, 128(%rsi)
11505 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11506 ; AVX-NEXT: vmovaps %ymm1, 64(%rsi)
11507 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11508 ; AVX-NEXT: vmovaps %ymm1, (%rsi)
11509 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11510 ; AVX-NEXT: vmovaps %ymm1, 480(%rsi)
11511 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11512 ; AVX-NEXT: vmovaps %ymm1, 416(%rsi)
11513 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11514 ; AVX-NEXT: vmovaps %ymm1, 352(%rsi)
11515 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11516 ; AVX-NEXT: vmovaps %ymm1, 288(%rsi)
11517 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11518 ; AVX-NEXT: vmovaps %ymm1, 224(%rsi)
11519 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11520 ; AVX-NEXT: vmovaps %ymm1, 160(%rsi)
11521 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11522 ; AVX-NEXT: vmovaps %ymm1, 96(%rsi)
11523 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11524 ; AVX-NEXT: vmovaps %ymm1, 32(%rsi)
11525 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11526 ; AVX-NEXT: vmovaps %ymm1, 448(%rdx)
11527 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11528 ; AVX-NEXT: vmovaps %ymm1, 384(%rdx)
11529 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11530 ; AVX-NEXT: vmovaps %ymm1, 320(%rdx)
11531 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11532 ; AVX-NEXT: vmovaps %ymm1, 256(%rdx)
11533 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11534 ; AVX-NEXT: vmovaps %ymm1, 192(%rdx)
11535 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11536 ; AVX-NEXT: vmovaps %ymm1, 128(%rdx)
11537 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11538 ; AVX-NEXT: vmovaps %ymm1, 64(%rdx)
11539 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11540 ; AVX-NEXT: vmovaps %ymm1, (%rdx)
11541 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11542 ; AVX-NEXT: vmovaps %ymm1, 480(%rdx)
11543 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11544 ; AVX-NEXT: vmovaps %ymm1, 416(%rdx)
11545 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11546 ; AVX-NEXT: vmovaps %ymm1, 352(%rdx)
11547 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11548 ; AVX-NEXT: vmovaps %ymm1, 288(%rdx)
11549 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11550 ; AVX-NEXT: vmovaps %ymm1, 224(%rdx)
11551 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11552 ; AVX-NEXT: vmovaps %ymm1, 160(%rdx)
11553 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11554 ; AVX-NEXT: vmovaps %ymm1, 96(%rdx)
11555 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11556 ; AVX-NEXT: vmovaps %ymm1, 32(%rdx)
11557 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11558 ; AVX-NEXT: vmovaps %ymm1, (%rcx)
11559 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11560 ; AVX-NEXT: vmovaps %ymm1, 64(%rcx)
11561 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11562 ; AVX-NEXT: vmovaps %ymm1, 128(%rcx)
11563 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11564 ; AVX-NEXT: vmovaps %ymm1, 192(%rcx)
11565 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11566 ; AVX-NEXT: vmovaps %ymm1, 256(%rcx)
11567 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11568 ; AVX-NEXT: vmovaps %ymm1, 320(%rcx)
11569 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11570 ; AVX-NEXT: vmovaps %ymm1, 384(%rcx)
11571 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11572 ; AVX-NEXT: vmovaps %ymm1, 448(%rcx)
11573 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11574 ; AVX-NEXT: vmovaps %ymm1, 480(%rcx)
11575 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11576 ; AVX-NEXT: vmovaps %ymm1, 416(%rcx)
11577 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11578 ; AVX-NEXT: vmovaps %ymm1, 352(%rcx)
11579 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11580 ; AVX-NEXT: vmovaps %ymm1, 288(%rcx)
11581 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11582 ; AVX-NEXT: vmovaps %ymm1, 224(%rcx)
11583 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11584 ; AVX-NEXT: vmovaps %ymm1, 160(%rcx)
11585 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11586 ; AVX-NEXT: vmovaps %ymm1, 96(%rcx)
11587 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11588 ; AVX-NEXT: vmovaps %ymm1, 32(%rcx)
11589 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11590 ; AVX-NEXT: vmovaps %ymm1, 480(%r8)
11591 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11592 ; AVX-NEXT: vmovaps %ymm1, 448(%r8)
11593 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11594 ; AVX-NEXT: vmovaps %ymm1, 416(%r8)
11595 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11596 ; AVX-NEXT: vmovaps %ymm1, 384(%r8)
11597 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11598 ; AVX-NEXT: vmovaps %ymm1, 352(%r8)
11599 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11600 ; AVX-NEXT: vmovaps %ymm1, 320(%r8)
11601 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11602 ; AVX-NEXT: vmovaps %ymm1, 288(%r8)
11603 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11604 ; AVX-NEXT: vmovaps %ymm1, 256(%r8)
11605 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11606 ; AVX-NEXT: vmovaps %ymm1, 224(%r8)
11607 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11608 ; AVX-NEXT: vmovaps %ymm1, 192(%r8)
11609 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11610 ; AVX-NEXT: vmovaps %ymm1, 160(%r8)
11611 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11612 ; AVX-NEXT: vmovaps %ymm1, 128(%r8)
11613 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11614 ; AVX-NEXT: vmovaps %ymm1, 96(%r8)
11615 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11616 ; AVX-NEXT: vmovaps %ymm1, 64(%r8)
11617 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11618 ; AVX-NEXT: vmovaps %ymm1, 32(%r8)
11619 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11620 ; AVX-NEXT: vmovaps %ymm1, (%r8)
11621 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11622 ; AVX-NEXT: vmovaps %ymm1, 480(%r9)
11623 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11624 ; AVX-NEXT: vmovaps %ymm1, 448(%r9)
11625 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11626 ; AVX-NEXT: vmovaps %ymm1, 416(%r9)
11627 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11628 ; AVX-NEXT: vmovaps %ymm1, 384(%r9)
11629 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11630 ; AVX-NEXT: vmovaps %ymm1, 352(%r9)
11631 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11632 ; AVX-NEXT: vmovaps %ymm1, 320(%r9)
11633 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11634 ; AVX-NEXT: vmovaps %ymm1, 288(%r9)
11635 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11636 ; AVX-NEXT: vmovaps %ymm1, 256(%r9)
11637 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11638 ; AVX-NEXT: vmovaps %ymm1, 224(%r9)
11639 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11640 ; AVX-NEXT: vmovaps %ymm1, 192(%r9)
11641 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11642 ; AVX-NEXT: vmovaps %ymm1, 160(%r9)
11643 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11644 ; AVX-NEXT: vmovaps %ymm1, 128(%r9)
11645 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11646 ; AVX-NEXT: vmovaps %ymm1, 96(%r9)
11647 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11648 ; AVX-NEXT: vmovaps %ymm1, 64(%r9)
11649 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11650 ; AVX-NEXT: vmovaps %ymm1, 32(%r9)
11651 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11652 ; AVX-NEXT: vmovaps %ymm1, (%r9)
11653 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
11654 ; AVX-NEXT: vmovaps %ymm4, 480(%rax)
11655 ; AVX-NEXT: vmovaps %ymm5, 448(%rax)
11656 ; AVX-NEXT: vmovaps %ymm0, 416(%rax)
11657 ; AVX-NEXT: vmovaps %ymm13, 384(%rax)
11658 ; AVX-NEXT: vmovaps %ymm15, 352(%rax)
11659 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11660 ; AVX-NEXT: vmovaps %ymm0, 320(%rax)
11661 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11662 ; AVX-NEXT: vmovaps %ymm0, 288(%rax)
11663 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11664 ; AVX-NEXT: vmovaps %ymm0, 256(%rax)
11665 ; AVX-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
11666 ; AVX-NEXT: vmovaps %ymm0, 224(%rax)
11667 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11668 ; AVX-NEXT: vmovaps %ymm0, 192(%rax)
11669 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11670 ; AVX-NEXT: vmovaps %ymm0, 160(%rax)
11671 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11672 ; AVX-NEXT: vmovaps %ymm0, 128(%rax)
11673 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11674 ; AVX-NEXT: vmovaps %ymm0, 96(%rax)
11675 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11676 ; AVX-NEXT: vmovaps %ymm0, 64(%rax)
11677 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11678 ; AVX-NEXT: vmovaps %ymm0, 32(%rax)
11679 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11680 ; AVX-NEXT: vmovaps %ymm0, (%rax)
11681 ; AVX-NEXT: addq $3768, %rsp # imm = 0xEB8
11682 ; AVX-NEXT: vzeroupper
11685 ; AVX2-LABEL: load_i64_stride6_vf64:
11687 ; AVX2-NEXT: subq $3432, %rsp # imm = 0xD68
11688 ; AVX2-NEXT: vmovaps 1088(%rdi), %ymm2
11689 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11690 ; AVX2-NEXT: vmovaps 1056(%rdi), %ymm4
11691 ; AVX2-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11692 ; AVX2-NEXT: vmovaps 704(%rdi), %ymm3
11693 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11694 ; AVX2-NEXT: vmovaps 672(%rdi), %ymm5
11695 ; AVX2-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11696 ; AVX2-NEXT: vmovaps 320(%rdi), %ymm1
11697 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11698 ; AVX2-NEXT: vmovaps 288(%rdi), %ymm6
11699 ; AVX2-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11700 ; AVX2-NEXT: vmovaps 240(%rdi), %xmm7
11701 ; AVX2-NEXT: vmovaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11702 ; AVX2-NEXT: vmovaps 192(%rdi), %xmm0
11703 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11704 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm7[0]
11705 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm6[0],ymm1[0],ymm6[2],ymm1[2]
11706 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
11707 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
11708 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11709 ; AVX2-NEXT: vmovaps 624(%rdi), %xmm1
11710 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11711 ; AVX2-NEXT: vmovaps 576(%rdi), %xmm0
11712 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11713 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
11714 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm5[0],ymm3[0],ymm5[2],ymm3[2]
11715 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
11716 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
11717 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11718 ; AVX2-NEXT: vmovaps 1008(%rdi), %xmm1
11719 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11720 ; AVX2-NEXT: vmovaps 960(%rdi), %xmm0
11721 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11722 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
11723 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm4[0],ymm2[0],ymm4[2],ymm2[2]
11724 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
11725 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
11726 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11727 ; AVX2-NEXT: vmovaps 1472(%rdi), %ymm0
11728 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11729 ; AVX2-NEXT: vmovaps 1440(%rdi), %ymm1
11730 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11731 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11732 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
11733 ; AVX2-NEXT: vmovaps 1392(%rdi), %xmm2
11734 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11735 ; AVX2-NEXT: vmovaps 1344(%rdi), %xmm1
11736 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11737 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11738 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11739 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11740 ; AVX2-NEXT: vmovaps 1856(%rdi), %ymm0
11741 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11742 ; AVX2-NEXT: vmovaps 1824(%rdi), %ymm1
11743 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11744 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11745 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
11746 ; AVX2-NEXT: vmovaps 1776(%rdi), %xmm2
11747 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11748 ; AVX2-NEXT: vmovaps 1728(%rdi), %xmm1
11749 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11750 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11751 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11752 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11753 ; AVX2-NEXT: vmovaps 2240(%rdi), %ymm0
11754 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11755 ; AVX2-NEXT: vmovaps 2208(%rdi), %ymm1
11756 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11757 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11758 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
11759 ; AVX2-NEXT: vmovaps 2160(%rdi), %xmm2
11760 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11761 ; AVX2-NEXT: vmovaps 2112(%rdi), %xmm1
11762 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11763 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11764 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11765 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11766 ; AVX2-NEXT: vmovaps 2624(%rdi), %ymm0
11767 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11768 ; AVX2-NEXT: vmovaps 2592(%rdi), %ymm1
11769 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11770 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11771 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
11772 ; AVX2-NEXT: vmovaps 2544(%rdi), %xmm2
11773 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11774 ; AVX2-NEXT: vmovaps 2496(%rdi), %xmm1
11775 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11776 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11777 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11778 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11779 ; AVX2-NEXT: vmovaps 3008(%rdi), %ymm0
11780 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11781 ; AVX2-NEXT: vmovaps 2976(%rdi), %ymm1
11782 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11783 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11784 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
11785 ; AVX2-NEXT: vmovaps 2928(%rdi), %xmm2
11786 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11787 ; AVX2-NEXT: vmovaps 2880(%rdi), %xmm1
11788 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11789 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11790 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11791 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11792 ; AVX2-NEXT: vmovaps 128(%rdi), %ymm0
11793 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11794 ; AVX2-NEXT: vmovaps 96(%rdi), %ymm1
11795 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11796 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11797 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
11798 ; AVX2-NEXT: vmovaps (%rdi), %xmm2
11799 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11800 ; AVX2-NEXT: vmovaps 48(%rdi), %xmm1
11801 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11802 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
11803 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11804 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11805 ; AVX2-NEXT: vmovaps 512(%rdi), %ymm0
11806 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11807 ; AVX2-NEXT: vmovaps 480(%rdi), %ymm1
11808 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11809 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11810 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
11811 ; AVX2-NEXT: vmovaps 432(%rdi), %xmm1
11812 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11813 ; AVX2-NEXT: vmovaps 384(%rdi), %xmm13
11814 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm13[0],xmm1[0]
11815 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11816 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11817 ; AVX2-NEXT: vmovaps 896(%rdi), %ymm0
11818 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11819 ; AVX2-NEXT: vmovaps 864(%rdi), %ymm1
11820 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11821 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11822 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
11823 ; AVX2-NEXT: vmovaps 816(%rdi), %xmm1
11824 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11825 ; AVX2-NEXT: vmovaps 768(%rdi), %xmm11
11826 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm1[0]
11827 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11828 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11829 ; AVX2-NEXT: vmovaps 1280(%rdi), %ymm0
11830 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11831 ; AVX2-NEXT: vmovaps 1248(%rdi), %ymm1
11832 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11833 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11834 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
11835 ; AVX2-NEXT: vmovaps 1200(%rdi), %xmm1
11836 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11837 ; AVX2-NEXT: vmovaps 1152(%rdi), %xmm9
11838 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm9[0],xmm1[0]
11839 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11840 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11841 ; AVX2-NEXT: vmovaps 1664(%rdi), %ymm0
11842 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11843 ; AVX2-NEXT: vmovaps 1632(%rdi), %ymm1
11844 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11845 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11846 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
11847 ; AVX2-NEXT: vmovaps 1584(%rdi), %xmm14
11848 ; AVX2-NEXT: vmovaps 1536(%rdi), %xmm7
11849 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm7[0],xmm14[0]
11850 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11851 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11852 ; AVX2-NEXT: vmovaps 2048(%rdi), %ymm12
11853 ; AVX2-NEXT: vmovaps 2016(%rdi), %ymm0
11854 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11855 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm12[0],ymm0[2],ymm12[2]
11856 ; AVX2-NEXT: vmovups %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11857 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
11858 ; AVX2-NEXT: vmovaps 1968(%rdi), %xmm10
11859 ; AVX2-NEXT: vmovaps 1920(%rdi), %xmm5
11860 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm10[0]
11861 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11862 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11863 ; AVX2-NEXT: vmovaps 2432(%rdi), %ymm8
11864 ; AVX2-NEXT: vmovaps 2400(%rdi), %ymm0
11865 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11866 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm8[0],ymm0[2],ymm8[2]
11867 ; AVX2-NEXT: vmovups %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11868 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
11869 ; AVX2-NEXT: vmovaps 2352(%rdi), %xmm6
11870 ; AVX2-NEXT: vmovaps 2304(%rdi), %xmm3
11871 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm6[0]
11872 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11873 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11874 ; AVX2-NEXT: vmovaps 2816(%rdi), %ymm4
11875 ; AVX2-NEXT: vmovaps 2784(%rdi), %ymm0
11876 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11877 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
11878 ; AVX2-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11879 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
11880 ; AVX2-NEXT: vmovaps 2736(%rdi), %xmm2
11881 ; AVX2-NEXT: vmovaps 2688(%rdi), %xmm1
11882 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
11883 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11884 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11885 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
11886 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
11887 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
11888 ; AVX2-NEXT: vbroadcastsd 296(%rdi), %ymm15
11889 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
11890 ; AVX2-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
11891 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
11892 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11893 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
11894 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
11895 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
11896 ; AVX2-NEXT: vbroadcastsd 680(%rdi), %ymm15
11897 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
11898 ; AVX2-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
11899 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
11900 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11901 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
11902 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
11903 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
11904 ; AVX2-NEXT: vbroadcastsd 1064(%rdi), %ymm15
11905 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
11906 ; AVX2-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
11907 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
11908 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11909 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
11910 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
11911 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
11912 ; AVX2-NEXT: vbroadcastsd 1448(%rdi), %ymm15
11913 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
11914 ; AVX2-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
11915 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
11916 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11917 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
11918 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
11919 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
11920 ; AVX2-NEXT: vbroadcastsd 1832(%rdi), %ymm15
11921 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
11922 ; AVX2-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
11923 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
11924 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11925 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
11926 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
11927 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
11928 ; AVX2-NEXT: vbroadcastsd 2216(%rdi), %ymm15
11929 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
11930 ; AVX2-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
11931 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
11932 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11933 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
11934 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
11935 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
11936 ; AVX2-NEXT: vbroadcastsd 2600(%rdi), %ymm15
11937 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
11938 ; AVX2-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
11939 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
11940 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11941 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
11942 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
11943 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
11944 ; AVX2-NEXT: vbroadcastsd 2984(%rdi), %ymm15
11945 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
11946 ; AVX2-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
11947 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
11948 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11949 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
11950 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
11951 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
11952 ; AVX2-NEXT: vbroadcastsd 104(%rdi), %ymm15
11953 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
11954 ; AVX2-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
11955 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
11956 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11957 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm13, %xmm0 # 16-byte Folded Reload
11958 ; AVX2-NEXT: # xmm0 = xmm13[1],mem[1]
11959 ; AVX2-NEXT: vbroadcastsd 488(%rdi), %ymm13
11960 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm13, %ymm13 # 32-byte Folded Reload
11961 ; AVX2-NEXT: # ymm13 = ymm13[1],mem[1],ymm13[3],mem[3]
11962 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm13[4,5,6,7]
11963 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11964 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm11, %xmm0 # 16-byte Folded Reload
11965 ; AVX2-NEXT: # xmm0 = xmm11[1],mem[1]
11966 ; AVX2-NEXT: vbroadcastsd 872(%rdi), %ymm11
11967 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm11, %ymm11 # 32-byte Folded Reload
11968 ; AVX2-NEXT: # ymm11 = ymm11[1],mem[1],ymm11[3],mem[3]
11969 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm11[4,5,6,7]
11970 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11971 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9, %xmm0 # 16-byte Folded Reload
11972 ; AVX2-NEXT: # xmm0 = xmm9[1],mem[1]
11973 ; AVX2-NEXT: vbroadcastsd 1256(%rdi), %ymm9
11974 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm9 # 32-byte Folded Reload
11975 ; AVX2-NEXT: # ymm9 = ymm9[1],mem[1],ymm9[3],mem[3]
11976 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm9[4,5,6,7]
11977 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11978 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm14[1]
11979 ; AVX2-NEXT: vbroadcastsd 1640(%rdi), %ymm7
11980 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm7, %ymm7 # 32-byte Folded Reload
11981 ; AVX2-NEXT: # ymm7 = ymm7[1],mem[1],ymm7[3],mem[3]
11982 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm7[4,5,6,7]
11983 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11984 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm10[1]
11985 ; AVX2-NEXT: vbroadcastsd 2024(%rdi), %ymm5
11986 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm5[1],ymm12[1],ymm5[3],ymm12[3]
11987 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm5[4,5,6,7]
11988 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11989 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm6[1]
11990 ; AVX2-NEXT: vbroadcastsd 2408(%rdi), %ymm3
11991 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm8[1],ymm3[3],ymm8[3]
11992 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
11993 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11994 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm2[1]
11995 ; AVX2-NEXT: vbroadcastsd 2792(%rdi), %ymm1
11996 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm4[1],ymm1[3],ymm4[3]
11997 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
11998 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11999 ; AVX2-NEXT: vbroadcastsd 352(%rdi), %ymm0
12000 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12001 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12002 ; AVX2-NEXT: vmovaps 256(%rdi), %xmm2
12003 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12004 ; AVX2-NEXT: vmovaps 208(%rdi), %xmm1
12005 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12006 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12007 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12008 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12009 ; AVX2-NEXT: vbroadcastsd 736(%rdi), %ymm0
12010 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12011 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12012 ; AVX2-NEXT: vmovaps 640(%rdi), %xmm2
12013 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12014 ; AVX2-NEXT: vmovaps 592(%rdi), %xmm1
12015 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12016 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12017 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12018 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12019 ; AVX2-NEXT: vbroadcastsd 1120(%rdi), %ymm0
12020 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12021 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12022 ; AVX2-NEXT: vmovaps 1024(%rdi), %xmm2
12023 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12024 ; AVX2-NEXT: vmovaps 976(%rdi), %xmm1
12025 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12026 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12027 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12028 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12029 ; AVX2-NEXT: vbroadcastsd 1504(%rdi), %ymm0
12030 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12031 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12032 ; AVX2-NEXT: vmovaps 1408(%rdi), %xmm2
12033 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12034 ; AVX2-NEXT: vmovaps 1360(%rdi), %xmm1
12035 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12036 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12037 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12038 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12039 ; AVX2-NEXT: vbroadcastsd 1888(%rdi), %ymm0
12040 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12041 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12042 ; AVX2-NEXT: vmovaps 1792(%rdi), %xmm2
12043 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12044 ; AVX2-NEXT: vmovaps 1744(%rdi), %xmm1
12045 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12046 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12047 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12048 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12049 ; AVX2-NEXT: vbroadcastsd 2272(%rdi), %ymm0
12050 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12051 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12052 ; AVX2-NEXT: vmovaps 2176(%rdi), %xmm2
12053 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12054 ; AVX2-NEXT: vmovaps 2128(%rdi), %xmm1
12055 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12056 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12057 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12058 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12059 ; AVX2-NEXT: vbroadcastsd 2656(%rdi), %ymm0
12060 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12061 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12062 ; AVX2-NEXT: vmovaps 2560(%rdi), %xmm2
12063 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12064 ; AVX2-NEXT: vmovaps 2512(%rdi), %xmm1
12065 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12066 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12067 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12068 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12069 ; AVX2-NEXT: vbroadcastsd 3040(%rdi), %ymm0
12070 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12071 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12072 ; AVX2-NEXT: vmovaps 2944(%rdi), %xmm2
12073 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12074 ; AVX2-NEXT: vmovaps 2896(%rdi), %xmm1
12075 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12076 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12077 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12078 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12079 ; AVX2-NEXT: vbroadcastsd 2848(%rdi), %ymm0
12080 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12081 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12082 ; AVX2-NEXT: vmovaps 2752(%rdi), %xmm2
12083 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12084 ; AVX2-NEXT: vmovaps 2704(%rdi), %xmm1
12085 ; AVX2-NEXT: vmovaps %xmm1, (%rsp) # 16-byte Spill
12086 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12087 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12088 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12089 ; AVX2-NEXT: vbroadcastsd 2464(%rdi), %ymm0
12090 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12091 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12092 ; AVX2-NEXT: vmovaps 2368(%rdi), %xmm2
12093 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12094 ; AVX2-NEXT: vmovaps 2320(%rdi), %xmm1
12095 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12096 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12097 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12098 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12099 ; AVX2-NEXT: vbroadcastsd 2080(%rdi), %ymm0
12100 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12101 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12102 ; AVX2-NEXT: vmovaps 1984(%rdi), %xmm2
12103 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12104 ; AVX2-NEXT: vmovaps 1936(%rdi), %xmm1
12105 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12106 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12107 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12108 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12109 ; AVX2-NEXT: vbroadcastsd 1696(%rdi), %ymm0
12110 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12111 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12112 ; AVX2-NEXT: vmovaps 1600(%rdi), %xmm12
12113 ; AVX2-NEXT: vmovaps 1552(%rdi), %xmm11
12114 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm12[0]
12115 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12116 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12117 ; AVX2-NEXT: vbroadcastsd 1312(%rdi), %ymm0
12118 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
12119 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm10[0],ymm0[0],ymm10[2],ymm0[2]
12120 ; AVX2-NEXT: vmovaps 1216(%rdi), %xmm9
12121 ; AVX2-NEXT: vmovaps 1168(%rdi), %xmm8
12122 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm8[0],xmm9[0]
12123 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12124 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12125 ; AVX2-NEXT: vbroadcastsd 928(%rdi), %ymm0
12126 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
12127 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm7[0],ymm0[0],ymm7[2],ymm0[2]
12128 ; AVX2-NEXT: vmovaps 832(%rdi), %xmm6
12129 ; AVX2-NEXT: vmovaps 784(%rdi), %xmm5
12130 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm13 = xmm5[0],xmm6[0]
12131 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm13[0,1,2,3],ymm0[4,5,6,7]
12132 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12133 ; AVX2-NEXT: vbroadcastsd 544(%rdi), %ymm0
12134 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
12135 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm4[0],ymm0[0],ymm4[2],ymm0[2]
12136 ; AVX2-NEXT: vmovaps 448(%rdi), %xmm13
12137 ; AVX2-NEXT: vmovaps 400(%rdi), %xmm3
12138 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm15 = xmm3[0],xmm13[0]
12139 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
12140 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12141 ; AVX2-NEXT: vbroadcastsd 160(%rdi), %ymm0
12142 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
12143 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm2[0],ymm0[0],ymm2[2],ymm0[2]
12144 ; AVX2-NEXT: vmovaps 16(%rdi), %xmm15
12145 ; AVX2-NEXT: vmovaps 64(%rdi), %xmm0
12146 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm14 = xmm15[0],xmm0[0]
12147 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm14[0,1,2,3],ymm1[4,5,6,7]
12148 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12149 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm15[1],xmm0[1]
12150 ; AVX2-NEXT: vmovaps 160(%rdi), %ymm14
12151 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm2[1],ymm14[1],ymm2[3],ymm14[3]
12152 ; AVX2-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12153 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12154 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12155 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12156 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12157 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12158 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
12159 ; AVX2-NEXT: vmovaps 352(%rdi), %ymm15
12160 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12161 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm15[1],ymm1[3],ymm15[3]
12162 ; AVX2-NEXT: vmovups %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12163 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12164 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12165 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12166 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm13[1]
12167 ; AVX2-NEXT: vmovaps 544(%rdi), %ymm3
12168 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm4[1],ymm3[1],ymm4[3],ymm3[3]
12169 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12170 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12171 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12172 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12173 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12174 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12175 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
12176 ; AVX2-NEXT: vmovaps 736(%rdi), %ymm4
12177 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12178 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm4[1],ymm1[3],ymm4[3]
12179 ; AVX2-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12180 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12181 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12182 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12183 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm6[1]
12184 ; AVX2-NEXT: vmovaps 928(%rdi), %ymm5
12185 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm7[1],ymm5[1],ymm7[3],ymm5[3]
12186 ; AVX2-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12187 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12188 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12189 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12190 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12191 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12192 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
12193 ; AVX2-NEXT: vmovaps 1120(%rdi), %ymm6
12194 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12195 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm6[1],ymm1[3],ymm6[3]
12196 ; AVX2-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12197 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12198 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12199 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12200 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm8[1],xmm9[1]
12201 ; AVX2-NEXT: vmovaps 1312(%rdi), %ymm7
12202 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm10[1],ymm7[1],ymm10[3],ymm7[3]
12203 ; AVX2-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12204 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12205 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12206 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12207 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12208 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12209 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
12210 ; AVX2-NEXT: vmovaps 1504(%rdi), %ymm8
12211 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12212 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm8[1],ymm1[3],ymm8[3]
12213 ; AVX2-NEXT: vmovups %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12214 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12215 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12216 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12217 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm11[1],xmm12[1]
12218 ; AVX2-NEXT: vmovaps 1696(%rdi), %ymm9
12219 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12220 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm9[1],ymm1[3],ymm9[3]
12221 ; AVX2-NEXT: vmovups %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12222 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12223 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12224 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12225 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12226 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12227 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
12228 ; AVX2-NEXT: vmovaps 1888(%rdi), %ymm10
12229 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12230 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm10[1],ymm1[3],ymm10[3]
12231 ; AVX2-NEXT: vmovups %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12232 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12233 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12234 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12235 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12236 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12237 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
12238 ; AVX2-NEXT: vmovaps 2080(%rdi), %ymm11
12239 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12240 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm11[1],ymm1[3],ymm11[3]
12241 ; AVX2-NEXT: vmovups %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12242 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12243 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12244 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12245 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12246 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12247 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
12248 ; AVX2-NEXT: vmovaps 2272(%rdi), %ymm2
12249 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12250 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12251 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
12252 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12253 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12254 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12255 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12256 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12257 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
12258 ; AVX2-NEXT: vmovaps 2464(%rdi), %ymm2
12259 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12260 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12261 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
12262 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12263 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12264 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12265 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12266 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12267 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
12268 ; AVX2-NEXT: vmovaps 2656(%rdi), %ymm2
12269 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12270 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12271 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
12272 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12273 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12274 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12275 ; AVX2-NEXT: vmovaps (%rsp), %xmm0 # 16-byte Reload
12276 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12277 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
12278 ; AVX2-NEXT: vmovaps 2848(%rdi), %ymm2
12279 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12280 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12281 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
12282 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12283 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12284 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12285 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12286 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12287 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
12288 ; AVX2-NEXT: vmovaps 3040(%rdi), %ymm2
12289 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12290 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
12291 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12292 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12293 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12294 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12295 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12296 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm14[0],ymm0[2],ymm14[2]
12297 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12298 ; AVX2-NEXT: vmovaps 32(%rdi), %xmm12
12299 ; AVX2-NEXT: vmovaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12300 ; AVX2-NEXT: vmovaps 80(%rdi), %xmm1
12301 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12302 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm12[0],xmm1[0]
12303 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12304 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12305 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12306 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm15[0],ymm0[2],ymm15[2]
12307 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12308 ; AVX2-NEXT: vmovaps 272(%rdi), %xmm12
12309 ; AVX2-NEXT: vmovaps %xmm12, (%rsp) # 16-byte Spill
12310 ; AVX2-NEXT: vmovaps 224(%rdi), %xmm1
12311 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12312 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm12[0]
12313 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12314 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12315 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12316 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
12317 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12318 ; AVX2-NEXT: vmovaps 464(%rdi), %xmm3
12319 ; AVX2-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12320 ; AVX2-NEXT: vmovaps 416(%rdi), %xmm1
12321 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12322 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
12323 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12324 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12325 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12326 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
12327 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12328 ; AVX2-NEXT: vmovaps 656(%rdi), %xmm3
12329 ; AVX2-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12330 ; AVX2-NEXT: vmovaps 608(%rdi), %xmm1
12331 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12332 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
12333 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12334 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12335 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12336 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm5[0],ymm0[2],ymm5[2]
12337 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12338 ; AVX2-NEXT: vmovaps 848(%rdi), %xmm3
12339 ; AVX2-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12340 ; AVX2-NEXT: vmovaps 800(%rdi), %xmm1
12341 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12342 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
12343 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12344 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12345 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12346 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm6[0],ymm0[2],ymm6[2]
12347 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12348 ; AVX2-NEXT: vmovaps 1040(%rdi), %xmm3
12349 ; AVX2-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12350 ; AVX2-NEXT: vmovaps 992(%rdi), %xmm1
12351 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12352 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
12353 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12354 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12355 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12356 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm7[0],ymm0[2],ymm7[2]
12357 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12358 ; AVX2-NEXT: vmovaps 1232(%rdi), %xmm3
12359 ; AVX2-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12360 ; AVX2-NEXT: vmovaps 1184(%rdi), %xmm1
12361 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12362 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
12363 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12364 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12365 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12366 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm8[0],ymm0[2],ymm8[2]
12367 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12368 ; AVX2-NEXT: vmovaps 1424(%rdi), %xmm3
12369 ; AVX2-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12370 ; AVX2-NEXT: vmovaps 1376(%rdi), %xmm1
12371 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12372 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
12373 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12374 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12375 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12376 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm9[0],ymm0[2],ymm9[2]
12377 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12378 ; AVX2-NEXT: vmovaps 1616(%rdi), %xmm3
12379 ; AVX2-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12380 ; AVX2-NEXT: vmovaps 1568(%rdi), %xmm1
12381 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12382 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
12383 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12384 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12385 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12386 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm10[0],ymm0[2],ymm10[2]
12387 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12388 ; AVX2-NEXT: vmovaps 1808(%rdi), %xmm1
12389 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12390 ; AVX2-NEXT: vmovaps 1760(%rdi), %xmm13
12391 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm13[0],xmm1[0]
12392 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12393 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12394 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12395 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm11[0],ymm0[2],ymm11[2]
12396 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12397 ; AVX2-NEXT: vmovaps 2000(%rdi), %xmm1
12398 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12399 ; AVX2-NEXT: vmovaps 1952(%rdi), %xmm11
12400 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm1[0]
12401 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12402 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12403 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12404 ; AVX2-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12405 ; AVX2-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
12406 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12407 ; AVX2-NEXT: vmovaps 2192(%rdi), %xmm1
12408 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12409 ; AVX2-NEXT: vmovaps 2144(%rdi), %xmm9
12410 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm9[0],xmm1[0]
12411 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12412 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12413 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12414 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
12415 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm14[0],ymm0[2],ymm14[2]
12416 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12417 ; AVX2-NEXT: vmovaps 2384(%rdi), %xmm12
12418 ; AVX2-NEXT: vmovaps 2336(%rdi), %xmm7
12419 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm7[0],xmm12[0]
12420 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12421 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12422 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12423 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
12424 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm10[0],ymm0[2],ymm10[2]
12425 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12426 ; AVX2-NEXT: vmovaps 2576(%rdi), %xmm8
12427 ; AVX2-NEXT: vmovaps 2528(%rdi), %xmm5
12428 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm8[0]
12429 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12430 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12431 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12432 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
12433 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm6[0],ymm0[2],ymm6[2]
12434 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12435 ; AVX2-NEXT: vmovaps 2768(%rdi), %xmm4
12436 ; AVX2-NEXT: vmovaps 2720(%rdi), %xmm3
12437 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm4[0]
12438 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12439 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12440 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12441 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm2[0],ymm0[2],ymm2[2]
12442 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12443 ; AVX2-NEXT: vmovaps 2960(%rdi), %xmm2
12444 ; AVX2-NEXT: vmovaps 2912(%rdi), %xmm1
12445 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
12446 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
12447 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12448 ; AVX2-NEXT: vbroadcastsd 136(%rdi), %ymm0
12449 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12450 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12451 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
12452 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
12453 ; AVX2-NEXT: # xmm15 = xmm15[1],mem[1]
12454 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
12455 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12456 ; AVX2-NEXT: vbroadcastsd 328(%rdi), %ymm0
12457 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12458 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12459 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
12460 ; AVX2-NEXT: vunpckhpd (%rsp), %xmm15, %xmm15 # 16-byte Folded Reload
12461 ; AVX2-NEXT: # xmm15 = xmm15[1],mem[1]
12462 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
12463 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12464 ; AVX2-NEXT: vbroadcastsd 520(%rdi), %ymm0
12465 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12466 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12467 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
12468 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
12469 ; AVX2-NEXT: # xmm15 = xmm15[1],mem[1]
12470 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
12471 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12472 ; AVX2-NEXT: vbroadcastsd 712(%rdi), %ymm0
12473 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12474 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12475 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
12476 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
12477 ; AVX2-NEXT: # xmm15 = xmm15[1],mem[1]
12478 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
12479 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12480 ; AVX2-NEXT: vbroadcastsd 904(%rdi), %ymm0
12481 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12482 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12483 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
12484 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
12485 ; AVX2-NEXT: # xmm15 = xmm15[1],mem[1]
12486 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
12487 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12488 ; AVX2-NEXT: vbroadcastsd 1096(%rdi), %ymm0
12489 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12490 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12491 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
12492 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
12493 ; AVX2-NEXT: # xmm15 = xmm15[1],mem[1]
12494 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
12495 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12496 ; AVX2-NEXT: vbroadcastsd 1288(%rdi), %ymm0
12497 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12498 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12499 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
12500 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
12501 ; AVX2-NEXT: # xmm15 = xmm15[1],mem[1]
12502 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
12503 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12504 ; AVX2-NEXT: vbroadcastsd 1480(%rdi), %ymm0
12505 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12506 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12507 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
12508 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
12509 ; AVX2-NEXT: # xmm15 = xmm15[1],mem[1]
12510 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
12511 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12512 ; AVX2-NEXT: vbroadcastsd 1672(%rdi), %ymm0
12513 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12514 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12515 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
12516 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
12517 ; AVX2-NEXT: # xmm15 = xmm15[1],mem[1]
12518 ; AVX2-NEXT: vblendps {{.*#+}} ymm15 = ymm15[0,1,2,3],ymm0[4,5,6,7]
12519 ; AVX2-NEXT: vbroadcastsd 1864(%rdi), %ymm0
12520 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12521 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12522 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm13, %xmm13 # 16-byte Folded Reload
12523 ; AVX2-NEXT: # xmm13 = xmm13[1],mem[1]
12524 ; AVX2-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3],ymm0[4,5,6,7]
12525 ; AVX2-NEXT: vbroadcastsd 2056(%rdi), %ymm0
12526 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12527 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12528 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm11, %xmm11 # 16-byte Folded Reload
12529 ; AVX2-NEXT: # xmm11 = xmm11[1],mem[1]
12530 ; AVX2-NEXT: vblendps {{.*#+}} ymm11 = ymm11[0,1,2,3],ymm0[4,5,6,7]
12531 ; AVX2-NEXT: vbroadcastsd 2248(%rdi), %ymm0
12532 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12533 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12534 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9, %xmm9 # 16-byte Folded Reload
12535 ; AVX2-NEXT: # xmm9 = xmm9[1],mem[1]
12536 ; AVX2-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm0[4,5,6,7]
12537 ; AVX2-NEXT: vbroadcastsd 2440(%rdi), %ymm0
12538 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm14[1],ymm0[3],ymm14[3]
12539 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm7 = xmm7[1],xmm12[1]
12540 ; AVX2-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm0[4,5,6,7]
12541 ; AVX2-NEXT: vbroadcastsd 2632(%rdi), %ymm0
12542 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm10[1],ymm0[3],ymm10[3]
12543 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm5 = xmm5[1],xmm8[1]
12544 ; AVX2-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm0[4,5,6,7]
12545 ; AVX2-NEXT: vbroadcastsd 2824(%rdi), %ymm0
12546 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm6[1],ymm0[3],ymm6[3]
12547 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm4[1]
12548 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm0[4,5,6,7]
12549 ; AVX2-NEXT: vbroadcastsd 3016(%rdi), %ymm0
12550 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12551 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12552 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
12553 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12554 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12555 ; AVX2-NEXT: vmovaps %ymm1, 448(%rsi)
12556 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12557 ; AVX2-NEXT: vmovaps %ymm1, 384(%rsi)
12558 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12559 ; AVX2-NEXT: vmovaps %ymm1, 320(%rsi)
12560 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12561 ; AVX2-NEXT: vmovaps %ymm1, 256(%rsi)
12562 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12563 ; AVX2-NEXT: vmovaps %ymm1, 192(%rsi)
12564 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12565 ; AVX2-NEXT: vmovaps %ymm1, 128(%rsi)
12566 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12567 ; AVX2-NEXT: vmovaps %ymm1, 64(%rsi)
12568 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12569 ; AVX2-NEXT: vmovaps %ymm1, (%rsi)
12570 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12571 ; AVX2-NEXT: vmovaps %ymm1, 480(%rsi)
12572 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12573 ; AVX2-NEXT: vmovaps %ymm1, 416(%rsi)
12574 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12575 ; AVX2-NEXT: vmovaps %ymm1, 352(%rsi)
12576 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12577 ; AVX2-NEXT: vmovaps %ymm1, 288(%rsi)
12578 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12579 ; AVX2-NEXT: vmovaps %ymm1, 224(%rsi)
12580 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12581 ; AVX2-NEXT: vmovaps %ymm1, 160(%rsi)
12582 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12583 ; AVX2-NEXT: vmovaps %ymm1, 96(%rsi)
12584 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12585 ; AVX2-NEXT: vmovaps %ymm1, 32(%rsi)
12586 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12587 ; AVX2-NEXT: vmovaps %ymm1, 448(%rdx)
12588 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12589 ; AVX2-NEXT: vmovaps %ymm1, 384(%rdx)
12590 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12591 ; AVX2-NEXT: vmovaps %ymm1, 320(%rdx)
12592 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12593 ; AVX2-NEXT: vmovaps %ymm1, 256(%rdx)
12594 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12595 ; AVX2-NEXT: vmovaps %ymm1, 192(%rdx)
12596 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12597 ; AVX2-NEXT: vmovaps %ymm1, 128(%rdx)
12598 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12599 ; AVX2-NEXT: vmovaps %ymm1, 64(%rdx)
12600 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12601 ; AVX2-NEXT: vmovaps %ymm1, (%rdx)
12602 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12603 ; AVX2-NEXT: vmovaps %ymm1, 480(%rdx)
12604 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12605 ; AVX2-NEXT: vmovaps %ymm1, 416(%rdx)
12606 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12607 ; AVX2-NEXT: vmovaps %ymm1, 352(%rdx)
12608 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12609 ; AVX2-NEXT: vmovaps %ymm1, 288(%rdx)
12610 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12611 ; AVX2-NEXT: vmovaps %ymm1, 224(%rdx)
12612 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12613 ; AVX2-NEXT: vmovaps %ymm1, 160(%rdx)
12614 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12615 ; AVX2-NEXT: vmovaps %ymm1, 96(%rdx)
12616 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12617 ; AVX2-NEXT: vmovaps %ymm1, 32(%rdx)
12618 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12619 ; AVX2-NEXT: vmovaps %ymm1, (%rcx)
12620 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12621 ; AVX2-NEXT: vmovaps %ymm1, 64(%rcx)
12622 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12623 ; AVX2-NEXT: vmovaps %ymm1, 128(%rcx)
12624 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12625 ; AVX2-NEXT: vmovaps %ymm1, 192(%rcx)
12626 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12627 ; AVX2-NEXT: vmovaps %ymm1, 256(%rcx)
12628 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12629 ; AVX2-NEXT: vmovaps %ymm1, 320(%rcx)
12630 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12631 ; AVX2-NEXT: vmovaps %ymm1, 384(%rcx)
12632 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12633 ; AVX2-NEXT: vmovaps %ymm1, 448(%rcx)
12634 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12635 ; AVX2-NEXT: vmovaps %ymm1, 480(%rcx)
12636 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12637 ; AVX2-NEXT: vmovaps %ymm1, 416(%rcx)
12638 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12639 ; AVX2-NEXT: vmovaps %ymm1, 352(%rcx)
12640 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12641 ; AVX2-NEXT: vmovaps %ymm1, 288(%rcx)
12642 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12643 ; AVX2-NEXT: vmovaps %ymm1, 224(%rcx)
12644 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12645 ; AVX2-NEXT: vmovaps %ymm1, 160(%rcx)
12646 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12647 ; AVX2-NEXT: vmovaps %ymm1, 96(%rcx)
12648 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12649 ; AVX2-NEXT: vmovaps %ymm1, 32(%rcx)
12650 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12651 ; AVX2-NEXT: vmovaps %ymm1, 480(%r8)
12652 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12653 ; AVX2-NEXT: vmovaps %ymm1, 448(%r8)
12654 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12655 ; AVX2-NEXT: vmovaps %ymm1, 416(%r8)
12656 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12657 ; AVX2-NEXT: vmovaps %ymm1, 384(%r8)
12658 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12659 ; AVX2-NEXT: vmovaps %ymm1, 352(%r8)
12660 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12661 ; AVX2-NEXT: vmovaps %ymm1, 320(%r8)
12662 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12663 ; AVX2-NEXT: vmovaps %ymm1, 288(%r8)
12664 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12665 ; AVX2-NEXT: vmovaps %ymm1, 256(%r8)
12666 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12667 ; AVX2-NEXT: vmovaps %ymm1, 224(%r8)
12668 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12669 ; AVX2-NEXT: vmovaps %ymm1, 192(%r8)
12670 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12671 ; AVX2-NEXT: vmovaps %ymm1, 160(%r8)
12672 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12673 ; AVX2-NEXT: vmovaps %ymm1, 128(%r8)
12674 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12675 ; AVX2-NEXT: vmovaps %ymm1, 96(%r8)
12676 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12677 ; AVX2-NEXT: vmovaps %ymm1, 64(%r8)
12678 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12679 ; AVX2-NEXT: vmovaps %ymm1, 32(%r8)
12680 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12681 ; AVX2-NEXT: vmovaps %ymm1, (%r8)
12682 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12683 ; AVX2-NEXT: vmovaps %ymm1, 480(%r9)
12684 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12685 ; AVX2-NEXT: vmovaps %ymm1, 448(%r9)
12686 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12687 ; AVX2-NEXT: vmovaps %ymm1, 416(%r9)
12688 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12689 ; AVX2-NEXT: vmovaps %ymm1, 384(%r9)
12690 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12691 ; AVX2-NEXT: vmovaps %ymm1, 352(%r9)
12692 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12693 ; AVX2-NEXT: vmovaps %ymm1, 320(%r9)
12694 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12695 ; AVX2-NEXT: vmovaps %ymm1, 288(%r9)
12696 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12697 ; AVX2-NEXT: vmovaps %ymm1, 256(%r9)
12698 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12699 ; AVX2-NEXT: vmovaps %ymm1, 224(%r9)
12700 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12701 ; AVX2-NEXT: vmovaps %ymm1, 192(%r9)
12702 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12703 ; AVX2-NEXT: vmovaps %ymm1, 160(%r9)
12704 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12705 ; AVX2-NEXT: vmovaps %ymm1, 128(%r9)
12706 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12707 ; AVX2-NEXT: vmovaps %ymm1, 96(%r9)
12708 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12709 ; AVX2-NEXT: vmovaps %ymm1, 64(%r9)
12710 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12711 ; AVX2-NEXT: vmovaps %ymm1, 32(%r9)
12712 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12713 ; AVX2-NEXT: vmovaps %ymm1, (%r9)
12714 ; AVX2-NEXT: movq {{[0-9]+}}(%rsp), %rax
12715 ; AVX2-NEXT: vmovaps %ymm0, 480(%rax)
12716 ; AVX2-NEXT: vmovaps %ymm3, 448(%rax)
12717 ; AVX2-NEXT: vmovaps %ymm5, 416(%rax)
12718 ; AVX2-NEXT: vmovaps %ymm7, 384(%rax)
12719 ; AVX2-NEXT: vmovaps %ymm9, 352(%rax)
12720 ; AVX2-NEXT: vmovaps %ymm11, 320(%rax)
12721 ; AVX2-NEXT: vmovaps %ymm13, 288(%rax)
12722 ; AVX2-NEXT: vmovaps %ymm15, 256(%rax)
12723 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12724 ; AVX2-NEXT: vmovaps %ymm0, 224(%rax)
12725 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12726 ; AVX2-NEXT: vmovaps %ymm0, 192(%rax)
12727 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12728 ; AVX2-NEXT: vmovaps %ymm0, 160(%rax)
12729 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12730 ; AVX2-NEXT: vmovaps %ymm0, 128(%rax)
12731 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12732 ; AVX2-NEXT: vmovaps %ymm0, 96(%rax)
12733 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12734 ; AVX2-NEXT: vmovaps %ymm0, 64(%rax)
12735 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12736 ; AVX2-NEXT: vmovaps %ymm0, 32(%rax)
12737 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12738 ; AVX2-NEXT: vmovaps %ymm0, (%rax)
12739 ; AVX2-NEXT: addq $3432, %rsp # imm = 0xD68
12740 ; AVX2-NEXT: vzeroupper
12743 ; AVX2-FP-LABEL: load_i64_stride6_vf64:
12744 ; AVX2-FP: # %bb.0:
12745 ; AVX2-FP-NEXT: subq $3432, %rsp # imm = 0xD68
12746 ; AVX2-FP-NEXT: vmovaps 1088(%rdi), %ymm2
12747 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12748 ; AVX2-FP-NEXT: vmovaps 1056(%rdi), %ymm4
12749 ; AVX2-FP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12750 ; AVX2-FP-NEXT: vmovaps 704(%rdi), %ymm3
12751 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12752 ; AVX2-FP-NEXT: vmovaps 672(%rdi), %ymm5
12753 ; AVX2-FP-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12754 ; AVX2-FP-NEXT: vmovaps 320(%rdi), %ymm1
12755 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12756 ; AVX2-FP-NEXT: vmovaps 288(%rdi), %ymm6
12757 ; AVX2-FP-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12758 ; AVX2-FP-NEXT: vmovaps 240(%rdi), %xmm7
12759 ; AVX2-FP-NEXT: vmovaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12760 ; AVX2-FP-NEXT: vmovaps 192(%rdi), %xmm0
12761 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12762 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm7[0]
12763 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm6[0],ymm1[0],ymm6[2],ymm1[2]
12764 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
12765 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12766 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12767 ; AVX2-FP-NEXT: vmovaps 624(%rdi), %xmm1
12768 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12769 ; AVX2-FP-NEXT: vmovaps 576(%rdi), %xmm0
12770 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12771 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
12772 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm5[0],ymm3[0],ymm5[2],ymm3[2]
12773 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
12774 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12775 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12776 ; AVX2-FP-NEXT: vmovaps 1008(%rdi), %xmm1
12777 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12778 ; AVX2-FP-NEXT: vmovaps 960(%rdi), %xmm0
12779 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12780 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
12781 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm4[0],ymm2[0],ymm4[2],ymm2[2]
12782 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
12783 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12784 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12785 ; AVX2-FP-NEXT: vmovaps 1472(%rdi), %ymm0
12786 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12787 ; AVX2-FP-NEXT: vmovaps 1440(%rdi), %ymm1
12788 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12789 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12790 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12791 ; AVX2-FP-NEXT: vmovaps 1392(%rdi), %xmm2
12792 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12793 ; AVX2-FP-NEXT: vmovaps 1344(%rdi), %xmm1
12794 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12795 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12796 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12797 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12798 ; AVX2-FP-NEXT: vmovaps 1856(%rdi), %ymm0
12799 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12800 ; AVX2-FP-NEXT: vmovaps 1824(%rdi), %ymm1
12801 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12802 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12803 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12804 ; AVX2-FP-NEXT: vmovaps 1776(%rdi), %xmm2
12805 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12806 ; AVX2-FP-NEXT: vmovaps 1728(%rdi), %xmm1
12807 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12808 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12809 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12810 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12811 ; AVX2-FP-NEXT: vmovaps 2240(%rdi), %ymm0
12812 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12813 ; AVX2-FP-NEXT: vmovaps 2208(%rdi), %ymm1
12814 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12815 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12816 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12817 ; AVX2-FP-NEXT: vmovaps 2160(%rdi), %xmm2
12818 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12819 ; AVX2-FP-NEXT: vmovaps 2112(%rdi), %xmm1
12820 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12821 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12822 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12823 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12824 ; AVX2-FP-NEXT: vmovaps 2624(%rdi), %ymm0
12825 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12826 ; AVX2-FP-NEXT: vmovaps 2592(%rdi), %ymm1
12827 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12828 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12829 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12830 ; AVX2-FP-NEXT: vmovaps 2544(%rdi), %xmm2
12831 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12832 ; AVX2-FP-NEXT: vmovaps 2496(%rdi), %xmm1
12833 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12834 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12835 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12836 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12837 ; AVX2-FP-NEXT: vmovaps 3008(%rdi), %ymm0
12838 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12839 ; AVX2-FP-NEXT: vmovaps 2976(%rdi), %ymm1
12840 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12841 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12842 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12843 ; AVX2-FP-NEXT: vmovaps 2928(%rdi), %xmm2
12844 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12845 ; AVX2-FP-NEXT: vmovaps 2880(%rdi), %xmm1
12846 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12847 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12848 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12849 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12850 ; AVX2-FP-NEXT: vmovaps 128(%rdi), %ymm0
12851 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12852 ; AVX2-FP-NEXT: vmovaps 96(%rdi), %ymm1
12853 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12854 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12855 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12856 ; AVX2-FP-NEXT: vmovaps (%rdi), %xmm2
12857 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12858 ; AVX2-FP-NEXT: vmovaps 48(%rdi), %xmm1
12859 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12860 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
12861 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12862 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12863 ; AVX2-FP-NEXT: vmovaps 512(%rdi), %ymm0
12864 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12865 ; AVX2-FP-NEXT: vmovaps 480(%rdi), %ymm1
12866 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12867 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12868 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12869 ; AVX2-FP-NEXT: vmovaps 432(%rdi), %xmm1
12870 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12871 ; AVX2-FP-NEXT: vmovaps 384(%rdi), %xmm13
12872 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm13[0],xmm1[0]
12873 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12874 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12875 ; AVX2-FP-NEXT: vmovaps 896(%rdi), %ymm0
12876 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12877 ; AVX2-FP-NEXT: vmovaps 864(%rdi), %ymm1
12878 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12879 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12880 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12881 ; AVX2-FP-NEXT: vmovaps 816(%rdi), %xmm1
12882 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12883 ; AVX2-FP-NEXT: vmovaps 768(%rdi), %xmm11
12884 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm1[0]
12885 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12886 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12887 ; AVX2-FP-NEXT: vmovaps 1280(%rdi), %ymm0
12888 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12889 ; AVX2-FP-NEXT: vmovaps 1248(%rdi), %ymm1
12890 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12891 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12892 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12893 ; AVX2-FP-NEXT: vmovaps 1200(%rdi), %xmm1
12894 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12895 ; AVX2-FP-NEXT: vmovaps 1152(%rdi), %xmm9
12896 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm9[0],xmm1[0]
12897 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12898 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12899 ; AVX2-FP-NEXT: vmovaps 1664(%rdi), %ymm0
12900 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12901 ; AVX2-FP-NEXT: vmovaps 1632(%rdi), %ymm1
12902 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12903 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12904 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12905 ; AVX2-FP-NEXT: vmovaps 1584(%rdi), %xmm14
12906 ; AVX2-FP-NEXT: vmovaps 1536(%rdi), %xmm7
12907 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm7[0],xmm14[0]
12908 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12909 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12910 ; AVX2-FP-NEXT: vmovaps 2048(%rdi), %ymm12
12911 ; AVX2-FP-NEXT: vmovaps 2016(%rdi), %ymm0
12912 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12913 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm12[0],ymm0[2],ymm12[2]
12914 ; AVX2-FP-NEXT: vmovups %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12915 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12916 ; AVX2-FP-NEXT: vmovaps 1968(%rdi), %xmm10
12917 ; AVX2-FP-NEXT: vmovaps 1920(%rdi), %xmm5
12918 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm10[0]
12919 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12920 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12921 ; AVX2-FP-NEXT: vmovaps 2432(%rdi), %ymm8
12922 ; AVX2-FP-NEXT: vmovaps 2400(%rdi), %ymm0
12923 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12924 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm8[0],ymm0[2],ymm8[2]
12925 ; AVX2-FP-NEXT: vmovups %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12926 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12927 ; AVX2-FP-NEXT: vmovaps 2352(%rdi), %xmm6
12928 ; AVX2-FP-NEXT: vmovaps 2304(%rdi), %xmm3
12929 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm6[0]
12930 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12931 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12932 ; AVX2-FP-NEXT: vmovaps 2816(%rdi), %ymm4
12933 ; AVX2-FP-NEXT: vmovaps 2784(%rdi), %ymm0
12934 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12935 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
12936 ; AVX2-FP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12937 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12938 ; AVX2-FP-NEXT: vmovaps 2736(%rdi), %xmm2
12939 ; AVX2-FP-NEXT: vmovaps 2688(%rdi), %xmm1
12940 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
12941 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
12942 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12943 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12944 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12945 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
12946 ; AVX2-FP-NEXT: vbroadcastsd 296(%rdi), %ymm15
12947 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
12948 ; AVX2-FP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
12949 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
12950 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12951 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12952 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12953 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
12954 ; AVX2-FP-NEXT: vbroadcastsd 680(%rdi), %ymm15
12955 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
12956 ; AVX2-FP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
12957 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
12958 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12959 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12960 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12961 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
12962 ; AVX2-FP-NEXT: vbroadcastsd 1064(%rdi), %ymm15
12963 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
12964 ; AVX2-FP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
12965 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
12966 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12967 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12968 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12969 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
12970 ; AVX2-FP-NEXT: vbroadcastsd 1448(%rdi), %ymm15
12971 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
12972 ; AVX2-FP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
12973 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
12974 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12975 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12976 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12977 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
12978 ; AVX2-FP-NEXT: vbroadcastsd 1832(%rdi), %ymm15
12979 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
12980 ; AVX2-FP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
12981 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
12982 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12983 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12984 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12985 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
12986 ; AVX2-FP-NEXT: vbroadcastsd 2216(%rdi), %ymm15
12987 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
12988 ; AVX2-FP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
12989 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
12990 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12991 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12992 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12993 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
12994 ; AVX2-FP-NEXT: vbroadcastsd 2600(%rdi), %ymm15
12995 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
12996 ; AVX2-FP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
12997 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
12998 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12999 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
13000 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13001 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13002 ; AVX2-FP-NEXT: vbroadcastsd 2984(%rdi), %ymm15
13003 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
13004 ; AVX2-FP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
13005 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
13006 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13007 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
13008 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13009 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13010 ; AVX2-FP-NEXT: vbroadcastsd 104(%rdi), %ymm15
13011 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
13012 ; AVX2-FP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
13013 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
13014 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13015 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm13, %xmm0 # 16-byte Folded Reload
13016 ; AVX2-FP-NEXT: # xmm0 = xmm13[1],mem[1]
13017 ; AVX2-FP-NEXT: vbroadcastsd 488(%rdi), %ymm13
13018 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm13, %ymm13 # 32-byte Folded Reload
13019 ; AVX2-FP-NEXT: # ymm13 = ymm13[1],mem[1],ymm13[3],mem[3]
13020 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm13[4,5,6,7]
13021 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13022 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm11, %xmm0 # 16-byte Folded Reload
13023 ; AVX2-FP-NEXT: # xmm0 = xmm11[1],mem[1]
13024 ; AVX2-FP-NEXT: vbroadcastsd 872(%rdi), %ymm11
13025 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm11, %ymm11 # 32-byte Folded Reload
13026 ; AVX2-FP-NEXT: # ymm11 = ymm11[1],mem[1],ymm11[3],mem[3]
13027 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm11[4,5,6,7]
13028 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13029 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9, %xmm0 # 16-byte Folded Reload
13030 ; AVX2-FP-NEXT: # xmm0 = xmm9[1],mem[1]
13031 ; AVX2-FP-NEXT: vbroadcastsd 1256(%rdi), %ymm9
13032 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm9 # 32-byte Folded Reload
13033 ; AVX2-FP-NEXT: # ymm9 = ymm9[1],mem[1],ymm9[3],mem[3]
13034 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm9[4,5,6,7]
13035 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13036 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm14[1]
13037 ; AVX2-FP-NEXT: vbroadcastsd 1640(%rdi), %ymm7
13038 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm7, %ymm7 # 32-byte Folded Reload
13039 ; AVX2-FP-NEXT: # ymm7 = ymm7[1],mem[1],ymm7[3],mem[3]
13040 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm7[4,5,6,7]
13041 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13042 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm10[1]
13043 ; AVX2-FP-NEXT: vbroadcastsd 2024(%rdi), %ymm5
13044 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm5[1],ymm12[1],ymm5[3],ymm12[3]
13045 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm5[4,5,6,7]
13046 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13047 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm6[1]
13048 ; AVX2-FP-NEXT: vbroadcastsd 2408(%rdi), %ymm3
13049 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm8[1],ymm3[3],ymm8[3]
13050 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
13051 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13052 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm2[1]
13053 ; AVX2-FP-NEXT: vbroadcastsd 2792(%rdi), %ymm1
13054 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm4[1],ymm1[3],ymm4[3]
13055 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13056 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13057 ; AVX2-FP-NEXT: vbroadcastsd 352(%rdi), %ymm0
13058 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13059 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13060 ; AVX2-FP-NEXT: vmovaps 256(%rdi), %xmm2
13061 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13062 ; AVX2-FP-NEXT: vmovaps 208(%rdi), %xmm1
13063 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13064 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13065 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13066 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13067 ; AVX2-FP-NEXT: vbroadcastsd 736(%rdi), %ymm0
13068 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13069 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13070 ; AVX2-FP-NEXT: vmovaps 640(%rdi), %xmm2
13071 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13072 ; AVX2-FP-NEXT: vmovaps 592(%rdi), %xmm1
13073 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13074 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13075 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13076 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13077 ; AVX2-FP-NEXT: vbroadcastsd 1120(%rdi), %ymm0
13078 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13079 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13080 ; AVX2-FP-NEXT: vmovaps 1024(%rdi), %xmm2
13081 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13082 ; AVX2-FP-NEXT: vmovaps 976(%rdi), %xmm1
13083 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13084 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13085 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13086 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13087 ; AVX2-FP-NEXT: vbroadcastsd 1504(%rdi), %ymm0
13088 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13089 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13090 ; AVX2-FP-NEXT: vmovaps 1408(%rdi), %xmm2
13091 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13092 ; AVX2-FP-NEXT: vmovaps 1360(%rdi), %xmm1
13093 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13094 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13095 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13096 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13097 ; AVX2-FP-NEXT: vbroadcastsd 1888(%rdi), %ymm0
13098 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13099 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13100 ; AVX2-FP-NEXT: vmovaps 1792(%rdi), %xmm2
13101 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13102 ; AVX2-FP-NEXT: vmovaps 1744(%rdi), %xmm1
13103 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13104 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13105 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13106 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13107 ; AVX2-FP-NEXT: vbroadcastsd 2272(%rdi), %ymm0
13108 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13109 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13110 ; AVX2-FP-NEXT: vmovaps 2176(%rdi), %xmm2
13111 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13112 ; AVX2-FP-NEXT: vmovaps 2128(%rdi), %xmm1
13113 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13114 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13115 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13116 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13117 ; AVX2-FP-NEXT: vbroadcastsd 2656(%rdi), %ymm0
13118 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13119 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13120 ; AVX2-FP-NEXT: vmovaps 2560(%rdi), %xmm2
13121 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13122 ; AVX2-FP-NEXT: vmovaps 2512(%rdi), %xmm1
13123 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13124 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13125 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13126 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13127 ; AVX2-FP-NEXT: vbroadcastsd 3040(%rdi), %ymm0
13128 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13129 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13130 ; AVX2-FP-NEXT: vmovaps 2944(%rdi), %xmm2
13131 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13132 ; AVX2-FP-NEXT: vmovaps 2896(%rdi), %xmm1
13133 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13134 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13135 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13136 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13137 ; AVX2-FP-NEXT: vbroadcastsd 2848(%rdi), %ymm0
13138 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13139 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13140 ; AVX2-FP-NEXT: vmovaps 2752(%rdi), %xmm2
13141 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13142 ; AVX2-FP-NEXT: vmovaps 2704(%rdi), %xmm1
13143 ; AVX2-FP-NEXT: vmovaps %xmm1, (%rsp) # 16-byte Spill
13144 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13145 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13146 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13147 ; AVX2-FP-NEXT: vbroadcastsd 2464(%rdi), %ymm0
13148 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13149 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13150 ; AVX2-FP-NEXT: vmovaps 2368(%rdi), %xmm2
13151 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13152 ; AVX2-FP-NEXT: vmovaps 2320(%rdi), %xmm1
13153 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13154 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13155 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13156 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13157 ; AVX2-FP-NEXT: vbroadcastsd 2080(%rdi), %ymm0
13158 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13159 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13160 ; AVX2-FP-NEXT: vmovaps 1984(%rdi), %xmm2
13161 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13162 ; AVX2-FP-NEXT: vmovaps 1936(%rdi), %xmm1
13163 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13164 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13165 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13166 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13167 ; AVX2-FP-NEXT: vbroadcastsd 1696(%rdi), %ymm0
13168 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13169 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13170 ; AVX2-FP-NEXT: vmovaps 1600(%rdi), %xmm12
13171 ; AVX2-FP-NEXT: vmovaps 1552(%rdi), %xmm11
13172 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm12[0]
13173 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13174 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13175 ; AVX2-FP-NEXT: vbroadcastsd 1312(%rdi), %ymm0
13176 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
13177 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm10[0],ymm0[0],ymm10[2],ymm0[2]
13178 ; AVX2-FP-NEXT: vmovaps 1216(%rdi), %xmm9
13179 ; AVX2-FP-NEXT: vmovaps 1168(%rdi), %xmm8
13180 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm8[0],xmm9[0]
13181 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13182 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13183 ; AVX2-FP-NEXT: vbroadcastsd 928(%rdi), %ymm0
13184 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
13185 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm7[0],ymm0[0],ymm7[2],ymm0[2]
13186 ; AVX2-FP-NEXT: vmovaps 832(%rdi), %xmm6
13187 ; AVX2-FP-NEXT: vmovaps 784(%rdi), %xmm5
13188 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm13 = xmm5[0],xmm6[0]
13189 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm13[0,1,2,3],ymm0[4,5,6,7]
13190 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13191 ; AVX2-FP-NEXT: vbroadcastsd 544(%rdi), %ymm0
13192 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
13193 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm4[0],ymm0[0],ymm4[2],ymm0[2]
13194 ; AVX2-FP-NEXT: vmovaps 448(%rdi), %xmm13
13195 ; AVX2-FP-NEXT: vmovaps 400(%rdi), %xmm3
13196 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm15 = xmm3[0],xmm13[0]
13197 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
13198 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13199 ; AVX2-FP-NEXT: vbroadcastsd 160(%rdi), %ymm0
13200 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
13201 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm2[0],ymm0[0],ymm2[2],ymm0[2]
13202 ; AVX2-FP-NEXT: vmovaps 16(%rdi), %xmm15
13203 ; AVX2-FP-NEXT: vmovaps 64(%rdi), %xmm0
13204 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm14 = xmm15[0],xmm0[0]
13205 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm14[0,1,2,3],ymm1[4,5,6,7]
13206 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13207 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm15[1],xmm0[1]
13208 ; AVX2-FP-NEXT: vmovaps 160(%rdi), %ymm14
13209 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm2[1],ymm14[1],ymm2[3],ymm14[3]
13210 ; AVX2-FP-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13211 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13212 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13213 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13214 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
13215 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13216 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13217 ; AVX2-FP-NEXT: vmovaps 352(%rdi), %ymm15
13218 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13219 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm15[1],ymm1[3],ymm15[3]
13220 ; AVX2-FP-NEXT: vmovups %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13221 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13222 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13223 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13224 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm13[1]
13225 ; AVX2-FP-NEXT: vmovaps 544(%rdi), %ymm3
13226 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm4[1],ymm3[1],ymm4[3],ymm3[3]
13227 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13228 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13229 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13230 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13231 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
13232 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13233 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13234 ; AVX2-FP-NEXT: vmovaps 736(%rdi), %ymm4
13235 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13236 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm4[1],ymm1[3],ymm4[3]
13237 ; AVX2-FP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13238 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13239 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13240 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13241 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm6[1]
13242 ; AVX2-FP-NEXT: vmovaps 928(%rdi), %ymm5
13243 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm7[1],ymm5[1],ymm7[3],ymm5[3]
13244 ; AVX2-FP-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13245 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13246 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13247 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13248 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
13249 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13250 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13251 ; AVX2-FP-NEXT: vmovaps 1120(%rdi), %ymm6
13252 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13253 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm6[1],ymm1[3],ymm6[3]
13254 ; AVX2-FP-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13255 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13256 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13257 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13258 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm8[1],xmm9[1]
13259 ; AVX2-FP-NEXT: vmovaps 1312(%rdi), %ymm7
13260 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm10[1],ymm7[1],ymm10[3],ymm7[3]
13261 ; AVX2-FP-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13262 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13263 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13264 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13265 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
13266 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13267 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13268 ; AVX2-FP-NEXT: vmovaps 1504(%rdi), %ymm8
13269 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13270 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm8[1],ymm1[3],ymm8[3]
13271 ; AVX2-FP-NEXT: vmovups %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13272 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13273 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13274 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13275 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm11[1],xmm12[1]
13276 ; AVX2-FP-NEXT: vmovaps 1696(%rdi), %ymm9
13277 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13278 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm9[1],ymm1[3],ymm9[3]
13279 ; AVX2-FP-NEXT: vmovups %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13280 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13281 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13282 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13283 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
13284 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13285 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13286 ; AVX2-FP-NEXT: vmovaps 1888(%rdi), %ymm10
13287 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13288 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm10[1],ymm1[3],ymm10[3]
13289 ; AVX2-FP-NEXT: vmovups %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13290 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13291 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13292 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13293 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
13294 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13295 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13296 ; AVX2-FP-NEXT: vmovaps 2080(%rdi), %ymm11
13297 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13298 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm11[1],ymm1[3],ymm11[3]
13299 ; AVX2-FP-NEXT: vmovups %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13300 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13301 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13302 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13303 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
13304 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13305 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13306 ; AVX2-FP-NEXT: vmovaps 2272(%rdi), %ymm2
13307 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13308 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13309 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
13310 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13311 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13312 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13313 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
13314 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13315 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13316 ; AVX2-FP-NEXT: vmovaps 2464(%rdi), %ymm2
13317 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13318 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13319 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
13320 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13321 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13322 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13323 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
13324 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13325 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13326 ; AVX2-FP-NEXT: vmovaps 2656(%rdi), %ymm2
13327 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13328 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13329 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
13330 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13331 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13332 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13333 ; AVX2-FP-NEXT: vmovaps (%rsp), %xmm0 # 16-byte Reload
13334 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13335 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13336 ; AVX2-FP-NEXT: vmovaps 2848(%rdi), %ymm2
13337 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13338 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13339 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
13340 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13341 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13342 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13343 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
13344 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13345 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13346 ; AVX2-FP-NEXT: vmovaps 3040(%rdi), %ymm2
13347 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13348 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
13349 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13350 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13351 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13352 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13353 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13354 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm14[0],ymm0[2],ymm14[2]
13355 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13356 ; AVX2-FP-NEXT: vmovaps 32(%rdi), %xmm12
13357 ; AVX2-FP-NEXT: vmovaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13358 ; AVX2-FP-NEXT: vmovaps 80(%rdi), %xmm1
13359 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13360 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm12[0],xmm1[0]
13361 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13362 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13363 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13364 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm15[0],ymm0[2],ymm15[2]
13365 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13366 ; AVX2-FP-NEXT: vmovaps 272(%rdi), %xmm12
13367 ; AVX2-FP-NEXT: vmovaps %xmm12, (%rsp) # 16-byte Spill
13368 ; AVX2-FP-NEXT: vmovaps 224(%rdi), %xmm1
13369 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13370 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm12[0]
13371 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13372 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13373 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13374 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
13375 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13376 ; AVX2-FP-NEXT: vmovaps 464(%rdi), %xmm3
13377 ; AVX2-FP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13378 ; AVX2-FP-NEXT: vmovaps 416(%rdi), %xmm1
13379 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13380 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
13381 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13382 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13383 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13384 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
13385 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13386 ; AVX2-FP-NEXT: vmovaps 656(%rdi), %xmm3
13387 ; AVX2-FP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13388 ; AVX2-FP-NEXT: vmovaps 608(%rdi), %xmm1
13389 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13390 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
13391 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13392 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13393 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13394 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm5[0],ymm0[2],ymm5[2]
13395 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13396 ; AVX2-FP-NEXT: vmovaps 848(%rdi), %xmm3
13397 ; AVX2-FP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13398 ; AVX2-FP-NEXT: vmovaps 800(%rdi), %xmm1
13399 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13400 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
13401 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13402 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13403 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13404 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm6[0],ymm0[2],ymm6[2]
13405 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13406 ; AVX2-FP-NEXT: vmovaps 1040(%rdi), %xmm3
13407 ; AVX2-FP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13408 ; AVX2-FP-NEXT: vmovaps 992(%rdi), %xmm1
13409 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13410 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
13411 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13412 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13413 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13414 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm7[0],ymm0[2],ymm7[2]
13415 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13416 ; AVX2-FP-NEXT: vmovaps 1232(%rdi), %xmm3
13417 ; AVX2-FP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13418 ; AVX2-FP-NEXT: vmovaps 1184(%rdi), %xmm1
13419 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13420 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
13421 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13422 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13423 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13424 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm8[0],ymm0[2],ymm8[2]
13425 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13426 ; AVX2-FP-NEXT: vmovaps 1424(%rdi), %xmm3
13427 ; AVX2-FP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13428 ; AVX2-FP-NEXT: vmovaps 1376(%rdi), %xmm1
13429 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13430 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
13431 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13432 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13433 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13434 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm9[0],ymm0[2],ymm9[2]
13435 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13436 ; AVX2-FP-NEXT: vmovaps 1616(%rdi), %xmm3
13437 ; AVX2-FP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13438 ; AVX2-FP-NEXT: vmovaps 1568(%rdi), %xmm1
13439 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13440 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
13441 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13442 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13443 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13444 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm10[0],ymm0[2],ymm10[2]
13445 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13446 ; AVX2-FP-NEXT: vmovaps 1808(%rdi), %xmm1
13447 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13448 ; AVX2-FP-NEXT: vmovaps 1760(%rdi), %xmm13
13449 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm13[0],xmm1[0]
13450 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13451 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13452 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13453 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm11[0],ymm0[2],ymm11[2]
13454 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13455 ; AVX2-FP-NEXT: vmovaps 2000(%rdi), %xmm1
13456 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13457 ; AVX2-FP-NEXT: vmovaps 1952(%rdi), %xmm11
13458 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm1[0]
13459 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13460 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13461 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13462 ; AVX2-FP-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13463 ; AVX2-FP-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
13464 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13465 ; AVX2-FP-NEXT: vmovaps 2192(%rdi), %xmm1
13466 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13467 ; AVX2-FP-NEXT: vmovaps 2144(%rdi), %xmm9
13468 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm9[0],xmm1[0]
13469 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13470 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13471 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13472 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
13473 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm14[0],ymm0[2],ymm14[2]
13474 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13475 ; AVX2-FP-NEXT: vmovaps 2384(%rdi), %xmm12
13476 ; AVX2-FP-NEXT: vmovaps 2336(%rdi), %xmm7
13477 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm7[0],xmm12[0]
13478 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13479 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13480 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13481 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
13482 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm10[0],ymm0[2],ymm10[2]
13483 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13484 ; AVX2-FP-NEXT: vmovaps 2576(%rdi), %xmm8
13485 ; AVX2-FP-NEXT: vmovaps 2528(%rdi), %xmm5
13486 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm8[0]
13487 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13488 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13489 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13490 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
13491 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm6[0],ymm0[2],ymm6[2]
13492 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13493 ; AVX2-FP-NEXT: vmovaps 2768(%rdi), %xmm4
13494 ; AVX2-FP-NEXT: vmovaps 2720(%rdi), %xmm3
13495 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm4[0]
13496 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13497 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13498 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13499 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm2[0],ymm0[2],ymm2[2]
13500 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13501 ; AVX2-FP-NEXT: vmovaps 2960(%rdi), %xmm2
13502 ; AVX2-FP-NEXT: vmovaps 2912(%rdi), %xmm1
13503 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
13504 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
13505 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13506 ; AVX2-FP-NEXT: vbroadcastsd 136(%rdi), %ymm0
13507 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13508 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13509 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
13510 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
13511 ; AVX2-FP-NEXT: # xmm15 = xmm15[1],mem[1]
13512 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
13513 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13514 ; AVX2-FP-NEXT: vbroadcastsd 328(%rdi), %ymm0
13515 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13516 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13517 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
13518 ; AVX2-FP-NEXT: vunpckhpd (%rsp), %xmm15, %xmm15 # 16-byte Folded Reload
13519 ; AVX2-FP-NEXT: # xmm15 = xmm15[1],mem[1]
13520 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
13521 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13522 ; AVX2-FP-NEXT: vbroadcastsd 520(%rdi), %ymm0
13523 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13524 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13525 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
13526 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
13527 ; AVX2-FP-NEXT: # xmm15 = xmm15[1],mem[1]
13528 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
13529 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13530 ; AVX2-FP-NEXT: vbroadcastsd 712(%rdi), %ymm0
13531 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13532 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13533 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
13534 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
13535 ; AVX2-FP-NEXT: # xmm15 = xmm15[1],mem[1]
13536 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
13537 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13538 ; AVX2-FP-NEXT: vbroadcastsd 904(%rdi), %ymm0
13539 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13540 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13541 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
13542 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
13543 ; AVX2-FP-NEXT: # xmm15 = xmm15[1],mem[1]
13544 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
13545 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13546 ; AVX2-FP-NEXT: vbroadcastsd 1096(%rdi), %ymm0
13547 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13548 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13549 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
13550 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
13551 ; AVX2-FP-NEXT: # xmm15 = xmm15[1],mem[1]
13552 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
13553 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13554 ; AVX2-FP-NEXT: vbroadcastsd 1288(%rdi), %ymm0
13555 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13556 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13557 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
13558 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
13559 ; AVX2-FP-NEXT: # xmm15 = xmm15[1],mem[1]
13560 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
13561 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13562 ; AVX2-FP-NEXT: vbroadcastsd 1480(%rdi), %ymm0
13563 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13564 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13565 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
13566 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
13567 ; AVX2-FP-NEXT: # xmm15 = xmm15[1],mem[1]
13568 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
13569 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13570 ; AVX2-FP-NEXT: vbroadcastsd 1672(%rdi), %ymm0
13571 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13572 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13573 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
13574 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
13575 ; AVX2-FP-NEXT: # xmm15 = xmm15[1],mem[1]
13576 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm15 = ymm15[0,1,2,3],ymm0[4,5,6,7]
13577 ; AVX2-FP-NEXT: vbroadcastsd 1864(%rdi), %ymm0
13578 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13579 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13580 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm13, %xmm13 # 16-byte Folded Reload
13581 ; AVX2-FP-NEXT: # xmm13 = xmm13[1],mem[1]
13582 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3],ymm0[4,5,6,7]
13583 ; AVX2-FP-NEXT: vbroadcastsd 2056(%rdi), %ymm0
13584 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13585 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13586 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm11, %xmm11 # 16-byte Folded Reload
13587 ; AVX2-FP-NEXT: # xmm11 = xmm11[1],mem[1]
13588 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm11 = ymm11[0,1,2,3],ymm0[4,5,6,7]
13589 ; AVX2-FP-NEXT: vbroadcastsd 2248(%rdi), %ymm0
13590 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13591 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13592 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9, %xmm9 # 16-byte Folded Reload
13593 ; AVX2-FP-NEXT: # xmm9 = xmm9[1],mem[1]
13594 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm0[4,5,6,7]
13595 ; AVX2-FP-NEXT: vbroadcastsd 2440(%rdi), %ymm0
13596 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm14[1],ymm0[3],ymm14[3]
13597 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm7 = xmm7[1],xmm12[1]
13598 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm0[4,5,6,7]
13599 ; AVX2-FP-NEXT: vbroadcastsd 2632(%rdi), %ymm0
13600 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm10[1],ymm0[3],ymm10[3]
13601 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm5 = xmm5[1],xmm8[1]
13602 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm0[4,5,6,7]
13603 ; AVX2-FP-NEXT: vbroadcastsd 2824(%rdi), %ymm0
13604 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm6[1],ymm0[3],ymm6[3]
13605 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm4[1]
13606 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm0[4,5,6,7]
13607 ; AVX2-FP-NEXT: vbroadcastsd 3016(%rdi), %ymm0
13608 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13609 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13610 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
13611 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13612 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13613 ; AVX2-FP-NEXT: vmovaps %ymm1, 448(%rsi)
13614 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13615 ; AVX2-FP-NEXT: vmovaps %ymm1, 384(%rsi)
13616 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13617 ; AVX2-FP-NEXT: vmovaps %ymm1, 320(%rsi)
13618 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13619 ; AVX2-FP-NEXT: vmovaps %ymm1, 256(%rsi)
13620 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13621 ; AVX2-FP-NEXT: vmovaps %ymm1, 192(%rsi)
13622 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13623 ; AVX2-FP-NEXT: vmovaps %ymm1, 128(%rsi)
13624 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13625 ; AVX2-FP-NEXT: vmovaps %ymm1, 64(%rsi)
13626 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13627 ; AVX2-FP-NEXT: vmovaps %ymm1, (%rsi)
13628 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13629 ; AVX2-FP-NEXT: vmovaps %ymm1, 480(%rsi)
13630 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13631 ; AVX2-FP-NEXT: vmovaps %ymm1, 416(%rsi)
13632 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13633 ; AVX2-FP-NEXT: vmovaps %ymm1, 352(%rsi)
13634 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13635 ; AVX2-FP-NEXT: vmovaps %ymm1, 288(%rsi)
13636 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13637 ; AVX2-FP-NEXT: vmovaps %ymm1, 224(%rsi)
13638 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13639 ; AVX2-FP-NEXT: vmovaps %ymm1, 160(%rsi)
13640 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13641 ; AVX2-FP-NEXT: vmovaps %ymm1, 96(%rsi)
13642 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13643 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%rsi)
13644 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13645 ; AVX2-FP-NEXT: vmovaps %ymm1, 448(%rdx)
13646 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13647 ; AVX2-FP-NEXT: vmovaps %ymm1, 384(%rdx)
13648 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13649 ; AVX2-FP-NEXT: vmovaps %ymm1, 320(%rdx)
13650 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13651 ; AVX2-FP-NEXT: vmovaps %ymm1, 256(%rdx)
13652 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13653 ; AVX2-FP-NEXT: vmovaps %ymm1, 192(%rdx)
13654 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13655 ; AVX2-FP-NEXT: vmovaps %ymm1, 128(%rdx)
13656 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13657 ; AVX2-FP-NEXT: vmovaps %ymm1, 64(%rdx)
13658 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13659 ; AVX2-FP-NEXT: vmovaps %ymm1, (%rdx)
13660 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13661 ; AVX2-FP-NEXT: vmovaps %ymm1, 480(%rdx)
13662 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13663 ; AVX2-FP-NEXT: vmovaps %ymm1, 416(%rdx)
13664 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13665 ; AVX2-FP-NEXT: vmovaps %ymm1, 352(%rdx)
13666 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13667 ; AVX2-FP-NEXT: vmovaps %ymm1, 288(%rdx)
13668 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13669 ; AVX2-FP-NEXT: vmovaps %ymm1, 224(%rdx)
13670 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13671 ; AVX2-FP-NEXT: vmovaps %ymm1, 160(%rdx)
13672 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13673 ; AVX2-FP-NEXT: vmovaps %ymm1, 96(%rdx)
13674 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13675 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%rdx)
13676 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13677 ; AVX2-FP-NEXT: vmovaps %ymm1, (%rcx)
13678 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13679 ; AVX2-FP-NEXT: vmovaps %ymm1, 64(%rcx)
13680 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13681 ; AVX2-FP-NEXT: vmovaps %ymm1, 128(%rcx)
13682 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13683 ; AVX2-FP-NEXT: vmovaps %ymm1, 192(%rcx)
13684 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13685 ; AVX2-FP-NEXT: vmovaps %ymm1, 256(%rcx)
13686 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13687 ; AVX2-FP-NEXT: vmovaps %ymm1, 320(%rcx)
13688 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13689 ; AVX2-FP-NEXT: vmovaps %ymm1, 384(%rcx)
13690 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13691 ; AVX2-FP-NEXT: vmovaps %ymm1, 448(%rcx)
13692 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13693 ; AVX2-FP-NEXT: vmovaps %ymm1, 480(%rcx)
13694 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13695 ; AVX2-FP-NEXT: vmovaps %ymm1, 416(%rcx)
13696 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13697 ; AVX2-FP-NEXT: vmovaps %ymm1, 352(%rcx)
13698 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13699 ; AVX2-FP-NEXT: vmovaps %ymm1, 288(%rcx)
13700 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13701 ; AVX2-FP-NEXT: vmovaps %ymm1, 224(%rcx)
13702 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13703 ; AVX2-FP-NEXT: vmovaps %ymm1, 160(%rcx)
13704 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13705 ; AVX2-FP-NEXT: vmovaps %ymm1, 96(%rcx)
13706 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13707 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%rcx)
13708 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13709 ; AVX2-FP-NEXT: vmovaps %ymm1, 480(%r8)
13710 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13711 ; AVX2-FP-NEXT: vmovaps %ymm1, 448(%r8)
13712 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13713 ; AVX2-FP-NEXT: vmovaps %ymm1, 416(%r8)
13714 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13715 ; AVX2-FP-NEXT: vmovaps %ymm1, 384(%r8)
13716 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13717 ; AVX2-FP-NEXT: vmovaps %ymm1, 352(%r8)
13718 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13719 ; AVX2-FP-NEXT: vmovaps %ymm1, 320(%r8)
13720 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13721 ; AVX2-FP-NEXT: vmovaps %ymm1, 288(%r8)
13722 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13723 ; AVX2-FP-NEXT: vmovaps %ymm1, 256(%r8)
13724 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13725 ; AVX2-FP-NEXT: vmovaps %ymm1, 224(%r8)
13726 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13727 ; AVX2-FP-NEXT: vmovaps %ymm1, 192(%r8)
13728 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13729 ; AVX2-FP-NEXT: vmovaps %ymm1, 160(%r8)
13730 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13731 ; AVX2-FP-NEXT: vmovaps %ymm1, 128(%r8)
13732 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13733 ; AVX2-FP-NEXT: vmovaps %ymm1, 96(%r8)
13734 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13735 ; AVX2-FP-NEXT: vmovaps %ymm1, 64(%r8)
13736 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13737 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%r8)
13738 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13739 ; AVX2-FP-NEXT: vmovaps %ymm1, (%r8)
13740 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13741 ; AVX2-FP-NEXT: vmovaps %ymm1, 480(%r9)
13742 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13743 ; AVX2-FP-NEXT: vmovaps %ymm1, 448(%r9)
13744 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13745 ; AVX2-FP-NEXT: vmovaps %ymm1, 416(%r9)
13746 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13747 ; AVX2-FP-NEXT: vmovaps %ymm1, 384(%r9)
13748 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13749 ; AVX2-FP-NEXT: vmovaps %ymm1, 352(%r9)
13750 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13751 ; AVX2-FP-NEXT: vmovaps %ymm1, 320(%r9)
13752 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13753 ; AVX2-FP-NEXT: vmovaps %ymm1, 288(%r9)
13754 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13755 ; AVX2-FP-NEXT: vmovaps %ymm1, 256(%r9)
13756 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13757 ; AVX2-FP-NEXT: vmovaps %ymm1, 224(%r9)
13758 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13759 ; AVX2-FP-NEXT: vmovaps %ymm1, 192(%r9)
13760 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13761 ; AVX2-FP-NEXT: vmovaps %ymm1, 160(%r9)
13762 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13763 ; AVX2-FP-NEXT: vmovaps %ymm1, 128(%r9)
13764 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13765 ; AVX2-FP-NEXT: vmovaps %ymm1, 96(%r9)
13766 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13767 ; AVX2-FP-NEXT: vmovaps %ymm1, 64(%r9)
13768 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13769 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%r9)
13770 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13771 ; AVX2-FP-NEXT: vmovaps %ymm1, (%r9)
13772 ; AVX2-FP-NEXT: movq {{[0-9]+}}(%rsp), %rax
13773 ; AVX2-FP-NEXT: vmovaps %ymm0, 480(%rax)
13774 ; AVX2-FP-NEXT: vmovaps %ymm3, 448(%rax)
13775 ; AVX2-FP-NEXT: vmovaps %ymm5, 416(%rax)
13776 ; AVX2-FP-NEXT: vmovaps %ymm7, 384(%rax)
13777 ; AVX2-FP-NEXT: vmovaps %ymm9, 352(%rax)
13778 ; AVX2-FP-NEXT: vmovaps %ymm11, 320(%rax)
13779 ; AVX2-FP-NEXT: vmovaps %ymm13, 288(%rax)
13780 ; AVX2-FP-NEXT: vmovaps %ymm15, 256(%rax)
13781 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13782 ; AVX2-FP-NEXT: vmovaps %ymm0, 224(%rax)
13783 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13784 ; AVX2-FP-NEXT: vmovaps %ymm0, 192(%rax)
13785 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13786 ; AVX2-FP-NEXT: vmovaps %ymm0, 160(%rax)
13787 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13788 ; AVX2-FP-NEXT: vmovaps %ymm0, 128(%rax)
13789 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13790 ; AVX2-FP-NEXT: vmovaps %ymm0, 96(%rax)
13791 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13792 ; AVX2-FP-NEXT: vmovaps %ymm0, 64(%rax)
13793 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13794 ; AVX2-FP-NEXT: vmovaps %ymm0, 32(%rax)
13795 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13796 ; AVX2-FP-NEXT: vmovaps %ymm0, (%rax)
13797 ; AVX2-FP-NEXT: addq $3432, %rsp # imm = 0xD68
13798 ; AVX2-FP-NEXT: vzeroupper
13799 ; AVX2-FP-NEXT: retq
13801 ; AVX2-FCP-LABEL: load_i64_stride6_vf64:
13802 ; AVX2-FCP: # %bb.0:
13803 ; AVX2-FCP-NEXT: subq $3432, %rsp # imm = 0xD68
13804 ; AVX2-FCP-NEXT: vmovaps 1088(%rdi), %ymm2
13805 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13806 ; AVX2-FCP-NEXT: vmovaps 1056(%rdi), %ymm4
13807 ; AVX2-FCP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13808 ; AVX2-FCP-NEXT: vmovaps 704(%rdi), %ymm3
13809 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13810 ; AVX2-FCP-NEXT: vmovaps 672(%rdi), %ymm5
13811 ; AVX2-FCP-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13812 ; AVX2-FCP-NEXT: vmovaps 320(%rdi), %ymm1
13813 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13814 ; AVX2-FCP-NEXT: vmovaps 288(%rdi), %ymm6
13815 ; AVX2-FCP-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13816 ; AVX2-FCP-NEXT: vmovaps 240(%rdi), %xmm7
13817 ; AVX2-FCP-NEXT: vmovaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13818 ; AVX2-FCP-NEXT: vmovaps 192(%rdi), %xmm0
13819 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13820 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm7[0]
13821 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm6[0],ymm1[0],ymm6[2],ymm1[2]
13822 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
13823 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13824 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13825 ; AVX2-FCP-NEXT: vmovaps 624(%rdi), %xmm1
13826 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13827 ; AVX2-FCP-NEXT: vmovaps 576(%rdi), %xmm0
13828 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13829 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
13830 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm5[0],ymm3[0],ymm5[2],ymm3[2]
13831 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
13832 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13833 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13834 ; AVX2-FCP-NEXT: vmovaps 1008(%rdi), %xmm1
13835 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13836 ; AVX2-FCP-NEXT: vmovaps 960(%rdi), %xmm0
13837 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13838 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
13839 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm4[0],ymm2[0],ymm4[2],ymm2[2]
13840 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
13841 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13842 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13843 ; AVX2-FCP-NEXT: vmovaps 1472(%rdi), %ymm0
13844 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13845 ; AVX2-FCP-NEXT: vmovaps 1440(%rdi), %ymm1
13846 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13847 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13848 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13849 ; AVX2-FCP-NEXT: vmovaps 1392(%rdi), %xmm2
13850 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13851 ; AVX2-FCP-NEXT: vmovaps 1344(%rdi), %xmm1
13852 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13853 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13854 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13855 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13856 ; AVX2-FCP-NEXT: vmovaps 1856(%rdi), %ymm0
13857 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13858 ; AVX2-FCP-NEXT: vmovaps 1824(%rdi), %ymm1
13859 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13860 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13861 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13862 ; AVX2-FCP-NEXT: vmovaps 1776(%rdi), %xmm2
13863 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13864 ; AVX2-FCP-NEXT: vmovaps 1728(%rdi), %xmm1
13865 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13866 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13867 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13868 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13869 ; AVX2-FCP-NEXT: vmovaps 2240(%rdi), %ymm0
13870 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13871 ; AVX2-FCP-NEXT: vmovaps 2208(%rdi), %ymm1
13872 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13873 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13874 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13875 ; AVX2-FCP-NEXT: vmovaps 2160(%rdi), %xmm2
13876 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13877 ; AVX2-FCP-NEXT: vmovaps 2112(%rdi), %xmm1
13878 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13879 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13880 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13881 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13882 ; AVX2-FCP-NEXT: vmovaps 2624(%rdi), %ymm0
13883 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13884 ; AVX2-FCP-NEXT: vmovaps 2592(%rdi), %ymm1
13885 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13886 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13887 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13888 ; AVX2-FCP-NEXT: vmovaps 2544(%rdi), %xmm2
13889 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13890 ; AVX2-FCP-NEXT: vmovaps 2496(%rdi), %xmm1
13891 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13892 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13893 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13894 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13895 ; AVX2-FCP-NEXT: vmovaps 3008(%rdi), %ymm0
13896 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13897 ; AVX2-FCP-NEXT: vmovaps 2976(%rdi), %ymm1
13898 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13899 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13900 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13901 ; AVX2-FCP-NEXT: vmovaps 2928(%rdi), %xmm2
13902 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13903 ; AVX2-FCP-NEXT: vmovaps 2880(%rdi), %xmm1
13904 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13905 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13906 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13907 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13908 ; AVX2-FCP-NEXT: vmovaps 128(%rdi), %ymm0
13909 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13910 ; AVX2-FCP-NEXT: vmovaps 96(%rdi), %ymm1
13911 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13912 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13913 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13914 ; AVX2-FCP-NEXT: vmovaps (%rdi), %xmm2
13915 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13916 ; AVX2-FCP-NEXT: vmovaps 48(%rdi), %xmm1
13917 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13918 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
13919 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13920 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13921 ; AVX2-FCP-NEXT: vmovaps 512(%rdi), %ymm0
13922 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13923 ; AVX2-FCP-NEXT: vmovaps 480(%rdi), %ymm1
13924 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13925 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13926 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13927 ; AVX2-FCP-NEXT: vmovaps 432(%rdi), %xmm1
13928 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13929 ; AVX2-FCP-NEXT: vmovaps 384(%rdi), %xmm13
13930 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm13[0],xmm1[0]
13931 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13932 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13933 ; AVX2-FCP-NEXT: vmovaps 896(%rdi), %ymm0
13934 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13935 ; AVX2-FCP-NEXT: vmovaps 864(%rdi), %ymm1
13936 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13937 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13938 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13939 ; AVX2-FCP-NEXT: vmovaps 816(%rdi), %xmm1
13940 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13941 ; AVX2-FCP-NEXT: vmovaps 768(%rdi), %xmm11
13942 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm1[0]
13943 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13944 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13945 ; AVX2-FCP-NEXT: vmovaps 1280(%rdi), %ymm0
13946 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13947 ; AVX2-FCP-NEXT: vmovaps 1248(%rdi), %ymm1
13948 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13949 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13950 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13951 ; AVX2-FCP-NEXT: vmovaps 1200(%rdi), %xmm1
13952 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13953 ; AVX2-FCP-NEXT: vmovaps 1152(%rdi), %xmm9
13954 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm9[0],xmm1[0]
13955 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13956 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13957 ; AVX2-FCP-NEXT: vmovaps 1664(%rdi), %ymm0
13958 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13959 ; AVX2-FCP-NEXT: vmovaps 1632(%rdi), %ymm1
13960 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13961 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13962 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13963 ; AVX2-FCP-NEXT: vmovaps 1584(%rdi), %xmm14
13964 ; AVX2-FCP-NEXT: vmovaps 1536(%rdi), %xmm7
13965 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm7[0],xmm14[0]
13966 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13967 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13968 ; AVX2-FCP-NEXT: vmovaps 2048(%rdi), %ymm12
13969 ; AVX2-FCP-NEXT: vmovaps 2016(%rdi), %ymm0
13970 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13971 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm12[0],ymm0[2],ymm12[2]
13972 ; AVX2-FCP-NEXT: vmovups %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13973 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13974 ; AVX2-FCP-NEXT: vmovaps 1968(%rdi), %xmm10
13975 ; AVX2-FCP-NEXT: vmovaps 1920(%rdi), %xmm5
13976 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm10[0]
13977 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13978 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13979 ; AVX2-FCP-NEXT: vmovaps 2432(%rdi), %ymm8
13980 ; AVX2-FCP-NEXT: vmovaps 2400(%rdi), %ymm0
13981 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13982 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm8[0],ymm0[2],ymm8[2]
13983 ; AVX2-FCP-NEXT: vmovups %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13984 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13985 ; AVX2-FCP-NEXT: vmovaps 2352(%rdi), %xmm6
13986 ; AVX2-FCP-NEXT: vmovaps 2304(%rdi), %xmm3
13987 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm6[0]
13988 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13989 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13990 ; AVX2-FCP-NEXT: vmovaps 2816(%rdi), %ymm4
13991 ; AVX2-FCP-NEXT: vmovaps 2784(%rdi), %ymm0
13992 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13993 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
13994 ; AVX2-FCP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13995 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13996 ; AVX2-FCP-NEXT: vmovaps 2736(%rdi), %xmm2
13997 ; AVX2-FCP-NEXT: vmovaps 2688(%rdi), %xmm1
13998 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
13999 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
14000 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14001 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14002 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14003 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14004 ; AVX2-FCP-NEXT: vbroadcastsd 296(%rdi), %ymm15
14005 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
14006 ; AVX2-FCP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
14007 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
14008 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14009 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14010 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14011 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14012 ; AVX2-FCP-NEXT: vbroadcastsd 680(%rdi), %ymm15
14013 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
14014 ; AVX2-FCP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
14015 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
14016 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14017 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14018 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14019 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14020 ; AVX2-FCP-NEXT: vbroadcastsd 1064(%rdi), %ymm15
14021 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
14022 ; AVX2-FCP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
14023 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
14024 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14025 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14026 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14027 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14028 ; AVX2-FCP-NEXT: vbroadcastsd 1448(%rdi), %ymm15
14029 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
14030 ; AVX2-FCP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
14031 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
14032 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14033 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14034 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14035 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14036 ; AVX2-FCP-NEXT: vbroadcastsd 1832(%rdi), %ymm15
14037 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
14038 ; AVX2-FCP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
14039 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
14040 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14041 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14042 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14043 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14044 ; AVX2-FCP-NEXT: vbroadcastsd 2216(%rdi), %ymm15
14045 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
14046 ; AVX2-FCP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
14047 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
14048 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14049 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14050 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14051 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14052 ; AVX2-FCP-NEXT: vbroadcastsd 2600(%rdi), %ymm15
14053 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
14054 ; AVX2-FCP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
14055 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
14056 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14057 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14058 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14059 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14060 ; AVX2-FCP-NEXT: vbroadcastsd 2984(%rdi), %ymm15
14061 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
14062 ; AVX2-FCP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
14063 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
14064 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14065 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14066 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14067 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14068 ; AVX2-FCP-NEXT: vbroadcastsd 104(%rdi), %ymm15
14069 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
14070 ; AVX2-FCP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
14071 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
14072 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14073 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm13, %xmm0 # 16-byte Folded Reload
14074 ; AVX2-FCP-NEXT: # xmm0 = xmm13[1],mem[1]
14075 ; AVX2-FCP-NEXT: vbroadcastsd 488(%rdi), %ymm13
14076 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm13, %ymm13 # 32-byte Folded Reload
14077 ; AVX2-FCP-NEXT: # ymm13 = ymm13[1],mem[1],ymm13[3],mem[3]
14078 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm13[4,5,6,7]
14079 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14080 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm11, %xmm0 # 16-byte Folded Reload
14081 ; AVX2-FCP-NEXT: # xmm0 = xmm11[1],mem[1]
14082 ; AVX2-FCP-NEXT: vbroadcastsd 872(%rdi), %ymm11
14083 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm11, %ymm11 # 32-byte Folded Reload
14084 ; AVX2-FCP-NEXT: # ymm11 = ymm11[1],mem[1],ymm11[3],mem[3]
14085 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm11[4,5,6,7]
14086 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14087 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9, %xmm0 # 16-byte Folded Reload
14088 ; AVX2-FCP-NEXT: # xmm0 = xmm9[1],mem[1]
14089 ; AVX2-FCP-NEXT: vbroadcastsd 1256(%rdi), %ymm9
14090 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm9 # 32-byte Folded Reload
14091 ; AVX2-FCP-NEXT: # ymm9 = ymm9[1],mem[1],ymm9[3],mem[3]
14092 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm9[4,5,6,7]
14093 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14094 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm14[1]
14095 ; AVX2-FCP-NEXT: vbroadcastsd 1640(%rdi), %ymm7
14096 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm7, %ymm7 # 32-byte Folded Reload
14097 ; AVX2-FCP-NEXT: # ymm7 = ymm7[1],mem[1],ymm7[3],mem[3]
14098 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm7[4,5,6,7]
14099 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14100 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm10[1]
14101 ; AVX2-FCP-NEXT: vbroadcastsd 2024(%rdi), %ymm5
14102 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm5[1],ymm12[1],ymm5[3],ymm12[3]
14103 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm5[4,5,6,7]
14104 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14105 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm6[1]
14106 ; AVX2-FCP-NEXT: vbroadcastsd 2408(%rdi), %ymm3
14107 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm8[1],ymm3[3],ymm8[3]
14108 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
14109 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14110 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm2[1]
14111 ; AVX2-FCP-NEXT: vbroadcastsd 2792(%rdi), %ymm1
14112 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm4[1],ymm1[3],ymm4[3]
14113 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14114 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14115 ; AVX2-FCP-NEXT: vbroadcastsd 352(%rdi), %ymm0
14116 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14117 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14118 ; AVX2-FCP-NEXT: vmovaps 256(%rdi), %xmm2
14119 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14120 ; AVX2-FCP-NEXT: vmovaps 208(%rdi), %xmm1
14121 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14122 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
14123 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14124 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14125 ; AVX2-FCP-NEXT: vbroadcastsd 736(%rdi), %ymm0
14126 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14127 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14128 ; AVX2-FCP-NEXT: vmovaps 640(%rdi), %xmm2
14129 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14130 ; AVX2-FCP-NEXT: vmovaps 592(%rdi), %xmm1
14131 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14132 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
14133 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14134 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14135 ; AVX2-FCP-NEXT: vbroadcastsd 1120(%rdi), %ymm0
14136 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14137 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14138 ; AVX2-FCP-NEXT: vmovaps 1024(%rdi), %xmm2
14139 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14140 ; AVX2-FCP-NEXT: vmovaps 976(%rdi), %xmm1
14141 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14142 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
14143 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14144 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14145 ; AVX2-FCP-NEXT: vbroadcastsd 1504(%rdi), %ymm0
14146 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14147 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14148 ; AVX2-FCP-NEXT: vmovaps 1408(%rdi), %xmm2
14149 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14150 ; AVX2-FCP-NEXT: vmovaps 1360(%rdi), %xmm1
14151 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14152 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
14153 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14154 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14155 ; AVX2-FCP-NEXT: vbroadcastsd 1888(%rdi), %ymm0
14156 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14157 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14158 ; AVX2-FCP-NEXT: vmovaps 1792(%rdi), %xmm2
14159 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14160 ; AVX2-FCP-NEXT: vmovaps 1744(%rdi), %xmm1
14161 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14162 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
14163 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14164 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14165 ; AVX2-FCP-NEXT: vbroadcastsd 2272(%rdi), %ymm0
14166 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14167 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14168 ; AVX2-FCP-NEXT: vmovaps 2176(%rdi), %xmm2
14169 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14170 ; AVX2-FCP-NEXT: vmovaps 2128(%rdi), %xmm1
14171 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14172 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
14173 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14174 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14175 ; AVX2-FCP-NEXT: vbroadcastsd 2656(%rdi), %ymm0
14176 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14177 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14178 ; AVX2-FCP-NEXT: vmovaps 2560(%rdi), %xmm2
14179 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14180 ; AVX2-FCP-NEXT: vmovaps 2512(%rdi), %xmm1
14181 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14182 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
14183 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14184 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14185 ; AVX2-FCP-NEXT: vbroadcastsd 3040(%rdi), %ymm0
14186 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14187 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14188 ; AVX2-FCP-NEXT: vmovaps 2944(%rdi), %xmm2
14189 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14190 ; AVX2-FCP-NEXT: vmovaps 2896(%rdi), %xmm1
14191 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14192 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
14193 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14194 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14195 ; AVX2-FCP-NEXT: vbroadcastsd 2848(%rdi), %ymm0
14196 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14197 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14198 ; AVX2-FCP-NEXT: vmovaps 2752(%rdi), %xmm2
14199 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14200 ; AVX2-FCP-NEXT: vmovaps 2704(%rdi), %xmm1
14201 ; AVX2-FCP-NEXT: vmovaps %xmm1, (%rsp) # 16-byte Spill
14202 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
14203 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14204 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14205 ; AVX2-FCP-NEXT: vbroadcastsd 2464(%rdi), %ymm0
14206 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14207 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14208 ; AVX2-FCP-NEXT: vmovaps 2368(%rdi), %xmm2
14209 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14210 ; AVX2-FCP-NEXT: vmovaps 2320(%rdi), %xmm1
14211 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14212 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
14213 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14214 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14215 ; AVX2-FCP-NEXT: vbroadcastsd 2080(%rdi), %ymm0
14216 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14217 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14218 ; AVX2-FCP-NEXT: vmovaps 1984(%rdi), %xmm2
14219 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14220 ; AVX2-FCP-NEXT: vmovaps 1936(%rdi), %xmm1
14221 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14222 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
14223 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14224 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14225 ; AVX2-FCP-NEXT: vbroadcastsd 1696(%rdi), %ymm0
14226 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14227 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14228 ; AVX2-FCP-NEXT: vmovaps 1600(%rdi), %xmm12
14229 ; AVX2-FCP-NEXT: vmovaps 1552(%rdi), %xmm11
14230 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm12[0]
14231 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14232 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14233 ; AVX2-FCP-NEXT: vbroadcastsd 1312(%rdi), %ymm0
14234 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
14235 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm10[0],ymm0[0],ymm10[2],ymm0[2]
14236 ; AVX2-FCP-NEXT: vmovaps 1216(%rdi), %xmm9
14237 ; AVX2-FCP-NEXT: vmovaps 1168(%rdi), %xmm8
14238 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm8[0],xmm9[0]
14239 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14240 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14241 ; AVX2-FCP-NEXT: vbroadcastsd 928(%rdi), %ymm0
14242 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
14243 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm7[0],ymm0[0],ymm7[2],ymm0[2]
14244 ; AVX2-FCP-NEXT: vmovaps 832(%rdi), %xmm6
14245 ; AVX2-FCP-NEXT: vmovaps 784(%rdi), %xmm5
14246 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm13 = xmm5[0],xmm6[0]
14247 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm13[0,1,2,3],ymm0[4,5,6,7]
14248 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14249 ; AVX2-FCP-NEXT: vbroadcastsd 544(%rdi), %ymm0
14250 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
14251 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm4[0],ymm0[0],ymm4[2],ymm0[2]
14252 ; AVX2-FCP-NEXT: vmovaps 448(%rdi), %xmm13
14253 ; AVX2-FCP-NEXT: vmovaps 400(%rdi), %xmm3
14254 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm15 = xmm3[0],xmm13[0]
14255 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
14256 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14257 ; AVX2-FCP-NEXT: vbroadcastsd 160(%rdi), %ymm0
14258 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
14259 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm2[0],ymm0[0],ymm2[2],ymm0[2]
14260 ; AVX2-FCP-NEXT: vmovaps 16(%rdi), %xmm15
14261 ; AVX2-FCP-NEXT: vmovaps 64(%rdi), %xmm0
14262 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm14 = xmm15[0],xmm0[0]
14263 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm14[0,1,2,3],ymm1[4,5,6,7]
14264 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14265 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm15[1],xmm0[1]
14266 ; AVX2-FCP-NEXT: vmovaps 160(%rdi), %ymm14
14267 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm2[1],ymm14[1],ymm2[3],ymm14[3]
14268 ; AVX2-FCP-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14269 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14270 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14271 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14272 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14273 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14274 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14275 ; AVX2-FCP-NEXT: vmovaps 352(%rdi), %ymm15
14276 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14277 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm15[1],ymm1[3],ymm15[3]
14278 ; AVX2-FCP-NEXT: vmovups %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14279 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14280 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14281 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14282 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm13[1]
14283 ; AVX2-FCP-NEXT: vmovaps 544(%rdi), %ymm3
14284 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm4[1],ymm3[1],ymm4[3],ymm3[3]
14285 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14286 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14287 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14288 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14289 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14290 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14291 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14292 ; AVX2-FCP-NEXT: vmovaps 736(%rdi), %ymm4
14293 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14294 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm4[1],ymm1[3],ymm4[3]
14295 ; AVX2-FCP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14296 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14297 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14298 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14299 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm6[1]
14300 ; AVX2-FCP-NEXT: vmovaps 928(%rdi), %ymm5
14301 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm7[1],ymm5[1],ymm7[3],ymm5[3]
14302 ; AVX2-FCP-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14303 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14304 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14305 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14306 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14307 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14308 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14309 ; AVX2-FCP-NEXT: vmovaps 1120(%rdi), %ymm6
14310 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14311 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm6[1],ymm1[3],ymm6[3]
14312 ; AVX2-FCP-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14313 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14314 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14315 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14316 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm8[1],xmm9[1]
14317 ; AVX2-FCP-NEXT: vmovaps 1312(%rdi), %ymm7
14318 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm10[1],ymm7[1],ymm10[3],ymm7[3]
14319 ; AVX2-FCP-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14320 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14321 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14322 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14323 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14324 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14325 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14326 ; AVX2-FCP-NEXT: vmovaps 1504(%rdi), %ymm8
14327 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14328 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm8[1],ymm1[3],ymm8[3]
14329 ; AVX2-FCP-NEXT: vmovups %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14330 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14331 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14332 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14333 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm11[1],xmm12[1]
14334 ; AVX2-FCP-NEXT: vmovaps 1696(%rdi), %ymm9
14335 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14336 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm9[1],ymm1[3],ymm9[3]
14337 ; AVX2-FCP-NEXT: vmovups %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14338 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14339 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14340 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14341 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14342 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14343 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14344 ; AVX2-FCP-NEXT: vmovaps 1888(%rdi), %ymm10
14345 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14346 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm10[1],ymm1[3],ymm10[3]
14347 ; AVX2-FCP-NEXT: vmovups %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14348 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14349 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14350 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14351 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14352 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14353 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14354 ; AVX2-FCP-NEXT: vmovaps 2080(%rdi), %ymm11
14355 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14356 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm11[1],ymm1[3],ymm11[3]
14357 ; AVX2-FCP-NEXT: vmovups %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14358 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14359 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14360 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14361 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14362 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14363 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14364 ; AVX2-FCP-NEXT: vmovaps 2272(%rdi), %ymm2
14365 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14366 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14367 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
14368 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14369 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14370 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14371 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14372 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14373 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14374 ; AVX2-FCP-NEXT: vmovaps 2464(%rdi), %ymm2
14375 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14376 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14377 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
14378 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14379 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14380 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14381 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14382 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14383 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14384 ; AVX2-FCP-NEXT: vmovaps 2656(%rdi), %ymm2
14385 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14386 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14387 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
14388 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14389 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14390 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14391 ; AVX2-FCP-NEXT: vmovaps (%rsp), %xmm0 # 16-byte Reload
14392 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14393 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14394 ; AVX2-FCP-NEXT: vmovaps 2848(%rdi), %ymm2
14395 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14396 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14397 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
14398 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14399 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14400 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14401 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14402 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14403 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14404 ; AVX2-FCP-NEXT: vmovaps 3040(%rdi), %ymm2
14405 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14406 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
14407 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14408 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14409 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14410 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14411 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14412 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm14[0],ymm0[2],ymm14[2]
14413 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14414 ; AVX2-FCP-NEXT: vmovaps 32(%rdi), %xmm12
14415 ; AVX2-FCP-NEXT: vmovaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14416 ; AVX2-FCP-NEXT: vmovaps 80(%rdi), %xmm1
14417 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14418 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm12[0],xmm1[0]
14419 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14420 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14421 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14422 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm15[0],ymm0[2],ymm15[2]
14423 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14424 ; AVX2-FCP-NEXT: vmovaps 272(%rdi), %xmm12
14425 ; AVX2-FCP-NEXT: vmovaps %xmm12, (%rsp) # 16-byte Spill
14426 ; AVX2-FCP-NEXT: vmovaps 224(%rdi), %xmm1
14427 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14428 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm12[0]
14429 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14430 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14431 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14432 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
14433 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14434 ; AVX2-FCP-NEXT: vmovaps 464(%rdi), %xmm3
14435 ; AVX2-FCP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14436 ; AVX2-FCP-NEXT: vmovaps 416(%rdi), %xmm1
14437 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14438 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
14439 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14440 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14441 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14442 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
14443 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14444 ; AVX2-FCP-NEXT: vmovaps 656(%rdi), %xmm3
14445 ; AVX2-FCP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14446 ; AVX2-FCP-NEXT: vmovaps 608(%rdi), %xmm1
14447 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14448 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
14449 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14450 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14451 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14452 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm5[0],ymm0[2],ymm5[2]
14453 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14454 ; AVX2-FCP-NEXT: vmovaps 848(%rdi), %xmm3
14455 ; AVX2-FCP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14456 ; AVX2-FCP-NEXT: vmovaps 800(%rdi), %xmm1
14457 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14458 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
14459 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14460 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14461 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14462 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm6[0],ymm0[2],ymm6[2]
14463 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14464 ; AVX2-FCP-NEXT: vmovaps 1040(%rdi), %xmm3
14465 ; AVX2-FCP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14466 ; AVX2-FCP-NEXT: vmovaps 992(%rdi), %xmm1
14467 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14468 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
14469 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14470 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14471 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14472 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm7[0],ymm0[2],ymm7[2]
14473 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14474 ; AVX2-FCP-NEXT: vmovaps 1232(%rdi), %xmm3
14475 ; AVX2-FCP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14476 ; AVX2-FCP-NEXT: vmovaps 1184(%rdi), %xmm1
14477 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14478 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
14479 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14480 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14481 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14482 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm8[0],ymm0[2],ymm8[2]
14483 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14484 ; AVX2-FCP-NEXT: vmovaps 1424(%rdi), %xmm3
14485 ; AVX2-FCP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14486 ; AVX2-FCP-NEXT: vmovaps 1376(%rdi), %xmm1
14487 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14488 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
14489 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14490 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14491 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14492 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm9[0],ymm0[2],ymm9[2]
14493 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14494 ; AVX2-FCP-NEXT: vmovaps 1616(%rdi), %xmm3
14495 ; AVX2-FCP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14496 ; AVX2-FCP-NEXT: vmovaps 1568(%rdi), %xmm1
14497 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14498 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
14499 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14500 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14501 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14502 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm10[0],ymm0[2],ymm10[2]
14503 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14504 ; AVX2-FCP-NEXT: vmovaps 1808(%rdi), %xmm1
14505 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14506 ; AVX2-FCP-NEXT: vmovaps 1760(%rdi), %xmm13
14507 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm13[0],xmm1[0]
14508 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14509 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14510 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14511 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm11[0],ymm0[2],ymm11[2]
14512 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14513 ; AVX2-FCP-NEXT: vmovaps 2000(%rdi), %xmm1
14514 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14515 ; AVX2-FCP-NEXT: vmovaps 1952(%rdi), %xmm11
14516 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm1[0]
14517 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14518 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14519 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14520 ; AVX2-FCP-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14521 ; AVX2-FCP-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
14522 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14523 ; AVX2-FCP-NEXT: vmovaps 2192(%rdi), %xmm1
14524 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14525 ; AVX2-FCP-NEXT: vmovaps 2144(%rdi), %xmm9
14526 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm9[0],xmm1[0]
14527 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14528 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14529 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14530 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
14531 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm14[0],ymm0[2],ymm14[2]
14532 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14533 ; AVX2-FCP-NEXT: vmovaps 2384(%rdi), %xmm12
14534 ; AVX2-FCP-NEXT: vmovaps 2336(%rdi), %xmm7
14535 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm7[0],xmm12[0]
14536 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14537 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14538 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14539 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
14540 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm10[0],ymm0[2],ymm10[2]
14541 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14542 ; AVX2-FCP-NEXT: vmovaps 2576(%rdi), %xmm8
14543 ; AVX2-FCP-NEXT: vmovaps 2528(%rdi), %xmm5
14544 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm8[0]
14545 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14546 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14547 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14548 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
14549 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm6[0],ymm0[2],ymm6[2]
14550 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14551 ; AVX2-FCP-NEXT: vmovaps 2768(%rdi), %xmm4
14552 ; AVX2-FCP-NEXT: vmovaps 2720(%rdi), %xmm3
14553 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm4[0]
14554 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14555 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14556 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14557 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm2[0],ymm0[2],ymm2[2]
14558 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14559 ; AVX2-FCP-NEXT: vmovaps 2960(%rdi), %xmm2
14560 ; AVX2-FCP-NEXT: vmovaps 2912(%rdi), %xmm1
14561 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
14562 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
14563 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14564 ; AVX2-FCP-NEXT: vbroadcastsd 136(%rdi), %ymm0
14565 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14566 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14567 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
14568 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
14569 ; AVX2-FCP-NEXT: # xmm15 = xmm15[1],mem[1]
14570 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
14571 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14572 ; AVX2-FCP-NEXT: vbroadcastsd 328(%rdi), %ymm0
14573 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14574 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14575 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
14576 ; AVX2-FCP-NEXT: vunpckhpd (%rsp), %xmm15, %xmm15 # 16-byte Folded Reload
14577 ; AVX2-FCP-NEXT: # xmm15 = xmm15[1],mem[1]
14578 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
14579 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14580 ; AVX2-FCP-NEXT: vbroadcastsd 520(%rdi), %ymm0
14581 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14582 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14583 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
14584 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
14585 ; AVX2-FCP-NEXT: # xmm15 = xmm15[1],mem[1]
14586 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
14587 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14588 ; AVX2-FCP-NEXT: vbroadcastsd 712(%rdi), %ymm0
14589 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14590 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14591 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
14592 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
14593 ; AVX2-FCP-NEXT: # xmm15 = xmm15[1],mem[1]
14594 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
14595 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14596 ; AVX2-FCP-NEXT: vbroadcastsd 904(%rdi), %ymm0
14597 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14598 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14599 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
14600 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
14601 ; AVX2-FCP-NEXT: # xmm15 = xmm15[1],mem[1]
14602 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
14603 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14604 ; AVX2-FCP-NEXT: vbroadcastsd 1096(%rdi), %ymm0
14605 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14606 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14607 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
14608 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
14609 ; AVX2-FCP-NEXT: # xmm15 = xmm15[1],mem[1]
14610 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
14611 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14612 ; AVX2-FCP-NEXT: vbroadcastsd 1288(%rdi), %ymm0
14613 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14614 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14615 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
14616 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
14617 ; AVX2-FCP-NEXT: # xmm15 = xmm15[1],mem[1]
14618 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
14619 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14620 ; AVX2-FCP-NEXT: vbroadcastsd 1480(%rdi), %ymm0
14621 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14622 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14623 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
14624 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
14625 ; AVX2-FCP-NEXT: # xmm15 = xmm15[1],mem[1]
14626 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
14627 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14628 ; AVX2-FCP-NEXT: vbroadcastsd 1672(%rdi), %ymm0
14629 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14630 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14631 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
14632 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
14633 ; AVX2-FCP-NEXT: # xmm15 = xmm15[1],mem[1]
14634 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm15 = ymm15[0,1,2,3],ymm0[4,5,6,7]
14635 ; AVX2-FCP-NEXT: vbroadcastsd 1864(%rdi), %ymm0
14636 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14637 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14638 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm13, %xmm13 # 16-byte Folded Reload
14639 ; AVX2-FCP-NEXT: # xmm13 = xmm13[1],mem[1]
14640 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3],ymm0[4,5,6,7]
14641 ; AVX2-FCP-NEXT: vbroadcastsd 2056(%rdi), %ymm0
14642 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14643 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14644 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm11, %xmm11 # 16-byte Folded Reload
14645 ; AVX2-FCP-NEXT: # xmm11 = xmm11[1],mem[1]
14646 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm11 = ymm11[0,1,2,3],ymm0[4,5,6,7]
14647 ; AVX2-FCP-NEXT: vbroadcastsd 2248(%rdi), %ymm0
14648 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14649 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14650 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9, %xmm9 # 16-byte Folded Reload
14651 ; AVX2-FCP-NEXT: # xmm9 = xmm9[1],mem[1]
14652 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm0[4,5,6,7]
14653 ; AVX2-FCP-NEXT: vbroadcastsd 2440(%rdi), %ymm0
14654 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm14[1],ymm0[3],ymm14[3]
14655 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm7 = xmm7[1],xmm12[1]
14656 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm0[4,5,6,7]
14657 ; AVX2-FCP-NEXT: vbroadcastsd 2632(%rdi), %ymm0
14658 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm10[1],ymm0[3],ymm10[3]
14659 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm5 = xmm5[1],xmm8[1]
14660 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm0[4,5,6,7]
14661 ; AVX2-FCP-NEXT: vbroadcastsd 2824(%rdi), %ymm0
14662 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm6[1],ymm0[3],ymm6[3]
14663 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm4[1]
14664 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm0[4,5,6,7]
14665 ; AVX2-FCP-NEXT: vbroadcastsd 3016(%rdi), %ymm0
14666 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14667 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14668 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
14669 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14670 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14671 ; AVX2-FCP-NEXT: vmovaps %ymm1, 448(%rsi)
14672 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14673 ; AVX2-FCP-NEXT: vmovaps %ymm1, 384(%rsi)
14674 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14675 ; AVX2-FCP-NEXT: vmovaps %ymm1, 320(%rsi)
14676 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14677 ; AVX2-FCP-NEXT: vmovaps %ymm1, 256(%rsi)
14678 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14679 ; AVX2-FCP-NEXT: vmovaps %ymm1, 192(%rsi)
14680 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14681 ; AVX2-FCP-NEXT: vmovaps %ymm1, 128(%rsi)
14682 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14683 ; AVX2-FCP-NEXT: vmovaps %ymm1, 64(%rsi)
14684 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14685 ; AVX2-FCP-NEXT: vmovaps %ymm1, (%rsi)
14686 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14687 ; AVX2-FCP-NEXT: vmovaps %ymm1, 480(%rsi)
14688 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14689 ; AVX2-FCP-NEXT: vmovaps %ymm1, 416(%rsi)
14690 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14691 ; AVX2-FCP-NEXT: vmovaps %ymm1, 352(%rsi)
14692 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14693 ; AVX2-FCP-NEXT: vmovaps %ymm1, 288(%rsi)
14694 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14695 ; AVX2-FCP-NEXT: vmovaps %ymm1, 224(%rsi)
14696 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14697 ; AVX2-FCP-NEXT: vmovaps %ymm1, 160(%rsi)
14698 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14699 ; AVX2-FCP-NEXT: vmovaps %ymm1, 96(%rsi)
14700 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14701 ; AVX2-FCP-NEXT: vmovaps %ymm1, 32(%rsi)
14702 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14703 ; AVX2-FCP-NEXT: vmovaps %ymm1, 448(%rdx)
14704 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14705 ; AVX2-FCP-NEXT: vmovaps %ymm1, 384(%rdx)
14706 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14707 ; AVX2-FCP-NEXT: vmovaps %ymm1, 320(%rdx)
14708 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14709 ; AVX2-FCP-NEXT: vmovaps %ymm1, 256(%rdx)
14710 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14711 ; AVX2-FCP-NEXT: vmovaps %ymm1, 192(%rdx)
14712 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14713 ; AVX2-FCP-NEXT: vmovaps %ymm1, 128(%rdx)
14714 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14715 ; AVX2-FCP-NEXT: vmovaps %ymm1, 64(%rdx)
14716 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14717 ; AVX2-FCP-NEXT: vmovaps %ymm1, (%rdx)
14718 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14719 ; AVX2-FCP-NEXT: vmovaps %ymm1, 480(%rdx)
14720 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14721 ; AVX2-FCP-NEXT: vmovaps %ymm1, 416(%rdx)
14722 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14723 ; AVX2-FCP-NEXT: vmovaps %ymm1, 352(%rdx)
14724 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14725 ; AVX2-FCP-NEXT: vmovaps %ymm1, 288(%rdx)
14726 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14727 ; AVX2-FCP-NEXT: vmovaps %ymm1, 224(%rdx)
14728 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14729 ; AVX2-FCP-NEXT: vmovaps %ymm1, 160(%rdx)
14730 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14731 ; AVX2-FCP-NEXT: vmovaps %ymm1, 96(%rdx)
14732 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14733 ; AVX2-FCP-NEXT: vmovaps %ymm1, 32(%rdx)
14734 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14735 ; AVX2-FCP-NEXT: vmovaps %ymm1, (%rcx)
14736 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14737 ; AVX2-FCP-NEXT: vmovaps %ymm1, 64(%rcx)
14738 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14739 ; AVX2-FCP-NEXT: vmovaps %ymm1, 128(%rcx)
14740 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14741 ; AVX2-FCP-NEXT: vmovaps %ymm1, 192(%rcx)
14742 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14743 ; AVX2-FCP-NEXT: vmovaps %ymm1, 256(%rcx)
14744 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14745 ; AVX2-FCP-NEXT: vmovaps %ymm1, 320(%rcx)
14746 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14747 ; AVX2-FCP-NEXT: vmovaps %ymm1, 384(%rcx)
14748 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14749 ; AVX2-FCP-NEXT: vmovaps %ymm1, 448(%rcx)
14750 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14751 ; AVX2-FCP-NEXT: vmovaps %ymm1, 480(%rcx)
14752 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14753 ; AVX2-FCP-NEXT: vmovaps %ymm1, 416(%rcx)
14754 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14755 ; AVX2-FCP-NEXT: vmovaps %ymm1, 352(%rcx)
14756 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14757 ; AVX2-FCP-NEXT: vmovaps %ymm1, 288(%rcx)
14758 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14759 ; AVX2-FCP-NEXT: vmovaps %ymm1, 224(%rcx)
14760 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14761 ; AVX2-FCP-NEXT: vmovaps %ymm1, 160(%rcx)
14762 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14763 ; AVX2-FCP-NEXT: vmovaps %ymm1, 96(%rcx)
14764 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14765 ; AVX2-FCP-NEXT: vmovaps %ymm1, 32(%rcx)
14766 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14767 ; AVX2-FCP-NEXT: vmovaps %ymm1, 480(%r8)
14768 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14769 ; AVX2-FCP-NEXT: vmovaps %ymm1, 448(%r8)
14770 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14771 ; AVX2-FCP-NEXT: vmovaps %ymm1, 416(%r8)
14772 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14773 ; AVX2-FCP-NEXT: vmovaps %ymm1, 384(%r8)
14774 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14775 ; AVX2-FCP-NEXT: vmovaps %ymm1, 352(%r8)
14776 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14777 ; AVX2-FCP-NEXT: vmovaps %ymm1, 320(%r8)
14778 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14779 ; AVX2-FCP-NEXT: vmovaps %ymm1, 288(%r8)
14780 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14781 ; AVX2-FCP-NEXT: vmovaps %ymm1, 256(%r8)
14782 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14783 ; AVX2-FCP-NEXT: vmovaps %ymm1, 224(%r8)
14784 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14785 ; AVX2-FCP-NEXT: vmovaps %ymm1, 192(%r8)
14786 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14787 ; AVX2-FCP-NEXT: vmovaps %ymm1, 160(%r8)
14788 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14789 ; AVX2-FCP-NEXT: vmovaps %ymm1, 128(%r8)
14790 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14791 ; AVX2-FCP-NEXT: vmovaps %ymm1, 96(%r8)
14792 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14793 ; AVX2-FCP-NEXT: vmovaps %ymm1, 64(%r8)
14794 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14795 ; AVX2-FCP-NEXT: vmovaps %ymm1, 32(%r8)
14796 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14797 ; AVX2-FCP-NEXT: vmovaps %ymm1, (%r8)
14798 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14799 ; AVX2-FCP-NEXT: vmovaps %ymm1, 480(%r9)
14800 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14801 ; AVX2-FCP-NEXT: vmovaps %ymm1, 448(%r9)
14802 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14803 ; AVX2-FCP-NEXT: vmovaps %ymm1, 416(%r9)
14804 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14805 ; AVX2-FCP-NEXT: vmovaps %ymm1, 384(%r9)
14806 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14807 ; AVX2-FCP-NEXT: vmovaps %ymm1, 352(%r9)
14808 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14809 ; AVX2-FCP-NEXT: vmovaps %ymm1, 320(%r9)
14810 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14811 ; AVX2-FCP-NEXT: vmovaps %ymm1, 288(%r9)
14812 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14813 ; AVX2-FCP-NEXT: vmovaps %ymm1, 256(%r9)
14814 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14815 ; AVX2-FCP-NEXT: vmovaps %ymm1, 224(%r9)
14816 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14817 ; AVX2-FCP-NEXT: vmovaps %ymm1, 192(%r9)
14818 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14819 ; AVX2-FCP-NEXT: vmovaps %ymm1, 160(%r9)
14820 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14821 ; AVX2-FCP-NEXT: vmovaps %ymm1, 128(%r9)
14822 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14823 ; AVX2-FCP-NEXT: vmovaps %ymm1, 96(%r9)
14824 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14825 ; AVX2-FCP-NEXT: vmovaps %ymm1, 64(%r9)
14826 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14827 ; AVX2-FCP-NEXT: vmovaps %ymm1, 32(%r9)
14828 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14829 ; AVX2-FCP-NEXT: vmovaps %ymm1, (%r9)
14830 ; AVX2-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
14831 ; AVX2-FCP-NEXT: vmovaps %ymm0, 480(%rax)
14832 ; AVX2-FCP-NEXT: vmovaps %ymm3, 448(%rax)
14833 ; AVX2-FCP-NEXT: vmovaps %ymm5, 416(%rax)
14834 ; AVX2-FCP-NEXT: vmovaps %ymm7, 384(%rax)
14835 ; AVX2-FCP-NEXT: vmovaps %ymm9, 352(%rax)
14836 ; AVX2-FCP-NEXT: vmovaps %ymm11, 320(%rax)
14837 ; AVX2-FCP-NEXT: vmovaps %ymm13, 288(%rax)
14838 ; AVX2-FCP-NEXT: vmovaps %ymm15, 256(%rax)
14839 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14840 ; AVX2-FCP-NEXT: vmovaps %ymm0, 224(%rax)
14841 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14842 ; AVX2-FCP-NEXT: vmovaps %ymm0, 192(%rax)
14843 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14844 ; AVX2-FCP-NEXT: vmovaps %ymm0, 160(%rax)
14845 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14846 ; AVX2-FCP-NEXT: vmovaps %ymm0, 128(%rax)
14847 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14848 ; AVX2-FCP-NEXT: vmovaps %ymm0, 96(%rax)
14849 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14850 ; AVX2-FCP-NEXT: vmovaps %ymm0, 64(%rax)
14851 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14852 ; AVX2-FCP-NEXT: vmovaps %ymm0, 32(%rax)
14853 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14854 ; AVX2-FCP-NEXT: vmovaps %ymm0, (%rax)
14855 ; AVX2-FCP-NEXT: addq $3432, %rsp # imm = 0xD68
14856 ; AVX2-FCP-NEXT: vzeroupper
14857 ; AVX2-FCP-NEXT: retq
14859 ; AVX512-LABEL: load_i64_stride6_vf64:
14861 ; AVX512-NEXT: subq $7240, %rsp # imm = 0x1C48
14862 ; AVX512-NEXT: vmovdqa64 2048(%rdi), %zmm3
14863 ; AVX512-NEXT: vmovdqa64 1280(%rdi), %zmm4
14864 ; AVX512-NEXT: vmovdqa64 1344(%rdi), %zmm0
14865 ; AVX512-NEXT: vmovdqa64 896(%rdi), %zmm5
14866 ; AVX512-NEXT: vmovdqa64 960(%rdi), %zmm26
14867 ; AVX512-NEXT: vmovdqa64 512(%rdi), %zmm2
14868 ; AVX512-NEXT: vmovdqa64 576(%rdi), %zmm1
14869 ; AVX512-NEXT: vmovdqa64 128(%rdi), %zmm6
14870 ; AVX512-NEXT: vmovdqa64 192(%rdi), %zmm29
14871 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [0,6,0,10,0,6,0,10]
14872 ; AVX512-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
14873 ; AVX512-NEXT: vmovdqa64 %zmm29, %zmm8
14874 ; AVX512-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
14875 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14876 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm8
14877 ; AVX512-NEXT: vpermt2q %zmm2, %zmm7, %zmm8
14878 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14879 ; AVX512-NEXT: vmovdqa64 %zmm26, %zmm8
14880 ; AVX512-NEXT: vpermt2q %zmm5, %zmm7, %zmm8
14881 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14882 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm8
14883 ; AVX512-NEXT: vpermt2q %zmm4, %zmm7, %zmm8
14884 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14885 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,7,0,11,1,7,0,11]
14886 ; AVX512-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
14887 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm9
14888 ; AVX512-NEXT: vpermt2q %zmm2, %zmm8, %zmm9
14889 ; AVX512-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14890 ; AVX512-NEXT: vmovdqa64 %zmm29, %zmm9
14891 ; AVX512-NEXT: vpermt2q %zmm6, %zmm8, %zmm9
14892 ; AVX512-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14893 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm9
14894 ; AVX512-NEXT: vpermt2q %zmm4, %zmm8, %zmm9
14895 ; AVX512-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14896 ; AVX512-NEXT: vmovdqa64 %zmm26, %zmm9
14897 ; AVX512-NEXT: vpermt2q %zmm5, %zmm8, %zmm9
14898 ; AVX512-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14899 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [10,4,10,4,10,4,10,4]
14900 ; AVX512-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14901 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm10
14902 ; AVX512-NEXT: vpermt2q %zmm1, %zmm9, %zmm10
14903 ; AVX512-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14904 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm10
14905 ; AVX512-NEXT: vpermt2q %zmm29, %zmm9, %zmm10
14906 ; AVX512-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14907 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm10
14908 ; AVX512-NEXT: vpermt2q %zmm0, %zmm9, %zmm10
14909 ; AVX512-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14910 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm10
14911 ; AVX512-NEXT: vpermt2q %zmm26, %zmm9, %zmm10
14912 ; AVX512-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14913 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [11,5,11,5,11,5,11,5]
14914 ; AVX512-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14915 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm11
14916 ; AVX512-NEXT: vpermt2q %zmm1, %zmm10, %zmm11
14917 ; AVX512-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14918 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm11
14919 ; AVX512-NEXT: vpermt2q %zmm29, %zmm10, %zmm11
14920 ; AVX512-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14921 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm11
14922 ; AVX512-NEXT: vpermt2q %zmm0, %zmm10, %zmm11
14923 ; AVX512-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14924 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm11
14925 ; AVX512-NEXT: vpermt2q %zmm26, %zmm10, %zmm11
14926 ; AVX512-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14927 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [12,0,0,6,12,0,0,6]
14928 ; AVX512-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
14929 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm13
14930 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm12
14931 ; AVX512-NEXT: vpermt2q %zmm1, %zmm11, %zmm13
14932 ; AVX512-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14933 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [13,0,1,7,13,0,1,7]
14934 ; AVX512-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
14935 ; AVX512-NEXT: vpermt2q %zmm1, %zmm2, %zmm12
14936 ; AVX512-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14937 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm1
14938 ; AVX512-NEXT: vpermt2q %zmm29, %zmm11, %zmm1
14939 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14940 ; AVX512-NEXT: vpermt2q %zmm29, %zmm2, %zmm6
14941 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14942 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm1
14943 ; AVX512-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
14944 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14945 ; AVX512-NEXT: vpermt2q %zmm0, %zmm2, %zmm4
14946 ; AVX512-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14947 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm0
14948 ; AVX512-NEXT: vpermt2q %zmm26, %zmm11, %zmm0
14949 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14950 ; AVX512-NEXT: vpermt2q %zmm26, %zmm2, %zmm5
14951 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14952 ; AVX512-NEXT: vmovdqa64 2112(%rdi), %zmm0
14953 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1
14954 ; AVX512-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
14955 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14956 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1
14957 ; AVX512-NEXT: vpermt2q %zmm3, %zmm8, %zmm1
14958 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14959 ; AVX512-NEXT: vmovdqa64 %zmm3, %zmm1
14960 ; AVX512-NEXT: vpermt2q %zmm0, %zmm9, %zmm1
14961 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14962 ; AVX512-NEXT: vmovdqa64 %zmm3, %zmm1
14963 ; AVX512-NEXT: vpermt2q %zmm0, %zmm10, %zmm1
14964 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14965 ; AVX512-NEXT: vmovdqa64 %zmm3, %zmm1
14966 ; AVX512-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
14967 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14968 ; AVX512-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
14969 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14970 ; AVX512-NEXT: vmovdqa64 1664(%rdi), %zmm1
14971 ; AVX512-NEXT: vmovdqa64 1728(%rdi), %zmm0
14972 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm3
14973 ; AVX512-NEXT: vpermt2q %zmm1, %zmm7, %zmm3
14974 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14975 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm3
14976 ; AVX512-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
14977 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14978 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
14979 ; AVX512-NEXT: vpermt2q %zmm0, %zmm9, %zmm3
14980 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14981 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
14982 ; AVX512-NEXT: vpermt2q %zmm0, %zmm10, %zmm3
14983 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14984 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
14985 ; AVX512-NEXT: vpermt2q %zmm0, %zmm11, %zmm3
14986 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14987 ; AVX512-NEXT: vpermt2q %zmm0, %zmm2, %zmm1
14988 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14989 ; AVX512-NEXT: vmovdqa64 2432(%rdi), %zmm3
14990 ; AVX512-NEXT: vmovdqa64 2496(%rdi), %zmm0
14991 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1
14992 ; AVX512-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
14993 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14994 ; AVX512-NEXT: vmovdqa64 2816(%rdi), %zmm4
14995 ; AVX512-NEXT: vmovdqa64 2880(%rdi), %zmm1
14996 ; AVX512-NEXT: vpermi2q %zmm4, %zmm1, %zmm7
14997 ; AVX512-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14998 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5
14999 ; AVX512-NEXT: vpermt2q %zmm3, %zmm8, %zmm5
15000 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15001 ; AVX512-NEXT: vpermi2q %zmm4, %zmm1, %zmm8
15002 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15003 ; AVX512-NEXT: vmovdqa64 %zmm3, %zmm5
15004 ; AVX512-NEXT: vpermt2q %zmm0, %zmm9, %zmm5
15005 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15006 ; AVX512-NEXT: vpermi2q %zmm1, %zmm4, %zmm9
15007 ; AVX512-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15008 ; AVX512-NEXT: vmovdqa64 %zmm3, %zmm5
15009 ; AVX512-NEXT: vpermt2q %zmm0, %zmm10, %zmm5
15010 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15011 ; AVX512-NEXT: vpermi2q %zmm1, %zmm4, %zmm10
15012 ; AVX512-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15013 ; AVX512-NEXT: vmovdqa64 %zmm3, %zmm5
15014 ; AVX512-NEXT: vpermt2q %zmm0, %zmm11, %zmm5
15015 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15016 ; AVX512-NEXT: vpermi2q %zmm1, %zmm4, %zmm11
15017 ; AVX512-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15018 ; AVX512-NEXT: vpermt2q %zmm1, %zmm2, %zmm4
15019 ; AVX512-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15020 ; AVX512-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
15021 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15022 ; AVX512-NEXT: vmovdqa64 448(%rdi), %zmm1
15023 ; AVX512-NEXT: vmovdqa64 384(%rdi), %zmm2
15024 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm8 = [0,6,12,0]
15025 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm0
15026 ; AVX512-NEXT: vpermt2q %zmm1, %zmm8, %zmm0
15027 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15028 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm9 = [1,7,13,0]
15029 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm0
15030 ; AVX512-NEXT: vpermt2q %zmm1, %zmm9, %zmm0
15031 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15032 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm10 = [10,0,6,0]
15033 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm0
15034 ; AVX512-NEXT: vpermt2q %zmm2, %zmm10, %zmm0
15035 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15036 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm11 = [11,1,7,0]
15037 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm0
15038 ; AVX512-NEXT: vpermt2q %zmm2, %zmm11, %zmm0
15039 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15040 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm4 = [4,10,4,10,4,10,4,10]
15041 ; AVX512-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15042 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm0
15043 ; AVX512-NEXT: vpermt2q %zmm1, %zmm4, %zmm0
15044 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15045 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
15046 ; AVX512-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15047 ; AVX512-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
15048 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15049 ; AVX512-NEXT: vmovdqa64 (%rdi), %zmm2
15050 ; AVX512-NEXT: vmovdqa64 64(%rdi), %zmm1
15051 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm3
15052 ; AVX512-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
15053 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15054 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm3
15055 ; AVX512-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
15056 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15057 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
15058 ; AVX512-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
15059 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15060 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
15061 ; AVX512-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
15062 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15063 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm3
15064 ; AVX512-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
15065 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15066 ; AVX512-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
15067 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15068 ; AVX512-NEXT: vmovdqa64 1216(%rdi), %zmm1
15069 ; AVX512-NEXT: vmovdqa64 1152(%rdi), %zmm2
15070 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm3
15071 ; AVX512-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
15072 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15073 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm3
15074 ; AVX512-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
15075 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15076 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
15077 ; AVX512-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
15078 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15079 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
15080 ; AVX512-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
15081 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15082 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm3
15083 ; AVX512-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
15084 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15085 ; AVX512-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
15086 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15087 ; AVX512-NEXT: vmovdqa64 832(%rdi), %zmm1
15088 ; AVX512-NEXT: vmovdqa64 768(%rdi), %zmm2
15089 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm3
15090 ; AVX512-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
15091 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15092 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm3
15093 ; AVX512-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
15094 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15095 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
15096 ; AVX512-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
15097 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15098 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
15099 ; AVX512-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
15100 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15101 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm3
15102 ; AVX512-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
15103 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15104 ; AVX512-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
15105 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15106 ; AVX512-NEXT: vmovdqa64 1984(%rdi), %zmm1
15107 ; AVX512-NEXT: vmovdqa64 1920(%rdi), %zmm2
15108 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm3
15109 ; AVX512-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
15110 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15111 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm3
15112 ; AVX512-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
15113 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15114 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
15115 ; AVX512-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
15116 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15117 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
15118 ; AVX512-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
15119 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15120 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm3
15121 ; AVX512-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
15122 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15123 ; AVX512-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
15124 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15125 ; AVX512-NEXT: vmovdqa64 1600(%rdi), %zmm1
15126 ; AVX512-NEXT: vmovdqa64 1536(%rdi), %zmm30
15127 ; AVX512-NEXT: vmovdqa64 %zmm30, %zmm2
15128 ; AVX512-NEXT: vpermt2q %zmm1, %zmm8, %zmm2
15129 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15130 ; AVX512-NEXT: vmovdqa64 %zmm30, %zmm2
15131 ; AVX512-NEXT: vpermt2q %zmm1, %zmm9, %zmm2
15132 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15133 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm2
15134 ; AVX512-NEXT: vpermt2q %zmm30, %zmm10, %zmm2
15135 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15136 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm2
15137 ; AVX512-NEXT: vpermt2q %zmm30, %zmm11, %zmm2
15138 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15139 ; AVX512-NEXT: vmovdqa64 %zmm30, %zmm2
15140 ; AVX512-NEXT: vpermt2q %zmm1, %zmm4, %zmm2
15141 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15142 ; AVX512-NEXT: vpermt2q %zmm1, %zmm0, %zmm30
15143 ; AVX512-NEXT: vmovdqa64 320(%rdi), %zmm26
15144 ; AVX512-NEXT: vmovdqa64 256(%rdi), %zmm18
15145 ; AVX512-NEXT: vmovdqa64 %zmm18, %zmm1
15146 ; AVX512-NEXT: vpermt2q %zmm26, %zmm4, %zmm1
15147 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15148 ; AVX512-NEXT: vmovdqa64 704(%rdi), %zmm21
15149 ; AVX512-NEXT: vmovdqa64 640(%rdi), %zmm13
15150 ; AVX512-NEXT: vmovdqa64 %zmm13, %zmm1
15151 ; AVX512-NEXT: vpermt2q %zmm21, %zmm4, %zmm1
15152 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15153 ; AVX512-NEXT: vmovdqa64 1088(%rdi), %zmm24
15154 ; AVX512-NEXT: vmovdqa64 1024(%rdi), %zmm19
15155 ; AVX512-NEXT: vmovdqa64 %zmm19, %zmm1
15156 ; AVX512-NEXT: vpermt2q %zmm24, %zmm4, %zmm1
15157 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15158 ; AVX512-NEXT: vmovdqa64 1472(%rdi), %zmm28
15159 ; AVX512-NEXT: vmovdqa64 1408(%rdi), %zmm16
15160 ; AVX512-NEXT: vmovdqa64 %zmm16, %zmm1
15161 ; AVX512-NEXT: vpermt2q %zmm28, %zmm4, %zmm1
15162 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15163 ; AVX512-NEXT: vmovdqa64 1856(%rdi), %zmm25
15164 ; AVX512-NEXT: vmovdqa64 1792(%rdi), %zmm7
15165 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm1
15166 ; AVX512-NEXT: vpermt2q %zmm25, %zmm4, %zmm1
15167 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15168 ; AVX512-NEXT: vmovdqa64 2240(%rdi), %zmm23
15169 ; AVX512-NEXT: vmovdqa64 2176(%rdi), %zmm17
15170 ; AVX512-NEXT: vmovdqa64 %zmm17, %zmm1
15171 ; AVX512-NEXT: vpermt2q %zmm23, %zmm4, %zmm1
15172 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15173 ; AVX512-NEXT: vmovdqa64 2624(%rdi), %zmm20
15174 ; AVX512-NEXT: vmovdqa64 2560(%rdi), %zmm5
15175 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm1
15176 ; AVX512-NEXT: vpermt2q %zmm20, %zmm4, %zmm1
15177 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15178 ; AVX512-NEXT: vmovdqa64 2368(%rdi), %zmm1
15179 ; AVX512-NEXT: vmovdqa64 2304(%rdi), %zmm22
15180 ; AVX512-NEXT: vmovdqa64 %zmm22, %zmm29
15181 ; AVX512-NEXT: vpermt2q %zmm1, %zmm8, %zmm29
15182 ; AVX512-NEXT: vmovdqa64 3008(%rdi), %zmm14
15183 ; AVX512-NEXT: vmovdqa64 2944(%rdi), %zmm31
15184 ; AVX512-NEXT: vmovdqa64 %zmm31, %zmm2
15185 ; AVX512-NEXT: vpermt2q %zmm14, %zmm4, %zmm2
15186 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15187 ; AVX512-NEXT: vmovdqa64 2752(%rdi), %zmm2
15188 ; AVX512-NEXT: vmovdqa64 2688(%rdi), %zmm12
15189 ; AVX512-NEXT: vpermi2q %zmm2, %zmm12, %zmm8
15190 ; AVX512-NEXT: vmovdqa64 %zmm22, %zmm3
15191 ; AVX512-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
15192 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15193 ; AVX512-NEXT: vpermi2q %zmm2, %zmm12, %zmm9
15194 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
15195 ; AVX512-NEXT: vpermt2q %zmm22, %zmm10, %zmm3
15196 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15197 ; AVX512-NEXT: vpermi2q %zmm12, %zmm2, %zmm10
15198 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
15199 ; AVX512-NEXT: vpermt2q %zmm22, %zmm11, %zmm3
15200 ; AVX512-NEXT: vmovdqu64 %zmm3, (%rsp) # 64-byte Spill
15201 ; AVX512-NEXT: vpermi2q %zmm12, %zmm2, %zmm11
15202 ; AVX512-NEXT: vmovdqa64 %zmm22, %zmm3
15203 ; AVX512-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
15204 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15205 ; AVX512-NEXT: vpermi2q %zmm2, %zmm12, %zmm4
15206 ; AVX512-NEXT: vpermt2q %zmm2, %zmm0, %zmm12
15207 ; AVX512-NEXT: vpermt2q %zmm1, %zmm0, %zmm22
15208 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm1
15209 ; AVX512-NEXT: vpermt2q %zmm20, %zmm0, %zmm1
15210 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15211 ; AVX512-NEXT: vmovdqa64 %zmm13, %zmm1
15212 ; AVX512-NEXT: vpermt2q %zmm21, %zmm0, %zmm1
15213 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15214 ; AVX512-NEXT: vmovdqa64 %zmm18, %zmm1
15215 ; AVX512-NEXT: vpermt2q %zmm26, %zmm0, %zmm1
15216 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15217 ; AVX512-NEXT: vmovdqa64 %zmm16, %zmm1
15218 ; AVX512-NEXT: vpermt2q %zmm28, %zmm0, %zmm1
15219 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15220 ; AVX512-NEXT: vmovdqa64 %zmm19, %zmm1
15221 ; AVX512-NEXT: vpermt2q %zmm24, %zmm0, %zmm1
15222 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15223 ; AVX512-NEXT: vmovdqa64 %zmm17, %zmm1
15224 ; AVX512-NEXT: vpermt2q %zmm23, %zmm0, %zmm1
15225 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15226 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm1
15227 ; AVX512-NEXT: vpermt2q %zmm25, %zmm0, %zmm1
15228 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15229 ; AVX512-NEXT: vmovdqa64 %zmm31, %zmm1
15230 ; AVX512-NEXT: vpermt2q %zmm14, %zmm0, %zmm1
15231 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15232 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
15233 ; AVX512-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
15234 ; AVX512-NEXT: vmovdqa64 %zmm13, %zmm0
15235 ; AVX512-NEXT: vpermt2q %zmm21, %zmm1, %zmm0
15236 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15237 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
15238 ; AVX512-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
15239 ; AVX512-NEXT: vmovdqa64 %zmm13, %zmm0
15240 ; AVX512-NEXT: vpermt2q %zmm21, %zmm2, %zmm0
15241 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15242 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
15243 ; AVX512-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
15244 ; AVX512-NEXT: vmovdqa64 %zmm21, %zmm0
15245 ; AVX512-NEXT: vpermt2q %zmm13, %zmm3, %zmm0
15246 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15247 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
15248 ; AVX512-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
15249 ; AVX512-NEXT: vpermt2q %zmm13, %zmm0, %zmm21
15250 ; AVX512-NEXT: vmovdqa64 %zmm18, %zmm6
15251 ; AVX512-NEXT: vpermt2q %zmm26, %zmm1, %zmm6
15252 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15253 ; AVX512-NEXT: vmovdqa64 %zmm18, %zmm6
15254 ; AVX512-NEXT: vpermt2q %zmm26, %zmm2, %zmm6
15255 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15256 ; AVX512-NEXT: vmovdqa64 %zmm26, %zmm6
15257 ; AVX512-NEXT: vpermt2q %zmm18, %zmm3, %zmm6
15258 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15259 ; AVX512-NEXT: vpermt2q %zmm18, %zmm0, %zmm26
15260 ; AVX512-NEXT: vmovdqa64 %zmm16, %zmm27
15261 ; AVX512-NEXT: vpermt2q %zmm28, %zmm1, %zmm27
15262 ; AVX512-NEXT: vmovdqa64 %zmm16, %zmm6
15263 ; AVX512-NEXT: vpermt2q %zmm28, %zmm2, %zmm6
15264 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15265 ; AVX512-NEXT: vmovdqa64 %zmm28, %zmm6
15266 ; AVX512-NEXT: vpermt2q %zmm16, %zmm3, %zmm6
15267 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15268 ; AVX512-NEXT: vpermt2q %zmm16, %zmm0, %zmm28
15269 ; AVX512-NEXT: vmovdqa64 %zmm19, %zmm18
15270 ; AVX512-NEXT: vpermt2q %zmm24, %zmm1, %zmm18
15271 ; AVX512-NEXT: vmovdqa64 %zmm19, %zmm6
15272 ; AVX512-NEXT: vpermt2q %zmm24, %zmm2, %zmm6
15273 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15274 ; AVX512-NEXT: vmovdqa64 %zmm24, %zmm6
15275 ; AVX512-NEXT: vpermt2q %zmm19, %zmm3, %zmm6
15276 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15277 ; AVX512-NEXT: vpermt2q %zmm19, %zmm0, %zmm24
15278 ; AVX512-NEXT: vmovdqa64 %zmm17, %zmm15
15279 ; AVX512-NEXT: vpermt2q %zmm23, %zmm1, %zmm15
15280 ; AVX512-NEXT: vmovdqa64 %zmm17, %zmm19
15281 ; AVX512-NEXT: vpermt2q %zmm23, %zmm2, %zmm19
15282 ; AVX512-NEXT: vmovdqa64 %zmm23, %zmm6
15283 ; AVX512-NEXT: vpermt2q %zmm17, %zmm3, %zmm6
15284 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15285 ; AVX512-NEXT: vpermt2q %zmm17, %zmm0, %zmm23
15286 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm13
15287 ; AVX512-NEXT: vpermt2q %zmm25, %zmm1, %zmm13
15288 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm17
15289 ; AVX512-NEXT: vpermt2q %zmm25, %zmm2, %zmm17
15290 ; AVX512-NEXT: vmovdqa64 %zmm25, %zmm6
15291 ; AVX512-NEXT: vpermt2q %zmm7, %zmm3, %zmm6
15292 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15293 ; AVX512-NEXT: vpermt2q %zmm7, %zmm0, %zmm25
15294 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm6
15295 ; AVX512-NEXT: vpermt2q %zmm20, %zmm1, %zmm6
15296 ; AVX512-NEXT: vpermi2q %zmm14, %zmm31, %zmm1
15297 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm7
15298 ; AVX512-NEXT: vpermt2q %zmm20, %zmm2, %zmm7
15299 ; AVX512-NEXT: vpermi2q %zmm14, %zmm31, %zmm2
15300 ; AVX512-NEXT: vmovdqa64 %zmm20, %zmm16
15301 ; AVX512-NEXT: vpermt2q %zmm5, %zmm3, %zmm16
15302 ; AVX512-NEXT: vpermi2q %zmm31, %zmm14, %zmm3
15303 ; AVX512-NEXT: vpermt2q %zmm31, %zmm0, %zmm14
15304 ; AVX512-NEXT: vpermt2q %zmm5, %zmm0, %zmm20
15305 ; AVX512-NEXT: movb $56, %al
15306 ; AVX512-NEXT: kmovw %eax, %k1
15307 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15308 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15309 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15310 ; AVX512-NEXT: movb $-64, %al
15311 ; AVX512-NEXT: kmovw %eax, %k2
15312 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15313 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15314 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15315 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15316 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15317 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15318 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15319 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15320 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15321 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15322 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15323 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15324 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15325 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15326 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15327 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15328 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15329 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15330 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15331 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15332 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15333 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15334 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15335 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15336 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15337 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15338 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15339 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15340 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15341 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15342 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15343 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15344 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15345 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15346 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
15347 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15348 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
15349 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15350 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
15351 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15352 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
15353 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15354 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15355 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15356 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15357 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15358 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15359 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15360 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15361 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15362 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15363 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15364 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15365 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15366 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15367 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15368 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15369 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15370 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15371 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15372 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15373 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15374 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15375 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15376 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15377 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15378 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15379 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15380 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15381 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15382 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15383 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15384 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15385 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15386 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15387 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15388 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15389 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15390 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15391 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15392 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15393 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15394 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15395 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15396 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm9 {%k1}
15397 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15398 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
15399 ; AVX512-NEXT: movb $24, %al
15400 ; AVX512-NEXT: kmovw %eax, %k2
15401 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15402 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15403 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15404 ; AVX512-NEXT: movb $-32, %al
15405 ; AVX512-NEXT: kmovw %eax, %k1
15406 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm5 {%k1}
15407 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15408 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15409 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15410 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15411 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15412 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15413 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15414 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15415 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15416 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15417 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15418 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15419 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15420 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15421 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15422 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15423 ; AVX512-NEXT: vmovdqa64 %zmm27, %zmm5 {%k1}
15424 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15425 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15426 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15427 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15428 ; AVX512-NEXT: vmovdqa64 %zmm18, %zmm5 {%k1}
15429 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15430 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15431 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15432 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15433 ; AVX512-NEXT: vmovdqa64 %zmm15, %zmm5 {%k1}
15434 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15435 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15436 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15437 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15438 ; AVX512-NEXT: vmovdqa64 %zmm13, %zmm5 {%k1}
15439 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15440 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15441 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
15442 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm10 {%k1}
15443 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15444 ; AVX512-NEXT: vmovdqu64 (%rsp), %zmm1 # 64-byte Reload
15445 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
15446 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm1 {%k1}
15447 ; AVX512-NEXT: vmovdqu64 %zmm1, (%rsp) # 64-byte Spill
15448 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15449 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15450 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
15451 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15452 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
15453 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15454 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15455 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15456 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
15457 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15458 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
15459 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15460 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15461 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15462 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
15463 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15464 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
15465 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15466 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15467 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15468 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
15469 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15470 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
15471 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15472 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15473 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
15474 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
15475 ; AVX512-NEXT: vmovdqa64 %zmm19, %zmm31 {%k1}
15476 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15477 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
15478 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm27 {%k2}
15479 ; AVX512-NEXT: vmovdqa64 %zmm17, %zmm27 {%k1}
15480 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15481 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
15482 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm11 {%k1}
15483 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15484 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
15485 ; AVX512-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
15486 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15487 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
15488 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15489 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
15490 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15491 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm5 # 16-byte Folded Reload
15492 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15493 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm5 {%k1}
15494 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15495 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm6 # 16-byte Folded Reload
15496 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15497 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm6 {%k1}
15498 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15499 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm7 # 16-byte Folded Reload
15500 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15501 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm7 {%k1}
15502 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15503 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm13 # 16-byte Folded Reload
15504 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15505 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm13 {%k1}
15506 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15507 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm15 # 16-byte Folded Reload
15508 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15509 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm15 {%k1}
15510 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15511 ; AVX512-NEXT: vinserti32x4 $0, %xmm4, %zmm2, %zmm4
15512 ; AVX512-NEXT: vmovdqa64 %zmm3, %zmm4 {%k1}
15513 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15514 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
15515 ; AVX512-NEXT: vmovdqa64 %zmm21, %zmm2 {%k1}
15516 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
15517 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
15518 ; AVX512-NEXT: vmovdqa64 %zmm26, %zmm3 {%k1}
15519 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
15520 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm16, %zmm16 # 16-byte Folded Reload
15521 ; AVX512-NEXT: vmovdqa64 %zmm28, %zmm16 {%k1}
15522 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
15523 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm17, %zmm17 # 16-byte Folded Reload
15524 ; AVX512-NEXT: vmovdqa64 %zmm24, %zmm17 {%k1}
15525 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
15526 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm18, %zmm18 # 16-byte Folded Reload
15527 ; AVX512-NEXT: vmovdqa64 %zmm23, %zmm18 {%k1}
15528 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
15529 ; AVX512-NEXT: vinserti32x4 $0, %xmm30, %zmm19, %zmm19
15530 ; AVX512-NEXT: vmovdqa64 %zmm25, %zmm19 {%k1}
15531 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
15532 ; AVX512-NEXT: vinserti32x4 $0, %xmm12, %zmm21, %zmm12
15533 ; AVX512-NEXT: vmovdqa64 %zmm14, %zmm12 {%k1}
15534 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
15535 ; AVX512-NEXT: vinserti32x4 $0, %xmm22, %zmm14, %zmm14
15536 ; AVX512-NEXT: vmovdqa64 %zmm20, %zmm14 {%k1}
15537 ; AVX512-NEXT: vmovdqa64 %zmm8, 448(%rsi)
15538 ; AVX512-NEXT: vmovdqa64 %zmm29, 384(%rsi)
15539 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15540 ; AVX512-NEXT: vmovaps %zmm8, 320(%rsi)
15541 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15542 ; AVX512-NEXT: vmovaps %zmm8, 256(%rsi)
15543 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15544 ; AVX512-NEXT: vmovaps %zmm8, 192(%rsi)
15545 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15546 ; AVX512-NEXT: vmovaps %zmm8, 128(%rsi)
15547 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15548 ; AVX512-NEXT: vmovaps %zmm8, 64(%rsi)
15549 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15550 ; AVX512-NEXT: vmovaps %zmm8, (%rsi)
15551 ; AVX512-NEXT: vmovdqa64 %zmm9, 448(%rdx)
15552 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15553 ; AVX512-NEXT: vmovaps %zmm8, 256(%rdx)
15554 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15555 ; AVX512-NEXT: vmovaps %zmm8, 320(%rdx)
15556 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15557 ; AVX512-NEXT: vmovaps %zmm8, 128(%rdx)
15558 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15559 ; AVX512-NEXT: vmovaps %zmm8, 192(%rdx)
15560 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15561 ; AVX512-NEXT: vmovaps %zmm8, (%rdx)
15562 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15563 ; AVX512-NEXT: vmovaps %zmm8, 64(%rdx)
15564 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15565 ; AVX512-NEXT: vmovaps %zmm8, 384(%rdx)
15566 ; AVX512-NEXT: vmovdqa64 %zmm10, 448(%rcx)
15567 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15568 ; AVX512-NEXT: vmovaps %zmm8, 256(%rcx)
15569 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15570 ; AVX512-NEXT: vmovaps %zmm8, 320(%rcx)
15571 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15572 ; AVX512-NEXT: vmovaps %zmm8, 128(%rcx)
15573 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15574 ; AVX512-NEXT: vmovaps %zmm8, 192(%rcx)
15575 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15576 ; AVX512-NEXT: vmovaps %zmm8, (%rcx)
15577 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15578 ; AVX512-NEXT: vmovaps %zmm8, 64(%rcx)
15579 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15580 ; AVX512-NEXT: vmovaps %zmm8, 384(%rcx)
15581 ; AVX512-NEXT: vmovdqa64 %zmm11, 448(%r8)
15582 ; AVX512-NEXT: vmovdqa64 %zmm27, 256(%r8)
15583 ; AVX512-NEXT: vmovdqa64 %zmm31, 320(%r8)
15584 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15585 ; AVX512-NEXT: vmovaps %zmm8, 128(%r8)
15586 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15587 ; AVX512-NEXT: vmovaps %zmm8, 192(%r8)
15588 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15589 ; AVX512-NEXT: vmovaps %zmm8, (%r8)
15590 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15591 ; AVX512-NEXT: vmovaps %zmm8, 64(%r8)
15592 ; AVX512-NEXT: vmovups (%rsp), %zmm8 # 64-byte Reload
15593 ; AVX512-NEXT: vmovaps %zmm8, 384(%r8)
15594 ; AVX512-NEXT: vmovdqa64 %zmm4, 448(%r9)
15595 ; AVX512-NEXT: vmovdqa64 %zmm15, 256(%r9)
15596 ; AVX512-NEXT: vmovdqa64 %zmm13, 320(%r9)
15597 ; AVX512-NEXT: vmovdqa64 %zmm7, 128(%r9)
15598 ; AVX512-NEXT: vmovdqa64 %zmm6, 192(%r9)
15599 ; AVX512-NEXT: vmovdqa64 %zmm5, (%r9)
15600 ; AVX512-NEXT: vmovdqa64 %zmm1, 64(%r9)
15601 ; AVX512-NEXT: vmovdqa64 %zmm0, 384(%r9)
15602 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
15603 ; AVX512-NEXT: vmovdqa64 %zmm14, 384(%rax)
15604 ; AVX512-NEXT: vmovdqa64 %zmm12, 448(%rax)
15605 ; AVX512-NEXT: vmovdqa64 %zmm19, 256(%rax)
15606 ; AVX512-NEXT: vmovdqa64 %zmm18, 320(%rax)
15607 ; AVX512-NEXT: vmovdqa64 %zmm17, 128(%rax)
15608 ; AVX512-NEXT: vmovdqa64 %zmm16, 192(%rax)
15609 ; AVX512-NEXT: vmovdqa64 %zmm3, (%rax)
15610 ; AVX512-NEXT: vmovdqa64 %zmm2, 64(%rax)
15611 ; AVX512-NEXT: addq $7240, %rsp # imm = 0x1C48
15612 ; AVX512-NEXT: vzeroupper
15613 ; AVX512-NEXT: retq
15615 ; AVX512-FCP-LABEL: load_i64_stride6_vf64:
15616 ; AVX512-FCP: # %bb.0:
15617 ; AVX512-FCP-NEXT: subq $7240, %rsp # imm = 0x1C48
15618 ; AVX512-FCP-NEXT: vmovdqa64 2048(%rdi), %zmm3
15619 ; AVX512-FCP-NEXT: vmovdqa64 1280(%rdi), %zmm4
15620 ; AVX512-FCP-NEXT: vmovdqa64 1344(%rdi), %zmm0
15621 ; AVX512-FCP-NEXT: vmovdqa64 896(%rdi), %zmm5
15622 ; AVX512-FCP-NEXT: vmovdqa64 960(%rdi), %zmm26
15623 ; AVX512-FCP-NEXT: vmovdqa64 512(%rdi), %zmm2
15624 ; AVX512-FCP-NEXT: vmovdqa64 576(%rdi), %zmm1
15625 ; AVX512-FCP-NEXT: vmovdqa64 128(%rdi), %zmm6
15626 ; AVX512-FCP-NEXT: vmovdqa64 192(%rdi), %zmm29
15627 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [0,6,0,10,0,6,0,10]
15628 ; AVX512-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
15629 ; AVX512-FCP-NEXT: vmovdqa64 %zmm29, %zmm8
15630 ; AVX512-FCP-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
15631 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15632 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm8
15633 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm7, %zmm8
15634 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15635 ; AVX512-FCP-NEXT: vmovdqa64 %zmm26, %zmm8
15636 ; AVX512-FCP-NEXT: vpermt2q %zmm5, %zmm7, %zmm8
15637 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15638 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm8
15639 ; AVX512-FCP-NEXT: vpermt2q %zmm4, %zmm7, %zmm8
15640 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15641 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,7,0,11,1,7,0,11]
15642 ; AVX512-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
15643 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm9
15644 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm8, %zmm9
15645 ; AVX512-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15646 ; AVX512-FCP-NEXT: vmovdqa64 %zmm29, %zmm9
15647 ; AVX512-FCP-NEXT: vpermt2q %zmm6, %zmm8, %zmm9
15648 ; AVX512-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15649 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm9
15650 ; AVX512-FCP-NEXT: vpermt2q %zmm4, %zmm8, %zmm9
15651 ; AVX512-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15652 ; AVX512-FCP-NEXT: vmovdqa64 %zmm26, %zmm9
15653 ; AVX512-FCP-NEXT: vpermt2q %zmm5, %zmm8, %zmm9
15654 ; AVX512-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15655 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [10,4,10,4,10,4,10,4]
15656 ; AVX512-FCP-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15657 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm10
15658 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm10
15659 ; AVX512-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15660 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm10
15661 ; AVX512-FCP-NEXT: vpermt2q %zmm29, %zmm9, %zmm10
15662 ; AVX512-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15663 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm10
15664 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm10
15665 ; AVX512-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15666 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm10
15667 ; AVX512-FCP-NEXT: vpermt2q %zmm26, %zmm9, %zmm10
15668 ; AVX512-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15669 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [11,5,11,5,11,5,11,5]
15670 ; AVX512-FCP-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15671 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm11
15672 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm10, %zmm11
15673 ; AVX512-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15674 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm11
15675 ; AVX512-FCP-NEXT: vpermt2q %zmm29, %zmm10, %zmm11
15676 ; AVX512-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15677 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm11
15678 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm11
15679 ; AVX512-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15680 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm11
15681 ; AVX512-FCP-NEXT: vpermt2q %zmm26, %zmm10, %zmm11
15682 ; AVX512-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15683 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [12,0,0,6,12,0,0,6]
15684 ; AVX512-FCP-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
15685 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm13
15686 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm12
15687 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm11, %zmm13
15688 ; AVX512-FCP-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15689 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [13,0,1,7,13,0,1,7]
15690 ; AVX512-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
15691 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm12
15692 ; AVX512-FCP-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15693 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm1
15694 ; AVX512-FCP-NEXT: vpermt2q %zmm29, %zmm11, %zmm1
15695 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15696 ; AVX512-FCP-NEXT: vpermt2q %zmm29, %zmm2, %zmm6
15697 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15698 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm1
15699 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
15700 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15701 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm4
15702 ; AVX512-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15703 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
15704 ; AVX512-FCP-NEXT: vpermt2q %zmm26, %zmm11, %zmm0
15705 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15706 ; AVX512-FCP-NEXT: vpermt2q %zmm26, %zmm2, %zmm5
15707 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15708 ; AVX512-FCP-NEXT: vmovdqa64 2112(%rdi), %zmm0
15709 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
15710 ; AVX512-FCP-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
15711 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15712 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
15713 ; AVX512-FCP-NEXT: vpermt2q %zmm3, %zmm8, %zmm1
15714 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15715 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, %zmm1
15716 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm1
15717 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15718 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, %zmm1
15719 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm1
15720 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15721 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, %zmm1
15722 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
15723 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15724 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
15725 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15726 ; AVX512-FCP-NEXT: vmovdqa64 1664(%rdi), %zmm1
15727 ; AVX512-FCP-NEXT: vmovdqa64 1728(%rdi), %zmm0
15728 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm3
15729 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm7, %zmm3
15730 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15731 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm3
15732 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
15733 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15734 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
15735 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm3
15736 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15737 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
15738 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm3
15739 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15740 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
15741 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm3
15742 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15743 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm1
15744 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15745 ; AVX512-FCP-NEXT: vmovdqa64 2432(%rdi), %zmm3
15746 ; AVX512-FCP-NEXT: vmovdqa64 2496(%rdi), %zmm0
15747 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
15748 ; AVX512-FCP-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
15749 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15750 ; AVX512-FCP-NEXT: vmovdqa64 2816(%rdi), %zmm4
15751 ; AVX512-FCP-NEXT: vmovdqa64 2880(%rdi), %zmm1
15752 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm1, %zmm7
15753 ; AVX512-FCP-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15754 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5
15755 ; AVX512-FCP-NEXT: vpermt2q %zmm3, %zmm8, %zmm5
15756 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15757 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm1, %zmm8
15758 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15759 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, %zmm5
15760 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm5
15761 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15762 ; AVX512-FCP-NEXT: vpermi2q %zmm1, %zmm4, %zmm9
15763 ; AVX512-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15764 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, %zmm5
15765 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm5
15766 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15767 ; AVX512-FCP-NEXT: vpermi2q %zmm1, %zmm4, %zmm10
15768 ; AVX512-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15769 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, %zmm5
15770 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm5
15771 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15772 ; AVX512-FCP-NEXT: vpermi2q %zmm1, %zmm4, %zmm11
15773 ; AVX512-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15774 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm4
15775 ; AVX512-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15776 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
15777 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15778 ; AVX512-FCP-NEXT: vmovdqa64 448(%rdi), %zmm1
15779 ; AVX512-FCP-NEXT: vmovdqa64 384(%rdi), %zmm2
15780 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [0,6,12,0]
15781 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm0
15782 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm0
15783 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15784 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm9 = [1,7,13,0]
15785 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm0
15786 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm0
15787 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15788 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm10 = [10,0,6,0]
15789 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm0
15790 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm0
15791 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15792 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm11 = [11,1,7,0]
15793 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm0
15794 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm0
15795 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15796 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm4 = [4,10,4,10,4,10,4,10]
15797 ; AVX512-FCP-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15798 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm0
15799 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm0
15800 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15801 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
15802 ; AVX512-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15803 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
15804 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15805 ; AVX512-FCP-NEXT: vmovdqa64 (%rdi), %zmm2
15806 ; AVX512-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
15807 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
15808 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
15809 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15810 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
15811 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
15812 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15813 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
15814 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
15815 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15816 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
15817 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
15818 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15819 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
15820 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
15821 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15822 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
15823 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15824 ; AVX512-FCP-NEXT: vmovdqa64 1216(%rdi), %zmm1
15825 ; AVX512-FCP-NEXT: vmovdqa64 1152(%rdi), %zmm2
15826 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
15827 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
15828 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15829 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
15830 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
15831 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15832 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
15833 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
15834 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15835 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
15836 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
15837 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15838 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
15839 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
15840 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15841 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
15842 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15843 ; AVX512-FCP-NEXT: vmovdqa64 832(%rdi), %zmm1
15844 ; AVX512-FCP-NEXT: vmovdqa64 768(%rdi), %zmm2
15845 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
15846 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
15847 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15848 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
15849 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
15850 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15851 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
15852 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
15853 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15854 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
15855 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
15856 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15857 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
15858 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
15859 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15860 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
15861 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15862 ; AVX512-FCP-NEXT: vmovdqa64 1984(%rdi), %zmm1
15863 ; AVX512-FCP-NEXT: vmovdqa64 1920(%rdi), %zmm2
15864 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
15865 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
15866 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15867 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
15868 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
15869 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15870 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
15871 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
15872 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15873 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
15874 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
15875 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15876 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
15877 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
15878 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15879 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
15880 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15881 ; AVX512-FCP-NEXT: vmovdqa64 1600(%rdi), %zmm1
15882 ; AVX512-FCP-NEXT: vmovdqa64 1536(%rdi), %zmm30
15883 ; AVX512-FCP-NEXT: vmovdqa64 %zmm30, %zmm2
15884 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm2
15885 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15886 ; AVX512-FCP-NEXT: vmovdqa64 %zmm30, %zmm2
15887 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm2
15888 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15889 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm2
15890 ; AVX512-FCP-NEXT: vpermt2q %zmm30, %zmm10, %zmm2
15891 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15892 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm2
15893 ; AVX512-FCP-NEXT: vpermt2q %zmm30, %zmm11, %zmm2
15894 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15895 ; AVX512-FCP-NEXT: vmovdqa64 %zmm30, %zmm2
15896 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm2
15897 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15898 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm30
15899 ; AVX512-FCP-NEXT: vmovdqa64 320(%rdi), %zmm26
15900 ; AVX512-FCP-NEXT: vmovdqa64 256(%rdi), %zmm18
15901 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, %zmm1
15902 ; AVX512-FCP-NEXT: vpermt2q %zmm26, %zmm4, %zmm1
15903 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15904 ; AVX512-FCP-NEXT: vmovdqa64 704(%rdi), %zmm21
15905 ; AVX512-FCP-NEXT: vmovdqa64 640(%rdi), %zmm13
15906 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, %zmm1
15907 ; AVX512-FCP-NEXT: vpermt2q %zmm21, %zmm4, %zmm1
15908 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15909 ; AVX512-FCP-NEXT: vmovdqa64 1088(%rdi), %zmm24
15910 ; AVX512-FCP-NEXT: vmovdqa64 1024(%rdi), %zmm19
15911 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, %zmm1
15912 ; AVX512-FCP-NEXT: vpermt2q %zmm24, %zmm4, %zmm1
15913 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15914 ; AVX512-FCP-NEXT: vmovdqa64 1472(%rdi), %zmm28
15915 ; AVX512-FCP-NEXT: vmovdqa64 1408(%rdi), %zmm16
15916 ; AVX512-FCP-NEXT: vmovdqa64 %zmm16, %zmm1
15917 ; AVX512-FCP-NEXT: vpermt2q %zmm28, %zmm4, %zmm1
15918 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15919 ; AVX512-FCP-NEXT: vmovdqa64 1856(%rdi), %zmm25
15920 ; AVX512-FCP-NEXT: vmovdqa64 1792(%rdi), %zmm7
15921 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm1
15922 ; AVX512-FCP-NEXT: vpermt2q %zmm25, %zmm4, %zmm1
15923 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15924 ; AVX512-FCP-NEXT: vmovdqa64 2240(%rdi), %zmm23
15925 ; AVX512-FCP-NEXT: vmovdqa64 2176(%rdi), %zmm17
15926 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, %zmm1
15927 ; AVX512-FCP-NEXT: vpermt2q %zmm23, %zmm4, %zmm1
15928 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15929 ; AVX512-FCP-NEXT: vmovdqa64 2624(%rdi), %zmm20
15930 ; AVX512-FCP-NEXT: vmovdqa64 2560(%rdi), %zmm5
15931 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
15932 ; AVX512-FCP-NEXT: vpermt2q %zmm20, %zmm4, %zmm1
15933 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15934 ; AVX512-FCP-NEXT: vmovdqa64 2368(%rdi), %zmm1
15935 ; AVX512-FCP-NEXT: vmovdqa64 2304(%rdi), %zmm22
15936 ; AVX512-FCP-NEXT: vmovdqa64 %zmm22, %zmm29
15937 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm29
15938 ; AVX512-FCP-NEXT: vmovdqa64 3008(%rdi), %zmm14
15939 ; AVX512-FCP-NEXT: vmovdqa64 2944(%rdi), %zmm31
15940 ; AVX512-FCP-NEXT: vmovdqa64 %zmm31, %zmm2
15941 ; AVX512-FCP-NEXT: vpermt2q %zmm14, %zmm4, %zmm2
15942 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15943 ; AVX512-FCP-NEXT: vmovdqa64 2752(%rdi), %zmm2
15944 ; AVX512-FCP-NEXT: vmovdqa64 2688(%rdi), %zmm12
15945 ; AVX512-FCP-NEXT: vpermi2q %zmm2, %zmm12, %zmm8
15946 ; AVX512-FCP-NEXT: vmovdqa64 %zmm22, %zmm3
15947 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
15948 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15949 ; AVX512-FCP-NEXT: vpermi2q %zmm2, %zmm12, %zmm9
15950 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
15951 ; AVX512-FCP-NEXT: vpermt2q %zmm22, %zmm10, %zmm3
15952 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15953 ; AVX512-FCP-NEXT: vpermi2q %zmm12, %zmm2, %zmm10
15954 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
15955 ; AVX512-FCP-NEXT: vpermt2q %zmm22, %zmm11, %zmm3
15956 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, (%rsp) # 64-byte Spill
15957 ; AVX512-FCP-NEXT: vpermi2q %zmm12, %zmm2, %zmm11
15958 ; AVX512-FCP-NEXT: vmovdqa64 %zmm22, %zmm3
15959 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
15960 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15961 ; AVX512-FCP-NEXT: vpermi2q %zmm2, %zmm12, %zmm4
15962 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm0, %zmm12
15963 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm22
15964 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
15965 ; AVX512-FCP-NEXT: vpermt2q %zmm20, %zmm0, %zmm1
15966 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15967 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, %zmm1
15968 ; AVX512-FCP-NEXT: vpermt2q %zmm21, %zmm0, %zmm1
15969 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15970 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, %zmm1
15971 ; AVX512-FCP-NEXT: vpermt2q %zmm26, %zmm0, %zmm1
15972 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15973 ; AVX512-FCP-NEXT: vmovdqa64 %zmm16, %zmm1
15974 ; AVX512-FCP-NEXT: vpermt2q %zmm28, %zmm0, %zmm1
15975 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15976 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, %zmm1
15977 ; AVX512-FCP-NEXT: vpermt2q %zmm24, %zmm0, %zmm1
15978 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15979 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, %zmm1
15980 ; AVX512-FCP-NEXT: vpermt2q %zmm23, %zmm0, %zmm1
15981 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15982 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm1
15983 ; AVX512-FCP-NEXT: vpermt2q %zmm25, %zmm0, %zmm1
15984 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15985 ; AVX512-FCP-NEXT: vmovdqa64 %zmm31, %zmm1
15986 ; AVX512-FCP-NEXT: vpermt2q %zmm14, %zmm0, %zmm1
15987 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15988 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
15989 ; AVX512-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
15990 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, %zmm0
15991 ; AVX512-FCP-NEXT: vpermt2q %zmm21, %zmm1, %zmm0
15992 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15993 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
15994 ; AVX512-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
15995 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, %zmm0
15996 ; AVX512-FCP-NEXT: vpermt2q %zmm21, %zmm2, %zmm0
15997 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15998 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
15999 ; AVX512-FCP-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
16000 ; AVX512-FCP-NEXT: vmovdqa64 %zmm21, %zmm0
16001 ; AVX512-FCP-NEXT: vpermt2q %zmm13, %zmm3, %zmm0
16002 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16003 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
16004 ; AVX512-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
16005 ; AVX512-FCP-NEXT: vpermt2q %zmm13, %zmm0, %zmm21
16006 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, %zmm6
16007 ; AVX512-FCP-NEXT: vpermt2q %zmm26, %zmm1, %zmm6
16008 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16009 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, %zmm6
16010 ; AVX512-FCP-NEXT: vpermt2q %zmm26, %zmm2, %zmm6
16011 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16012 ; AVX512-FCP-NEXT: vmovdqa64 %zmm26, %zmm6
16013 ; AVX512-FCP-NEXT: vpermt2q %zmm18, %zmm3, %zmm6
16014 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16015 ; AVX512-FCP-NEXT: vpermt2q %zmm18, %zmm0, %zmm26
16016 ; AVX512-FCP-NEXT: vmovdqa64 %zmm16, %zmm27
16017 ; AVX512-FCP-NEXT: vpermt2q %zmm28, %zmm1, %zmm27
16018 ; AVX512-FCP-NEXT: vmovdqa64 %zmm16, %zmm6
16019 ; AVX512-FCP-NEXT: vpermt2q %zmm28, %zmm2, %zmm6
16020 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16021 ; AVX512-FCP-NEXT: vmovdqa64 %zmm28, %zmm6
16022 ; AVX512-FCP-NEXT: vpermt2q %zmm16, %zmm3, %zmm6
16023 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16024 ; AVX512-FCP-NEXT: vpermt2q %zmm16, %zmm0, %zmm28
16025 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, %zmm18
16026 ; AVX512-FCP-NEXT: vpermt2q %zmm24, %zmm1, %zmm18
16027 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, %zmm6
16028 ; AVX512-FCP-NEXT: vpermt2q %zmm24, %zmm2, %zmm6
16029 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16030 ; AVX512-FCP-NEXT: vmovdqa64 %zmm24, %zmm6
16031 ; AVX512-FCP-NEXT: vpermt2q %zmm19, %zmm3, %zmm6
16032 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16033 ; AVX512-FCP-NEXT: vpermt2q %zmm19, %zmm0, %zmm24
16034 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, %zmm15
16035 ; AVX512-FCP-NEXT: vpermt2q %zmm23, %zmm1, %zmm15
16036 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, %zmm19
16037 ; AVX512-FCP-NEXT: vpermt2q %zmm23, %zmm2, %zmm19
16038 ; AVX512-FCP-NEXT: vmovdqa64 %zmm23, %zmm6
16039 ; AVX512-FCP-NEXT: vpermt2q %zmm17, %zmm3, %zmm6
16040 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16041 ; AVX512-FCP-NEXT: vpermt2q %zmm17, %zmm0, %zmm23
16042 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm13
16043 ; AVX512-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm13
16044 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm17
16045 ; AVX512-FCP-NEXT: vpermt2q %zmm25, %zmm2, %zmm17
16046 ; AVX512-FCP-NEXT: vmovdqa64 %zmm25, %zmm6
16047 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm3, %zmm6
16048 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16049 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm25
16050 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm6
16051 ; AVX512-FCP-NEXT: vpermt2q %zmm20, %zmm1, %zmm6
16052 ; AVX512-FCP-NEXT: vpermi2q %zmm14, %zmm31, %zmm1
16053 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm7
16054 ; AVX512-FCP-NEXT: vpermt2q %zmm20, %zmm2, %zmm7
16055 ; AVX512-FCP-NEXT: vpermi2q %zmm14, %zmm31, %zmm2
16056 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, %zmm16
16057 ; AVX512-FCP-NEXT: vpermt2q %zmm5, %zmm3, %zmm16
16058 ; AVX512-FCP-NEXT: vpermi2q %zmm31, %zmm14, %zmm3
16059 ; AVX512-FCP-NEXT: vpermt2q %zmm31, %zmm0, %zmm14
16060 ; AVX512-FCP-NEXT: vpermt2q %zmm5, %zmm0, %zmm20
16061 ; AVX512-FCP-NEXT: movb $56, %al
16062 ; AVX512-FCP-NEXT: kmovw %eax, %k1
16063 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16064 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16065 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16066 ; AVX512-FCP-NEXT: movb $-64, %al
16067 ; AVX512-FCP-NEXT: kmovw %eax, %k2
16068 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16069 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16070 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16071 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16072 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16073 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16074 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16075 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16076 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16077 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16078 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16079 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16080 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16081 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16082 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16083 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16084 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16085 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16086 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16087 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16088 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16089 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16090 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16091 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16092 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16093 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16094 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16095 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16096 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16097 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16098 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16099 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16100 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16101 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16102 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
16103 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16104 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
16105 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16106 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
16107 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16108 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
16109 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16110 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16111 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16112 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16113 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16114 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16115 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16116 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16117 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16118 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16119 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16120 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16121 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16122 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16123 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16124 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16125 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16126 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16127 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16128 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16129 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16130 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16131 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16132 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16133 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16134 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16135 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16136 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16137 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16138 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16139 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16140 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16141 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16142 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16143 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16144 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16145 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16146 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16147 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16148 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16149 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16150 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16151 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16152 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm9 {%k1}
16153 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16154 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
16155 ; AVX512-FCP-NEXT: movb $24, %al
16156 ; AVX512-FCP-NEXT: kmovw %eax, %k2
16157 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16158 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16159 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16160 ; AVX512-FCP-NEXT: movb $-32, %al
16161 ; AVX512-FCP-NEXT: kmovw %eax, %k1
16162 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm5 {%k1}
16163 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16164 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16165 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16166 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16167 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16168 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16169 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16170 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16171 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16172 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16173 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16174 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16175 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16176 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16177 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16178 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16179 ; AVX512-FCP-NEXT: vmovdqa64 %zmm27, %zmm5 {%k1}
16180 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16181 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16182 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16183 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16184 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, %zmm5 {%k1}
16185 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16186 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16187 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16188 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16189 ; AVX512-FCP-NEXT: vmovdqa64 %zmm15, %zmm5 {%k1}
16190 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16191 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16192 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16193 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16194 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, %zmm5 {%k1}
16195 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16196 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16197 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
16198 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm10 {%k1}
16199 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16200 ; AVX512-FCP-NEXT: vmovdqu64 (%rsp), %zmm1 # 64-byte Reload
16201 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
16202 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm1 {%k1}
16203 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, (%rsp) # 64-byte Spill
16204 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16205 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16206 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
16207 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16208 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
16209 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16210 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16211 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16212 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
16213 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16214 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
16215 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16216 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16217 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16218 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
16219 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16220 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
16221 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16222 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16223 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16224 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
16225 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16226 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
16227 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16228 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16229 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
16230 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
16231 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, %zmm31 {%k1}
16232 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16233 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
16234 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm27 {%k2}
16235 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, %zmm27 {%k1}
16236 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16237 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
16238 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm11 {%k1}
16239 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16240 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
16241 ; AVX512-FCP-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
16242 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16243 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
16244 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16245 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
16246 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16247 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm5 # 16-byte Folded Reload
16248 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16249 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm5 {%k1}
16250 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16251 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm6 # 16-byte Folded Reload
16252 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16253 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm6 {%k1}
16254 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16255 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm7 # 16-byte Folded Reload
16256 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16257 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm7 {%k1}
16258 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16259 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm13 # 16-byte Folded Reload
16260 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16261 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm13 {%k1}
16262 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16263 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm15 # 16-byte Folded Reload
16264 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16265 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm15 {%k1}
16266 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16267 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm4, %zmm2, %zmm4
16268 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, %zmm4 {%k1}
16269 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16270 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
16271 ; AVX512-FCP-NEXT: vmovdqa64 %zmm21, %zmm2 {%k1}
16272 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
16273 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
16274 ; AVX512-FCP-NEXT: vmovdqa64 %zmm26, %zmm3 {%k1}
16275 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
16276 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm16, %zmm16 # 16-byte Folded Reload
16277 ; AVX512-FCP-NEXT: vmovdqa64 %zmm28, %zmm16 {%k1}
16278 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
16279 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm17, %zmm17 # 16-byte Folded Reload
16280 ; AVX512-FCP-NEXT: vmovdqa64 %zmm24, %zmm17 {%k1}
16281 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
16282 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm18, %zmm18 # 16-byte Folded Reload
16283 ; AVX512-FCP-NEXT: vmovdqa64 %zmm23, %zmm18 {%k1}
16284 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
16285 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm30, %zmm19, %zmm19
16286 ; AVX512-FCP-NEXT: vmovdqa64 %zmm25, %zmm19 {%k1}
16287 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
16288 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm12, %zmm21, %zmm12
16289 ; AVX512-FCP-NEXT: vmovdqa64 %zmm14, %zmm12 {%k1}
16290 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
16291 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm22, %zmm14, %zmm14
16292 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, %zmm14 {%k1}
16293 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, 448(%rsi)
16294 ; AVX512-FCP-NEXT: vmovdqa64 %zmm29, 384(%rsi)
16295 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16296 ; AVX512-FCP-NEXT: vmovaps %zmm8, 320(%rsi)
16297 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16298 ; AVX512-FCP-NEXT: vmovaps %zmm8, 256(%rsi)
16299 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16300 ; AVX512-FCP-NEXT: vmovaps %zmm8, 192(%rsi)
16301 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16302 ; AVX512-FCP-NEXT: vmovaps %zmm8, 128(%rsi)
16303 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16304 ; AVX512-FCP-NEXT: vmovaps %zmm8, 64(%rsi)
16305 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16306 ; AVX512-FCP-NEXT: vmovaps %zmm8, (%rsi)
16307 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, 448(%rdx)
16308 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16309 ; AVX512-FCP-NEXT: vmovaps %zmm8, 256(%rdx)
16310 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16311 ; AVX512-FCP-NEXT: vmovaps %zmm8, 320(%rdx)
16312 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16313 ; AVX512-FCP-NEXT: vmovaps %zmm8, 128(%rdx)
16314 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16315 ; AVX512-FCP-NEXT: vmovaps %zmm8, 192(%rdx)
16316 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16317 ; AVX512-FCP-NEXT: vmovaps %zmm8, (%rdx)
16318 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16319 ; AVX512-FCP-NEXT: vmovaps %zmm8, 64(%rdx)
16320 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16321 ; AVX512-FCP-NEXT: vmovaps %zmm8, 384(%rdx)
16322 ; AVX512-FCP-NEXT: vmovdqa64 %zmm10, 448(%rcx)
16323 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16324 ; AVX512-FCP-NEXT: vmovaps %zmm8, 256(%rcx)
16325 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16326 ; AVX512-FCP-NEXT: vmovaps %zmm8, 320(%rcx)
16327 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16328 ; AVX512-FCP-NEXT: vmovaps %zmm8, 128(%rcx)
16329 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16330 ; AVX512-FCP-NEXT: vmovaps %zmm8, 192(%rcx)
16331 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16332 ; AVX512-FCP-NEXT: vmovaps %zmm8, (%rcx)
16333 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16334 ; AVX512-FCP-NEXT: vmovaps %zmm8, 64(%rcx)
16335 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16336 ; AVX512-FCP-NEXT: vmovaps %zmm8, 384(%rcx)
16337 ; AVX512-FCP-NEXT: vmovdqa64 %zmm11, 448(%r8)
16338 ; AVX512-FCP-NEXT: vmovdqa64 %zmm27, 256(%r8)
16339 ; AVX512-FCP-NEXT: vmovdqa64 %zmm31, 320(%r8)
16340 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16341 ; AVX512-FCP-NEXT: vmovaps %zmm8, 128(%r8)
16342 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16343 ; AVX512-FCP-NEXT: vmovaps %zmm8, 192(%r8)
16344 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16345 ; AVX512-FCP-NEXT: vmovaps %zmm8, (%r8)
16346 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16347 ; AVX512-FCP-NEXT: vmovaps %zmm8, 64(%r8)
16348 ; AVX512-FCP-NEXT: vmovups (%rsp), %zmm8 # 64-byte Reload
16349 ; AVX512-FCP-NEXT: vmovaps %zmm8, 384(%r8)
16350 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, 448(%r9)
16351 ; AVX512-FCP-NEXT: vmovdqa64 %zmm15, 256(%r9)
16352 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, 320(%r9)
16353 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, 128(%r9)
16354 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, 192(%r9)
16355 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, (%r9)
16356 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, 64(%r9)
16357 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, 384(%r9)
16358 ; AVX512-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
16359 ; AVX512-FCP-NEXT: vmovdqa64 %zmm14, 384(%rax)
16360 ; AVX512-FCP-NEXT: vmovdqa64 %zmm12, 448(%rax)
16361 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, 256(%rax)
16362 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, 320(%rax)
16363 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, 128(%rax)
16364 ; AVX512-FCP-NEXT: vmovdqa64 %zmm16, 192(%rax)
16365 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, (%rax)
16366 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, 64(%rax)
16367 ; AVX512-FCP-NEXT: addq $7240, %rsp # imm = 0x1C48
16368 ; AVX512-FCP-NEXT: vzeroupper
16369 ; AVX512-FCP-NEXT: retq
16371 ; AVX512DQ-LABEL: load_i64_stride6_vf64:
16372 ; AVX512DQ: # %bb.0:
16373 ; AVX512DQ-NEXT: subq $7240, %rsp # imm = 0x1C48
16374 ; AVX512DQ-NEXT: vmovdqa64 2048(%rdi), %zmm3
16375 ; AVX512DQ-NEXT: vmovdqa64 1280(%rdi), %zmm4
16376 ; AVX512DQ-NEXT: vmovdqa64 1344(%rdi), %zmm0
16377 ; AVX512DQ-NEXT: vmovdqa64 896(%rdi), %zmm5
16378 ; AVX512DQ-NEXT: vmovdqa64 960(%rdi), %zmm26
16379 ; AVX512DQ-NEXT: vmovdqa64 512(%rdi), %zmm2
16380 ; AVX512DQ-NEXT: vmovdqa64 576(%rdi), %zmm1
16381 ; AVX512DQ-NEXT: vmovdqa64 128(%rdi), %zmm6
16382 ; AVX512DQ-NEXT: vmovdqa64 192(%rdi), %zmm29
16383 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [0,6,0,10,0,6,0,10]
16384 ; AVX512DQ-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
16385 ; AVX512DQ-NEXT: vmovdqa64 %zmm29, %zmm8
16386 ; AVX512DQ-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
16387 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16388 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm8
16389 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm7, %zmm8
16390 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16391 ; AVX512DQ-NEXT: vmovdqa64 %zmm26, %zmm8
16392 ; AVX512DQ-NEXT: vpermt2q %zmm5, %zmm7, %zmm8
16393 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16394 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm8
16395 ; AVX512DQ-NEXT: vpermt2q %zmm4, %zmm7, %zmm8
16396 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16397 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,7,0,11,1,7,0,11]
16398 ; AVX512DQ-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
16399 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm9
16400 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm8, %zmm9
16401 ; AVX512DQ-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16402 ; AVX512DQ-NEXT: vmovdqa64 %zmm29, %zmm9
16403 ; AVX512DQ-NEXT: vpermt2q %zmm6, %zmm8, %zmm9
16404 ; AVX512DQ-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16405 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm9
16406 ; AVX512DQ-NEXT: vpermt2q %zmm4, %zmm8, %zmm9
16407 ; AVX512DQ-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16408 ; AVX512DQ-NEXT: vmovdqa64 %zmm26, %zmm9
16409 ; AVX512DQ-NEXT: vpermt2q %zmm5, %zmm8, %zmm9
16410 ; AVX512DQ-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16411 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [10,4,10,4,10,4,10,4]
16412 ; AVX512DQ-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
16413 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm10
16414 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm9, %zmm10
16415 ; AVX512DQ-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16416 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm10
16417 ; AVX512DQ-NEXT: vpermt2q %zmm29, %zmm9, %zmm10
16418 ; AVX512DQ-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16419 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm10
16420 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm9, %zmm10
16421 ; AVX512DQ-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16422 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm10
16423 ; AVX512DQ-NEXT: vpermt2q %zmm26, %zmm9, %zmm10
16424 ; AVX512DQ-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16425 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [11,5,11,5,11,5,11,5]
16426 ; AVX512DQ-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
16427 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm11
16428 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm10, %zmm11
16429 ; AVX512DQ-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16430 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm11
16431 ; AVX512DQ-NEXT: vpermt2q %zmm29, %zmm10, %zmm11
16432 ; AVX512DQ-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16433 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm11
16434 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm10, %zmm11
16435 ; AVX512DQ-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16436 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm11
16437 ; AVX512DQ-NEXT: vpermt2q %zmm26, %zmm10, %zmm11
16438 ; AVX512DQ-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16439 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [12,0,0,6,12,0,0,6]
16440 ; AVX512DQ-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
16441 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm13
16442 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm12
16443 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm11, %zmm13
16444 ; AVX512DQ-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16445 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [13,0,1,7,13,0,1,7]
16446 ; AVX512DQ-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
16447 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm2, %zmm12
16448 ; AVX512DQ-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16449 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm1
16450 ; AVX512DQ-NEXT: vpermt2q %zmm29, %zmm11, %zmm1
16451 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16452 ; AVX512DQ-NEXT: vpermt2q %zmm29, %zmm2, %zmm6
16453 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16454 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm1
16455 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
16456 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16457 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm2, %zmm4
16458 ; AVX512DQ-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16459 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm0
16460 ; AVX512DQ-NEXT: vpermt2q %zmm26, %zmm11, %zmm0
16461 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16462 ; AVX512DQ-NEXT: vpermt2q %zmm26, %zmm2, %zmm5
16463 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16464 ; AVX512DQ-NEXT: vmovdqa64 2112(%rdi), %zmm0
16465 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1
16466 ; AVX512DQ-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
16467 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16468 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1
16469 ; AVX512DQ-NEXT: vpermt2q %zmm3, %zmm8, %zmm1
16470 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16471 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, %zmm1
16472 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm9, %zmm1
16473 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16474 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, %zmm1
16475 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm10, %zmm1
16476 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16477 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, %zmm1
16478 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
16479 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16480 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
16481 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16482 ; AVX512DQ-NEXT: vmovdqa64 1664(%rdi), %zmm1
16483 ; AVX512DQ-NEXT: vmovdqa64 1728(%rdi), %zmm0
16484 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm3
16485 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm7, %zmm3
16486 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16487 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm3
16488 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
16489 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16490 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
16491 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm9, %zmm3
16492 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16493 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
16494 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm10, %zmm3
16495 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16496 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
16497 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm11, %zmm3
16498 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16499 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm2, %zmm1
16500 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16501 ; AVX512DQ-NEXT: vmovdqa64 2432(%rdi), %zmm3
16502 ; AVX512DQ-NEXT: vmovdqa64 2496(%rdi), %zmm0
16503 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1
16504 ; AVX512DQ-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
16505 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16506 ; AVX512DQ-NEXT: vmovdqa64 2816(%rdi), %zmm4
16507 ; AVX512DQ-NEXT: vmovdqa64 2880(%rdi), %zmm1
16508 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm1, %zmm7
16509 ; AVX512DQ-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16510 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5
16511 ; AVX512DQ-NEXT: vpermt2q %zmm3, %zmm8, %zmm5
16512 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16513 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm1, %zmm8
16514 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16515 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, %zmm5
16516 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm9, %zmm5
16517 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16518 ; AVX512DQ-NEXT: vpermi2q %zmm1, %zmm4, %zmm9
16519 ; AVX512DQ-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16520 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, %zmm5
16521 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm10, %zmm5
16522 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16523 ; AVX512DQ-NEXT: vpermi2q %zmm1, %zmm4, %zmm10
16524 ; AVX512DQ-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16525 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, %zmm5
16526 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm11, %zmm5
16527 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16528 ; AVX512DQ-NEXT: vpermi2q %zmm1, %zmm4, %zmm11
16529 ; AVX512DQ-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16530 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm2, %zmm4
16531 ; AVX512DQ-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16532 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
16533 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16534 ; AVX512DQ-NEXT: vmovdqa64 448(%rdi), %zmm1
16535 ; AVX512DQ-NEXT: vmovdqa64 384(%rdi), %zmm2
16536 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm8 = [0,6,12,0]
16537 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm0
16538 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm8, %zmm0
16539 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16540 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm9 = [1,7,13,0]
16541 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm0
16542 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm9, %zmm0
16543 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16544 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm10 = [10,0,6,0]
16545 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm0
16546 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm10, %zmm0
16547 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16548 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm11 = [11,1,7,0]
16549 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm0
16550 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm11, %zmm0
16551 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16552 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm4 = [4,10,4,10,4,10,4,10]
16553 ; AVX512DQ-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
16554 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm0
16555 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm4, %zmm0
16556 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16557 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
16558 ; AVX512DQ-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
16559 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
16560 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16561 ; AVX512DQ-NEXT: vmovdqa64 (%rdi), %zmm2
16562 ; AVX512DQ-NEXT: vmovdqa64 64(%rdi), %zmm1
16563 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm3
16564 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
16565 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16566 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm3
16567 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
16568 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16569 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
16570 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
16571 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16572 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
16573 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
16574 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16575 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm3
16576 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
16577 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16578 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
16579 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16580 ; AVX512DQ-NEXT: vmovdqa64 1216(%rdi), %zmm1
16581 ; AVX512DQ-NEXT: vmovdqa64 1152(%rdi), %zmm2
16582 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm3
16583 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
16584 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16585 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm3
16586 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
16587 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16588 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
16589 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
16590 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16591 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
16592 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
16593 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16594 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm3
16595 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
16596 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16597 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
16598 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16599 ; AVX512DQ-NEXT: vmovdqa64 832(%rdi), %zmm1
16600 ; AVX512DQ-NEXT: vmovdqa64 768(%rdi), %zmm2
16601 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm3
16602 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
16603 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16604 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm3
16605 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
16606 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16607 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
16608 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
16609 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16610 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
16611 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
16612 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16613 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm3
16614 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
16615 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16616 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
16617 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16618 ; AVX512DQ-NEXT: vmovdqa64 1984(%rdi), %zmm1
16619 ; AVX512DQ-NEXT: vmovdqa64 1920(%rdi), %zmm2
16620 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm3
16621 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
16622 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16623 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm3
16624 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
16625 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16626 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
16627 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
16628 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16629 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
16630 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
16631 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16632 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm3
16633 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
16634 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16635 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
16636 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16637 ; AVX512DQ-NEXT: vmovdqa64 1600(%rdi), %zmm1
16638 ; AVX512DQ-NEXT: vmovdqa64 1536(%rdi), %zmm30
16639 ; AVX512DQ-NEXT: vmovdqa64 %zmm30, %zmm2
16640 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm8, %zmm2
16641 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16642 ; AVX512DQ-NEXT: vmovdqa64 %zmm30, %zmm2
16643 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm9, %zmm2
16644 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16645 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm2
16646 ; AVX512DQ-NEXT: vpermt2q %zmm30, %zmm10, %zmm2
16647 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16648 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm2
16649 ; AVX512DQ-NEXT: vpermt2q %zmm30, %zmm11, %zmm2
16650 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16651 ; AVX512DQ-NEXT: vmovdqa64 %zmm30, %zmm2
16652 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm4, %zmm2
16653 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16654 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm0, %zmm30
16655 ; AVX512DQ-NEXT: vmovdqa64 320(%rdi), %zmm26
16656 ; AVX512DQ-NEXT: vmovdqa64 256(%rdi), %zmm18
16657 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, %zmm1
16658 ; AVX512DQ-NEXT: vpermt2q %zmm26, %zmm4, %zmm1
16659 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16660 ; AVX512DQ-NEXT: vmovdqa64 704(%rdi), %zmm21
16661 ; AVX512DQ-NEXT: vmovdqa64 640(%rdi), %zmm13
16662 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, %zmm1
16663 ; AVX512DQ-NEXT: vpermt2q %zmm21, %zmm4, %zmm1
16664 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16665 ; AVX512DQ-NEXT: vmovdqa64 1088(%rdi), %zmm24
16666 ; AVX512DQ-NEXT: vmovdqa64 1024(%rdi), %zmm19
16667 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, %zmm1
16668 ; AVX512DQ-NEXT: vpermt2q %zmm24, %zmm4, %zmm1
16669 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16670 ; AVX512DQ-NEXT: vmovdqa64 1472(%rdi), %zmm28
16671 ; AVX512DQ-NEXT: vmovdqa64 1408(%rdi), %zmm16
16672 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, %zmm1
16673 ; AVX512DQ-NEXT: vpermt2q %zmm28, %zmm4, %zmm1
16674 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16675 ; AVX512DQ-NEXT: vmovdqa64 1856(%rdi), %zmm25
16676 ; AVX512DQ-NEXT: vmovdqa64 1792(%rdi), %zmm7
16677 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm1
16678 ; AVX512DQ-NEXT: vpermt2q %zmm25, %zmm4, %zmm1
16679 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16680 ; AVX512DQ-NEXT: vmovdqa64 2240(%rdi), %zmm23
16681 ; AVX512DQ-NEXT: vmovdqa64 2176(%rdi), %zmm17
16682 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, %zmm1
16683 ; AVX512DQ-NEXT: vpermt2q %zmm23, %zmm4, %zmm1
16684 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16685 ; AVX512DQ-NEXT: vmovdqa64 2624(%rdi), %zmm20
16686 ; AVX512DQ-NEXT: vmovdqa64 2560(%rdi), %zmm5
16687 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm1
16688 ; AVX512DQ-NEXT: vpermt2q %zmm20, %zmm4, %zmm1
16689 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16690 ; AVX512DQ-NEXT: vmovdqa64 2368(%rdi), %zmm1
16691 ; AVX512DQ-NEXT: vmovdqa64 2304(%rdi), %zmm22
16692 ; AVX512DQ-NEXT: vmovdqa64 %zmm22, %zmm29
16693 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm8, %zmm29
16694 ; AVX512DQ-NEXT: vmovdqa64 3008(%rdi), %zmm14
16695 ; AVX512DQ-NEXT: vmovdqa64 2944(%rdi), %zmm31
16696 ; AVX512DQ-NEXT: vmovdqa64 %zmm31, %zmm2
16697 ; AVX512DQ-NEXT: vpermt2q %zmm14, %zmm4, %zmm2
16698 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16699 ; AVX512DQ-NEXT: vmovdqa64 2752(%rdi), %zmm2
16700 ; AVX512DQ-NEXT: vmovdqa64 2688(%rdi), %zmm12
16701 ; AVX512DQ-NEXT: vpermi2q %zmm2, %zmm12, %zmm8
16702 ; AVX512DQ-NEXT: vmovdqa64 %zmm22, %zmm3
16703 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
16704 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16705 ; AVX512DQ-NEXT: vpermi2q %zmm2, %zmm12, %zmm9
16706 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
16707 ; AVX512DQ-NEXT: vpermt2q %zmm22, %zmm10, %zmm3
16708 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16709 ; AVX512DQ-NEXT: vpermi2q %zmm12, %zmm2, %zmm10
16710 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
16711 ; AVX512DQ-NEXT: vpermt2q %zmm22, %zmm11, %zmm3
16712 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, (%rsp) # 64-byte Spill
16713 ; AVX512DQ-NEXT: vpermi2q %zmm12, %zmm2, %zmm11
16714 ; AVX512DQ-NEXT: vmovdqa64 %zmm22, %zmm3
16715 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
16716 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16717 ; AVX512DQ-NEXT: vpermi2q %zmm2, %zmm12, %zmm4
16718 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm0, %zmm12
16719 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm0, %zmm22
16720 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm1
16721 ; AVX512DQ-NEXT: vpermt2q %zmm20, %zmm0, %zmm1
16722 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16723 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, %zmm1
16724 ; AVX512DQ-NEXT: vpermt2q %zmm21, %zmm0, %zmm1
16725 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16726 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, %zmm1
16727 ; AVX512DQ-NEXT: vpermt2q %zmm26, %zmm0, %zmm1
16728 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16729 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, %zmm1
16730 ; AVX512DQ-NEXT: vpermt2q %zmm28, %zmm0, %zmm1
16731 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16732 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, %zmm1
16733 ; AVX512DQ-NEXT: vpermt2q %zmm24, %zmm0, %zmm1
16734 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16735 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, %zmm1
16736 ; AVX512DQ-NEXT: vpermt2q %zmm23, %zmm0, %zmm1
16737 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16738 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm1
16739 ; AVX512DQ-NEXT: vpermt2q %zmm25, %zmm0, %zmm1
16740 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16741 ; AVX512DQ-NEXT: vmovdqa64 %zmm31, %zmm1
16742 ; AVX512DQ-NEXT: vpermt2q %zmm14, %zmm0, %zmm1
16743 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16744 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
16745 ; AVX512DQ-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
16746 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, %zmm0
16747 ; AVX512DQ-NEXT: vpermt2q %zmm21, %zmm1, %zmm0
16748 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16749 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
16750 ; AVX512DQ-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
16751 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, %zmm0
16752 ; AVX512DQ-NEXT: vpermt2q %zmm21, %zmm2, %zmm0
16753 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16754 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
16755 ; AVX512DQ-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
16756 ; AVX512DQ-NEXT: vmovdqa64 %zmm21, %zmm0
16757 ; AVX512DQ-NEXT: vpermt2q %zmm13, %zmm3, %zmm0
16758 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16759 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
16760 ; AVX512DQ-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
16761 ; AVX512DQ-NEXT: vpermt2q %zmm13, %zmm0, %zmm21
16762 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, %zmm6
16763 ; AVX512DQ-NEXT: vpermt2q %zmm26, %zmm1, %zmm6
16764 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16765 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, %zmm6
16766 ; AVX512DQ-NEXT: vpermt2q %zmm26, %zmm2, %zmm6
16767 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16768 ; AVX512DQ-NEXT: vmovdqa64 %zmm26, %zmm6
16769 ; AVX512DQ-NEXT: vpermt2q %zmm18, %zmm3, %zmm6
16770 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16771 ; AVX512DQ-NEXT: vpermt2q %zmm18, %zmm0, %zmm26
16772 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, %zmm27
16773 ; AVX512DQ-NEXT: vpermt2q %zmm28, %zmm1, %zmm27
16774 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, %zmm6
16775 ; AVX512DQ-NEXT: vpermt2q %zmm28, %zmm2, %zmm6
16776 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16777 ; AVX512DQ-NEXT: vmovdqa64 %zmm28, %zmm6
16778 ; AVX512DQ-NEXT: vpermt2q %zmm16, %zmm3, %zmm6
16779 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16780 ; AVX512DQ-NEXT: vpermt2q %zmm16, %zmm0, %zmm28
16781 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, %zmm18
16782 ; AVX512DQ-NEXT: vpermt2q %zmm24, %zmm1, %zmm18
16783 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, %zmm6
16784 ; AVX512DQ-NEXT: vpermt2q %zmm24, %zmm2, %zmm6
16785 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16786 ; AVX512DQ-NEXT: vmovdqa64 %zmm24, %zmm6
16787 ; AVX512DQ-NEXT: vpermt2q %zmm19, %zmm3, %zmm6
16788 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16789 ; AVX512DQ-NEXT: vpermt2q %zmm19, %zmm0, %zmm24
16790 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, %zmm15
16791 ; AVX512DQ-NEXT: vpermt2q %zmm23, %zmm1, %zmm15
16792 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, %zmm19
16793 ; AVX512DQ-NEXT: vpermt2q %zmm23, %zmm2, %zmm19
16794 ; AVX512DQ-NEXT: vmovdqa64 %zmm23, %zmm6
16795 ; AVX512DQ-NEXT: vpermt2q %zmm17, %zmm3, %zmm6
16796 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16797 ; AVX512DQ-NEXT: vpermt2q %zmm17, %zmm0, %zmm23
16798 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm13
16799 ; AVX512DQ-NEXT: vpermt2q %zmm25, %zmm1, %zmm13
16800 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm17
16801 ; AVX512DQ-NEXT: vpermt2q %zmm25, %zmm2, %zmm17
16802 ; AVX512DQ-NEXT: vmovdqa64 %zmm25, %zmm6
16803 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm3, %zmm6
16804 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16805 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm0, %zmm25
16806 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm6
16807 ; AVX512DQ-NEXT: vpermt2q %zmm20, %zmm1, %zmm6
16808 ; AVX512DQ-NEXT: vpermi2q %zmm14, %zmm31, %zmm1
16809 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm7
16810 ; AVX512DQ-NEXT: vpermt2q %zmm20, %zmm2, %zmm7
16811 ; AVX512DQ-NEXT: vpermi2q %zmm14, %zmm31, %zmm2
16812 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, %zmm16
16813 ; AVX512DQ-NEXT: vpermt2q %zmm5, %zmm3, %zmm16
16814 ; AVX512DQ-NEXT: vpermi2q %zmm31, %zmm14, %zmm3
16815 ; AVX512DQ-NEXT: vpermt2q %zmm31, %zmm0, %zmm14
16816 ; AVX512DQ-NEXT: vpermt2q %zmm5, %zmm0, %zmm20
16817 ; AVX512DQ-NEXT: movb $56, %al
16818 ; AVX512DQ-NEXT: kmovw %eax, %k1
16819 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16820 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16821 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16822 ; AVX512DQ-NEXT: movb $-64, %al
16823 ; AVX512DQ-NEXT: kmovw %eax, %k2
16824 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16825 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16826 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16827 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16828 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16829 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16830 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16831 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16832 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16833 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16834 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16835 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16836 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16837 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16838 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16839 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16840 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16841 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16842 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16843 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16844 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16845 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16846 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16847 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16848 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16849 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16850 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16851 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16852 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16853 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16854 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16855 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16856 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16857 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16858 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
16859 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16860 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
16861 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16862 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
16863 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16864 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
16865 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16866 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16867 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16868 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16869 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16870 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16871 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16872 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16873 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16874 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16875 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16876 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16877 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16878 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16879 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16880 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16881 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16882 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16883 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16884 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16885 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16886 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16887 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16888 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16889 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16890 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16891 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16892 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16893 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16894 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16895 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16896 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16897 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16898 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16899 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16900 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16901 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16902 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16903 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16904 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16905 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16906 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16907 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16908 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm9 {%k1}
16909 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16910 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
16911 ; AVX512DQ-NEXT: movb $24, %al
16912 ; AVX512DQ-NEXT: kmovw %eax, %k2
16913 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16914 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16915 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16916 ; AVX512DQ-NEXT: movb $-32, %al
16917 ; AVX512DQ-NEXT: kmovw %eax, %k1
16918 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm5 {%k1}
16919 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16920 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16921 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16922 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16923 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16924 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16925 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16926 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16927 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16928 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16929 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16930 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16931 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16932 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16933 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16934 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16935 ; AVX512DQ-NEXT: vmovdqa64 %zmm27, %zmm5 {%k1}
16936 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16937 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16938 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16939 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16940 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, %zmm5 {%k1}
16941 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16942 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16943 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16944 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16945 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, %zmm5 {%k1}
16946 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16947 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16948 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16949 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16950 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, %zmm5 {%k1}
16951 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16952 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16953 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
16954 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm10 {%k1}
16955 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16956 ; AVX512DQ-NEXT: vmovdqu64 (%rsp), %zmm1 # 64-byte Reload
16957 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
16958 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm1 {%k1}
16959 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, (%rsp) # 64-byte Spill
16960 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16961 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16962 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
16963 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16964 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
16965 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16966 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16967 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16968 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
16969 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16970 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
16971 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16972 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16973 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16974 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
16975 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16976 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
16977 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16978 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16979 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16980 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
16981 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16982 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
16983 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16984 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16985 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
16986 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
16987 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, %zmm31 {%k1}
16988 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16989 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
16990 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm27 {%k2}
16991 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, %zmm27 {%k1}
16992 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16993 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
16994 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm11 {%k1}
16995 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16996 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
16997 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
16998 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16999 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
17000 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17001 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
17002 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17003 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm5 # 16-byte Folded Reload
17004 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17005 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm5 {%k1}
17006 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17007 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm6 # 16-byte Folded Reload
17008 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17009 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm6 {%k1}
17010 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17011 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm7 # 16-byte Folded Reload
17012 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17013 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm7 {%k1}
17014 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17015 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm13 # 16-byte Folded Reload
17016 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17017 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm13 {%k1}
17018 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17019 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm15 # 16-byte Folded Reload
17020 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17021 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm15 {%k1}
17022 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17023 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm4, %zmm2, %zmm4
17024 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, %zmm4 {%k1}
17025 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17026 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
17027 ; AVX512DQ-NEXT: vmovdqa64 %zmm21, %zmm2 {%k1}
17028 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
17029 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
17030 ; AVX512DQ-NEXT: vmovdqa64 %zmm26, %zmm3 {%k1}
17031 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
17032 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm16, %zmm16 # 16-byte Folded Reload
17033 ; AVX512DQ-NEXT: vmovdqa64 %zmm28, %zmm16 {%k1}
17034 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
17035 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm17, %zmm17 # 16-byte Folded Reload
17036 ; AVX512DQ-NEXT: vmovdqa64 %zmm24, %zmm17 {%k1}
17037 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
17038 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm18, %zmm18 # 16-byte Folded Reload
17039 ; AVX512DQ-NEXT: vmovdqa64 %zmm23, %zmm18 {%k1}
17040 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
17041 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm30, %zmm19, %zmm19
17042 ; AVX512DQ-NEXT: vmovdqa64 %zmm25, %zmm19 {%k1}
17043 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
17044 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm12, %zmm21, %zmm12
17045 ; AVX512DQ-NEXT: vmovdqa64 %zmm14, %zmm12 {%k1}
17046 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
17047 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm22, %zmm14, %zmm14
17048 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, %zmm14 {%k1}
17049 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, 448(%rsi)
17050 ; AVX512DQ-NEXT: vmovdqa64 %zmm29, 384(%rsi)
17051 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17052 ; AVX512DQ-NEXT: vmovaps %zmm8, 320(%rsi)
17053 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17054 ; AVX512DQ-NEXT: vmovaps %zmm8, 256(%rsi)
17055 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17056 ; AVX512DQ-NEXT: vmovaps %zmm8, 192(%rsi)
17057 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17058 ; AVX512DQ-NEXT: vmovaps %zmm8, 128(%rsi)
17059 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17060 ; AVX512DQ-NEXT: vmovaps %zmm8, 64(%rsi)
17061 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17062 ; AVX512DQ-NEXT: vmovaps %zmm8, (%rsi)
17063 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, 448(%rdx)
17064 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17065 ; AVX512DQ-NEXT: vmovaps %zmm8, 256(%rdx)
17066 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17067 ; AVX512DQ-NEXT: vmovaps %zmm8, 320(%rdx)
17068 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17069 ; AVX512DQ-NEXT: vmovaps %zmm8, 128(%rdx)
17070 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17071 ; AVX512DQ-NEXT: vmovaps %zmm8, 192(%rdx)
17072 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17073 ; AVX512DQ-NEXT: vmovaps %zmm8, (%rdx)
17074 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17075 ; AVX512DQ-NEXT: vmovaps %zmm8, 64(%rdx)
17076 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17077 ; AVX512DQ-NEXT: vmovaps %zmm8, 384(%rdx)
17078 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, 448(%rcx)
17079 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17080 ; AVX512DQ-NEXT: vmovaps %zmm8, 256(%rcx)
17081 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17082 ; AVX512DQ-NEXT: vmovaps %zmm8, 320(%rcx)
17083 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17084 ; AVX512DQ-NEXT: vmovaps %zmm8, 128(%rcx)
17085 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17086 ; AVX512DQ-NEXT: vmovaps %zmm8, 192(%rcx)
17087 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17088 ; AVX512DQ-NEXT: vmovaps %zmm8, (%rcx)
17089 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17090 ; AVX512DQ-NEXT: vmovaps %zmm8, 64(%rcx)
17091 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17092 ; AVX512DQ-NEXT: vmovaps %zmm8, 384(%rcx)
17093 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, 448(%r8)
17094 ; AVX512DQ-NEXT: vmovdqa64 %zmm27, 256(%r8)
17095 ; AVX512DQ-NEXT: vmovdqa64 %zmm31, 320(%r8)
17096 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17097 ; AVX512DQ-NEXT: vmovaps %zmm8, 128(%r8)
17098 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17099 ; AVX512DQ-NEXT: vmovaps %zmm8, 192(%r8)
17100 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17101 ; AVX512DQ-NEXT: vmovaps %zmm8, (%r8)
17102 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17103 ; AVX512DQ-NEXT: vmovaps %zmm8, 64(%r8)
17104 ; AVX512DQ-NEXT: vmovups (%rsp), %zmm8 # 64-byte Reload
17105 ; AVX512DQ-NEXT: vmovaps %zmm8, 384(%r8)
17106 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, 448(%r9)
17107 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, 256(%r9)
17108 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, 320(%r9)
17109 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, 128(%r9)
17110 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, 192(%r9)
17111 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, (%r9)
17112 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 64(%r9)
17113 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, 384(%r9)
17114 ; AVX512DQ-NEXT: movq {{[0-9]+}}(%rsp), %rax
17115 ; AVX512DQ-NEXT: vmovdqa64 %zmm14, 384(%rax)
17116 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, 448(%rax)
17117 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, 256(%rax)
17118 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, 320(%rax)
17119 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, 128(%rax)
17120 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, 192(%rax)
17121 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, (%rax)
17122 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 64(%rax)
17123 ; AVX512DQ-NEXT: addq $7240, %rsp # imm = 0x1C48
17124 ; AVX512DQ-NEXT: vzeroupper
17125 ; AVX512DQ-NEXT: retq
17127 ; AVX512DQ-FCP-LABEL: load_i64_stride6_vf64:
17128 ; AVX512DQ-FCP: # %bb.0:
17129 ; AVX512DQ-FCP-NEXT: subq $7240, %rsp # imm = 0x1C48
17130 ; AVX512DQ-FCP-NEXT: vmovdqa64 2048(%rdi), %zmm3
17131 ; AVX512DQ-FCP-NEXT: vmovdqa64 1280(%rdi), %zmm4
17132 ; AVX512DQ-FCP-NEXT: vmovdqa64 1344(%rdi), %zmm0
17133 ; AVX512DQ-FCP-NEXT: vmovdqa64 896(%rdi), %zmm5
17134 ; AVX512DQ-FCP-NEXT: vmovdqa64 960(%rdi), %zmm26
17135 ; AVX512DQ-FCP-NEXT: vmovdqa64 512(%rdi), %zmm2
17136 ; AVX512DQ-FCP-NEXT: vmovdqa64 576(%rdi), %zmm1
17137 ; AVX512DQ-FCP-NEXT: vmovdqa64 128(%rdi), %zmm6
17138 ; AVX512DQ-FCP-NEXT: vmovdqa64 192(%rdi), %zmm29
17139 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [0,6,0,10,0,6,0,10]
17140 ; AVX512DQ-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
17141 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm29, %zmm8
17142 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
17143 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17144 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm8
17145 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm7, %zmm8
17146 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17147 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm26, %zmm8
17148 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm5, %zmm7, %zmm8
17149 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17150 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm8
17151 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm4, %zmm7, %zmm8
17152 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17153 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,7,0,11,1,7,0,11]
17154 ; AVX512DQ-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
17155 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm9
17156 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm8, %zmm9
17157 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17158 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm29, %zmm9
17159 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm6, %zmm8, %zmm9
17160 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17161 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm9
17162 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm4, %zmm8, %zmm9
17163 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17164 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm26, %zmm9
17165 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm5, %zmm8, %zmm9
17166 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17167 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [10,4,10,4,10,4,10,4]
17168 ; AVX512DQ-FCP-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
17169 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm10
17170 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm10
17171 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17172 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm10
17173 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm29, %zmm9, %zmm10
17174 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17175 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm10
17176 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm10
17177 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17178 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm10
17179 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm26, %zmm9, %zmm10
17180 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17181 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [11,5,11,5,11,5,11,5]
17182 ; AVX512DQ-FCP-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
17183 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm11
17184 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm10, %zmm11
17185 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17186 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm11
17187 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm29, %zmm10, %zmm11
17188 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17189 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm11
17190 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm11
17191 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17192 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm11
17193 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm26, %zmm10, %zmm11
17194 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17195 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [12,0,0,6,12,0,0,6]
17196 ; AVX512DQ-FCP-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
17197 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm13
17198 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm12
17199 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm11, %zmm13
17200 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17201 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [13,0,1,7,13,0,1,7]
17202 ; AVX512DQ-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
17203 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm12
17204 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17205 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm1
17206 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm29, %zmm11, %zmm1
17207 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17208 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm29, %zmm2, %zmm6
17209 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17210 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm1
17211 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
17212 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17213 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm4
17214 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17215 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
17216 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm26, %zmm11, %zmm0
17217 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17218 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm26, %zmm2, %zmm5
17219 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17220 ; AVX512DQ-FCP-NEXT: vmovdqa64 2112(%rdi), %zmm0
17221 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
17222 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
17223 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17224 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
17225 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm3, %zmm8, %zmm1
17226 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17227 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, %zmm1
17228 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm1
17229 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17230 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, %zmm1
17231 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm1
17232 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17233 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, %zmm1
17234 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
17235 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17236 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
17237 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17238 ; AVX512DQ-FCP-NEXT: vmovdqa64 1664(%rdi), %zmm1
17239 ; AVX512DQ-FCP-NEXT: vmovdqa64 1728(%rdi), %zmm0
17240 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm3
17241 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm7, %zmm3
17242 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17243 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm3
17244 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
17245 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17246 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
17247 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm3
17248 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17249 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
17250 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm3
17251 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17252 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
17253 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm3
17254 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17255 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm1
17256 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17257 ; AVX512DQ-FCP-NEXT: vmovdqa64 2432(%rdi), %zmm3
17258 ; AVX512DQ-FCP-NEXT: vmovdqa64 2496(%rdi), %zmm0
17259 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
17260 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
17261 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17262 ; AVX512DQ-FCP-NEXT: vmovdqa64 2816(%rdi), %zmm4
17263 ; AVX512DQ-FCP-NEXT: vmovdqa64 2880(%rdi), %zmm1
17264 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm1, %zmm7
17265 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17266 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5
17267 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm3, %zmm8, %zmm5
17268 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17269 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm1, %zmm8
17270 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17271 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, %zmm5
17272 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm5
17273 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17274 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm1, %zmm4, %zmm9
17275 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17276 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, %zmm5
17277 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm5
17278 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17279 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm1, %zmm4, %zmm10
17280 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17281 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, %zmm5
17282 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm5
17283 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17284 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm1, %zmm4, %zmm11
17285 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17286 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm4
17287 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17288 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
17289 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17290 ; AVX512DQ-FCP-NEXT: vmovdqa64 448(%rdi), %zmm1
17291 ; AVX512DQ-FCP-NEXT: vmovdqa64 384(%rdi), %zmm2
17292 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [0,6,12,0]
17293 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm0
17294 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm0
17295 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17296 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm9 = [1,7,13,0]
17297 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm0
17298 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm0
17299 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17300 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm10 = [10,0,6,0]
17301 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm0
17302 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm0
17303 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17304 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm11 = [11,1,7,0]
17305 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm0
17306 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm0
17307 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17308 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm4 = [4,10,4,10,4,10,4,10]
17309 ; AVX512DQ-FCP-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
17310 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm0
17311 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm0
17312 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17313 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
17314 ; AVX512DQ-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
17315 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
17316 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17317 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rdi), %zmm2
17318 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
17319 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
17320 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
17321 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17322 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
17323 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
17324 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17325 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
17326 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
17327 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17328 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
17329 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
17330 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17331 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
17332 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
17333 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17334 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
17335 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17336 ; AVX512DQ-FCP-NEXT: vmovdqa64 1216(%rdi), %zmm1
17337 ; AVX512DQ-FCP-NEXT: vmovdqa64 1152(%rdi), %zmm2
17338 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
17339 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
17340 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17341 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
17342 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
17343 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17344 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
17345 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
17346 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17347 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
17348 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
17349 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17350 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
17351 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
17352 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17353 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
17354 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17355 ; AVX512DQ-FCP-NEXT: vmovdqa64 832(%rdi), %zmm1
17356 ; AVX512DQ-FCP-NEXT: vmovdqa64 768(%rdi), %zmm2
17357 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
17358 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
17359 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17360 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
17361 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
17362 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17363 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
17364 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
17365 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17366 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
17367 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
17368 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17369 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
17370 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
17371 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17372 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
17373 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17374 ; AVX512DQ-FCP-NEXT: vmovdqa64 1984(%rdi), %zmm1
17375 ; AVX512DQ-FCP-NEXT: vmovdqa64 1920(%rdi), %zmm2
17376 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
17377 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
17378 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17379 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
17380 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
17381 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17382 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
17383 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
17384 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17385 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
17386 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
17387 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17388 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
17389 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
17390 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17391 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
17392 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17393 ; AVX512DQ-FCP-NEXT: vmovdqa64 1600(%rdi), %zmm1
17394 ; AVX512DQ-FCP-NEXT: vmovdqa64 1536(%rdi), %zmm30
17395 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm30, %zmm2
17396 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm2
17397 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17398 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm30, %zmm2
17399 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm2
17400 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17401 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm2
17402 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm30, %zmm10, %zmm2
17403 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17404 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm2
17405 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm30, %zmm11, %zmm2
17406 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17407 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm30, %zmm2
17408 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm2
17409 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17410 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm30
17411 ; AVX512DQ-FCP-NEXT: vmovdqa64 320(%rdi), %zmm26
17412 ; AVX512DQ-FCP-NEXT: vmovdqa64 256(%rdi), %zmm18
17413 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, %zmm1
17414 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm26, %zmm4, %zmm1
17415 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17416 ; AVX512DQ-FCP-NEXT: vmovdqa64 704(%rdi), %zmm21
17417 ; AVX512DQ-FCP-NEXT: vmovdqa64 640(%rdi), %zmm13
17418 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, %zmm1
17419 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm21, %zmm4, %zmm1
17420 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17421 ; AVX512DQ-FCP-NEXT: vmovdqa64 1088(%rdi), %zmm24
17422 ; AVX512DQ-FCP-NEXT: vmovdqa64 1024(%rdi), %zmm19
17423 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, %zmm1
17424 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm24, %zmm4, %zmm1
17425 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17426 ; AVX512DQ-FCP-NEXT: vmovdqa64 1472(%rdi), %zmm28
17427 ; AVX512DQ-FCP-NEXT: vmovdqa64 1408(%rdi), %zmm16
17428 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm16, %zmm1
17429 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm28, %zmm4, %zmm1
17430 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17431 ; AVX512DQ-FCP-NEXT: vmovdqa64 1856(%rdi), %zmm25
17432 ; AVX512DQ-FCP-NEXT: vmovdqa64 1792(%rdi), %zmm7
17433 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm1
17434 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm25, %zmm4, %zmm1
17435 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17436 ; AVX512DQ-FCP-NEXT: vmovdqa64 2240(%rdi), %zmm23
17437 ; AVX512DQ-FCP-NEXT: vmovdqa64 2176(%rdi), %zmm17
17438 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, %zmm1
17439 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm23, %zmm4, %zmm1
17440 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17441 ; AVX512DQ-FCP-NEXT: vmovdqa64 2624(%rdi), %zmm20
17442 ; AVX512DQ-FCP-NEXT: vmovdqa64 2560(%rdi), %zmm5
17443 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
17444 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm20, %zmm4, %zmm1
17445 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17446 ; AVX512DQ-FCP-NEXT: vmovdqa64 2368(%rdi), %zmm1
17447 ; AVX512DQ-FCP-NEXT: vmovdqa64 2304(%rdi), %zmm22
17448 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm22, %zmm29
17449 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm29
17450 ; AVX512DQ-FCP-NEXT: vmovdqa64 3008(%rdi), %zmm14
17451 ; AVX512DQ-FCP-NEXT: vmovdqa64 2944(%rdi), %zmm31
17452 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm31, %zmm2
17453 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm14, %zmm4, %zmm2
17454 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17455 ; AVX512DQ-FCP-NEXT: vmovdqa64 2752(%rdi), %zmm2
17456 ; AVX512DQ-FCP-NEXT: vmovdqa64 2688(%rdi), %zmm12
17457 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm2, %zmm12, %zmm8
17458 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm22, %zmm3
17459 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
17460 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17461 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm2, %zmm12, %zmm9
17462 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
17463 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm22, %zmm10, %zmm3
17464 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17465 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm12, %zmm2, %zmm10
17466 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
17467 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm22, %zmm11, %zmm3
17468 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, (%rsp) # 64-byte Spill
17469 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm12, %zmm2, %zmm11
17470 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm22, %zmm3
17471 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
17472 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17473 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm2, %zmm12, %zmm4
17474 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm0, %zmm12
17475 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm22
17476 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
17477 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm20, %zmm0, %zmm1
17478 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17479 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, %zmm1
17480 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm21, %zmm0, %zmm1
17481 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17482 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, %zmm1
17483 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm26, %zmm0, %zmm1
17484 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17485 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm16, %zmm1
17486 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm28, %zmm0, %zmm1
17487 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17488 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, %zmm1
17489 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm24, %zmm0, %zmm1
17490 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17491 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, %zmm1
17492 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm23, %zmm0, %zmm1
17493 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17494 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm1
17495 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm25, %zmm0, %zmm1
17496 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17497 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm31, %zmm1
17498 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm14, %zmm0, %zmm1
17499 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17500 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
17501 ; AVX512DQ-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
17502 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, %zmm0
17503 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm21, %zmm1, %zmm0
17504 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17505 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
17506 ; AVX512DQ-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
17507 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, %zmm0
17508 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm21, %zmm2, %zmm0
17509 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17510 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
17511 ; AVX512DQ-FCP-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
17512 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm21, %zmm0
17513 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm13, %zmm3, %zmm0
17514 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17515 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
17516 ; AVX512DQ-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
17517 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm13, %zmm0, %zmm21
17518 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, %zmm6
17519 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm26, %zmm1, %zmm6
17520 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17521 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, %zmm6
17522 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm26, %zmm2, %zmm6
17523 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17524 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm26, %zmm6
17525 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm18, %zmm3, %zmm6
17526 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17527 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm18, %zmm0, %zmm26
17528 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm16, %zmm27
17529 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm28, %zmm1, %zmm27
17530 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm16, %zmm6
17531 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm28, %zmm2, %zmm6
17532 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17533 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm28, %zmm6
17534 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm16, %zmm3, %zmm6
17535 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17536 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm16, %zmm0, %zmm28
17537 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, %zmm18
17538 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm24, %zmm1, %zmm18
17539 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, %zmm6
17540 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm24, %zmm2, %zmm6
17541 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17542 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm24, %zmm6
17543 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm19, %zmm3, %zmm6
17544 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17545 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm19, %zmm0, %zmm24
17546 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, %zmm15
17547 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm23, %zmm1, %zmm15
17548 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, %zmm19
17549 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm23, %zmm2, %zmm19
17550 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm23, %zmm6
17551 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm17, %zmm3, %zmm6
17552 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17553 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm17, %zmm0, %zmm23
17554 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm13
17555 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm13
17556 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm17
17557 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm25, %zmm2, %zmm17
17558 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm25, %zmm6
17559 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm3, %zmm6
17560 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17561 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm25
17562 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm6
17563 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm20, %zmm1, %zmm6
17564 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm14, %zmm31, %zmm1
17565 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm7
17566 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm20, %zmm2, %zmm7
17567 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm14, %zmm31, %zmm2
17568 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, %zmm16
17569 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm5, %zmm3, %zmm16
17570 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm31, %zmm14, %zmm3
17571 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm31, %zmm0, %zmm14
17572 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm5, %zmm0, %zmm20
17573 ; AVX512DQ-FCP-NEXT: movb $56, %al
17574 ; AVX512DQ-FCP-NEXT: kmovw %eax, %k1
17575 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17576 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17577 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17578 ; AVX512DQ-FCP-NEXT: movb $-64, %al
17579 ; AVX512DQ-FCP-NEXT: kmovw %eax, %k2
17580 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17581 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17582 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17583 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17584 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17585 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17586 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17587 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17588 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17589 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17590 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17591 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17592 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17593 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17594 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17595 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17596 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17597 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17598 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17599 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17600 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17601 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17602 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17603 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17604 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17605 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17606 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17607 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17608 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17609 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17610 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17611 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17612 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17613 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17614 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
17615 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17616 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
17617 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17618 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
17619 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17620 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
17621 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17622 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17623 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17624 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17625 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17626 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17627 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17628 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17629 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17630 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17631 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17632 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17633 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17634 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17635 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17636 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17637 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17638 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17639 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17640 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17641 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17642 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17643 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17644 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17645 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17646 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17647 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17648 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17649 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17650 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17651 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17652 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17653 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17654 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17655 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17656 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17657 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17658 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17659 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17660 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17661 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17662 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17663 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17664 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm9 {%k1}
17665 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17666 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
17667 ; AVX512DQ-FCP-NEXT: movb $24, %al
17668 ; AVX512DQ-FCP-NEXT: kmovw %eax, %k2
17669 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17670 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17671 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17672 ; AVX512DQ-FCP-NEXT: movb $-32, %al
17673 ; AVX512DQ-FCP-NEXT: kmovw %eax, %k1
17674 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm5 {%k1}
17675 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17676 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17677 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17678 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17679 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17680 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17681 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17682 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17683 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17684 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17685 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17686 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17687 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17688 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17689 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17690 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17691 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm27, %zmm5 {%k1}
17692 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17693 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17694 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17695 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17696 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, %zmm5 {%k1}
17697 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17698 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17699 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17700 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17701 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm15, %zmm5 {%k1}
17702 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17703 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17704 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17705 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17706 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, %zmm5 {%k1}
17707 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17708 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17709 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
17710 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm10 {%k1}
17711 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17712 ; AVX512DQ-FCP-NEXT: vmovdqu64 (%rsp), %zmm1 # 64-byte Reload
17713 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
17714 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm1 {%k1}
17715 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, (%rsp) # 64-byte Spill
17716 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17717 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17718 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
17719 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17720 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
17721 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17722 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17723 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17724 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
17725 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17726 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
17727 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17728 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17729 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17730 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
17731 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17732 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
17733 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17734 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17735 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17736 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
17737 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17738 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
17739 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17740 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17741 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
17742 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
17743 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, %zmm31 {%k1}
17744 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17745 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
17746 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm27 {%k2}
17747 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, %zmm27 {%k1}
17748 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17749 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
17750 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm11 {%k1}
17751 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17752 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
17753 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
17754 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17755 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
17756 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17757 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
17758 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17759 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm5 # 16-byte Folded Reload
17760 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17761 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm5 {%k1}
17762 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17763 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm6 # 16-byte Folded Reload
17764 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17765 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm6 {%k1}
17766 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17767 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm7 # 16-byte Folded Reload
17768 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17769 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm7 {%k1}
17770 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17771 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm13 # 16-byte Folded Reload
17772 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17773 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm13 {%k1}
17774 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17775 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm15 # 16-byte Folded Reload
17776 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17777 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm15 {%k1}
17778 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17779 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm4, %zmm2, %zmm4
17780 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, %zmm4 {%k1}
17781 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17782 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
17783 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm21, %zmm2 {%k1}
17784 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
17785 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
17786 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm26, %zmm3 {%k1}
17787 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
17788 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm16, %zmm16 # 16-byte Folded Reload
17789 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm28, %zmm16 {%k1}
17790 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
17791 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm17, %zmm17 # 16-byte Folded Reload
17792 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm24, %zmm17 {%k1}
17793 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
17794 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm18, %zmm18 # 16-byte Folded Reload
17795 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm23, %zmm18 {%k1}
17796 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
17797 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm30, %zmm19, %zmm19
17798 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm25, %zmm19 {%k1}
17799 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
17800 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm12, %zmm21, %zmm12
17801 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm14, %zmm12 {%k1}
17802 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
17803 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm22, %zmm14, %zmm14
17804 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, %zmm14 {%k1}
17805 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, 448(%rsi)
17806 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm29, 384(%rsi)
17807 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17808 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 320(%rsi)
17809 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17810 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 256(%rsi)
17811 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17812 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 192(%rsi)
17813 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17814 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 128(%rsi)
17815 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17816 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 64(%rsi)
17817 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17818 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, (%rsi)
17819 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, 448(%rdx)
17820 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17821 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 256(%rdx)
17822 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17823 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 320(%rdx)
17824 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17825 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 128(%rdx)
17826 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17827 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 192(%rdx)
17828 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17829 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, (%rdx)
17830 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17831 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 64(%rdx)
17832 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17833 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 384(%rdx)
17834 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm10, 448(%rcx)
17835 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17836 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 256(%rcx)
17837 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17838 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 320(%rcx)
17839 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17840 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 128(%rcx)
17841 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17842 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 192(%rcx)
17843 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17844 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, (%rcx)
17845 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17846 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 64(%rcx)
17847 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17848 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 384(%rcx)
17849 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm11, 448(%r8)
17850 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm27, 256(%r8)
17851 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm31, 320(%r8)
17852 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17853 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 128(%r8)
17854 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17855 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 192(%r8)
17856 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17857 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, (%r8)
17858 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17859 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 64(%r8)
17860 ; AVX512DQ-FCP-NEXT: vmovups (%rsp), %zmm8 # 64-byte Reload
17861 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 384(%r8)
17862 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, 448(%r9)
17863 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm15, 256(%r9)
17864 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, 320(%r9)
17865 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, 128(%r9)
17866 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, 192(%r9)
17867 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, (%r9)
17868 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, 64(%r9)
17869 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, 384(%r9)
17870 ; AVX512DQ-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
17871 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm14, 384(%rax)
17872 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm12, 448(%rax)
17873 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, 256(%rax)
17874 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, 320(%rax)
17875 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, 128(%rax)
17876 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm16, 192(%rax)
17877 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, (%rax)
17878 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, 64(%rax)
17879 ; AVX512DQ-FCP-NEXT: addq $7240, %rsp # imm = 0x1C48
17880 ; AVX512DQ-FCP-NEXT: vzeroupper
17881 ; AVX512DQ-FCP-NEXT: retq
17883 ; AVX512BW-LABEL: load_i64_stride6_vf64:
17884 ; AVX512BW: # %bb.0:
17885 ; AVX512BW-NEXT: subq $7240, %rsp # imm = 0x1C48
17886 ; AVX512BW-NEXT: vmovdqa64 2048(%rdi), %zmm3
17887 ; AVX512BW-NEXT: vmovdqa64 1280(%rdi), %zmm4
17888 ; AVX512BW-NEXT: vmovdqa64 1344(%rdi), %zmm0
17889 ; AVX512BW-NEXT: vmovdqa64 896(%rdi), %zmm5
17890 ; AVX512BW-NEXT: vmovdqa64 960(%rdi), %zmm26
17891 ; AVX512BW-NEXT: vmovdqa64 512(%rdi), %zmm2
17892 ; AVX512BW-NEXT: vmovdqa64 576(%rdi), %zmm1
17893 ; AVX512BW-NEXT: vmovdqa64 128(%rdi), %zmm6
17894 ; AVX512BW-NEXT: vmovdqa64 192(%rdi), %zmm29
17895 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [0,6,0,10,0,6,0,10]
17896 ; AVX512BW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
17897 ; AVX512BW-NEXT: vmovdqa64 %zmm29, %zmm8
17898 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
17899 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17900 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm8
17901 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm7, %zmm8
17902 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17903 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm8
17904 ; AVX512BW-NEXT: vpermt2q %zmm5, %zmm7, %zmm8
17905 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17906 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm8
17907 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm7, %zmm8
17908 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17909 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,7,0,11,1,7,0,11]
17910 ; AVX512BW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
17911 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm9
17912 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm8, %zmm9
17913 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17914 ; AVX512BW-NEXT: vmovdqa64 %zmm29, %zmm9
17915 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm8, %zmm9
17916 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17917 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm9
17918 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm8, %zmm9
17919 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17920 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm9
17921 ; AVX512BW-NEXT: vpermt2q %zmm5, %zmm8, %zmm9
17922 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17923 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [10,4,10,4,10,4,10,4]
17924 ; AVX512BW-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
17925 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm10
17926 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm10
17927 ; AVX512BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17928 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm10
17929 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm9, %zmm10
17930 ; AVX512BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17931 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm10
17932 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm9, %zmm10
17933 ; AVX512BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17934 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm10
17935 ; AVX512BW-NEXT: vpermt2q %zmm26, %zmm9, %zmm10
17936 ; AVX512BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17937 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [11,5,11,5,11,5,11,5]
17938 ; AVX512BW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
17939 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm11
17940 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm10, %zmm11
17941 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17942 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm11
17943 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm10, %zmm11
17944 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17945 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm11
17946 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm10, %zmm11
17947 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17948 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm11
17949 ; AVX512BW-NEXT: vpermt2q %zmm26, %zmm10, %zmm11
17950 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17951 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [12,0,0,6,12,0,0,6]
17952 ; AVX512BW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
17953 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm13
17954 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm12
17955 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm11, %zmm13
17956 ; AVX512BW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17957 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [13,0,1,7,13,0,1,7]
17958 ; AVX512BW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
17959 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm2, %zmm12
17960 ; AVX512BW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17961 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm1
17962 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm11, %zmm1
17963 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17964 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm2, %zmm6
17965 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17966 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm1
17967 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
17968 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17969 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm2, %zmm4
17970 ; AVX512BW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17971 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm0
17972 ; AVX512BW-NEXT: vpermt2q %zmm26, %zmm11, %zmm0
17973 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17974 ; AVX512BW-NEXT: vpermt2q %zmm26, %zmm2, %zmm5
17975 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17976 ; AVX512BW-NEXT: vmovdqa64 2112(%rdi), %zmm0
17977 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1
17978 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
17979 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17980 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1
17981 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm8, %zmm1
17982 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17983 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm1
17984 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm9, %zmm1
17985 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17986 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm1
17987 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm10, %zmm1
17988 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17989 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm1
17990 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
17991 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17992 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
17993 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17994 ; AVX512BW-NEXT: vmovdqa64 1664(%rdi), %zmm1
17995 ; AVX512BW-NEXT: vmovdqa64 1728(%rdi), %zmm0
17996 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm3
17997 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm7, %zmm3
17998 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17999 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm3
18000 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
18001 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18002 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
18003 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm9, %zmm3
18004 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18005 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
18006 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm10, %zmm3
18007 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18008 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
18009 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm11, %zmm3
18010 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18011 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm2, %zmm1
18012 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18013 ; AVX512BW-NEXT: vmovdqa64 2432(%rdi), %zmm3
18014 ; AVX512BW-NEXT: vmovdqa64 2496(%rdi), %zmm0
18015 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1
18016 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
18017 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18018 ; AVX512BW-NEXT: vmovdqa64 2816(%rdi), %zmm4
18019 ; AVX512BW-NEXT: vmovdqa64 2880(%rdi), %zmm1
18020 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm1, %zmm7
18021 ; AVX512BW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18022 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5
18023 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm8, %zmm5
18024 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18025 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm1, %zmm8
18026 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18027 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm5
18028 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm9, %zmm5
18029 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18030 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm4, %zmm9
18031 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18032 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm5
18033 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm10, %zmm5
18034 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18035 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm4, %zmm10
18036 ; AVX512BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18037 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm5
18038 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm11, %zmm5
18039 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18040 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm4, %zmm11
18041 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18042 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm2, %zmm4
18043 ; AVX512BW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18044 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
18045 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18046 ; AVX512BW-NEXT: vmovdqa64 448(%rdi), %zmm1
18047 ; AVX512BW-NEXT: vmovdqa64 384(%rdi), %zmm2
18048 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm8 = [0,6,12,0]
18049 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm0
18050 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm0
18051 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18052 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm9 = [1,7,13,0]
18053 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm0
18054 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm0
18055 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18056 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm10 = [10,0,6,0]
18057 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm0
18058 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm10, %zmm0
18059 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18060 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm11 = [11,1,7,0]
18061 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm0
18062 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm11, %zmm0
18063 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18064 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm4 = [4,10,4,10,4,10,4,10]
18065 ; AVX512BW-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
18066 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm0
18067 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm0
18068 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18069 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
18070 ; AVX512BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
18071 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
18072 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18073 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm2
18074 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %zmm1
18075 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm3
18076 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
18077 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18078 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm3
18079 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
18080 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18081 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
18082 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
18083 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18084 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
18085 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
18086 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18087 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm3
18088 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
18089 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18090 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
18091 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18092 ; AVX512BW-NEXT: vmovdqa64 1216(%rdi), %zmm1
18093 ; AVX512BW-NEXT: vmovdqa64 1152(%rdi), %zmm2
18094 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm3
18095 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
18096 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18097 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm3
18098 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
18099 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18100 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
18101 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
18102 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18103 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
18104 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
18105 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18106 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm3
18107 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
18108 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18109 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
18110 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18111 ; AVX512BW-NEXT: vmovdqa64 832(%rdi), %zmm1
18112 ; AVX512BW-NEXT: vmovdqa64 768(%rdi), %zmm2
18113 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm3
18114 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
18115 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18116 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm3
18117 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
18118 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18119 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
18120 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
18121 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18122 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
18123 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
18124 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18125 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm3
18126 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
18127 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18128 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
18129 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18130 ; AVX512BW-NEXT: vmovdqa64 1984(%rdi), %zmm1
18131 ; AVX512BW-NEXT: vmovdqa64 1920(%rdi), %zmm2
18132 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm3
18133 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
18134 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18135 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm3
18136 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
18137 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18138 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
18139 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
18140 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18141 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
18142 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
18143 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18144 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm3
18145 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
18146 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18147 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
18148 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18149 ; AVX512BW-NEXT: vmovdqa64 1600(%rdi), %zmm1
18150 ; AVX512BW-NEXT: vmovdqa64 1536(%rdi), %zmm30
18151 ; AVX512BW-NEXT: vmovdqa64 %zmm30, %zmm2
18152 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm2
18153 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18154 ; AVX512BW-NEXT: vmovdqa64 %zmm30, %zmm2
18155 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm2
18156 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18157 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm2
18158 ; AVX512BW-NEXT: vpermt2q %zmm30, %zmm10, %zmm2
18159 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18160 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm2
18161 ; AVX512BW-NEXT: vpermt2q %zmm30, %zmm11, %zmm2
18162 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18163 ; AVX512BW-NEXT: vmovdqa64 %zmm30, %zmm2
18164 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm2
18165 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18166 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm30
18167 ; AVX512BW-NEXT: vmovdqa64 320(%rdi), %zmm26
18168 ; AVX512BW-NEXT: vmovdqa64 256(%rdi), %zmm18
18169 ; AVX512BW-NEXT: vmovdqa64 %zmm18, %zmm1
18170 ; AVX512BW-NEXT: vpermt2q %zmm26, %zmm4, %zmm1
18171 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18172 ; AVX512BW-NEXT: vmovdqa64 704(%rdi), %zmm21
18173 ; AVX512BW-NEXT: vmovdqa64 640(%rdi), %zmm13
18174 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm1
18175 ; AVX512BW-NEXT: vpermt2q %zmm21, %zmm4, %zmm1
18176 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18177 ; AVX512BW-NEXT: vmovdqa64 1088(%rdi), %zmm24
18178 ; AVX512BW-NEXT: vmovdqa64 1024(%rdi), %zmm19
18179 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm1
18180 ; AVX512BW-NEXT: vpermt2q %zmm24, %zmm4, %zmm1
18181 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18182 ; AVX512BW-NEXT: vmovdqa64 1472(%rdi), %zmm28
18183 ; AVX512BW-NEXT: vmovdqa64 1408(%rdi), %zmm16
18184 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm1
18185 ; AVX512BW-NEXT: vpermt2q %zmm28, %zmm4, %zmm1
18186 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18187 ; AVX512BW-NEXT: vmovdqa64 1856(%rdi), %zmm25
18188 ; AVX512BW-NEXT: vmovdqa64 1792(%rdi), %zmm7
18189 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm1
18190 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm4, %zmm1
18191 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18192 ; AVX512BW-NEXT: vmovdqa64 2240(%rdi), %zmm23
18193 ; AVX512BW-NEXT: vmovdqa64 2176(%rdi), %zmm17
18194 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm1
18195 ; AVX512BW-NEXT: vpermt2q %zmm23, %zmm4, %zmm1
18196 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18197 ; AVX512BW-NEXT: vmovdqa64 2624(%rdi), %zmm20
18198 ; AVX512BW-NEXT: vmovdqa64 2560(%rdi), %zmm5
18199 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm1
18200 ; AVX512BW-NEXT: vpermt2q %zmm20, %zmm4, %zmm1
18201 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18202 ; AVX512BW-NEXT: vmovdqa64 2368(%rdi), %zmm1
18203 ; AVX512BW-NEXT: vmovdqa64 2304(%rdi), %zmm22
18204 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm29
18205 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm29
18206 ; AVX512BW-NEXT: vmovdqa64 3008(%rdi), %zmm14
18207 ; AVX512BW-NEXT: vmovdqa64 2944(%rdi), %zmm31
18208 ; AVX512BW-NEXT: vmovdqa64 %zmm31, %zmm2
18209 ; AVX512BW-NEXT: vpermt2q %zmm14, %zmm4, %zmm2
18210 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18211 ; AVX512BW-NEXT: vmovdqa64 2752(%rdi), %zmm2
18212 ; AVX512BW-NEXT: vmovdqa64 2688(%rdi), %zmm12
18213 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm12, %zmm8
18214 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm3
18215 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
18216 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18217 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm12, %zmm9
18218 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
18219 ; AVX512BW-NEXT: vpermt2q %zmm22, %zmm10, %zmm3
18220 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18221 ; AVX512BW-NEXT: vpermi2q %zmm12, %zmm2, %zmm10
18222 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
18223 ; AVX512BW-NEXT: vpermt2q %zmm22, %zmm11, %zmm3
18224 ; AVX512BW-NEXT: vmovdqu64 %zmm3, (%rsp) # 64-byte Spill
18225 ; AVX512BW-NEXT: vpermi2q %zmm12, %zmm2, %zmm11
18226 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm3
18227 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
18228 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18229 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm12, %zmm4
18230 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm0, %zmm12
18231 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm22
18232 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm1
18233 ; AVX512BW-NEXT: vpermt2q %zmm20, %zmm0, %zmm1
18234 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18235 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm1
18236 ; AVX512BW-NEXT: vpermt2q %zmm21, %zmm0, %zmm1
18237 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18238 ; AVX512BW-NEXT: vmovdqa64 %zmm18, %zmm1
18239 ; AVX512BW-NEXT: vpermt2q %zmm26, %zmm0, %zmm1
18240 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18241 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm1
18242 ; AVX512BW-NEXT: vpermt2q %zmm28, %zmm0, %zmm1
18243 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18244 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm1
18245 ; AVX512BW-NEXT: vpermt2q %zmm24, %zmm0, %zmm1
18246 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18247 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm1
18248 ; AVX512BW-NEXT: vpermt2q %zmm23, %zmm0, %zmm1
18249 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18250 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm1
18251 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm0, %zmm1
18252 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18253 ; AVX512BW-NEXT: vmovdqa64 %zmm31, %zmm1
18254 ; AVX512BW-NEXT: vpermt2q %zmm14, %zmm0, %zmm1
18255 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18256 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
18257 ; AVX512BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
18258 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm0
18259 ; AVX512BW-NEXT: vpermt2q %zmm21, %zmm1, %zmm0
18260 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18261 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
18262 ; AVX512BW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
18263 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm0
18264 ; AVX512BW-NEXT: vpermt2q %zmm21, %zmm2, %zmm0
18265 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18266 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
18267 ; AVX512BW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
18268 ; AVX512BW-NEXT: vmovdqa64 %zmm21, %zmm0
18269 ; AVX512BW-NEXT: vpermt2q %zmm13, %zmm3, %zmm0
18270 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18271 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
18272 ; AVX512BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
18273 ; AVX512BW-NEXT: vpermt2q %zmm13, %zmm0, %zmm21
18274 ; AVX512BW-NEXT: vmovdqa64 %zmm18, %zmm6
18275 ; AVX512BW-NEXT: vpermt2q %zmm26, %zmm1, %zmm6
18276 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18277 ; AVX512BW-NEXT: vmovdqa64 %zmm18, %zmm6
18278 ; AVX512BW-NEXT: vpermt2q %zmm26, %zmm2, %zmm6
18279 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18280 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm6
18281 ; AVX512BW-NEXT: vpermt2q %zmm18, %zmm3, %zmm6
18282 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18283 ; AVX512BW-NEXT: vpermt2q %zmm18, %zmm0, %zmm26
18284 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm27
18285 ; AVX512BW-NEXT: vpermt2q %zmm28, %zmm1, %zmm27
18286 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm6
18287 ; AVX512BW-NEXT: vpermt2q %zmm28, %zmm2, %zmm6
18288 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18289 ; AVX512BW-NEXT: vmovdqa64 %zmm28, %zmm6
18290 ; AVX512BW-NEXT: vpermt2q %zmm16, %zmm3, %zmm6
18291 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18292 ; AVX512BW-NEXT: vpermt2q %zmm16, %zmm0, %zmm28
18293 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm18
18294 ; AVX512BW-NEXT: vpermt2q %zmm24, %zmm1, %zmm18
18295 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm6
18296 ; AVX512BW-NEXT: vpermt2q %zmm24, %zmm2, %zmm6
18297 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18298 ; AVX512BW-NEXT: vmovdqa64 %zmm24, %zmm6
18299 ; AVX512BW-NEXT: vpermt2q %zmm19, %zmm3, %zmm6
18300 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18301 ; AVX512BW-NEXT: vpermt2q %zmm19, %zmm0, %zmm24
18302 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm15
18303 ; AVX512BW-NEXT: vpermt2q %zmm23, %zmm1, %zmm15
18304 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm19
18305 ; AVX512BW-NEXT: vpermt2q %zmm23, %zmm2, %zmm19
18306 ; AVX512BW-NEXT: vmovdqa64 %zmm23, %zmm6
18307 ; AVX512BW-NEXT: vpermt2q %zmm17, %zmm3, %zmm6
18308 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18309 ; AVX512BW-NEXT: vpermt2q %zmm17, %zmm0, %zmm23
18310 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm13
18311 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm1, %zmm13
18312 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm17
18313 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm2, %zmm17
18314 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm6
18315 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm3, %zmm6
18316 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18317 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm0, %zmm25
18318 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm6
18319 ; AVX512BW-NEXT: vpermt2q %zmm20, %zmm1, %zmm6
18320 ; AVX512BW-NEXT: vpermi2q %zmm14, %zmm31, %zmm1
18321 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm7
18322 ; AVX512BW-NEXT: vpermt2q %zmm20, %zmm2, %zmm7
18323 ; AVX512BW-NEXT: vpermi2q %zmm14, %zmm31, %zmm2
18324 ; AVX512BW-NEXT: vmovdqa64 %zmm20, %zmm16
18325 ; AVX512BW-NEXT: vpermt2q %zmm5, %zmm3, %zmm16
18326 ; AVX512BW-NEXT: vpermi2q %zmm31, %zmm14, %zmm3
18327 ; AVX512BW-NEXT: vpermt2q %zmm31, %zmm0, %zmm14
18328 ; AVX512BW-NEXT: vpermt2q %zmm5, %zmm0, %zmm20
18329 ; AVX512BW-NEXT: movb $56, %al
18330 ; AVX512BW-NEXT: kmovd %eax, %k1
18331 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18332 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18333 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18334 ; AVX512BW-NEXT: movb $-64, %al
18335 ; AVX512BW-NEXT: kmovd %eax, %k2
18336 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18337 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18338 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18339 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18340 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18341 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18342 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18343 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18344 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18345 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18346 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18347 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18348 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18349 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18350 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18351 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18352 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18353 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18354 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18355 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18356 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18357 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18358 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18359 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18360 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18361 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18362 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18363 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18364 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18365 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18366 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18367 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18368 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18369 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18370 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
18371 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18372 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
18373 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18374 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
18375 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18376 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
18377 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18378 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18379 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18380 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18381 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18382 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18383 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18384 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18385 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18386 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18387 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18388 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18389 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18390 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18391 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18392 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18393 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18394 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18395 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18396 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18397 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18398 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18399 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18400 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18401 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18402 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18403 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18404 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18405 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18406 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18407 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18408 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18409 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18410 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18411 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18412 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18413 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18414 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18415 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18416 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18417 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18418 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18419 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18420 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm9 {%k1}
18421 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18422 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
18423 ; AVX512BW-NEXT: movb $24, %al
18424 ; AVX512BW-NEXT: kmovd %eax, %k2
18425 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18426 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18427 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18428 ; AVX512BW-NEXT: movb $-32, %al
18429 ; AVX512BW-NEXT: kmovd %eax, %k1
18430 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm5 {%k1}
18431 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18432 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18433 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18434 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18435 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18436 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18437 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18438 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18439 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18440 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18441 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18442 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18443 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18444 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18445 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18446 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18447 ; AVX512BW-NEXT: vmovdqa64 %zmm27, %zmm5 {%k1}
18448 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18449 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18450 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18451 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18452 ; AVX512BW-NEXT: vmovdqa64 %zmm18, %zmm5 {%k1}
18453 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18454 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18455 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18456 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18457 ; AVX512BW-NEXT: vmovdqa64 %zmm15, %zmm5 {%k1}
18458 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18459 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18460 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18461 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18462 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm5 {%k1}
18463 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18464 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18465 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
18466 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm10 {%k1}
18467 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18468 ; AVX512BW-NEXT: vmovdqu64 (%rsp), %zmm1 # 64-byte Reload
18469 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
18470 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm1 {%k1}
18471 ; AVX512BW-NEXT: vmovdqu64 %zmm1, (%rsp) # 64-byte Spill
18472 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18473 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
18474 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
18475 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18476 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
18477 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18478 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18479 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
18480 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
18481 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18482 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
18483 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18484 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18485 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
18486 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
18487 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18488 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
18489 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18490 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18491 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
18492 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
18493 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18494 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
18495 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18496 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18497 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
18498 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
18499 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm31 {%k1}
18500 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18501 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
18502 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm27 {%k2}
18503 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm27 {%k1}
18504 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18505 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
18506 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm11 {%k1}
18507 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18508 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
18509 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
18510 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
18511 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
18512 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18513 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
18514 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18515 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm5 # 16-byte Folded Reload
18516 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18517 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm5 {%k1}
18518 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18519 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm6 # 16-byte Folded Reload
18520 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18521 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm6 {%k1}
18522 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18523 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm7 # 16-byte Folded Reload
18524 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18525 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm7 {%k1}
18526 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18527 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm13 # 16-byte Folded Reload
18528 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18529 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm13 {%k1}
18530 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18531 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm15 # 16-byte Folded Reload
18532 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18533 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm15 {%k1}
18534 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18535 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm4, %zmm2, %zmm4
18536 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm4 {%k1}
18537 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18538 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
18539 ; AVX512BW-NEXT: vmovdqa64 %zmm21, %zmm2 {%k1}
18540 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
18541 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
18542 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm3 {%k1}
18543 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
18544 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm16, %zmm16 # 16-byte Folded Reload
18545 ; AVX512BW-NEXT: vmovdqa64 %zmm28, %zmm16 {%k1}
18546 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
18547 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm17, %zmm17 # 16-byte Folded Reload
18548 ; AVX512BW-NEXT: vmovdqa64 %zmm24, %zmm17 {%k1}
18549 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
18550 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm18, %zmm18 # 16-byte Folded Reload
18551 ; AVX512BW-NEXT: vmovdqa64 %zmm23, %zmm18 {%k1}
18552 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
18553 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm30, %zmm19, %zmm19
18554 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm19 {%k1}
18555 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
18556 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm12, %zmm21, %zmm12
18557 ; AVX512BW-NEXT: vmovdqa64 %zmm14, %zmm12 {%k1}
18558 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
18559 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm22, %zmm14, %zmm14
18560 ; AVX512BW-NEXT: vmovdqa64 %zmm20, %zmm14 {%k1}
18561 ; AVX512BW-NEXT: vmovdqa64 %zmm8, 448(%rsi)
18562 ; AVX512BW-NEXT: vmovdqa64 %zmm29, 384(%rsi)
18563 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18564 ; AVX512BW-NEXT: vmovaps %zmm8, 320(%rsi)
18565 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18566 ; AVX512BW-NEXT: vmovaps %zmm8, 256(%rsi)
18567 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18568 ; AVX512BW-NEXT: vmovaps %zmm8, 192(%rsi)
18569 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18570 ; AVX512BW-NEXT: vmovaps %zmm8, 128(%rsi)
18571 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18572 ; AVX512BW-NEXT: vmovaps %zmm8, 64(%rsi)
18573 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18574 ; AVX512BW-NEXT: vmovaps %zmm8, (%rsi)
18575 ; AVX512BW-NEXT: vmovdqa64 %zmm9, 448(%rdx)
18576 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18577 ; AVX512BW-NEXT: vmovaps %zmm8, 256(%rdx)
18578 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18579 ; AVX512BW-NEXT: vmovaps %zmm8, 320(%rdx)
18580 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18581 ; AVX512BW-NEXT: vmovaps %zmm8, 128(%rdx)
18582 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18583 ; AVX512BW-NEXT: vmovaps %zmm8, 192(%rdx)
18584 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18585 ; AVX512BW-NEXT: vmovaps %zmm8, (%rdx)
18586 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18587 ; AVX512BW-NEXT: vmovaps %zmm8, 64(%rdx)
18588 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18589 ; AVX512BW-NEXT: vmovaps %zmm8, 384(%rdx)
18590 ; AVX512BW-NEXT: vmovdqa64 %zmm10, 448(%rcx)
18591 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18592 ; AVX512BW-NEXT: vmovaps %zmm8, 256(%rcx)
18593 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18594 ; AVX512BW-NEXT: vmovaps %zmm8, 320(%rcx)
18595 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18596 ; AVX512BW-NEXT: vmovaps %zmm8, 128(%rcx)
18597 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18598 ; AVX512BW-NEXT: vmovaps %zmm8, 192(%rcx)
18599 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18600 ; AVX512BW-NEXT: vmovaps %zmm8, (%rcx)
18601 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18602 ; AVX512BW-NEXT: vmovaps %zmm8, 64(%rcx)
18603 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18604 ; AVX512BW-NEXT: vmovaps %zmm8, 384(%rcx)
18605 ; AVX512BW-NEXT: vmovdqa64 %zmm11, 448(%r8)
18606 ; AVX512BW-NEXT: vmovdqa64 %zmm27, 256(%r8)
18607 ; AVX512BW-NEXT: vmovdqa64 %zmm31, 320(%r8)
18608 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18609 ; AVX512BW-NEXT: vmovaps %zmm8, 128(%r8)
18610 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18611 ; AVX512BW-NEXT: vmovaps %zmm8, 192(%r8)
18612 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18613 ; AVX512BW-NEXT: vmovaps %zmm8, (%r8)
18614 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18615 ; AVX512BW-NEXT: vmovaps %zmm8, 64(%r8)
18616 ; AVX512BW-NEXT: vmovups (%rsp), %zmm8 # 64-byte Reload
18617 ; AVX512BW-NEXT: vmovaps %zmm8, 384(%r8)
18618 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 448(%r9)
18619 ; AVX512BW-NEXT: vmovdqa64 %zmm15, 256(%r9)
18620 ; AVX512BW-NEXT: vmovdqa64 %zmm13, 320(%r9)
18621 ; AVX512BW-NEXT: vmovdqa64 %zmm7, 128(%r9)
18622 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 192(%r9)
18623 ; AVX512BW-NEXT: vmovdqa64 %zmm5, (%r9)
18624 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 64(%r9)
18625 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 384(%r9)
18626 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
18627 ; AVX512BW-NEXT: vmovdqa64 %zmm14, 384(%rax)
18628 ; AVX512BW-NEXT: vmovdqa64 %zmm12, 448(%rax)
18629 ; AVX512BW-NEXT: vmovdqa64 %zmm19, 256(%rax)
18630 ; AVX512BW-NEXT: vmovdqa64 %zmm18, 320(%rax)
18631 ; AVX512BW-NEXT: vmovdqa64 %zmm17, 128(%rax)
18632 ; AVX512BW-NEXT: vmovdqa64 %zmm16, 192(%rax)
18633 ; AVX512BW-NEXT: vmovdqa64 %zmm3, (%rax)
18634 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 64(%rax)
18635 ; AVX512BW-NEXT: addq $7240, %rsp # imm = 0x1C48
18636 ; AVX512BW-NEXT: vzeroupper
18637 ; AVX512BW-NEXT: retq
18639 ; AVX512BW-FCP-LABEL: load_i64_stride6_vf64:
18640 ; AVX512BW-FCP: # %bb.0:
18641 ; AVX512BW-FCP-NEXT: subq $7240, %rsp # imm = 0x1C48
18642 ; AVX512BW-FCP-NEXT: vmovdqa64 2048(%rdi), %zmm3
18643 ; AVX512BW-FCP-NEXT: vmovdqa64 1280(%rdi), %zmm4
18644 ; AVX512BW-FCP-NEXT: vmovdqa64 1344(%rdi), %zmm0
18645 ; AVX512BW-FCP-NEXT: vmovdqa64 896(%rdi), %zmm5
18646 ; AVX512BW-FCP-NEXT: vmovdqa64 960(%rdi), %zmm26
18647 ; AVX512BW-FCP-NEXT: vmovdqa64 512(%rdi), %zmm2
18648 ; AVX512BW-FCP-NEXT: vmovdqa64 576(%rdi), %zmm1
18649 ; AVX512BW-FCP-NEXT: vmovdqa64 128(%rdi), %zmm6
18650 ; AVX512BW-FCP-NEXT: vmovdqa64 192(%rdi), %zmm29
18651 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [0,6,0,10,0,6,0,10]
18652 ; AVX512BW-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
18653 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm29, %zmm8
18654 ; AVX512BW-FCP-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
18655 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18656 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm8
18657 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm7, %zmm8
18658 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18659 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm8
18660 ; AVX512BW-FCP-NEXT: vpermt2q %zmm5, %zmm7, %zmm8
18661 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18662 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm8
18663 ; AVX512BW-FCP-NEXT: vpermt2q %zmm4, %zmm7, %zmm8
18664 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18665 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,7,0,11,1,7,0,11]
18666 ; AVX512BW-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
18667 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm9
18668 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm8, %zmm9
18669 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18670 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm29, %zmm9
18671 ; AVX512BW-FCP-NEXT: vpermt2q %zmm6, %zmm8, %zmm9
18672 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18673 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm9
18674 ; AVX512BW-FCP-NEXT: vpermt2q %zmm4, %zmm8, %zmm9
18675 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18676 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm9
18677 ; AVX512BW-FCP-NEXT: vpermt2q %zmm5, %zmm8, %zmm9
18678 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18679 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [10,4,10,4,10,4,10,4]
18680 ; AVX512BW-FCP-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
18681 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm10
18682 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm10
18683 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18684 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm10
18685 ; AVX512BW-FCP-NEXT: vpermt2q %zmm29, %zmm9, %zmm10
18686 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18687 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm10
18688 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm10
18689 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18690 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm10
18691 ; AVX512BW-FCP-NEXT: vpermt2q %zmm26, %zmm9, %zmm10
18692 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18693 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [11,5,11,5,11,5,11,5]
18694 ; AVX512BW-FCP-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
18695 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm11
18696 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm10, %zmm11
18697 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18698 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm11
18699 ; AVX512BW-FCP-NEXT: vpermt2q %zmm29, %zmm10, %zmm11
18700 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18701 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm11
18702 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm11
18703 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18704 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm11
18705 ; AVX512BW-FCP-NEXT: vpermt2q %zmm26, %zmm10, %zmm11
18706 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18707 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [12,0,0,6,12,0,0,6]
18708 ; AVX512BW-FCP-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
18709 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm13
18710 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm12
18711 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm11, %zmm13
18712 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18713 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [13,0,1,7,13,0,1,7]
18714 ; AVX512BW-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
18715 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm12
18716 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18717 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm1
18718 ; AVX512BW-FCP-NEXT: vpermt2q %zmm29, %zmm11, %zmm1
18719 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18720 ; AVX512BW-FCP-NEXT: vpermt2q %zmm29, %zmm2, %zmm6
18721 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18722 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm1
18723 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
18724 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18725 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm4
18726 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18727 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
18728 ; AVX512BW-FCP-NEXT: vpermt2q %zmm26, %zmm11, %zmm0
18729 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18730 ; AVX512BW-FCP-NEXT: vpermt2q %zmm26, %zmm2, %zmm5
18731 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18732 ; AVX512BW-FCP-NEXT: vmovdqa64 2112(%rdi), %zmm0
18733 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
18734 ; AVX512BW-FCP-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
18735 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18736 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
18737 ; AVX512BW-FCP-NEXT: vpermt2q %zmm3, %zmm8, %zmm1
18738 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18739 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm1
18740 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm1
18741 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18742 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm1
18743 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm1
18744 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18745 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm1
18746 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
18747 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18748 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
18749 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18750 ; AVX512BW-FCP-NEXT: vmovdqa64 1664(%rdi), %zmm1
18751 ; AVX512BW-FCP-NEXT: vmovdqa64 1728(%rdi), %zmm0
18752 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm3
18753 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm7, %zmm3
18754 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18755 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm3
18756 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
18757 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18758 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
18759 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm3
18760 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18761 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
18762 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm3
18763 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18764 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
18765 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm3
18766 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18767 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm1
18768 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18769 ; AVX512BW-FCP-NEXT: vmovdqa64 2432(%rdi), %zmm3
18770 ; AVX512BW-FCP-NEXT: vmovdqa64 2496(%rdi), %zmm0
18771 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
18772 ; AVX512BW-FCP-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
18773 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18774 ; AVX512BW-FCP-NEXT: vmovdqa64 2816(%rdi), %zmm4
18775 ; AVX512BW-FCP-NEXT: vmovdqa64 2880(%rdi), %zmm1
18776 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm1, %zmm7
18777 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18778 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5
18779 ; AVX512BW-FCP-NEXT: vpermt2q %zmm3, %zmm8, %zmm5
18780 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18781 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm1, %zmm8
18782 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18783 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm5
18784 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm5
18785 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18786 ; AVX512BW-FCP-NEXT: vpermi2q %zmm1, %zmm4, %zmm9
18787 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18788 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm5
18789 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm5
18790 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18791 ; AVX512BW-FCP-NEXT: vpermi2q %zmm1, %zmm4, %zmm10
18792 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18793 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm5
18794 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm5
18795 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18796 ; AVX512BW-FCP-NEXT: vpermi2q %zmm1, %zmm4, %zmm11
18797 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18798 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm4
18799 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18800 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
18801 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18802 ; AVX512BW-FCP-NEXT: vmovdqa64 448(%rdi), %zmm1
18803 ; AVX512BW-FCP-NEXT: vmovdqa64 384(%rdi), %zmm2
18804 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [0,6,12,0]
18805 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm0
18806 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm0
18807 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18808 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm9 = [1,7,13,0]
18809 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm0
18810 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm0
18811 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18812 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm10 = [10,0,6,0]
18813 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm0
18814 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm0
18815 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18816 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm11 = [11,1,7,0]
18817 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm0
18818 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm0
18819 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18820 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm4 = [4,10,4,10,4,10,4,10]
18821 ; AVX512BW-FCP-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
18822 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm0
18823 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm0
18824 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18825 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
18826 ; AVX512BW-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
18827 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
18828 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18829 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm2
18830 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
18831 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
18832 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
18833 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18834 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
18835 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
18836 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18837 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
18838 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
18839 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18840 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
18841 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
18842 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18843 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
18844 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
18845 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18846 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
18847 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18848 ; AVX512BW-FCP-NEXT: vmovdqa64 1216(%rdi), %zmm1
18849 ; AVX512BW-FCP-NEXT: vmovdqa64 1152(%rdi), %zmm2
18850 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
18851 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
18852 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18853 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
18854 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
18855 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18856 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
18857 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
18858 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18859 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
18860 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
18861 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18862 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
18863 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
18864 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18865 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
18866 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18867 ; AVX512BW-FCP-NEXT: vmovdqa64 832(%rdi), %zmm1
18868 ; AVX512BW-FCP-NEXT: vmovdqa64 768(%rdi), %zmm2
18869 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
18870 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
18871 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18872 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
18873 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
18874 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18875 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
18876 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
18877 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18878 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
18879 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
18880 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18881 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
18882 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
18883 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18884 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
18885 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18886 ; AVX512BW-FCP-NEXT: vmovdqa64 1984(%rdi), %zmm1
18887 ; AVX512BW-FCP-NEXT: vmovdqa64 1920(%rdi), %zmm2
18888 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
18889 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
18890 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18891 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
18892 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
18893 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18894 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
18895 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
18896 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18897 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
18898 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
18899 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18900 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
18901 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
18902 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18903 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
18904 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18905 ; AVX512BW-FCP-NEXT: vmovdqa64 1600(%rdi), %zmm1
18906 ; AVX512BW-FCP-NEXT: vmovdqa64 1536(%rdi), %zmm30
18907 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm30, %zmm2
18908 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm2
18909 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18910 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm30, %zmm2
18911 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm2
18912 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18913 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm2
18914 ; AVX512BW-FCP-NEXT: vpermt2q %zmm30, %zmm10, %zmm2
18915 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18916 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm2
18917 ; AVX512BW-FCP-NEXT: vpermt2q %zmm30, %zmm11, %zmm2
18918 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18919 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm30, %zmm2
18920 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm2
18921 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18922 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm30
18923 ; AVX512BW-FCP-NEXT: vmovdqa64 320(%rdi), %zmm26
18924 ; AVX512BW-FCP-NEXT: vmovdqa64 256(%rdi), %zmm18
18925 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm1
18926 ; AVX512BW-FCP-NEXT: vpermt2q %zmm26, %zmm4, %zmm1
18927 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18928 ; AVX512BW-FCP-NEXT: vmovdqa64 704(%rdi), %zmm21
18929 ; AVX512BW-FCP-NEXT: vmovdqa64 640(%rdi), %zmm13
18930 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm1
18931 ; AVX512BW-FCP-NEXT: vpermt2q %zmm21, %zmm4, %zmm1
18932 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18933 ; AVX512BW-FCP-NEXT: vmovdqa64 1088(%rdi), %zmm24
18934 ; AVX512BW-FCP-NEXT: vmovdqa64 1024(%rdi), %zmm19
18935 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm1
18936 ; AVX512BW-FCP-NEXT: vpermt2q %zmm24, %zmm4, %zmm1
18937 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18938 ; AVX512BW-FCP-NEXT: vmovdqa64 1472(%rdi), %zmm28
18939 ; AVX512BW-FCP-NEXT: vmovdqa64 1408(%rdi), %zmm16
18940 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm1
18941 ; AVX512BW-FCP-NEXT: vpermt2q %zmm28, %zmm4, %zmm1
18942 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18943 ; AVX512BW-FCP-NEXT: vmovdqa64 1856(%rdi), %zmm25
18944 ; AVX512BW-FCP-NEXT: vmovdqa64 1792(%rdi), %zmm7
18945 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm1
18946 ; AVX512BW-FCP-NEXT: vpermt2q %zmm25, %zmm4, %zmm1
18947 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18948 ; AVX512BW-FCP-NEXT: vmovdqa64 2240(%rdi), %zmm23
18949 ; AVX512BW-FCP-NEXT: vmovdqa64 2176(%rdi), %zmm17
18950 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm1
18951 ; AVX512BW-FCP-NEXT: vpermt2q %zmm23, %zmm4, %zmm1
18952 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18953 ; AVX512BW-FCP-NEXT: vmovdqa64 2624(%rdi), %zmm20
18954 ; AVX512BW-FCP-NEXT: vmovdqa64 2560(%rdi), %zmm5
18955 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
18956 ; AVX512BW-FCP-NEXT: vpermt2q %zmm20, %zmm4, %zmm1
18957 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18958 ; AVX512BW-FCP-NEXT: vmovdqa64 2368(%rdi), %zmm1
18959 ; AVX512BW-FCP-NEXT: vmovdqa64 2304(%rdi), %zmm22
18960 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm29
18961 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm29
18962 ; AVX512BW-FCP-NEXT: vmovdqa64 3008(%rdi), %zmm14
18963 ; AVX512BW-FCP-NEXT: vmovdqa64 2944(%rdi), %zmm31
18964 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm31, %zmm2
18965 ; AVX512BW-FCP-NEXT: vpermt2q %zmm14, %zmm4, %zmm2
18966 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18967 ; AVX512BW-FCP-NEXT: vmovdqa64 2752(%rdi), %zmm2
18968 ; AVX512BW-FCP-NEXT: vmovdqa64 2688(%rdi), %zmm12
18969 ; AVX512BW-FCP-NEXT: vpermi2q %zmm2, %zmm12, %zmm8
18970 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm3
18971 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
18972 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18973 ; AVX512BW-FCP-NEXT: vpermi2q %zmm2, %zmm12, %zmm9
18974 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
18975 ; AVX512BW-FCP-NEXT: vpermt2q %zmm22, %zmm10, %zmm3
18976 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18977 ; AVX512BW-FCP-NEXT: vpermi2q %zmm12, %zmm2, %zmm10
18978 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
18979 ; AVX512BW-FCP-NEXT: vpermt2q %zmm22, %zmm11, %zmm3
18980 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, (%rsp) # 64-byte Spill
18981 ; AVX512BW-FCP-NEXT: vpermi2q %zmm12, %zmm2, %zmm11
18982 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm3
18983 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
18984 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18985 ; AVX512BW-FCP-NEXT: vpermi2q %zmm2, %zmm12, %zmm4
18986 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm0, %zmm12
18987 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm22
18988 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
18989 ; AVX512BW-FCP-NEXT: vpermt2q %zmm20, %zmm0, %zmm1
18990 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18991 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm1
18992 ; AVX512BW-FCP-NEXT: vpermt2q %zmm21, %zmm0, %zmm1
18993 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18994 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm1
18995 ; AVX512BW-FCP-NEXT: vpermt2q %zmm26, %zmm0, %zmm1
18996 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18997 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm1
18998 ; AVX512BW-FCP-NEXT: vpermt2q %zmm28, %zmm0, %zmm1
18999 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19000 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm1
19001 ; AVX512BW-FCP-NEXT: vpermt2q %zmm24, %zmm0, %zmm1
19002 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19003 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm1
19004 ; AVX512BW-FCP-NEXT: vpermt2q %zmm23, %zmm0, %zmm1
19005 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19006 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm1
19007 ; AVX512BW-FCP-NEXT: vpermt2q %zmm25, %zmm0, %zmm1
19008 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19009 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm31, %zmm1
19010 ; AVX512BW-FCP-NEXT: vpermt2q %zmm14, %zmm0, %zmm1
19011 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19012 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
19013 ; AVX512BW-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
19014 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm0
19015 ; AVX512BW-FCP-NEXT: vpermt2q %zmm21, %zmm1, %zmm0
19016 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19017 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
19018 ; AVX512BW-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
19019 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm0
19020 ; AVX512BW-FCP-NEXT: vpermt2q %zmm21, %zmm2, %zmm0
19021 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19022 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
19023 ; AVX512BW-FCP-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
19024 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm0
19025 ; AVX512BW-FCP-NEXT: vpermt2q %zmm13, %zmm3, %zmm0
19026 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19027 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
19028 ; AVX512BW-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
19029 ; AVX512BW-FCP-NEXT: vpermt2q %zmm13, %zmm0, %zmm21
19030 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm6
19031 ; AVX512BW-FCP-NEXT: vpermt2q %zmm26, %zmm1, %zmm6
19032 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19033 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm6
19034 ; AVX512BW-FCP-NEXT: vpermt2q %zmm26, %zmm2, %zmm6
19035 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19036 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm6
19037 ; AVX512BW-FCP-NEXT: vpermt2q %zmm18, %zmm3, %zmm6
19038 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19039 ; AVX512BW-FCP-NEXT: vpermt2q %zmm18, %zmm0, %zmm26
19040 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm27
19041 ; AVX512BW-FCP-NEXT: vpermt2q %zmm28, %zmm1, %zmm27
19042 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm6
19043 ; AVX512BW-FCP-NEXT: vpermt2q %zmm28, %zmm2, %zmm6
19044 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19045 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm28, %zmm6
19046 ; AVX512BW-FCP-NEXT: vpermt2q %zmm16, %zmm3, %zmm6
19047 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19048 ; AVX512BW-FCP-NEXT: vpermt2q %zmm16, %zmm0, %zmm28
19049 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm18
19050 ; AVX512BW-FCP-NEXT: vpermt2q %zmm24, %zmm1, %zmm18
19051 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm6
19052 ; AVX512BW-FCP-NEXT: vpermt2q %zmm24, %zmm2, %zmm6
19053 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19054 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm6
19055 ; AVX512BW-FCP-NEXT: vpermt2q %zmm19, %zmm3, %zmm6
19056 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19057 ; AVX512BW-FCP-NEXT: vpermt2q %zmm19, %zmm0, %zmm24
19058 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm15
19059 ; AVX512BW-FCP-NEXT: vpermt2q %zmm23, %zmm1, %zmm15
19060 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm19
19061 ; AVX512BW-FCP-NEXT: vpermt2q %zmm23, %zmm2, %zmm19
19062 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm6
19063 ; AVX512BW-FCP-NEXT: vpermt2q %zmm17, %zmm3, %zmm6
19064 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19065 ; AVX512BW-FCP-NEXT: vpermt2q %zmm17, %zmm0, %zmm23
19066 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm13
19067 ; AVX512BW-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm13
19068 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm17
19069 ; AVX512BW-FCP-NEXT: vpermt2q %zmm25, %zmm2, %zmm17
19070 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm6
19071 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm3, %zmm6
19072 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19073 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm25
19074 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm6
19075 ; AVX512BW-FCP-NEXT: vpermt2q %zmm20, %zmm1, %zmm6
19076 ; AVX512BW-FCP-NEXT: vpermi2q %zmm14, %zmm31, %zmm1
19077 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm7
19078 ; AVX512BW-FCP-NEXT: vpermt2q %zmm20, %zmm2, %zmm7
19079 ; AVX512BW-FCP-NEXT: vpermi2q %zmm14, %zmm31, %zmm2
19080 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm16
19081 ; AVX512BW-FCP-NEXT: vpermt2q %zmm5, %zmm3, %zmm16
19082 ; AVX512BW-FCP-NEXT: vpermi2q %zmm31, %zmm14, %zmm3
19083 ; AVX512BW-FCP-NEXT: vpermt2q %zmm31, %zmm0, %zmm14
19084 ; AVX512BW-FCP-NEXT: vpermt2q %zmm5, %zmm0, %zmm20
19085 ; AVX512BW-FCP-NEXT: movb $56, %al
19086 ; AVX512BW-FCP-NEXT: kmovd %eax, %k1
19087 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19088 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19089 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19090 ; AVX512BW-FCP-NEXT: movb $-64, %al
19091 ; AVX512BW-FCP-NEXT: kmovd %eax, %k2
19092 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19093 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19094 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19095 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19096 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19097 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19098 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19099 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19100 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19101 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19102 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19103 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19104 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19105 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19106 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19107 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19108 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19109 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19110 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19111 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19112 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19113 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19114 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19115 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19116 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19117 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19118 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19119 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19120 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19121 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19122 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19123 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19124 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19125 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19126 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
19127 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19128 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
19129 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19130 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
19131 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19132 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
19133 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19134 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19135 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19136 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19137 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19138 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19139 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19140 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19141 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19142 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19143 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19144 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19145 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19146 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19147 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19148 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19149 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19150 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19151 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19152 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19153 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19154 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19155 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19156 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19157 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19158 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19159 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19160 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19161 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19162 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19163 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19164 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19165 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19166 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19167 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19168 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19169 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19170 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19171 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19172 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19173 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19174 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19175 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19176 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm9 {%k1}
19177 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19178 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
19179 ; AVX512BW-FCP-NEXT: movb $24, %al
19180 ; AVX512BW-FCP-NEXT: kmovd %eax, %k2
19181 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19182 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19183 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19184 ; AVX512BW-FCP-NEXT: movb $-32, %al
19185 ; AVX512BW-FCP-NEXT: kmovd %eax, %k1
19186 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm5 {%k1}
19187 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19188 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19189 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19190 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19191 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19192 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19193 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19194 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19195 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19196 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19197 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19198 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19199 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19200 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19201 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19202 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19203 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm27, %zmm5 {%k1}
19204 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19205 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19206 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19207 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19208 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm5 {%k1}
19209 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19210 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19211 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19212 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19213 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm5 {%k1}
19214 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19215 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19216 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19217 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19218 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm5 {%k1}
19219 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19220 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19221 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
19222 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm10 {%k1}
19223 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19224 ; AVX512BW-FCP-NEXT: vmovdqu64 (%rsp), %zmm1 # 64-byte Reload
19225 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
19226 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm1 {%k1}
19227 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, (%rsp) # 64-byte Spill
19228 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19229 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19230 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
19231 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19232 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
19233 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19234 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19235 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19236 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
19237 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19238 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
19239 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19240 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19241 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19242 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
19243 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19244 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
19245 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19246 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19247 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19248 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
19249 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19250 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
19251 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19252 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19253 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
19254 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
19255 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm31 {%k1}
19256 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19257 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
19258 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm27 {%k2}
19259 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm27 {%k1}
19260 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19261 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
19262 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm11 {%k1}
19263 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19264 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
19265 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
19266 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19267 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
19268 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19269 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
19270 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19271 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm5 # 16-byte Folded Reload
19272 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19273 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm5 {%k1}
19274 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19275 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm6 # 16-byte Folded Reload
19276 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19277 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm6 {%k1}
19278 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19279 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm7 # 16-byte Folded Reload
19280 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19281 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm7 {%k1}
19282 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19283 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm13 # 16-byte Folded Reload
19284 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19285 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm13 {%k1}
19286 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19287 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm15 # 16-byte Folded Reload
19288 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19289 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm15 {%k1}
19290 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19291 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm4, %zmm2, %zmm4
19292 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm4 {%k1}
19293 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19294 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
19295 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm2 {%k1}
19296 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
19297 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
19298 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm3 {%k1}
19299 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
19300 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm16, %zmm16 # 16-byte Folded Reload
19301 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm28, %zmm16 {%k1}
19302 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
19303 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm17, %zmm17 # 16-byte Folded Reload
19304 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm17 {%k1}
19305 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
19306 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm18, %zmm18 # 16-byte Folded Reload
19307 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm18 {%k1}
19308 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
19309 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm30, %zmm19, %zmm19
19310 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm19 {%k1}
19311 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
19312 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm12, %zmm21, %zmm12
19313 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm14, %zmm12 {%k1}
19314 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
19315 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm22, %zmm14, %zmm14
19316 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm14 {%k1}
19317 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, 448(%rsi)
19318 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm29, 384(%rsi)
19319 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19320 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 320(%rsi)
19321 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19322 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 256(%rsi)
19323 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19324 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 192(%rsi)
19325 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19326 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 128(%rsi)
19327 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19328 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 64(%rsi)
19329 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19330 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, (%rsi)
19331 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, 448(%rdx)
19332 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19333 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 256(%rdx)
19334 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19335 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 320(%rdx)
19336 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19337 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 128(%rdx)
19338 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19339 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 192(%rdx)
19340 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19341 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, (%rdx)
19342 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19343 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 64(%rdx)
19344 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19345 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 384(%rdx)
19346 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, 448(%rcx)
19347 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19348 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 256(%rcx)
19349 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19350 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 320(%rcx)
19351 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19352 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 128(%rcx)
19353 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19354 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 192(%rcx)
19355 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19356 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, (%rcx)
19357 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19358 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 64(%rcx)
19359 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19360 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 384(%rcx)
19361 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm11, 448(%r8)
19362 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm27, 256(%r8)
19363 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm31, 320(%r8)
19364 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19365 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 128(%r8)
19366 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19367 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 192(%r8)
19368 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19369 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, (%r8)
19370 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19371 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 64(%r8)
19372 ; AVX512BW-FCP-NEXT: vmovups (%rsp), %zmm8 # 64-byte Reload
19373 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 384(%r8)
19374 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, 448(%r9)
19375 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm15, 256(%r9)
19376 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, 320(%r9)
19377 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, 128(%r9)
19378 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, 192(%r9)
19379 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, (%r9)
19380 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, 64(%r9)
19381 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, 384(%r9)
19382 ; AVX512BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
19383 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm14, 384(%rax)
19384 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm12, 448(%rax)
19385 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, 256(%rax)
19386 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, 320(%rax)
19387 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, 128(%rax)
19388 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm16, 192(%rax)
19389 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, (%rax)
19390 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, 64(%rax)
19391 ; AVX512BW-FCP-NEXT: addq $7240, %rsp # imm = 0x1C48
19392 ; AVX512BW-FCP-NEXT: vzeroupper
19393 ; AVX512BW-FCP-NEXT: retq
19395 ; AVX512DQ-BW-LABEL: load_i64_stride6_vf64:
19396 ; AVX512DQ-BW: # %bb.0:
19397 ; AVX512DQ-BW-NEXT: subq $7240, %rsp # imm = 0x1C48
19398 ; AVX512DQ-BW-NEXT: vmovdqa64 2048(%rdi), %zmm3
19399 ; AVX512DQ-BW-NEXT: vmovdqa64 1280(%rdi), %zmm4
19400 ; AVX512DQ-BW-NEXT: vmovdqa64 1344(%rdi), %zmm0
19401 ; AVX512DQ-BW-NEXT: vmovdqa64 896(%rdi), %zmm5
19402 ; AVX512DQ-BW-NEXT: vmovdqa64 960(%rdi), %zmm26
19403 ; AVX512DQ-BW-NEXT: vmovdqa64 512(%rdi), %zmm2
19404 ; AVX512DQ-BW-NEXT: vmovdqa64 576(%rdi), %zmm1
19405 ; AVX512DQ-BW-NEXT: vmovdqa64 128(%rdi), %zmm6
19406 ; AVX512DQ-BW-NEXT: vmovdqa64 192(%rdi), %zmm29
19407 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [0,6,0,10,0,6,0,10]
19408 ; AVX512DQ-BW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
19409 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm29, %zmm8
19410 ; AVX512DQ-BW-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
19411 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19412 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm8
19413 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm7, %zmm8
19414 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19415 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm26, %zmm8
19416 ; AVX512DQ-BW-NEXT: vpermt2q %zmm5, %zmm7, %zmm8
19417 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19418 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm8
19419 ; AVX512DQ-BW-NEXT: vpermt2q %zmm4, %zmm7, %zmm8
19420 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19421 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,7,0,11,1,7,0,11]
19422 ; AVX512DQ-BW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
19423 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm9
19424 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm8, %zmm9
19425 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19426 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm29, %zmm9
19427 ; AVX512DQ-BW-NEXT: vpermt2q %zmm6, %zmm8, %zmm9
19428 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19429 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm9
19430 ; AVX512DQ-BW-NEXT: vpermt2q %zmm4, %zmm8, %zmm9
19431 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19432 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm26, %zmm9
19433 ; AVX512DQ-BW-NEXT: vpermt2q %zmm5, %zmm8, %zmm9
19434 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19435 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [10,4,10,4,10,4,10,4]
19436 ; AVX512DQ-BW-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
19437 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm10
19438 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm10
19439 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19440 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm10
19441 ; AVX512DQ-BW-NEXT: vpermt2q %zmm29, %zmm9, %zmm10
19442 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19443 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm10
19444 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm9, %zmm10
19445 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19446 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm10
19447 ; AVX512DQ-BW-NEXT: vpermt2q %zmm26, %zmm9, %zmm10
19448 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19449 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [11,5,11,5,11,5,11,5]
19450 ; AVX512DQ-BW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
19451 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm11
19452 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm10, %zmm11
19453 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19454 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm11
19455 ; AVX512DQ-BW-NEXT: vpermt2q %zmm29, %zmm10, %zmm11
19456 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19457 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm11
19458 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm10, %zmm11
19459 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19460 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm11
19461 ; AVX512DQ-BW-NEXT: vpermt2q %zmm26, %zmm10, %zmm11
19462 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19463 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [12,0,0,6,12,0,0,6]
19464 ; AVX512DQ-BW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
19465 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm13
19466 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm12
19467 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm11, %zmm13
19468 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19469 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [13,0,1,7,13,0,1,7]
19470 ; AVX512DQ-BW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
19471 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm2, %zmm12
19472 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19473 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm1
19474 ; AVX512DQ-BW-NEXT: vpermt2q %zmm29, %zmm11, %zmm1
19475 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19476 ; AVX512DQ-BW-NEXT: vpermt2q %zmm29, %zmm2, %zmm6
19477 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19478 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm1
19479 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
19480 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19481 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm2, %zmm4
19482 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19483 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm0
19484 ; AVX512DQ-BW-NEXT: vpermt2q %zmm26, %zmm11, %zmm0
19485 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19486 ; AVX512DQ-BW-NEXT: vpermt2q %zmm26, %zmm2, %zmm5
19487 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19488 ; AVX512DQ-BW-NEXT: vmovdqa64 2112(%rdi), %zmm0
19489 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1
19490 ; AVX512DQ-BW-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
19491 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19492 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1
19493 ; AVX512DQ-BW-NEXT: vpermt2q %zmm3, %zmm8, %zmm1
19494 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19495 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, %zmm1
19496 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm9, %zmm1
19497 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19498 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, %zmm1
19499 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm10, %zmm1
19500 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19501 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, %zmm1
19502 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
19503 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19504 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
19505 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19506 ; AVX512DQ-BW-NEXT: vmovdqa64 1664(%rdi), %zmm1
19507 ; AVX512DQ-BW-NEXT: vmovdqa64 1728(%rdi), %zmm0
19508 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm3
19509 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm7, %zmm3
19510 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19511 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm3
19512 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
19513 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19514 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
19515 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm9, %zmm3
19516 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19517 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
19518 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm10, %zmm3
19519 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19520 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
19521 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm11, %zmm3
19522 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19523 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm2, %zmm1
19524 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19525 ; AVX512DQ-BW-NEXT: vmovdqa64 2432(%rdi), %zmm3
19526 ; AVX512DQ-BW-NEXT: vmovdqa64 2496(%rdi), %zmm0
19527 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1
19528 ; AVX512DQ-BW-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
19529 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19530 ; AVX512DQ-BW-NEXT: vmovdqa64 2816(%rdi), %zmm4
19531 ; AVX512DQ-BW-NEXT: vmovdqa64 2880(%rdi), %zmm1
19532 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm1, %zmm7
19533 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19534 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5
19535 ; AVX512DQ-BW-NEXT: vpermt2q %zmm3, %zmm8, %zmm5
19536 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19537 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm1, %zmm8
19538 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19539 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, %zmm5
19540 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm9, %zmm5
19541 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19542 ; AVX512DQ-BW-NEXT: vpermi2q %zmm1, %zmm4, %zmm9
19543 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19544 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, %zmm5
19545 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm10, %zmm5
19546 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19547 ; AVX512DQ-BW-NEXT: vpermi2q %zmm1, %zmm4, %zmm10
19548 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19549 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, %zmm5
19550 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm11, %zmm5
19551 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19552 ; AVX512DQ-BW-NEXT: vpermi2q %zmm1, %zmm4, %zmm11
19553 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19554 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm2, %zmm4
19555 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19556 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
19557 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19558 ; AVX512DQ-BW-NEXT: vmovdqa64 448(%rdi), %zmm1
19559 ; AVX512DQ-BW-NEXT: vmovdqa64 384(%rdi), %zmm2
19560 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm8 = [0,6,12,0]
19561 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm0
19562 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm0
19563 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19564 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm9 = [1,7,13,0]
19565 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm0
19566 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm0
19567 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19568 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm10 = [10,0,6,0]
19569 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm0
19570 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm10, %zmm0
19571 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19572 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm11 = [11,1,7,0]
19573 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm0
19574 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm11, %zmm0
19575 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19576 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm4 = [4,10,4,10,4,10,4,10]
19577 ; AVX512DQ-BW-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
19578 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm0
19579 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm0
19580 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19581 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
19582 ; AVX512DQ-BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
19583 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
19584 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19585 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdi), %zmm2
19586 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rdi), %zmm1
19587 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm3
19588 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
19589 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19590 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm3
19591 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
19592 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19593 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
19594 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
19595 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19596 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
19597 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
19598 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19599 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm3
19600 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
19601 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19602 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
19603 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19604 ; AVX512DQ-BW-NEXT: vmovdqa64 1216(%rdi), %zmm1
19605 ; AVX512DQ-BW-NEXT: vmovdqa64 1152(%rdi), %zmm2
19606 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm3
19607 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
19608 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19609 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm3
19610 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
19611 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19612 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
19613 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
19614 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19615 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
19616 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
19617 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19618 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm3
19619 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
19620 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19621 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
19622 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19623 ; AVX512DQ-BW-NEXT: vmovdqa64 832(%rdi), %zmm1
19624 ; AVX512DQ-BW-NEXT: vmovdqa64 768(%rdi), %zmm2
19625 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm3
19626 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
19627 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19628 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm3
19629 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
19630 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19631 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
19632 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
19633 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19634 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
19635 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
19636 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19637 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm3
19638 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
19639 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19640 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
19641 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19642 ; AVX512DQ-BW-NEXT: vmovdqa64 1984(%rdi), %zmm1
19643 ; AVX512DQ-BW-NEXT: vmovdqa64 1920(%rdi), %zmm2
19644 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm3
19645 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
19646 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19647 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm3
19648 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
19649 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19650 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
19651 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
19652 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19653 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
19654 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
19655 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19656 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm3
19657 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
19658 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19659 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
19660 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19661 ; AVX512DQ-BW-NEXT: vmovdqa64 1600(%rdi), %zmm1
19662 ; AVX512DQ-BW-NEXT: vmovdqa64 1536(%rdi), %zmm30
19663 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm30, %zmm2
19664 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm2
19665 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19666 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm30, %zmm2
19667 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm2
19668 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19669 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm2
19670 ; AVX512DQ-BW-NEXT: vpermt2q %zmm30, %zmm10, %zmm2
19671 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19672 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm2
19673 ; AVX512DQ-BW-NEXT: vpermt2q %zmm30, %zmm11, %zmm2
19674 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19675 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm30, %zmm2
19676 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm2
19677 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19678 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm30
19679 ; AVX512DQ-BW-NEXT: vmovdqa64 320(%rdi), %zmm26
19680 ; AVX512DQ-BW-NEXT: vmovdqa64 256(%rdi), %zmm18
19681 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, %zmm1
19682 ; AVX512DQ-BW-NEXT: vpermt2q %zmm26, %zmm4, %zmm1
19683 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19684 ; AVX512DQ-BW-NEXT: vmovdqa64 704(%rdi), %zmm21
19685 ; AVX512DQ-BW-NEXT: vmovdqa64 640(%rdi), %zmm13
19686 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, %zmm1
19687 ; AVX512DQ-BW-NEXT: vpermt2q %zmm21, %zmm4, %zmm1
19688 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19689 ; AVX512DQ-BW-NEXT: vmovdqa64 1088(%rdi), %zmm24
19690 ; AVX512DQ-BW-NEXT: vmovdqa64 1024(%rdi), %zmm19
19691 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, %zmm1
19692 ; AVX512DQ-BW-NEXT: vpermt2q %zmm24, %zmm4, %zmm1
19693 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19694 ; AVX512DQ-BW-NEXT: vmovdqa64 1472(%rdi), %zmm28
19695 ; AVX512DQ-BW-NEXT: vmovdqa64 1408(%rdi), %zmm16
19696 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm16, %zmm1
19697 ; AVX512DQ-BW-NEXT: vpermt2q %zmm28, %zmm4, %zmm1
19698 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19699 ; AVX512DQ-BW-NEXT: vmovdqa64 1856(%rdi), %zmm25
19700 ; AVX512DQ-BW-NEXT: vmovdqa64 1792(%rdi), %zmm7
19701 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm1
19702 ; AVX512DQ-BW-NEXT: vpermt2q %zmm25, %zmm4, %zmm1
19703 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19704 ; AVX512DQ-BW-NEXT: vmovdqa64 2240(%rdi), %zmm23
19705 ; AVX512DQ-BW-NEXT: vmovdqa64 2176(%rdi), %zmm17
19706 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, %zmm1
19707 ; AVX512DQ-BW-NEXT: vpermt2q %zmm23, %zmm4, %zmm1
19708 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19709 ; AVX512DQ-BW-NEXT: vmovdqa64 2624(%rdi), %zmm20
19710 ; AVX512DQ-BW-NEXT: vmovdqa64 2560(%rdi), %zmm5
19711 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm1
19712 ; AVX512DQ-BW-NEXT: vpermt2q %zmm20, %zmm4, %zmm1
19713 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19714 ; AVX512DQ-BW-NEXT: vmovdqa64 2368(%rdi), %zmm1
19715 ; AVX512DQ-BW-NEXT: vmovdqa64 2304(%rdi), %zmm22
19716 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm22, %zmm29
19717 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm29
19718 ; AVX512DQ-BW-NEXT: vmovdqa64 3008(%rdi), %zmm14
19719 ; AVX512DQ-BW-NEXT: vmovdqa64 2944(%rdi), %zmm31
19720 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm31, %zmm2
19721 ; AVX512DQ-BW-NEXT: vpermt2q %zmm14, %zmm4, %zmm2
19722 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19723 ; AVX512DQ-BW-NEXT: vmovdqa64 2752(%rdi), %zmm2
19724 ; AVX512DQ-BW-NEXT: vmovdqa64 2688(%rdi), %zmm12
19725 ; AVX512DQ-BW-NEXT: vpermi2q %zmm2, %zmm12, %zmm8
19726 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm22, %zmm3
19727 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
19728 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19729 ; AVX512DQ-BW-NEXT: vpermi2q %zmm2, %zmm12, %zmm9
19730 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
19731 ; AVX512DQ-BW-NEXT: vpermt2q %zmm22, %zmm10, %zmm3
19732 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19733 ; AVX512DQ-BW-NEXT: vpermi2q %zmm12, %zmm2, %zmm10
19734 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
19735 ; AVX512DQ-BW-NEXT: vpermt2q %zmm22, %zmm11, %zmm3
19736 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, (%rsp) # 64-byte Spill
19737 ; AVX512DQ-BW-NEXT: vpermi2q %zmm12, %zmm2, %zmm11
19738 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm22, %zmm3
19739 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
19740 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19741 ; AVX512DQ-BW-NEXT: vpermi2q %zmm2, %zmm12, %zmm4
19742 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm0, %zmm12
19743 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm22
19744 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm1
19745 ; AVX512DQ-BW-NEXT: vpermt2q %zmm20, %zmm0, %zmm1
19746 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19747 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, %zmm1
19748 ; AVX512DQ-BW-NEXT: vpermt2q %zmm21, %zmm0, %zmm1
19749 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19750 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, %zmm1
19751 ; AVX512DQ-BW-NEXT: vpermt2q %zmm26, %zmm0, %zmm1
19752 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19753 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm16, %zmm1
19754 ; AVX512DQ-BW-NEXT: vpermt2q %zmm28, %zmm0, %zmm1
19755 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19756 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, %zmm1
19757 ; AVX512DQ-BW-NEXT: vpermt2q %zmm24, %zmm0, %zmm1
19758 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19759 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, %zmm1
19760 ; AVX512DQ-BW-NEXT: vpermt2q %zmm23, %zmm0, %zmm1
19761 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19762 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm1
19763 ; AVX512DQ-BW-NEXT: vpermt2q %zmm25, %zmm0, %zmm1
19764 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19765 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm31, %zmm1
19766 ; AVX512DQ-BW-NEXT: vpermt2q %zmm14, %zmm0, %zmm1
19767 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19768 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
19769 ; AVX512DQ-BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
19770 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, %zmm0
19771 ; AVX512DQ-BW-NEXT: vpermt2q %zmm21, %zmm1, %zmm0
19772 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19773 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
19774 ; AVX512DQ-BW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
19775 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, %zmm0
19776 ; AVX512DQ-BW-NEXT: vpermt2q %zmm21, %zmm2, %zmm0
19777 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19778 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
19779 ; AVX512DQ-BW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
19780 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm21, %zmm0
19781 ; AVX512DQ-BW-NEXT: vpermt2q %zmm13, %zmm3, %zmm0
19782 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19783 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
19784 ; AVX512DQ-BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
19785 ; AVX512DQ-BW-NEXT: vpermt2q %zmm13, %zmm0, %zmm21
19786 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, %zmm6
19787 ; AVX512DQ-BW-NEXT: vpermt2q %zmm26, %zmm1, %zmm6
19788 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19789 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, %zmm6
19790 ; AVX512DQ-BW-NEXT: vpermt2q %zmm26, %zmm2, %zmm6
19791 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19792 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm26, %zmm6
19793 ; AVX512DQ-BW-NEXT: vpermt2q %zmm18, %zmm3, %zmm6
19794 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19795 ; AVX512DQ-BW-NEXT: vpermt2q %zmm18, %zmm0, %zmm26
19796 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm16, %zmm27
19797 ; AVX512DQ-BW-NEXT: vpermt2q %zmm28, %zmm1, %zmm27
19798 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm16, %zmm6
19799 ; AVX512DQ-BW-NEXT: vpermt2q %zmm28, %zmm2, %zmm6
19800 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19801 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm28, %zmm6
19802 ; AVX512DQ-BW-NEXT: vpermt2q %zmm16, %zmm3, %zmm6
19803 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19804 ; AVX512DQ-BW-NEXT: vpermt2q %zmm16, %zmm0, %zmm28
19805 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, %zmm18
19806 ; AVX512DQ-BW-NEXT: vpermt2q %zmm24, %zmm1, %zmm18
19807 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, %zmm6
19808 ; AVX512DQ-BW-NEXT: vpermt2q %zmm24, %zmm2, %zmm6
19809 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19810 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm24, %zmm6
19811 ; AVX512DQ-BW-NEXT: vpermt2q %zmm19, %zmm3, %zmm6
19812 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19813 ; AVX512DQ-BW-NEXT: vpermt2q %zmm19, %zmm0, %zmm24
19814 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, %zmm15
19815 ; AVX512DQ-BW-NEXT: vpermt2q %zmm23, %zmm1, %zmm15
19816 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, %zmm19
19817 ; AVX512DQ-BW-NEXT: vpermt2q %zmm23, %zmm2, %zmm19
19818 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm23, %zmm6
19819 ; AVX512DQ-BW-NEXT: vpermt2q %zmm17, %zmm3, %zmm6
19820 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19821 ; AVX512DQ-BW-NEXT: vpermt2q %zmm17, %zmm0, %zmm23
19822 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm13
19823 ; AVX512DQ-BW-NEXT: vpermt2q %zmm25, %zmm1, %zmm13
19824 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm17
19825 ; AVX512DQ-BW-NEXT: vpermt2q %zmm25, %zmm2, %zmm17
19826 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm25, %zmm6
19827 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm3, %zmm6
19828 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19829 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm0, %zmm25
19830 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm6
19831 ; AVX512DQ-BW-NEXT: vpermt2q %zmm20, %zmm1, %zmm6
19832 ; AVX512DQ-BW-NEXT: vpermi2q %zmm14, %zmm31, %zmm1
19833 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm7
19834 ; AVX512DQ-BW-NEXT: vpermt2q %zmm20, %zmm2, %zmm7
19835 ; AVX512DQ-BW-NEXT: vpermi2q %zmm14, %zmm31, %zmm2
19836 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, %zmm16
19837 ; AVX512DQ-BW-NEXT: vpermt2q %zmm5, %zmm3, %zmm16
19838 ; AVX512DQ-BW-NEXT: vpermi2q %zmm31, %zmm14, %zmm3
19839 ; AVX512DQ-BW-NEXT: vpermt2q %zmm31, %zmm0, %zmm14
19840 ; AVX512DQ-BW-NEXT: vpermt2q %zmm5, %zmm0, %zmm20
19841 ; AVX512DQ-BW-NEXT: movb $56, %al
19842 ; AVX512DQ-BW-NEXT: kmovd %eax, %k1
19843 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19844 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19845 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19846 ; AVX512DQ-BW-NEXT: movb $-64, %al
19847 ; AVX512DQ-BW-NEXT: kmovd %eax, %k2
19848 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19849 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19850 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19851 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19852 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19853 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19854 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19855 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19856 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19857 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19858 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19859 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19860 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19861 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19862 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19863 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19864 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19865 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19866 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19867 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19868 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19869 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19870 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19871 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19872 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19873 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19874 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19875 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19876 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19877 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19878 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19879 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19880 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19881 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19882 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
19883 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19884 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
19885 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19886 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
19887 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19888 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
19889 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19890 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19891 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19892 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19893 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19894 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19895 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19896 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19897 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19898 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19899 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19900 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19901 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19902 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19903 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19904 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19905 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19906 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19907 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19908 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19909 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19910 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19911 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19912 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19913 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19914 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19915 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19916 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19917 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19918 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19919 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19920 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19921 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19922 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19923 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19924 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19925 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19926 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19927 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19928 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19929 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19930 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19931 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19932 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm9 {%k1}
19933 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19934 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
19935 ; AVX512DQ-BW-NEXT: movb $24, %al
19936 ; AVX512DQ-BW-NEXT: kmovd %eax, %k2
19937 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19938 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19939 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19940 ; AVX512DQ-BW-NEXT: movb $-32, %al
19941 ; AVX512DQ-BW-NEXT: kmovd %eax, %k1
19942 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm5 {%k1}
19943 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19944 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19945 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19946 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19947 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19948 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19949 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19950 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19951 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19952 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19953 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19954 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19955 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19956 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19957 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19958 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19959 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm27, %zmm5 {%k1}
19960 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19961 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19962 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19963 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19964 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, %zmm5 {%k1}
19965 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19966 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19967 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19968 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19969 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm15, %zmm5 {%k1}
19970 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19971 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19972 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19973 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19974 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, %zmm5 {%k1}
19975 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19976 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19977 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
19978 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm10 {%k1}
19979 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19980 ; AVX512DQ-BW-NEXT: vmovdqu64 (%rsp), %zmm1 # 64-byte Reload
19981 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
19982 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm1 {%k1}
19983 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, (%rsp) # 64-byte Spill
19984 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19985 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19986 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
19987 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19988 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
19989 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19990 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19991 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19992 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
19993 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19994 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
19995 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19996 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19997 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19998 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
19999 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20000 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
20001 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20002 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20003 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
20004 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
20005 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20006 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
20007 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20008 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20009 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
20010 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
20011 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, %zmm31 {%k1}
20012 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20013 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
20014 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm27 {%k2}
20015 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, %zmm27 {%k1}
20016 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20017 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
20018 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm11 {%k1}
20019 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20020 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
20021 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
20022 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
20023 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
20024 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20025 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
20026 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20027 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm5 # 16-byte Folded Reload
20028 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20029 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm5 {%k1}
20030 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20031 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm6 # 16-byte Folded Reload
20032 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20033 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm6 {%k1}
20034 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20035 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm7 # 16-byte Folded Reload
20036 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20037 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm7 {%k1}
20038 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20039 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm13 # 16-byte Folded Reload
20040 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20041 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm13 {%k1}
20042 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20043 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm15 # 16-byte Folded Reload
20044 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20045 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm15 {%k1}
20046 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20047 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm4, %zmm2, %zmm4
20048 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, %zmm4 {%k1}
20049 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20050 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
20051 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm21, %zmm2 {%k1}
20052 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
20053 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
20054 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm26, %zmm3 {%k1}
20055 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
20056 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm16, %zmm16 # 16-byte Folded Reload
20057 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm28, %zmm16 {%k1}
20058 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
20059 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm17, %zmm17 # 16-byte Folded Reload
20060 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm24, %zmm17 {%k1}
20061 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
20062 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm18, %zmm18 # 16-byte Folded Reload
20063 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm23, %zmm18 {%k1}
20064 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
20065 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm30, %zmm19, %zmm19
20066 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm25, %zmm19 {%k1}
20067 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
20068 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm12, %zmm21, %zmm12
20069 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm14, %zmm12 {%k1}
20070 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
20071 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm22, %zmm14, %zmm14
20072 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, %zmm14 {%k1}
20073 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, 448(%rsi)
20074 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm29, 384(%rsi)
20075 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20076 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 320(%rsi)
20077 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20078 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 256(%rsi)
20079 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20080 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 192(%rsi)
20081 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20082 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 128(%rsi)
20083 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20084 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 64(%rsi)
20085 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20086 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, (%rsi)
20087 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, 448(%rdx)
20088 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20089 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 256(%rdx)
20090 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20091 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 320(%rdx)
20092 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20093 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 128(%rdx)
20094 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20095 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 192(%rdx)
20096 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20097 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, (%rdx)
20098 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20099 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 64(%rdx)
20100 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20101 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 384(%rdx)
20102 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, 448(%rcx)
20103 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20104 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 256(%rcx)
20105 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20106 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 320(%rcx)
20107 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20108 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 128(%rcx)
20109 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20110 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 192(%rcx)
20111 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20112 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, (%rcx)
20113 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20114 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 64(%rcx)
20115 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20116 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 384(%rcx)
20117 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm11, 448(%r8)
20118 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm27, 256(%r8)
20119 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm31, 320(%r8)
20120 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20121 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 128(%r8)
20122 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20123 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 192(%r8)
20124 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20125 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, (%r8)
20126 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20127 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 64(%r8)
20128 ; AVX512DQ-BW-NEXT: vmovups (%rsp), %zmm8 # 64-byte Reload
20129 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 384(%r8)
20130 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, 448(%r9)
20131 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm15, 256(%r9)
20132 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, 320(%r9)
20133 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, 128(%r9)
20134 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, 192(%r9)
20135 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, (%r9)
20136 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, 64(%r9)
20137 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, 384(%r9)
20138 ; AVX512DQ-BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
20139 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm14, 384(%rax)
20140 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm12, 448(%rax)
20141 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, 256(%rax)
20142 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, 320(%rax)
20143 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, 128(%rax)
20144 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm16, 192(%rax)
20145 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, (%rax)
20146 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, 64(%rax)
20147 ; AVX512DQ-BW-NEXT: addq $7240, %rsp # imm = 0x1C48
20148 ; AVX512DQ-BW-NEXT: vzeroupper
20149 ; AVX512DQ-BW-NEXT: retq
20151 ; AVX512DQ-BW-FCP-LABEL: load_i64_stride6_vf64:
20152 ; AVX512DQ-BW-FCP: # %bb.0:
20153 ; AVX512DQ-BW-FCP-NEXT: subq $7240, %rsp # imm = 0x1C48
20154 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2048(%rdi), %zmm3
20155 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1280(%rdi), %zmm4
20156 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1344(%rdi), %zmm0
20157 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 896(%rdi), %zmm5
20158 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 960(%rdi), %zmm26
20159 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 512(%rdi), %zmm2
20160 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 576(%rdi), %zmm1
20161 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 128(%rdi), %zmm6
20162 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 192(%rdi), %zmm29
20163 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [0,6,0,10,0,6,0,10]
20164 ; AVX512DQ-BW-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
20165 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm29, %zmm8
20166 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
20167 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20168 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm8
20169 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm7, %zmm8
20170 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20171 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm8
20172 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm5, %zmm7, %zmm8
20173 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20174 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm8
20175 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm4, %zmm7, %zmm8
20176 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20177 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,7,0,11,1,7,0,11]
20178 ; AVX512DQ-BW-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
20179 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm9
20180 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm8, %zmm9
20181 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20182 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm29, %zmm9
20183 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm6, %zmm8, %zmm9
20184 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20185 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm9
20186 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm4, %zmm8, %zmm9
20187 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20188 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm9
20189 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm5, %zmm8, %zmm9
20190 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20191 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [10,4,10,4,10,4,10,4]
20192 ; AVX512DQ-BW-FCP-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
20193 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm10
20194 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm10
20195 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20196 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm10
20197 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm29, %zmm9, %zmm10
20198 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20199 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm10
20200 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm10
20201 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20202 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm10
20203 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm26, %zmm9, %zmm10
20204 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20205 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [11,5,11,5,11,5,11,5]
20206 ; AVX512DQ-BW-FCP-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
20207 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm11
20208 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm10, %zmm11
20209 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20210 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm11
20211 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm29, %zmm10, %zmm11
20212 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20213 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm11
20214 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm11
20215 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20216 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm11
20217 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm26, %zmm10, %zmm11
20218 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20219 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [12,0,0,6,12,0,0,6]
20220 ; AVX512DQ-BW-FCP-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
20221 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm13
20222 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm12
20223 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm11, %zmm13
20224 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20225 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [13,0,1,7,13,0,1,7]
20226 ; AVX512DQ-BW-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
20227 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm12
20228 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20229 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm1
20230 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm29, %zmm11, %zmm1
20231 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20232 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm29, %zmm2, %zmm6
20233 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20234 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm1
20235 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
20236 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20237 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm4
20238 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20239 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
20240 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm26, %zmm11, %zmm0
20241 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20242 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm26, %zmm2, %zmm5
20243 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20244 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2112(%rdi), %zmm0
20245 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
20246 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
20247 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20248 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
20249 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm3, %zmm8, %zmm1
20250 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20251 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm1
20252 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm1
20253 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20254 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm1
20255 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm1
20256 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20257 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm1
20258 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
20259 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20260 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
20261 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20262 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1664(%rdi), %zmm1
20263 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1728(%rdi), %zmm0
20264 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm3
20265 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm7, %zmm3
20266 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20267 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm3
20268 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
20269 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20270 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
20271 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm3
20272 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20273 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
20274 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm3
20275 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20276 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
20277 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm3
20278 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20279 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm1
20280 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20281 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2432(%rdi), %zmm3
20282 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2496(%rdi), %zmm0
20283 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
20284 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
20285 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20286 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2816(%rdi), %zmm4
20287 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2880(%rdi), %zmm1
20288 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm1, %zmm7
20289 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20290 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5
20291 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm3, %zmm8, %zmm5
20292 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20293 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm1, %zmm8
20294 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20295 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm5
20296 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm5
20297 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20298 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm1, %zmm4, %zmm9
20299 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20300 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm5
20301 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm5
20302 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20303 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm1, %zmm4, %zmm10
20304 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20305 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm5
20306 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm5
20307 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20308 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm1, %zmm4, %zmm11
20309 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20310 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm4
20311 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20312 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
20313 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20314 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 448(%rdi), %zmm1
20315 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 384(%rdi), %zmm2
20316 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [0,6,12,0]
20317 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm0
20318 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm0
20319 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20320 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm9 = [1,7,13,0]
20321 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm0
20322 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm0
20323 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20324 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm10 = [10,0,6,0]
20325 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm0
20326 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm0
20327 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20328 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm11 = [11,1,7,0]
20329 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm0
20330 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm0
20331 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20332 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm4 = [4,10,4,10,4,10,4,10]
20333 ; AVX512DQ-BW-FCP-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
20334 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm0
20335 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm0
20336 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20337 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
20338 ; AVX512DQ-BW-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
20339 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
20340 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20341 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm2
20342 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
20343 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
20344 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
20345 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20346 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
20347 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
20348 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20349 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
20350 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
20351 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20352 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
20353 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
20354 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20355 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
20356 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
20357 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20358 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
20359 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20360 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1216(%rdi), %zmm1
20361 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1152(%rdi), %zmm2
20362 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
20363 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
20364 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20365 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
20366 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
20367 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20368 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
20369 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
20370 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20371 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
20372 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
20373 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20374 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
20375 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
20376 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20377 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
20378 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20379 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 832(%rdi), %zmm1
20380 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 768(%rdi), %zmm2
20381 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
20382 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
20383 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20384 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
20385 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
20386 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20387 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
20388 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
20389 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20390 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
20391 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
20392 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20393 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
20394 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
20395 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20396 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
20397 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20398 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1984(%rdi), %zmm1
20399 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1920(%rdi), %zmm2
20400 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
20401 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
20402 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20403 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
20404 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
20405 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20406 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
20407 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
20408 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20409 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
20410 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
20411 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20412 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
20413 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
20414 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20415 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
20416 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20417 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1600(%rdi), %zmm1
20418 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1536(%rdi), %zmm30
20419 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm30, %zmm2
20420 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm2
20421 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20422 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm30, %zmm2
20423 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm2
20424 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20425 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm2
20426 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm30, %zmm10, %zmm2
20427 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20428 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm2
20429 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm30, %zmm11, %zmm2
20430 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20431 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm30, %zmm2
20432 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm2
20433 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20434 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm30
20435 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 320(%rdi), %zmm26
20436 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 256(%rdi), %zmm18
20437 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm1
20438 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm26, %zmm4, %zmm1
20439 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20440 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 704(%rdi), %zmm21
20441 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 640(%rdi), %zmm13
20442 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm1
20443 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm21, %zmm4, %zmm1
20444 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20445 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1088(%rdi), %zmm24
20446 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1024(%rdi), %zmm19
20447 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm1
20448 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm24, %zmm4, %zmm1
20449 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20450 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1472(%rdi), %zmm28
20451 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1408(%rdi), %zmm16
20452 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm1
20453 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm28, %zmm4, %zmm1
20454 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20455 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1856(%rdi), %zmm25
20456 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1792(%rdi), %zmm7
20457 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm1
20458 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm25, %zmm4, %zmm1
20459 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20460 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2240(%rdi), %zmm23
20461 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2176(%rdi), %zmm17
20462 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm1
20463 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm23, %zmm4, %zmm1
20464 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20465 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2624(%rdi), %zmm20
20466 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2560(%rdi), %zmm5
20467 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
20468 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm20, %zmm4, %zmm1
20469 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20470 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2368(%rdi), %zmm1
20471 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2304(%rdi), %zmm22
20472 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm29
20473 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm29
20474 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 3008(%rdi), %zmm14
20475 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2944(%rdi), %zmm31
20476 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm31, %zmm2
20477 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm14, %zmm4, %zmm2
20478 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20479 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2752(%rdi), %zmm2
20480 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2688(%rdi), %zmm12
20481 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm2, %zmm12, %zmm8
20482 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm3
20483 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
20484 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20485 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm2, %zmm12, %zmm9
20486 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
20487 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm22, %zmm10, %zmm3
20488 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20489 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm12, %zmm2, %zmm10
20490 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
20491 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm22, %zmm11, %zmm3
20492 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, (%rsp) # 64-byte Spill
20493 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm12, %zmm2, %zmm11
20494 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm3
20495 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
20496 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20497 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm2, %zmm12, %zmm4
20498 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm0, %zmm12
20499 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm22
20500 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
20501 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm20, %zmm0, %zmm1
20502 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20503 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm1
20504 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm21, %zmm0, %zmm1
20505 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20506 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm1
20507 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm26, %zmm0, %zmm1
20508 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20509 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm1
20510 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm28, %zmm0, %zmm1
20511 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20512 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm1
20513 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm24, %zmm0, %zmm1
20514 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20515 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm1
20516 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm23, %zmm0, %zmm1
20517 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20518 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm1
20519 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm25, %zmm0, %zmm1
20520 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20521 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm31, %zmm1
20522 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm14, %zmm0, %zmm1
20523 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20524 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
20525 ; AVX512DQ-BW-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
20526 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm0
20527 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm21, %zmm1, %zmm0
20528 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20529 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
20530 ; AVX512DQ-BW-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
20531 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm0
20532 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm21, %zmm2, %zmm0
20533 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20534 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
20535 ; AVX512DQ-BW-FCP-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
20536 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm0
20537 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm13, %zmm3, %zmm0
20538 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20539 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
20540 ; AVX512DQ-BW-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
20541 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm13, %zmm0, %zmm21
20542 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm6
20543 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm26, %zmm1, %zmm6
20544 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20545 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm6
20546 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm26, %zmm2, %zmm6
20547 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20548 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm6
20549 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm18, %zmm3, %zmm6
20550 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20551 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm18, %zmm0, %zmm26
20552 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm27
20553 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm28, %zmm1, %zmm27
20554 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm6
20555 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm28, %zmm2, %zmm6
20556 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20557 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm28, %zmm6
20558 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm16, %zmm3, %zmm6
20559 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20560 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm16, %zmm0, %zmm28
20561 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm18
20562 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm24, %zmm1, %zmm18
20563 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm6
20564 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm24, %zmm2, %zmm6
20565 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20566 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm6
20567 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm19, %zmm3, %zmm6
20568 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20569 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm19, %zmm0, %zmm24
20570 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm15
20571 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm23, %zmm1, %zmm15
20572 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm19
20573 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm23, %zmm2, %zmm19
20574 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm6
20575 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm17, %zmm3, %zmm6
20576 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20577 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm17, %zmm0, %zmm23
20578 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm13
20579 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm13
20580 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm17
20581 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm25, %zmm2, %zmm17
20582 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm6
20583 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm3, %zmm6
20584 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20585 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm25
20586 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm6
20587 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm20, %zmm1, %zmm6
20588 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm14, %zmm31, %zmm1
20589 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm7
20590 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm20, %zmm2, %zmm7
20591 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm14, %zmm31, %zmm2
20592 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm16
20593 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm5, %zmm3, %zmm16
20594 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm31, %zmm14, %zmm3
20595 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm31, %zmm0, %zmm14
20596 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm5, %zmm0, %zmm20
20597 ; AVX512DQ-BW-FCP-NEXT: movb $56, %al
20598 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k1
20599 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20600 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20601 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20602 ; AVX512DQ-BW-FCP-NEXT: movb $-64, %al
20603 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k2
20604 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20605 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20606 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20607 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20608 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20609 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20610 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20611 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20612 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20613 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20614 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20615 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20616 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20617 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20618 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20619 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20620 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20621 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20622 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20623 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20624 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20625 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20626 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20627 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20628 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20629 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20630 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20631 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20632 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20633 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20634 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20635 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20636 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20637 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20638 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
20639 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20640 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
20641 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20642 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
20643 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20644 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
20645 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20646 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20647 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20648 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20649 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20650 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20651 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20652 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20653 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20654 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20655 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20656 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20657 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20658 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20659 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20660 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20661 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20662 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20663 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20664 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20665 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20666 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20667 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20668 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20669 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20670 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20671 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20672 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20673 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20674 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20675 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20676 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20677 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20678 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20679 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20680 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20681 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20682 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20683 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20684 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20685 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20686 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20687 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20688 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm9 {%k1}
20689 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20690 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
20691 ; AVX512DQ-BW-FCP-NEXT: movb $24, %al
20692 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k2
20693 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20694 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20695 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20696 ; AVX512DQ-BW-FCP-NEXT: movb $-32, %al
20697 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k1
20698 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm5 {%k1}
20699 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20700 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20701 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20702 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20703 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20704 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20705 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20706 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20707 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20708 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20709 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20710 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20711 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20712 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20713 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20714 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20715 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm27, %zmm5 {%k1}
20716 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20717 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20718 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20719 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20720 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm5 {%k1}
20721 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20722 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20723 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20724 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20725 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm5 {%k1}
20726 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20727 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20728 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20729 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20730 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm5 {%k1}
20731 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20732 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20733 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
20734 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm10 {%k1}
20735 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20736 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 (%rsp), %zmm1 # 64-byte Reload
20737 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
20738 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm1 {%k1}
20739 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, (%rsp) # 64-byte Spill
20740 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20741 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
20742 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
20743 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20744 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
20745 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20746 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20747 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
20748 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
20749 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20750 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
20751 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20752 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20753 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
20754 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
20755 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20756 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
20757 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20758 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20759 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
20760 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
20761 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20762 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
20763 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20764 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20765 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
20766 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
20767 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm31 {%k1}
20768 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20769 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
20770 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm27 {%k2}
20771 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm27 {%k1}
20772 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20773 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
20774 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm11 {%k1}
20775 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20776 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
20777 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
20778 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
20779 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
20780 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20781 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
20782 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20783 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm5 # 16-byte Folded Reload
20784 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20785 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm5 {%k1}
20786 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20787 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm6 # 16-byte Folded Reload
20788 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20789 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm6 {%k1}
20790 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20791 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm7 # 16-byte Folded Reload
20792 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20793 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm7 {%k1}
20794 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20795 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm13 # 16-byte Folded Reload
20796 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20797 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm13 {%k1}
20798 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20799 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm15 # 16-byte Folded Reload
20800 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20801 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm15 {%k1}
20802 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20803 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm4, %zmm2, %zmm4
20804 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm4 {%k1}
20805 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20806 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
20807 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm2 {%k1}
20808 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
20809 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
20810 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm3 {%k1}
20811 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
20812 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm16, %zmm16 # 16-byte Folded Reload
20813 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm28, %zmm16 {%k1}
20814 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
20815 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm17, %zmm17 # 16-byte Folded Reload
20816 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm17 {%k1}
20817 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
20818 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm18, %zmm18 # 16-byte Folded Reload
20819 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm18 {%k1}
20820 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
20821 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm30, %zmm19, %zmm19
20822 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm19 {%k1}
20823 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
20824 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm12, %zmm21, %zmm12
20825 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm14, %zmm12 {%k1}
20826 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
20827 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm22, %zmm14, %zmm14
20828 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm14 {%k1}
20829 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, 448(%rsi)
20830 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm29, 384(%rsi)
20831 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20832 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 320(%rsi)
20833 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20834 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 256(%rsi)
20835 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20836 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 192(%rsi)
20837 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20838 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 128(%rsi)
20839 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20840 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 64(%rsi)
20841 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20842 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, (%rsi)
20843 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, 448(%rdx)
20844 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20845 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 256(%rdx)
20846 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20847 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 320(%rdx)
20848 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20849 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 128(%rdx)
20850 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20851 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 192(%rdx)
20852 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20853 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, (%rdx)
20854 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20855 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 64(%rdx)
20856 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20857 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 384(%rdx)
20858 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, 448(%rcx)
20859 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20860 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 256(%rcx)
20861 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20862 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 320(%rcx)
20863 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20864 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 128(%rcx)
20865 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20866 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 192(%rcx)
20867 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20868 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, (%rcx)
20869 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20870 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 64(%rcx)
20871 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20872 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 384(%rcx)
20873 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm11, 448(%r8)
20874 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm27, 256(%r8)
20875 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm31, 320(%r8)
20876 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20877 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 128(%r8)
20878 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20879 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 192(%r8)
20880 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20881 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, (%r8)
20882 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20883 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 64(%r8)
20884 ; AVX512DQ-BW-FCP-NEXT: vmovups (%rsp), %zmm8 # 64-byte Reload
20885 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 384(%r8)
20886 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, 448(%r9)
20887 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm15, 256(%r9)
20888 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, 320(%r9)
20889 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, 128(%r9)
20890 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, 192(%r9)
20891 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, (%r9)
20892 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, 64(%r9)
20893 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, 384(%r9)
20894 ; AVX512DQ-BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
20895 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm14, 384(%rax)
20896 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm12, 448(%rax)
20897 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, 256(%rax)
20898 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, 320(%rax)
20899 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, 128(%rax)
20900 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm16, 192(%rax)
20901 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, (%rax)
20902 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, 64(%rax)
20903 ; AVX512DQ-BW-FCP-NEXT: addq $7240, %rsp # imm = 0x1C48
20904 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
20905 ; AVX512DQ-BW-FCP-NEXT: retq
20906 %wide.vec = load <384 x i64>, ptr %in.vec, align 64
20907 %strided.vec0 = shufflevector <384 x i64> %wide.vec, <384 x i64> poison, <64 x i32> <i32 0, i32 6, i32 12, i32 18, i32 24, i32 30, i32 36, i32 42, i32 48, i32 54, i32 60, i32 66, i32 72, i32 78, i32 84, i32 90, i32 96, i32 102, i32 108, i32 114, i32 120, i32 126, i32 132, i32 138, i32 144, i32 150, i32 156, i32 162, i32 168, i32 174, i32 180, i32 186, i32 192, i32 198, i32 204, i32 210, i32 216, i32 222, i32 228, i32 234, i32 240, i32 246, i32 252, i32 258, i32 264, i32 270, i32 276, i32 282, i32 288, i32 294, i32 300, i32 306, i32 312, i32 318, i32 324, i32 330, i32 336, i32 342, i32 348, i32 354, i32 360, i32 366, i32 372, i32 378>
20908 %strided.vec1 = shufflevector <384 x i64> %wide.vec, <384 x i64> poison, <64 x i32> <i32 1, i32 7, i32 13, i32 19, i32 25, i32 31, i32 37, i32 43, i32 49, i32 55, i32 61, i32 67, i32 73, i32 79, i32 85, i32 91, i32 97, i32 103, i32 109, i32 115, i32 121, i32 127, i32 133, i32 139, i32 145, i32 151, i32 157, i32 163, i32 169, i32 175, i32 181, i32 187, i32 193, i32 199, i32 205, i32 211, i32 217, i32 223, i32 229, i32 235, i32 241, i32 247, i32 253, i32 259, i32 265, i32 271, i32 277, i32 283, i32 289, i32 295, i32 301, i32 307, i32 313, i32 319, i32 325, i32 331, i32 337, i32 343, i32 349, i32 355, i32 361, i32 367, i32 373, i32 379>
20909 %strided.vec2 = shufflevector <384 x i64> %wide.vec, <384 x i64> poison, <64 x i32> <i32 2, i32 8, i32 14, i32 20, i32 26, i32 32, i32 38, i32 44, i32 50, i32 56, i32 62, i32 68, i32 74, i32 80, i32 86, i32 92, i32 98, i32 104, i32 110, i32 116, i32 122, i32 128, i32 134, i32 140, i32 146, i32 152, i32 158, i32 164, i32 170, i32 176, i32 182, i32 188, i32 194, i32 200, i32 206, i32 212, i32 218, i32 224, i32 230, i32 236, i32 242, i32 248, i32 254, i32 260, i32 266, i32 272, i32 278, i32 284, i32 290, i32 296, i32 302, i32 308, i32 314, i32 320, i32 326, i32 332, i32 338, i32 344, i32 350, i32 356, i32 362, i32 368, i32 374, i32 380>
20910 %strided.vec3 = shufflevector <384 x i64> %wide.vec, <384 x i64> poison, <64 x i32> <i32 3, i32 9, i32 15, i32 21, i32 27, i32 33, i32 39, i32 45, i32 51, i32 57, i32 63, i32 69, i32 75, i32 81, i32 87, i32 93, i32 99, i32 105, i32 111, i32 117, i32 123, i32 129, i32 135, i32 141, i32 147, i32 153, i32 159, i32 165, i32 171, i32 177, i32 183, i32 189, i32 195, i32 201, i32 207, i32 213, i32 219, i32 225, i32 231, i32 237, i32 243, i32 249, i32 255, i32 261, i32 267, i32 273, i32 279, i32 285, i32 291, i32 297, i32 303, i32 309, i32 315, i32 321, i32 327, i32 333, i32 339, i32 345, i32 351, i32 357, i32 363, i32 369, i32 375, i32 381>
20911 %strided.vec4 = shufflevector <384 x i64> %wide.vec, <384 x i64> poison, <64 x i32> <i32 4, i32 10, i32 16, i32 22, i32 28, i32 34, i32 40, i32 46, i32 52, i32 58, i32 64, i32 70, i32 76, i32 82, i32 88, i32 94, i32 100, i32 106, i32 112, i32 118, i32 124, i32 130, i32 136, i32 142, i32 148, i32 154, i32 160, i32 166, i32 172, i32 178, i32 184, i32 190, i32 196, i32 202, i32 208, i32 214, i32 220, i32 226, i32 232, i32 238, i32 244, i32 250, i32 256, i32 262, i32 268, i32 274, i32 280, i32 286, i32 292, i32 298, i32 304, i32 310, i32 316, i32 322, i32 328, i32 334, i32 340, i32 346, i32 352, i32 358, i32 364, i32 370, i32 376, i32 382>
20912 %strided.vec5 = shufflevector <384 x i64> %wide.vec, <384 x i64> poison, <64 x i32> <i32 5, i32 11, i32 17, i32 23, i32 29, i32 35, i32 41, i32 47, i32 53, i32 59, i32 65, i32 71, i32 77, i32 83, i32 89, i32 95, i32 101, i32 107, i32 113, i32 119, i32 125, i32 131, i32 137, i32 143, i32 149, i32 155, i32 161, i32 167, i32 173, i32 179, i32 185, i32 191, i32 197, i32 203, i32 209, i32 215, i32 221, i32 227, i32 233, i32 239, i32 245, i32 251, i32 257, i32 263, i32 269, i32 275, i32 281, i32 287, i32 293, i32 299, i32 305, i32 311, i32 317, i32 323, i32 329, i32 335, i32 341, i32 347, i32 353, i32 359, i32 365, i32 371, i32 377, i32 383>
20913 store <64 x i64> %strided.vec0, ptr %out.vec0, align 64
20914 store <64 x i64> %strided.vec1, ptr %out.vec1, align 64
20915 store <64 x i64> %strided.vec2, ptr %out.vec2, align 64
20916 store <64 x i64> %strided.vec3, ptr %out.vec3, align 64
20917 store <64 x i64> %strided.vec4, ptr %out.vec4, align 64
20918 store <64 x i64> %strided.vec5, ptr %out.vec5, align 64