1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+sse2 | FileCheck %s --check-prefixes=SSE
3 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx | FileCheck %s --check-prefixes=AVX
4 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2 | FileCheck %s --check-prefixes=AVX2
5 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX2-FP
6 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX2-FCP
7 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl | FileCheck %s --check-prefixes=AVX512
8 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512-FCP
9 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq | FileCheck %s --check-prefixes=AVX512DQ
10 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512DQ-FCP
11 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw | FileCheck %s --check-prefixes=AVX512BW
12 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512BW-FCP
13 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw | FileCheck %s --check-prefixes=AVX512DQ-BW
14 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512DQ-BW-FCP
16 ; These patterns are produced by LoopVectorizer for interleaved loads.
18 define void @load_i64_stride6_vf2(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr %out.vec2, ptr %out.vec3, ptr %out.vec4, ptr %out.vec5) nounwind {
19 ; SSE-LABEL: load_i64_stride6_vf2:
21 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
22 ; SSE-NEXT: movaps 80(%rdi), %xmm0
23 ; SSE-NEXT: movaps 64(%rdi), %xmm1
24 ; SSE-NEXT: movaps (%rdi), %xmm2
25 ; SSE-NEXT: movaps 16(%rdi), %xmm3
26 ; SSE-NEXT: movaps 32(%rdi), %xmm4
27 ; SSE-NEXT: movaps 48(%rdi), %xmm5
28 ; SSE-NEXT: movaps %xmm2, %xmm6
29 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm5[0]
30 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm5[1]
31 ; SSE-NEXT: movaps %xmm3, %xmm5
32 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm1[0]
33 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm1[1]
34 ; SSE-NEXT: movaps %xmm4, %xmm1
35 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
36 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm0[1]
37 ; SSE-NEXT: movaps %xmm6, (%rsi)
38 ; SSE-NEXT: movaps %xmm2, (%rdx)
39 ; SSE-NEXT: movaps %xmm5, (%rcx)
40 ; SSE-NEXT: movaps %xmm3, (%r8)
41 ; SSE-NEXT: movaps %xmm1, (%r9)
42 ; SSE-NEXT: movaps %xmm4, (%rax)
45 ; AVX-LABEL: load_i64_stride6_vf2:
47 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
48 ; AVX-NEXT: vmovaps (%rdi), %xmm0
49 ; AVX-NEXT: vmovaps 16(%rdi), %xmm1
50 ; AVX-NEXT: vmovaps 32(%rdi), %xmm2
51 ; AVX-NEXT: vmovaps 48(%rdi), %xmm3
52 ; AVX-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm3[0]
53 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
54 ; AVX-NEXT: vmovaps 64(%rdi), %xmm3
55 ; AVX-NEXT: vmovlhps {{.*#+}} xmm5 = xmm1[0],xmm3[0]
56 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
57 ; AVX-NEXT: vmovaps 80(%rdi), %xmm3
58 ; AVX-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm3[0]
59 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
60 ; AVX-NEXT: vmovaps %xmm4, (%rsi)
61 ; AVX-NEXT: vmovaps %xmm0, (%rdx)
62 ; AVX-NEXT: vmovaps %xmm5, (%rcx)
63 ; AVX-NEXT: vmovaps %xmm1, (%r8)
64 ; AVX-NEXT: vmovaps %xmm6, (%r9)
65 ; AVX-NEXT: vmovaps %xmm2, (%rax)
68 ; AVX2-LABEL: load_i64_stride6_vf2:
70 ; AVX2-NEXT: movq {{[0-9]+}}(%rsp), %rax
71 ; AVX2-NEXT: vmovaps (%rdi), %xmm0
72 ; AVX2-NEXT: vmovaps 16(%rdi), %xmm1
73 ; AVX2-NEXT: vmovaps 32(%rdi), %xmm2
74 ; AVX2-NEXT: vmovaps 48(%rdi), %xmm3
75 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm3[0]
76 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
77 ; AVX2-NEXT: vmovaps 64(%rdi), %xmm3
78 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm5 = xmm1[0],xmm3[0]
79 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
80 ; AVX2-NEXT: vmovaps 80(%rdi), %xmm3
81 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm3[0]
82 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
83 ; AVX2-NEXT: vmovaps %xmm4, (%rsi)
84 ; AVX2-NEXT: vmovaps %xmm0, (%rdx)
85 ; AVX2-NEXT: vmovaps %xmm5, (%rcx)
86 ; AVX2-NEXT: vmovaps %xmm1, (%r8)
87 ; AVX2-NEXT: vmovaps %xmm6, (%r9)
88 ; AVX2-NEXT: vmovaps %xmm2, (%rax)
91 ; AVX2-FP-LABEL: load_i64_stride6_vf2:
93 ; AVX2-FP-NEXT: movq {{[0-9]+}}(%rsp), %rax
94 ; AVX2-FP-NEXT: vmovaps (%rdi), %xmm0
95 ; AVX2-FP-NEXT: vmovaps 16(%rdi), %xmm1
96 ; AVX2-FP-NEXT: vmovaps 32(%rdi), %xmm2
97 ; AVX2-FP-NEXT: vmovaps 48(%rdi), %xmm3
98 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm3[0]
99 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
100 ; AVX2-FP-NEXT: vmovaps 64(%rdi), %xmm3
101 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm5 = xmm1[0],xmm3[0]
102 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
103 ; AVX2-FP-NEXT: vmovaps 80(%rdi), %xmm3
104 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm3[0]
105 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
106 ; AVX2-FP-NEXT: vmovaps %xmm4, (%rsi)
107 ; AVX2-FP-NEXT: vmovaps %xmm0, (%rdx)
108 ; AVX2-FP-NEXT: vmovaps %xmm5, (%rcx)
109 ; AVX2-FP-NEXT: vmovaps %xmm1, (%r8)
110 ; AVX2-FP-NEXT: vmovaps %xmm6, (%r9)
111 ; AVX2-FP-NEXT: vmovaps %xmm2, (%rax)
114 ; AVX2-FCP-LABEL: load_i64_stride6_vf2:
116 ; AVX2-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
117 ; AVX2-FCP-NEXT: vmovaps (%rdi), %xmm0
118 ; AVX2-FCP-NEXT: vmovaps 16(%rdi), %xmm1
119 ; AVX2-FCP-NEXT: vmovaps 32(%rdi), %xmm2
120 ; AVX2-FCP-NEXT: vmovaps 48(%rdi), %xmm3
121 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm3[0]
122 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
123 ; AVX2-FCP-NEXT: vmovaps 64(%rdi), %xmm3
124 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm5 = xmm1[0],xmm3[0]
125 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
126 ; AVX2-FCP-NEXT: vmovaps 80(%rdi), %xmm3
127 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm3[0]
128 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
129 ; AVX2-FCP-NEXT: vmovaps %xmm4, (%rsi)
130 ; AVX2-FCP-NEXT: vmovaps %xmm0, (%rdx)
131 ; AVX2-FCP-NEXT: vmovaps %xmm5, (%rcx)
132 ; AVX2-FCP-NEXT: vmovaps %xmm1, (%r8)
133 ; AVX2-FCP-NEXT: vmovaps %xmm6, (%r9)
134 ; AVX2-FCP-NEXT: vmovaps %xmm2, (%rax)
135 ; AVX2-FCP-NEXT: retq
137 ; AVX512-LABEL: load_i64_stride6_vf2:
139 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
140 ; AVX512-NEXT: vmovaps (%rdi), %xmm0
141 ; AVX512-NEXT: vmovaps 16(%rdi), %xmm1
142 ; AVX512-NEXT: vmovaps 32(%rdi), %xmm2
143 ; AVX512-NEXT: vmovaps 48(%rdi), %xmm3
144 ; AVX512-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm3[0]
145 ; AVX512-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
146 ; AVX512-NEXT: vmovaps 64(%rdi), %xmm3
147 ; AVX512-NEXT: vmovlhps {{.*#+}} xmm5 = xmm1[0],xmm3[0]
148 ; AVX512-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
149 ; AVX512-NEXT: vmovaps 80(%rdi), %xmm3
150 ; AVX512-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm3[0]
151 ; AVX512-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
152 ; AVX512-NEXT: vmovaps %xmm4, (%rsi)
153 ; AVX512-NEXT: vmovaps %xmm0, (%rdx)
154 ; AVX512-NEXT: vmovaps %xmm5, (%rcx)
155 ; AVX512-NEXT: vmovaps %xmm1, (%r8)
156 ; AVX512-NEXT: vmovaps %xmm6, (%r9)
157 ; AVX512-NEXT: vmovaps %xmm2, (%rax)
160 ; AVX512-FCP-LABEL: load_i64_stride6_vf2:
161 ; AVX512-FCP: # %bb.0:
162 ; AVX512-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
163 ; AVX512-FCP-NEXT: vmovaps (%rdi), %xmm0
164 ; AVX512-FCP-NEXT: vmovaps 16(%rdi), %xmm1
165 ; AVX512-FCP-NEXT: vmovaps 32(%rdi), %xmm2
166 ; AVX512-FCP-NEXT: vmovaps 48(%rdi), %xmm3
167 ; AVX512-FCP-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm3[0]
168 ; AVX512-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
169 ; AVX512-FCP-NEXT: vmovaps 64(%rdi), %xmm3
170 ; AVX512-FCP-NEXT: vmovlhps {{.*#+}} xmm5 = xmm1[0],xmm3[0]
171 ; AVX512-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
172 ; AVX512-FCP-NEXT: vmovaps 80(%rdi), %xmm3
173 ; AVX512-FCP-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm3[0]
174 ; AVX512-FCP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
175 ; AVX512-FCP-NEXT: vmovaps %xmm4, (%rsi)
176 ; AVX512-FCP-NEXT: vmovaps %xmm0, (%rdx)
177 ; AVX512-FCP-NEXT: vmovaps %xmm5, (%rcx)
178 ; AVX512-FCP-NEXT: vmovaps %xmm1, (%r8)
179 ; AVX512-FCP-NEXT: vmovaps %xmm6, (%r9)
180 ; AVX512-FCP-NEXT: vmovaps %xmm2, (%rax)
181 ; AVX512-FCP-NEXT: retq
183 ; AVX512DQ-LABEL: load_i64_stride6_vf2:
185 ; AVX512DQ-NEXT: movq {{[0-9]+}}(%rsp), %rax
186 ; AVX512DQ-NEXT: vmovaps (%rdi), %xmm0
187 ; AVX512DQ-NEXT: vmovaps 16(%rdi), %xmm1
188 ; AVX512DQ-NEXT: vmovaps 32(%rdi), %xmm2
189 ; AVX512DQ-NEXT: vmovaps 48(%rdi), %xmm3
190 ; AVX512DQ-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm3[0]
191 ; AVX512DQ-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
192 ; AVX512DQ-NEXT: vmovaps 64(%rdi), %xmm3
193 ; AVX512DQ-NEXT: vmovlhps {{.*#+}} xmm5 = xmm1[0],xmm3[0]
194 ; AVX512DQ-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
195 ; AVX512DQ-NEXT: vmovaps 80(%rdi), %xmm3
196 ; AVX512DQ-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm3[0]
197 ; AVX512DQ-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
198 ; AVX512DQ-NEXT: vmovaps %xmm4, (%rsi)
199 ; AVX512DQ-NEXT: vmovaps %xmm0, (%rdx)
200 ; AVX512DQ-NEXT: vmovaps %xmm5, (%rcx)
201 ; AVX512DQ-NEXT: vmovaps %xmm1, (%r8)
202 ; AVX512DQ-NEXT: vmovaps %xmm6, (%r9)
203 ; AVX512DQ-NEXT: vmovaps %xmm2, (%rax)
204 ; AVX512DQ-NEXT: retq
206 ; AVX512DQ-FCP-LABEL: load_i64_stride6_vf2:
207 ; AVX512DQ-FCP: # %bb.0:
208 ; AVX512DQ-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
209 ; AVX512DQ-FCP-NEXT: vmovaps (%rdi), %xmm0
210 ; AVX512DQ-FCP-NEXT: vmovaps 16(%rdi), %xmm1
211 ; AVX512DQ-FCP-NEXT: vmovaps 32(%rdi), %xmm2
212 ; AVX512DQ-FCP-NEXT: vmovaps 48(%rdi), %xmm3
213 ; AVX512DQ-FCP-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm3[0]
214 ; AVX512DQ-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
215 ; AVX512DQ-FCP-NEXT: vmovaps 64(%rdi), %xmm3
216 ; AVX512DQ-FCP-NEXT: vmovlhps {{.*#+}} xmm5 = xmm1[0],xmm3[0]
217 ; AVX512DQ-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
218 ; AVX512DQ-FCP-NEXT: vmovaps 80(%rdi), %xmm3
219 ; AVX512DQ-FCP-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm3[0]
220 ; AVX512DQ-FCP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
221 ; AVX512DQ-FCP-NEXT: vmovaps %xmm4, (%rsi)
222 ; AVX512DQ-FCP-NEXT: vmovaps %xmm0, (%rdx)
223 ; AVX512DQ-FCP-NEXT: vmovaps %xmm5, (%rcx)
224 ; AVX512DQ-FCP-NEXT: vmovaps %xmm1, (%r8)
225 ; AVX512DQ-FCP-NEXT: vmovaps %xmm6, (%r9)
226 ; AVX512DQ-FCP-NEXT: vmovaps %xmm2, (%rax)
227 ; AVX512DQ-FCP-NEXT: retq
229 ; AVX512BW-LABEL: load_i64_stride6_vf2:
231 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
232 ; AVX512BW-NEXT: vmovaps (%rdi), %xmm0
233 ; AVX512BW-NEXT: vmovaps 16(%rdi), %xmm1
234 ; AVX512BW-NEXT: vmovaps 32(%rdi), %xmm2
235 ; AVX512BW-NEXT: vmovaps 48(%rdi), %xmm3
236 ; AVX512BW-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm3[0]
237 ; AVX512BW-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
238 ; AVX512BW-NEXT: vmovaps 64(%rdi), %xmm3
239 ; AVX512BW-NEXT: vmovlhps {{.*#+}} xmm5 = xmm1[0],xmm3[0]
240 ; AVX512BW-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
241 ; AVX512BW-NEXT: vmovaps 80(%rdi), %xmm3
242 ; AVX512BW-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm3[0]
243 ; AVX512BW-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
244 ; AVX512BW-NEXT: vmovaps %xmm4, (%rsi)
245 ; AVX512BW-NEXT: vmovaps %xmm0, (%rdx)
246 ; AVX512BW-NEXT: vmovaps %xmm5, (%rcx)
247 ; AVX512BW-NEXT: vmovaps %xmm1, (%r8)
248 ; AVX512BW-NEXT: vmovaps %xmm6, (%r9)
249 ; AVX512BW-NEXT: vmovaps %xmm2, (%rax)
250 ; AVX512BW-NEXT: retq
252 ; AVX512BW-FCP-LABEL: load_i64_stride6_vf2:
253 ; AVX512BW-FCP: # %bb.0:
254 ; AVX512BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
255 ; AVX512BW-FCP-NEXT: vmovaps (%rdi), %xmm0
256 ; AVX512BW-FCP-NEXT: vmovaps 16(%rdi), %xmm1
257 ; AVX512BW-FCP-NEXT: vmovaps 32(%rdi), %xmm2
258 ; AVX512BW-FCP-NEXT: vmovaps 48(%rdi), %xmm3
259 ; AVX512BW-FCP-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm3[0]
260 ; AVX512BW-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
261 ; AVX512BW-FCP-NEXT: vmovaps 64(%rdi), %xmm3
262 ; AVX512BW-FCP-NEXT: vmovlhps {{.*#+}} xmm5 = xmm1[0],xmm3[0]
263 ; AVX512BW-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
264 ; AVX512BW-FCP-NEXT: vmovaps 80(%rdi), %xmm3
265 ; AVX512BW-FCP-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm3[0]
266 ; AVX512BW-FCP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
267 ; AVX512BW-FCP-NEXT: vmovaps %xmm4, (%rsi)
268 ; AVX512BW-FCP-NEXT: vmovaps %xmm0, (%rdx)
269 ; AVX512BW-FCP-NEXT: vmovaps %xmm5, (%rcx)
270 ; AVX512BW-FCP-NEXT: vmovaps %xmm1, (%r8)
271 ; AVX512BW-FCP-NEXT: vmovaps %xmm6, (%r9)
272 ; AVX512BW-FCP-NEXT: vmovaps %xmm2, (%rax)
273 ; AVX512BW-FCP-NEXT: retq
275 ; AVX512DQ-BW-LABEL: load_i64_stride6_vf2:
276 ; AVX512DQ-BW: # %bb.0:
277 ; AVX512DQ-BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
278 ; AVX512DQ-BW-NEXT: vmovaps (%rdi), %xmm0
279 ; AVX512DQ-BW-NEXT: vmovaps 16(%rdi), %xmm1
280 ; AVX512DQ-BW-NEXT: vmovaps 32(%rdi), %xmm2
281 ; AVX512DQ-BW-NEXT: vmovaps 48(%rdi), %xmm3
282 ; AVX512DQ-BW-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm3[0]
283 ; AVX512DQ-BW-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
284 ; AVX512DQ-BW-NEXT: vmovaps 64(%rdi), %xmm3
285 ; AVX512DQ-BW-NEXT: vmovlhps {{.*#+}} xmm5 = xmm1[0],xmm3[0]
286 ; AVX512DQ-BW-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
287 ; AVX512DQ-BW-NEXT: vmovaps 80(%rdi), %xmm3
288 ; AVX512DQ-BW-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm3[0]
289 ; AVX512DQ-BW-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
290 ; AVX512DQ-BW-NEXT: vmovaps %xmm4, (%rsi)
291 ; AVX512DQ-BW-NEXT: vmovaps %xmm0, (%rdx)
292 ; AVX512DQ-BW-NEXT: vmovaps %xmm5, (%rcx)
293 ; AVX512DQ-BW-NEXT: vmovaps %xmm1, (%r8)
294 ; AVX512DQ-BW-NEXT: vmovaps %xmm6, (%r9)
295 ; AVX512DQ-BW-NEXT: vmovaps %xmm2, (%rax)
296 ; AVX512DQ-BW-NEXT: retq
298 ; AVX512DQ-BW-FCP-LABEL: load_i64_stride6_vf2:
299 ; AVX512DQ-BW-FCP: # %bb.0:
300 ; AVX512DQ-BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
301 ; AVX512DQ-BW-FCP-NEXT: vmovaps (%rdi), %xmm0
302 ; AVX512DQ-BW-FCP-NEXT: vmovaps 16(%rdi), %xmm1
303 ; AVX512DQ-BW-FCP-NEXT: vmovaps 32(%rdi), %xmm2
304 ; AVX512DQ-BW-FCP-NEXT: vmovaps 48(%rdi), %xmm3
305 ; AVX512DQ-BW-FCP-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm3[0]
306 ; AVX512DQ-BW-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
307 ; AVX512DQ-BW-FCP-NEXT: vmovaps 64(%rdi), %xmm3
308 ; AVX512DQ-BW-FCP-NEXT: vmovlhps {{.*#+}} xmm5 = xmm1[0],xmm3[0]
309 ; AVX512DQ-BW-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
310 ; AVX512DQ-BW-FCP-NEXT: vmovaps 80(%rdi), %xmm3
311 ; AVX512DQ-BW-FCP-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm3[0]
312 ; AVX512DQ-BW-FCP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
313 ; AVX512DQ-BW-FCP-NEXT: vmovaps %xmm4, (%rsi)
314 ; AVX512DQ-BW-FCP-NEXT: vmovaps %xmm0, (%rdx)
315 ; AVX512DQ-BW-FCP-NEXT: vmovaps %xmm5, (%rcx)
316 ; AVX512DQ-BW-FCP-NEXT: vmovaps %xmm1, (%r8)
317 ; AVX512DQ-BW-FCP-NEXT: vmovaps %xmm6, (%r9)
318 ; AVX512DQ-BW-FCP-NEXT: vmovaps %xmm2, (%rax)
319 ; AVX512DQ-BW-FCP-NEXT: retq
320 %wide.vec = load <12 x i64>, ptr %in.vec, align 64
321 %strided.vec0 = shufflevector <12 x i64> %wide.vec, <12 x i64> poison, <2 x i32> <i32 0, i32 6>
322 %strided.vec1 = shufflevector <12 x i64> %wide.vec, <12 x i64> poison, <2 x i32> <i32 1, i32 7>
323 %strided.vec2 = shufflevector <12 x i64> %wide.vec, <12 x i64> poison, <2 x i32> <i32 2, i32 8>
324 %strided.vec3 = shufflevector <12 x i64> %wide.vec, <12 x i64> poison, <2 x i32> <i32 3, i32 9>
325 %strided.vec4 = shufflevector <12 x i64> %wide.vec, <12 x i64> poison, <2 x i32> <i32 4, i32 10>
326 %strided.vec5 = shufflevector <12 x i64> %wide.vec, <12 x i64> poison, <2 x i32> <i32 5, i32 11>
327 store <2 x i64> %strided.vec0, ptr %out.vec0, align 64
328 store <2 x i64> %strided.vec1, ptr %out.vec1, align 64
329 store <2 x i64> %strided.vec2, ptr %out.vec2, align 64
330 store <2 x i64> %strided.vec3, ptr %out.vec3, align 64
331 store <2 x i64> %strided.vec4, ptr %out.vec4, align 64
332 store <2 x i64> %strided.vec5, ptr %out.vec5, align 64
336 define void @load_i64_stride6_vf4(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr %out.vec2, ptr %out.vec3, ptr %out.vec4, ptr %out.vec5) nounwind {
337 ; SSE-LABEL: load_i64_stride6_vf4:
339 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
340 ; SSE-NEXT: movaps 80(%rdi), %xmm3
341 ; SSE-NEXT: movaps 176(%rdi), %xmm5
342 ; SSE-NEXT: movaps 128(%rdi), %xmm0
343 ; SSE-NEXT: movaps 64(%rdi), %xmm6
344 ; SSE-NEXT: movaps 160(%rdi), %xmm7
345 ; SSE-NEXT: movaps 112(%rdi), %xmm1
346 ; SSE-NEXT: movaps (%rdi), %xmm8
347 ; SSE-NEXT: movaps 16(%rdi), %xmm4
348 ; SSE-NEXT: movaps 32(%rdi), %xmm2
349 ; SSE-NEXT: movaps 48(%rdi), %xmm9
350 ; SSE-NEXT: movaps 144(%rdi), %xmm10
351 ; SSE-NEXT: movaps 96(%rdi), %xmm11
352 ; SSE-NEXT: movaps %xmm11, %xmm12
353 ; SSE-NEXT: movlhps {{.*#+}} xmm12 = xmm12[0],xmm10[0]
354 ; SSE-NEXT: movaps %xmm8, %xmm13
355 ; SSE-NEXT: movlhps {{.*#+}} xmm13 = xmm13[0],xmm9[0]
356 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm10[1]
357 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm9[1]
358 ; SSE-NEXT: movaps %xmm1, %xmm9
359 ; SSE-NEXT: movlhps {{.*#+}} xmm9 = xmm9[0],xmm7[0]
360 ; SSE-NEXT: movaps %xmm4, %xmm10
361 ; SSE-NEXT: movlhps {{.*#+}} xmm10 = xmm10[0],xmm6[0]
362 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm7[1]
363 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm6[1]
364 ; SSE-NEXT: movaps %xmm0, %xmm6
365 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm5[0]
366 ; SSE-NEXT: movaps %xmm2, %xmm7
367 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm3[0]
368 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm5[1]
369 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
370 ; SSE-NEXT: movaps %xmm13, (%rsi)
371 ; SSE-NEXT: movaps %xmm12, 16(%rsi)
372 ; SSE-NEXT: movaps %xmm8, (%rdx)
373 ; SSE-NEXT: movaps %xmm11, 16(%rdx)
374 ; SSE-NEXT: movaps %xmm10, (%rcx)
375 ; SSE-NEXT: movaps %xmm9, 16(%rcx)
376 ; SSE-NEXT: movaps %xmm4, (%r8)
377 ; SSE-NEXT: movaps %xmm1, 16(%r8)
378 ; SSE-NEXT: movaps %xmm7, (%r9)
379 ; SSE-NEXT: movaps %xmm6, 16(%r9)
380 ; SSE-NEXT: movaps %xmm2, (%rax)
381 ; SSE-NEXT: movaps %xmm0, 16(%rax)
384 ; AVX-LABEL: load_i64_stride6_vf4:
386 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
387 ; AVX-NEXT: vmovaps 160(%rdi), %ymm0
388 ; AVX-NEXT: vmovaps 96(%rdi), %ymm1
389 ; AVX-NEXT: vmovaps 128(%rdi), %ymm2
390 ; AVX-NEXT: vinsertf128 $1, 96(%rdi), %ymm0, %ymm3
391 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm3[0],ymm2[0],ymm3[2],ymm2[2]
392 ; AVX-NEXT: vmovaps (%rdi), %xmm5
393 ; AVX-NEXT: vmovaps 16(%rdi), %xmm6
394 ; AVX-NEXT: vmovaps 32(%rdi), %xmm7
395 ; AVX-NEXT: vmovaps 48(%rdi), %xmm8
396 ; AVX-NEXT: vmovlhps {{.*#+}} xmm9 = xmm5[0],xmm8[0]
397 ; AVX-NEXT: vblendps {{.*#+}} ymm4 = ymm9[0,1,2,3],ymm4[4,5,6,7]
398 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm3[1],ymm2[1],ymm3[3],ymm2[3]
399 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm5[1],xmm8[1]
400 ; AVX-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1,2,3],ymm2[4,5,6,7]
401 ; AVX-NEXT: vinsertf128 $1, 160(%rdi), %ymm0, %ymm3
402 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm5 = ymm1[0],ymm3[0],ymm1[2],ymm3[2]
403 ; AVX-NEXT: vmovaps 64(%rdi), %xmm8
404 ; AVX-NEXT: vmovlhps {{.*#+}} xmm9 = xmm6[0],xmm8[0]
405 ; AVX-NEXT: vblendps {{.*#+}} ymm5 = ymm9[0,1,2,3],ymm5[4,5,6,7]
406 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm3[1],ymm1[3],ymm3[3]
407 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm6[1],xmm8[1]
408 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1,2,3],ymm1[4,5,6,7]
409 ; AVX-NEXT: vinsertf128 $1, 128(%rdi), %ymm0, %ymm3
410 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm6 = ymm3[0],ymm0[0],ymm3[2],ymm0[2]
411 ; AVX-NEXT: vmovaps 80(%rdi), %xmm8
412 ; AVX-NEXT: vmovlhps {{.*#+}} xmm9 = xmm7[0],xmm8[0]
413 ; AVX-NEXT: vblendps {{.*#+}} ymm6 = ymm9[0,1,2,3],ymm6[4,5,6,7]
414 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm3[1],ymm0[1],ymm3[3],ymm0[3]
415 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm7[1],xmm8[1]
416 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm3[0,1,2,3],ymm0[4,5,6,7]
417 ; AVX-NEXT: vmovaps %ymm4, (%rsi)
418 ; AVX-NEXT: vmovaps %ymm2, (%rdx)
419 ; AVX-NEXT: vmovaps %ymm5, (%rcx)
420 ; AVX-NEXT: vmovaps %ymm1, (%r8)
421 ; AVX-NEXT: vmovaps %ymm6, (%r9)
422 ; AVX-NEXT: vmovaps %ymm0, (%rax)
423 ; AVX-NEXT: vzeroupper
426 ; AVX2-LABEL: load_i64_stride6_vf4:
428 ; AVX2-NEXT: movq {{[0-9]+}}(%rsp), %rax
429 ; AVX2-NEXT: vmovaps 160(%rdi), %ymm0
430 ; AVX2-NEXT: vmovaps 128(%rdi), %ymm1
431 ; AVX2-NEXT: vmovaps 96(%rdi), %ymm2
432 ; AVX2-NEXT: vmovaps (%rdi), %xmm3
433 ; AVX2-NEXT: vmovaps 16(%rdi), %xmm4
434 ; AVX2-NEXT: vmovaps 32(%rdi), %xmm5
435 ; AVX2-NEXT: vmovaps 48(%rdi), %xmm6
436 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm7 = xmm3[0],xmm6[0]
437 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
438 ; AVX2-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,1,0,3]
439 ; AVX2-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm8[4,5,6,7]
440 ; AVX2-NEXT: vbroadcastsd 104(%rdi), %ymm8
441 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm8 = ymm8[1],ymm1[1],ymm8[3],ymm1[3]
442 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm6[1]
443 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm8[4,5,6,7]
444 ; AVX2-NEXT: vbroadcastsd 160(%rdi), %ymm6
445 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm6 = ymm2[0],ymm6[0],ymm2[2],ymm6[2]
446 ; AVX2-NEXT: vmovaps 64(%rdi), %xmm8
447 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm9 = xmm4[0],xmm8[0]
448 ; AVX2-NEXT: vblendps {{.*#+}} ymm6 = ymm9[0,1,2,3],ymm6[4,5,6,7]
449 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm4[1],xmm8[1]
450 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm0[1],ymm2[3],ymm0[3]
451 ; AVX2-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,2,1]
452 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm4[0,1,2,3],ymm2[4,5,6,7]
453 ; AVX2-NEXT: vmovaps 80(%rdi), %xmm4
454 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm8 = xmm5[0],xmm4[0]
455 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
456 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
457 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm8[0,1,2,3],ymm1[4,5,6,7]
458 ; AVX2-NEXT: vbroadcastsd 136(%rdi), %ymm8
459 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm8[1],ymm0[1],ymm8[3],ymm0[3]
460 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm5[1],xmm4[1]
461 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
462 ; AVX2-NEXT: vmovaps %ymm7, (%rsi)
463 ; AVX2-NEXT: vmovaps %ymm3, (%rdx)
464 ; AVX2-NEXT: vmovaps %ymm6, (%rcx)
465 ; AVX2-NEXT: vmovaps %ymm2, (%r8)
466 ; AVX2-NEXT: vmovaps %ymm1, (%r9)
467 ; AVX2-NEXT: vmovaps %ymm0, (%rax)
468 ; AVX2-NEXT: vzeroupper
471 ; AVX2-FP-LABEL: load_i64_stride6_vf4:
473 ; AVX2-FP-NEXT: movq {{[0-9]+}}(%rsp), %rax
474 ; AVX2-FP-NEXT: vmovaps 160(%rdi), %ymm0
475 ; AVX2-FP-NEXT: vmovaps 128(%rdi), %ymm1
476 ; AVX2-FP-NEXT: vmovaps 96(%rdi), %ymm2
477 ; AVX2-FP-NEXT: vmovaps (%rdi), %xmm3
478 ; AVX2-FP-NEXT: vmovaps 16(%rdi), %xmm4
479 ; AVX2-FP-NEXT: vmovaps 32(%rdi), %xmm5
480 ; AVX2-FP-NEXT: vmovaps 48(%rdi), %xmm6
481 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm7 = xmm3[0],xmm6[0]
482 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
483 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,1,0,3]
484 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm8[4,5,6,7]
485 ; AVX2-FP-NEXT: vbroadcastsd 104(%rdi), %ymm8
486 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm8 = ymm8[1],ymm1[1],ymm8[3],ymm1[3]
487 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm6[1]
488 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm8[4,5,6,7]
489 ; AVX2-FP-NEXT: vbroadcastsd 160(%rdi), %ymm6
490 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm6 = ymm2[0],ymm6[0],ymm2[2],ymm6[2]
491 ; AVX2-FP-NEXT: vmovaps 64(%rdi), %xmm8
492 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm9 = xmm4[0],xmm8[0]
493 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm6 = ymm9[0,1,2,3],ymm6[4,5,6,7]
494 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm4[1],xmm8[1]
495 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm0[1],ymm2[3],ymm0[3]
496 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,2,1]
497 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm4[0,1,2,3],ymm2[4,5,6,7]
498 ; AVX2-FP-NEXT: vmovaps 80(%rdi), %xmm4
499 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm8 = xmm5[0],xmm4[0]
500 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
501 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
502 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm8[0,1,2,3],ymm1[4,5,6,7]
503 ; AVX2-FP-NEXT: vbroadcastsd 136(%rdi), %ymm8
504 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm8[1],ymm0[1],ymm8[3],ymm0[3]
505 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm5[1],xmm4[1]
506 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
507 ; AVX2-FP-NEXT: vmovaps %ymm7, (%rsi)
508 ; AVX2-FP-NEXT: vmovaps %ymm3, (%rdx)
509 ; AVX2-FP-NEXT: vmovaps %ymm6, (%rcx)
510 ; AVX2-FP-NEXT: vmovaps %ymm2, (%r8)
511 ; AVX2-FP-NEXT: vmovaps %ymm1, (%r9)
512 ; AVX2-FP-NEXT: vmovaps %ymm0, (%rax)
513 ; AVX2-FP-NEXT: vzeroupper
516 ; AVX2-FCP-LABEL: load_i64_stride6_vf4:
518 ; AVX2-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
519 ; AVX2-FCP-NEXT: vmovaps 160(%rdi), %ymm0
520 ; AVX2-FCP-NEXT: vmovaps 128(%rdi), %ymm1
521 ; AVX2-FCP-NEXT: vmovaps 96(%rdi), %ymm2
522 ; AVX2-FCP-NEXT: vmovaps (%rdi), %xmm3
523 ; AVX2-FCP-NEXT: vmovaps 16(%rdi), %xmm4
524 ; AVX2-FCP-NEXT: vmovaps 32(%rdi), %xmm5
525 ; AVX2-FCP-NEXT: vmovaps 48(%rdi), %xmm6
526 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm7 = xmm3[0],xmm6[0]
527 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
528 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,1,0,3]
529 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm8[4,5,6,7]
530 ; AVX2-FCP-NEXT: vbroadcastsd 104(%rdi), %ymm8
531 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm8 = ymm8[1],ymm1[1],ymm8[3],ymm1[3]
532 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm6[1]
533 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm8[4,5,6,7]
534 ; AVX2-FCP-NEXT: vbroadcastsd 160(%rdi), %ymm6
535 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm6 = ymm2[0],ymm6[0],ymm2[2],ymm6[2]
536 ; AVX2-FCP-NEXT: vmovaps 64(%rdi), %xmm8
537 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm9 = xmm4[0],xmm8[0]
538 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm6 = ymm9[0,1,2,3],ymm6[4,5,6,7]
539 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm4[1],xmm8[1]
540 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm0[1],ymm2[3],ymm0[3]
541 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,2,1]
542 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm4[0,1,2,3],ymm2[4,5,6,7]
543 ; AVX2-FCP-NEXT: vmovaps 80(%rdi), %xmm4
544 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm8 = xmm5[0],xmm4[0]
545 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
546 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
547 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm8[0,1,2,3],ymm1[4,5,6,7]
548 ; AVX2-FCP-NEXT: vbroadcastsd 136(%rdi), %ymm8
549 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm8[1],ymm0[1],ymm8[3],ymm0[3]
550 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm5[1],xmm4[1]
551 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
552 ; AVX2-FCP-NEXT: vmovaps %ymm7, (%rsi)
553 ; AVX2-FCP-NEXT: vmovaps %ymm3, (%rdx)
554 ; AVX2-FCP-NEXT: vmovaps %ymm6, (%rcx)
555 ; AVX2-FCP-NEXT: vmovaps %ymm2, (%r8)
556 ; AVX2-FCP-NEXT: vmovaps %ymm1, (%r9)
557 ; AVX2-FCP-NEXT: vmovaps %ymm0, (%rax)
558 ; AVX2-FCP-NEXT: vzeroupper
559 ; AVX2-FCP-NEXT: retq
561 ; AVX512-LABEL: load_i64_stride6_vf4:
563 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
564 ; AVX512-NEXT: vmovdqa64 (%rdi), %zmm2
565 ; AVX512-NEXT: vmovdqa64 64(%rdi), %zmm3
566 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm0 = [0,6,12,0]
567 ; AVX512-NEXT: vpermi2q %zmm3, %zmm2, %zmm0
568 ; AVX512-NEXT: vpbroadcastq 144(%rdi), %ymm1
569 ; AVX512-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
570 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm1 = [1,7,13,0]
571 ; AVX512-NEXT: vpermi2q %zmm3, %zmm2, %zmm1
572 ; AVX512-NEXT: vmovdqa 128(%rdi), %ymm4
573 ; AVX512-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm4[6,7]
574 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm5 = [10,0,6,0]
575 ; AVX512-NEXT: vpermi2q %zmm2, %zmm3, %zmm5
576 ; AVX512-NEXT: vmovdqa 160(%rdi), %xmm6
577 ; AVX512-NEXT: vpbroadcastq %xmm6, %ymm7
578 ; AVX512-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],ymm7[6,7]
579 ; AVX512-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
580 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm7 = [11,1,7,0]
581 ; AVX512-NEXT: vpermi2q %zmm2, %zmm3, %zmm7
582 ; AVX512-NEXT: vpblendd {{.*#+}} ymm6 = ymm7[0,1,2,3,4,5],ymm6[6,7]
583 ; AVX512-NEXT: vmovdqa 160(%rdi), %ymm7
584 ; AVX512-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm4[0],ymm7[0],ymm4[2],ymm7[2]
585 ; AVX512-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,0,3]
586 ; AVX512-NEXT: vpmovsxbq {{.*#+}} xmm8 = [4,10]
587 ; AVX512-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
588 ; AVX512-NEXT: vpblendd {{.*#+}} ymm4 = ymm8[0,1,2,3],ymm4[4,5,6,7]
589 ; AVX512-NEXT: vpbroadcastq 136(%rdi), %ymm8
590 ; AVX512-NEXT: vpunpckhqdq {{.*#+}} ymm7 = ymm8[1],ymm7[1],ymm8[3],ymm7[3]
591 ; AVX512-NEXT: vpmovsxbq {{.*#+}} xmm8 = [5,11]
592 ; AVX512-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
593 ; AVX512-NEXT: vpblendd {{.*#+}} ymm2 = ymm8[0,1,2,3],ymm7[4,5,6,7]
594 ; AVX512-NEXT: vmovdqa %ymm0, (%rsi)
595 ; AVX512-NEXT: vmovdqa %ymm1, (%rdx)
596 ; AVX512-NEXT: vmovdqa %ymm5, (%rcx)
597 ; AVX512-NEXT: vmovdqa %ymm6, (%r8)
598 ; AVX512-NEXT: vmovdqa %ymm4, (%r9)
599 ; AVX512-NEXT: vmovdqa %ymm2, (%rax)
600 ; AVX512-NEXT: vzeroupper
603 ; AVX512-FCP-LABEL: load_i64_stride6_vf4:
604 ; AVX512-FCP: # %bb.0:
605 ; AVX512-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
606 ; AVX512-FCP-NEXT: vmovdqa64 (%rdi), %zmm2
607 ; AVX512-FCP-NEXT: vmovdqa64 64(%rdi), %zmm3
608 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm0 = [0,6,12,0]
609 ; AVX512-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm0
610 ; AVX512-FCP-NEXT: vpbroadcastq 144(%rdi), %ymm1
611 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
612 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm1 = [1,7,13,0]
613 ; AVX512-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm1
614 ; AVX512-FCP-NEXT: vmovdqa 128(%rdi), %ymm4
615 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm4[6,7]
616 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm5 = [10,0,6,0]
617 ; AVX512-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm5
618 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm6 = [0,1,2,4]
619 ; AVX512-FCP-NEXT: vmovdqa 160(%rdi), %ymm7
620 ; AVX512-FCP-NEXT: vpermi2q %ymm7, %ymm5, %ymm6
621 ; AVX512-FCP-NEXT: vinserti128 $1, 160(%rdi), %ymm0, %ymm5
622 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [11,1,7,0]
623 ; AVX512-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm8
624 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm5 = ymm8[0,1,2,3,4,5],ymm5[6,7]
625 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [0,0,0,6]
626 ; AVX512-FCP-NEXT: vpermi2q %ymm7, %ymm4, %ymm8
627 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} xmm4 = [4,10]
628 ; AVX512-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm4
629 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
630 ; AVX512-FCP-NEXT: vpbroadcastq 136(%rdi), %ymm8
631 ; AVX512-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm7 = ymm8[1],ymm7[1],ymm8[3],ymm7[3]
632 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} xmm8 = [5,11]
633 ; AVX512-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
634 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm8[0,1,2,3],ymm7[4,5,6,7]
635 ; AVX512-FCP-NEXT: vmovdqa %ymm0, (%rsi)
636 ; AVX512-FCP-NEXT: vmovdqa %ymm1, (%rdx)
637 ; AVX512-FCP-NEXT: vmovdqa %ymm6, (%rcx)
638 ; AVX512-FCP-NEXT: vmovdqa %ymm5, (%r8)
639 ; AVX512-FCP-NEXT: vmovdqa %ymm4, (%r9)
640 ; AVX512-FCP-NEXT: vmovdqa %ymm2, (%rax)
641 ; AVX512-FCP-NEXT: vzeroupper
642 ; AVX512-FCP-NEXT: retq
644 ; AVX512DQ-LABEL: load_i64_stride6_vf4:
646 ; AVX512DQ-NEXT: movq {{[0-9]+}}(%rsp), %rax
647 ; AVX512DQ-NEXT: vmovdqa64 (%rdi), %zmm2
648 ; AVX512DQ-NEXT: vmovdqa64 64(%rdi), %zmm3
649 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm0 = [0,6,12,0]
650 ; AVX512DQ-NEXT: vpermi2q %zmm3, %zmm2, %zmm0
651 ; AVX512DQ-NEXT: vpbroadcastq 144(%rdi), %ymm1
652 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
653 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm1 = [1,7,13,0]
654 ; AVX512DQ-NEXT: vpermi2q %zmm3, %zmm2, %zmm1
655 ; AVX512DQ-NEXT: vmovdqa 128(%rdi), %ymm4
656 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm4[6,7]
657 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm5 = [10,0,6,0]
658 ; AVX512DQ-NEXT: vpermi2q %zmm2, %zmm3, %zmm5
659 ; AVX512DQ-NEXT: vmovdqa 160(%rdi), %xmm6
660 ; AVX512DQ-NEXT: vpbroadcastq %xmm6, %ymm7
661 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],ymm7[6,7]
662 ; AVX512DQ-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
663 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm7 = [11,1,7,0]
664 ; AVX512DQ-NEXT: vpermi2q %zmm2, %zmm3, %zmm7
665 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm6 = ymm7[0,1,2,3,4,5],ymm6[6,7]
666 ; AVX512DQ-NEXT: vmovdqa 160(%rdi), %ymm7
667 ; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm4[0],ymm7[0],ymm4[2],ymm7[2]
668 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,0,3]
669 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} xmm8 = [4,10]
670 ; AVX512DQ-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
671 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm4 = ymm8[0,1,2,3],ymm4[4,5,6,7]
672 ; AVX512DQ-NEXT: vpbroadcastq 136(%rdi), %ymm8
673 ; AVX512DQ-NEXT: vpunpckhqdq {{.*#+}} ymm7 = ymm8[1],ymm7[1],ymm8[3],ymm7[3]
674 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} xmm8 = [5,11]
675 ; AVX512DQ-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
676 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm2 = ymm8[0,1,2,3],ymm7[4,5,6,7]
677 ; AVX512DQ-NEXT: vmovdqa %ymm0, (%rsi)
678 ; AVX512DQ-NEXT: vmovdqa %ymm1, (%rdx)
679 ; AVX512DQ-NEXT: vmovdqa %ymm5, (%rcx)
680 ; AVX512DQ-NEXT: vmovdqa %ymm6, (%r8)
681 ; AVX512DQ-NEXT: vmovdqa %ymm4, (%r9)
682 ; AVX512DQ-NEXT: vmovdqa %ymm2, (%rax)
683 ; AVX512DQ-NEXT: vzeroupper
684 ; AVX512DQ-NEXT: retq
686 ; AVX512DQ-FCP-LABEL: load_i64_stride6_vf4:
687 ; AVX512DQ-FCP: # %bb.0:
688 ; AVX512DQ-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
689 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rdi), %zmm2
690 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rdi), %zmm3
691 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm0 = [0,6,12,0]
692 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm0
693 ; AVX512DQ-FCP-NEXT: vpbroadcastq 144(%rdi), %ymm1
694 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
695 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm1 = [1,7,13,0]
696 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm1
697 ; AVX512DQ-FCP-NEXT: vmovdqa 128(%rdi), %ymm4
698 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm4[6,7]
699 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm5 = [10,0,6,0]
700 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm5
701 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm6 = [0,1,2,4]
702 ; AVX512DQ-FCP-NEXT: vmovdqa 160(%rdi), %ymm7
703 ; AVX512DQ-FCP-NEXT: vpermi2q %ymm7, %ymm5, %ymm6
704 ; AVX512DQ-FCP-NEXT: vinserti128 $1, 160(%rdi), %ymm0, %ymm5
705 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [11,1,7,0]
706 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm8
707 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm5 = ymm8[0,1,2,3,4,5],ymm5[6,7]
708 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [0,0,0,6]
709 ; AVX512DQ-FCP-NEXT: vpermi2q %ymm7, %ymm4, %ymm8
710 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} xmm4 = [4,10]
711 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm4
712 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
713 ; AVX512DQ-FCP-NEXT: vpbroadcastq 136(%rdi), %ymm8
714 ; AVX512DQ-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm7 = ymm8[1],ymm7[1],ymm8[3],ymm7[3]
715 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} xmm8 = [5,11]
716 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
717 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm8[0,1,2,3],ymm7[4,5,6,7]
718 ; AVX512DQ-FCP-NEXT: vmovdqa %ymm0, (%rsi)
719 ; AVX512DQ-FCP-NEXT: vmovdqa %ymm1, (%rdx)
720 ; AVX512DQ-FCP-NEXT: vmovdqa %ymm6, (%rcx)
721 ; AVX512DQ-FCP-NEXT: vmovdqa %ymm5, (%r8)
722 ; AVX512DQ-FCP-NEXT: vmovdqa %ymm4, (%r9)
723 ; AVX512DQ-FCP-NEXT: vmovdqa %ymm2, (%rax)
724 ; AVX512DQ-FCP-NEXT: vzeroupper
725 ; AVX512DQ-FCP-NEXT: retq
727 ; AVX512BW-LABEL: load_i64_stride6_vf4:
729 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
730 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm2
731 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %zmm3
732 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm0 = [0,6,12,0]
733 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm0
734 ; AVX512BW-NEXT: vpbroadcastq 144(%rdi), %ymm1
735 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
736 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm1 = [1,7,13,0]
737 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm1
738 ; AVX512BW-NEXT: vmovdqa 128(%rdi), %ymm4
739 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm4[6,7]
740 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm5 = [10,0,6,0]
741 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm3, %zmm5
742 ; AVX512BW-NEXT: vmovdqa 160(%rdi), %xmm6
743 ; AVX512BW-NEXT: vpbroadcastq %xmm6, %ymm7
744 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],ymm7[6,7]
745 ; AVX512BW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
746 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm7 = [11,1,7,0]
747 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm3, %zmm7
748 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm6 = ymm7[0,1,2,3,4,5],ymm6[6,7]
749 ; AVX512BW-NEXT: vmovdqa 160(%rdi), %ymm7
750 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm4[0],ymm7[0],ymm4[2],ymm7[2]
751 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,0,3]
752 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} xmm8 = [4,10]
753 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
754 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm4 = ymm8[0,1,2,3],ymm4[4,5,6,7]
755 ; AVX512BW-NEXT: vpbroadcastq 136(%rdi), %ymm8
756 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm7 = ymm8[1],ymm7[1],ymm8[3],ymm7[3]
757 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} xmm8 = [5,11]
758 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
759 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm2 = ymm8[0,1,2,3],ymm7[4,5,6,7]
760 ; AVX512BW-NEXT: vmovdqa %ymm0, (%rsi)
761 ; AVX512BW-NEXT: vmovdqa %ymm1, (%rdx)
762 ; AVX512BW-NEXT: vmovdqa %ymm5, (%rcx)
763 ; AVX512BW-NEXT: vmovdqa %ymm6, (%r8)
764 ; AVX512BW-NEXT: vmovdqa %ymm4, (%r9)
765 ; AVX512BW-NEXT: vmovdqa %ymm2, (%rax)
766 ; AVX512BW-NEXT: vzeroupper
767 ; AVX512BW-NEXT: retq
769 ; AVX512BW-FCP-LABEL: load_i64_stride6_vf4:
770 ; AVX512BW-FCP: # %bb.0:
771 ; AVX512BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
772 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm2
773 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm3
774 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm0 = [0,6,12,0]
775 ; AVX512BW-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm0
776 ; AVX512BW-FCP-NEXT: vpbroadcastq 144(%rdi), %ymm1
777 ; AVX512BW-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
778 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm1 = [1,7,13,0]
779 ; AVX512BW-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm1
780 ; AVX512BW-FCP-NEXT: vmovdqa 128(%rdi), %ymm4
781 ; AVX512BW-FCP-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm4[6,7]
782 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm5 = [10,0,6,0]
783 ; AVX512BW-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm5
784 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm6 = [0,1,2,4]
785 ; AVX512BW-FCP-NEXT: vmovdqa 160(%rdi), %ymm7
786 ; AVX512BW-FCP-NEXT: vpermi2q %ymm7, %ymm5, %ymm6
787 ; AVX512BW-FCP-NEXT: vinserti128 $1, 160(%rdi), %ymm0, %ymm5
788 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [11,1,7,0]
789 ; AVX512BW-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm8
790 ; AVX512BW-FCP-NEXT: vpblendd {{.*#+}} ymm5 = ymm8[0,1,2,3,4,5],ymm5[6,7]
791 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [0,0,0,6]
792 ; AVX512BW-FCP-NEXT: vpermi2q %ymm7, %ymm4, %ymm8
793 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} xmm4 = [4,10]
794 ; AVX512BW-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm4
795 ; AVX512BW-FCP-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
796 ; AVX512BW-FCP-NEXT: vpbroadcastq 136(%rdi), %ymm8
797 ; AVX512BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm7 = ymm8[1],ymm7[1],ymm8[3],ymm7[3]
798 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} xmm8 = [5,11]
799 ; AVX512BW-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
800 ; AVX512BW-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm8[0,1,2,3],ymm7[4,5,6,7]
801 ; AVX512BW-FCP-NEXT: vmovdqa %ymm0, (%rsi)
802 ; AVX512BW-FCP-NEXT: vmovdqa %ymm1, (%rdx)
803 ; AVX512BW-FCP-NEXT: vmovdqa %ymm6, (%rcx)
804 ; AVX512BW-FCP-NEXT: vmovdqa %ymm5, (%r8)
805 ; AVX512BW-FCP-NEXT: vmovdqa %ymm4, (%r9)
806 ; AVX512BW-FCP-NEXT: vmovdqa %ymm2, (%rax)
807 ; AVX512BW-FCP-NEXT: vzeroupper
808 ; AVX512BW-FCP-NEXT: retq
810 ; AVX512DQ-BW-LABEL: load_i64_stride6_vf4:
811 ; AVX512DQ-BW: # %bb.0:
812 ; AVX512DQ-BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
813 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdi), %zmm2
814 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rdi), %zmm3
815 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm0 = [0,6,12,0]
816 ; AVX512DQ-BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm0
817 ; AVX512DQ-BW-NEXT: vpbroadcastq 144(%rdi), %ymm1
818 ; AVX512DQ-BW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
819 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm1 = [1,7,13,0]
820 ; AVX512DQ-BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm1
821 ; AVX512DQ-BW-NEXT: vmovdqa 128(%rdi), %ymm4
822 ; AVX512DQ-BW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm4[6,7]
823 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm5 = [10,0,6,0]
824 ; AVX512DQ-BW-NEXT: vpermi2q %zmm2, %zmm3, %zmm5
825 ; AVX512DQ-BW-NEXT: vmovdqa 160(%rdi), %xmm6
826 ; AVX512DQ-BW-NEXT: vpbroadcastq %xmm6, %ymm7
827 ; AVX512DQ-BW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],ymm7[6,7]
828 ; AVX512DQ-BW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
829 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm7 = [11,1,7,0]
830 ; AVX512DQ-BW-NEXT: vpermi2q %zmm2, %zmm3, %zmm7
831 ; AVX512DQ-BW-NEXT: vpblendd {{.*#+}} ymm6 = ymm7[0,1,2,3,4,5],ymm6[6,7]
832 ; AVX512DQ-BW-NEXT: vmovdqa 160(%rdi), %ymm7
833 ; AVX512DQ-BW-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm4[0],ymm7[0],ymm4[2],ymm7[2]
834 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,0,3]
835 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} xmm8 = [4,10]
836 ; AVX512DQ-BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
837 ; AVX512DQ-BW-NEXT: vpblendd {{.*#+}} ymm4 = ymm8[0,1,2,3],ymm4[4,5,6,7]
838 ; AVX512DQ-BW-NEXT: vpbroadcastq 136(%rdi), %ymm8
839 ; AVX512DQ-BW-NEXT: vpunpckhqdq {{.*#+}} ymm7 = ymm8[1],ymm7[1],ymm8[3],ymm7[3]
840 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} xmm8 = [5,11]
841 ; AVX512DQ-BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
842 ; AVX512DQ-BW-NEXT: vpblendd {{.*#+}} ymm2 = ymm8[0,1,2,3],ymm7[4,5,6,7]
843 ; AVX512DQ-BW-NEXT: vmovdqa %ymm0, (%rsi)
844 ; AVX512DQ-BW-NEXT: vmovdqa %ymm1, (%rdx)
845 ; AVX512DQ-BW-NEXT: vmovdqa %ymm5, (%rcx)
846 ; AVX512DQ-BW-NEXT: vmovdqa %ymm6, (%r8)
847 ; AVX512DQ-BW-NEXT: vmovdqa %ymm4, (%r9)
848 ; AVX512DQ-BW-NEXT: vmovdqa %ymm2, (%rax)
849 ; AVX512DQ-BW-NEXT: vzeroupper
850 ; AVX512DQ-BW-NEXT: retq
852 ; AVX512DQ-BW-FCP-LABEL: load_i64_stride6_vf4:
853 ; AVX512DQ-BW-FCP: # %bb.0:
854 ; AVX512DQ-BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
855 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm2
856 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm3
857 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm0 = [0,6,12,0]
858 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm0
859 ; AVX512DQ-BW-FCP-NEXT: vpbroadcastq 144(%rdi), %ymm1
860 ; AVX512DQ-BW-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
861 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm1 = [1,7,13,0]
862 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm1
863 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 128(%rdi), %ymm4
864 ; AVX512DQ-BW-FCP-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm4[6,7]
865 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm5 = [10,0,6,0]
866 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm5
867 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm6 = [0,1,2,4]
868 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 160(%rdi), %ymm7
869 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %ymm7, %ymm5, %ymm6
870 ; AVX512DQ-BW-FCP-NEXT: vinserti128 $1, 160(%rdi), %ymm0, %ymm5
871 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [11,1,7,0]
872 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm8
873 ; AVX512DQ-BW-FCP-NEXT: vpblendd {{.*#+}} ymm5 = ymm8[0,1,2,3,4,5],ymm5[6,7]
874 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [0,0,0,6]
875 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %ymm7, %ymm4, %ymm8
876 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} xmm4 = [4,10]
877 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm4
878 ; AVX512DQ-BW-FCP-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
879 ; AVX512DQ-BW-FCP-NEXT: vpbroadcastq 136(%rdi), %ymm8
880 ; AVX512DQ-BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm7 = ymm8[1],ymm7[1],ymm8[3],ymm7[3]
881 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} xmm8 = [5,11]
882 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
883 ; AVX512DQ-BW-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm8[0,1,2,3],ymm7[4,5,6,7]
884 ; AVX512DQ-BW-FCP-NEXT: vmovdqa %ymm0, (%rsi)
885 ; AVX512DQ-BW-FCP-NEXT: vmovdqa %ymm1, (%rdx)
886 ; AVX512DQ-BW-FCP-NEXT: vmovdqa %ymm6, (%rcx)
887 ; AVX512DQ-BW-FCP-NEXT: vmovdqa %ymm5, (%r8)
888 ; AVX512DQ-BW-FCP-NEXT: vmovdqa %ymm4, (%r9)
889 ; AVX512DQ-BW-FCP-NEXT: vmovdqa %ymm2, (%rax)
890 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
891 ; AVX512DQ-BW-FCP-NEXT: retq
892 %wide.vec = load <24 x i64>, ptr %in.vec, align 64
893 %strided.vec0 = shufflevector <24 x i64> %wide.vec, <24 x i64> poison, <4 x i32> <i32 0, i32 6, i32 12, i32 18>
894 %strided.vec1 = shufflevector <24 x i64> %wide.vec, <24 x i64> poison, <4 x i32> <i32 1, i32 7, i32 13, i32 19>
895 %strided.vec2 = shufflevector <24 x i64> %wide.vec, <24 x i64> poison, <4 x i32> <i32 2, i32 8, i32 14, i32 20>
896 %strided.vec3 = shufflevector <24 x i64> %wide.vec, <24 x i64> poison, <4 x i32> <i32 3, i32 9, i32 15, i32 21>
897 %strided.vec4 = shufflevector <24 x i64> %wide.vec, <24 x i64> poison, <4 x i32> <i32 4, i32 10, i32 16, i32 22>
898 %strided.vec5 = shufflevector <24 x i64> %wide.vec, <24 x i64> poison, <4 x i32> <i32 5, i32 11, i32 17, i32 23>
899 store <4 x i64> %strided.vec0, ptr %out.vec0, align 64
900 store <4 x i64> %strided.vec1, ptr %out.vec1, align 64
901 store <4 x i64> %strided.vec2, ptr %out.vec2, align 64
902 store <4 x i64> %strided.vec3, ptr %out.vec3, align 64
903 store <4 x i64> %strided.vec4, ptr %out.vec4, align 64
904 store <4 x i64> %strided.vec5, ptr %out.vec5, align 64
908 define void @load_i64_stride6_vf8(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr %out.vec2, ptr %out.vec3, ptr %out.vec4, ptr %out.vec5) nounwind {
909 ; SSE-LABEL: load_i64_stride6_vf8:
911 ; SSE-NEXT: subq $24, %rsp
912 ; SSE-NEXT: movaps 160(%rdi), %xmm10
913 ; SSE-NEXT: movaps 256(%rdi), %xmm13
914 ; SSE-NEXT: movaps 208(%rdi), %xmm5
915 ; SSE-NEXT: movaps 352(%rdi), %xmm15
916 ; SSE-NEXT: movaps 304(%rdi), %xmm6
917 ; SSE-NEXT: movaps 64(%rdi), %xmm0
918 ; SSE-NEXT: movaps (%rdi), %xmm8
919 ; SSE-NEXT: movaps 16(%rdi), %xmm7
920 ; SSE-NEXT: movaps 48(%rdi), %xmm1
921 ; SSE-NEXT: movaps 144(%rdi), %xmm2
922 ; SSE-NEXT: movaps 96(%rdi), %xmm11
923 ; SSE-NEXT: movaps 240(%rdi), %xmm3
924 ; SSE-NEXT: movaps 192(%rdi), %xmm12
925 ; SSE-NEXT: movaps 336(%rdi), %xmm4
926 ; SSE-NEXT: movaps 288(%rdi), %xmm9
927 ; SSE-NEXT: movaps %xmm9, %xmm14
928 ; SSE-NEXT: movlhps {{.*#+}} xmm14 = xmm14[0],xmm4[0]
929 ; SSE-NEXT: movaps %xmm14, (%rsp) # 16-byte Spill
930 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm4[1]
931 ; SSE-NEXT: movaps %xmm12, %xmm4
932 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm3[0]
933 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
934 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm3[1]
935 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
936 ; SSE-NEXT: movaps %xmm11, %xmm3
937 ; SSE-NEXT: movlhps {{.*#+}} xmm11 = xmm11[0],xmm2[0]
938 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm2[1]
939 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
940 ; SSE-NEXT: movaps %xmm8, %xmm12
941 ; SSE-NEXT: movlhps {{.*#+}} xmm12 = xmm12[0],xmm1[0]
942 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm1[1]
943 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
944 ; SSE-NEXT: movaps %xmm7, %xmm2
945 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
946 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
947 ; SSE-NEXT: unpckhpd {{.*#+}} xmm7 = xmm7[1],xmm0[1]
948 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
949 ; SSE-NEXT: movaps %xmm6, %xmm14
950 ; SSE-NEXT: movlhps {{.*#+}} xmm14 = xmm14[0],xmm15[0]
951 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm15[1]
952 ; SSE-NEXT: movaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
953 ; SSE-NEXT: movaps %xmm5, %xmm15
954 ; SSE-NEXT: movlhps {{.*#+}} xmm15 = xmm15[0],xmm13[0]
955 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm13[1]
956 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
957 ; SSE-NEXT: movaps 112(%rdi), %xmm13
958 ; SSE-NEXT: movaps %xmm13, %xmm7
959 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm10[0]
960 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm10[1]
961 ; SSE-NEXT: movaps 80(%rdi), %xmm0
962 ; SSE-NEXT: movaps 32(%rdi), %xmm10
963 ; SSE-NEXT: movaps %xmm10, %xmm8
964 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm0[0]
965 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm0[1]
966 ; SSE-NEXT: movaps 368(%rdi), %xmm0
967 ; SSE-NEXT: movaps 320(%rdi), %xmm5
968 ; SSE-NEXT: movaps %xmm5, %xmm6
969 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm0[0]
970 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm0[1]
971 ; SSE-NEXT: movaps 272(%rdi), %xmm0
972 ; SSE-NEXT: movaps 224(%rdi), %xmm3
973 ; SSE-NEXT: movaps %xmm3, %xmm4
974 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm0[0]
975 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm0[1]
976 ; SSE-NEXT: movaps 176(%rdi), %xmm0
977 ; SSE-NEXT: movaps 128(%rdi), %xmm1
978 ; SSE-NEXT: movaps %xmm1, %xmm2
979 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
980 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
981 ; SSE-NEXT: movaps %xmm11, 16(%rsi)
982 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
983 ; SSE-NEXT: movaps %xmm0, 32(%rsi)
984 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
985 ; SSE-NEXT: movaps %xmm0, 48(%rsi)
986 ; SSE-NEXT: movaps %xmm12, (%rsi)
987 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
988 ; SSE-NEXT: movaps %xmm0, 16(%rdx)
989 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
990 ; SSE-NEXT: movaps %xmm0, 32(%rdx)
991 ; SSE-NEXT: movaps %xmm9, 48(%rdx)
992 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
993 ; SSE-NEXT: movaps %xmm0, (%rdx)
994 ; SSE-NEXT: movaps %xmm7, 16(%rcx)
995 ; SSE-NEXT: movaps %xmm15, 32(%rcx)
996 ; SSE-NEXT: movaps %xmm14, 48(%rcx)
997 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
998 ; SSE-NEXT: movaps %xmm0, (%rcx)
999 ; SSE-NEXT: movaps %xmm13, 16(%r8)
1000 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1001 ; SSE-NEXT: movaps %xmm0, 32(%r8)
1002 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1003 ; SSE-NEXT: movaps %xmm0, 48(%r8)
1004 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1005 ; SSE-NEXT: movaps %xmm0, (%r8)
1006 ; SSE-NEXT: movaps %xmm2, 16(%r9)
1007 ; SSE-NEXT: movaps %xmm4, 32(%r9)
1008 ; SSE-NEXT: movaps %xmm6, 48(%r9)
1009 ; SSE-NEXT: movaps %xmm8, (%r9)
1010 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
1011 ; SSE-NEXT: movaps %xmm1, 16(%rax)
1012 ; SSE-NEXT: movaps %xmm3, 32(%rax)
1013 ; SSE-NEXT: movaps %xmm5, 48(%rax)
1014 ; SSE-NEXT: movaps %xmm10, (%rax)
1015 ; SSE-NEXT: addq $24, %rsp
1018 ; AVX-LABEL: load_i64_stride6_vf8:
1020 ; AVX-NEXT: vmovaps 352(%rdi), %ymm0
1021 ; AVX-NEXT: vmovaps 288(%rdi), %ymm4
1022 ; AVX-NEXT: vmovaps 96(%rdi), %ymm7
1023 ; AVX-NEXT: vmovaps 320(%rdi), %ymm5
1024 ; AVX-NEXT: vmovaps 128(%rdi), %ymm3
1025 ; AVX-NEXT: vinsertf128 $1, 96(%rdi), %ymm0, %ymm6
1026 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm6[0],ymm3[0],ymm6[2],ymm3[2]
1027 ; AVX-NEXT: vmovaps (%rdi), %xmm9
1028 ; AVX-NEXT: vmovaps 16(%rdi), %xmm8
1029 ; AVX-NEXT: vmovaps 48(%rdi), %xmm10
1030 ; AVX-NEXT: vmovlhps {{.*#+}} xmm2 = xmm9[0],xmm10[0]
1031 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
1032 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1033 ; AVX-NEXT: vinsertf128 $1, 288(%rdi), %ymm0, %ymm11
1034 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm11[0],ymm5[0],ymm11[2],ymm5[2]
1035 ; AVX-NEXT: vmovaps 240(%rdi), %xmm12
1036 ; AVX-NEXT: vmovaps 192(%rdi), %xmm13
1037 ; AVX-NEXT: vmovlhps {{.*#+}} xmm14 = xmm13[0],xmm12[0]
1038 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm14[0,1,2,3],ymm2[4,5,6,7]
1039 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1040 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm6[1],ymm3[1],ymm6[3],ymm3[3]
1041 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm6 = xmm9[1],xmm10[1]
1042 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm6[0,1,2,3],ymm3[4,5,6,7]
1043 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1044 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm11[1],ymm5[1],ymm11[3],ymm5[3]
1045 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm6 = xmm13[1],xmm12[1]
1046 ; AVX-NEXT: vblendps {{.*#+}} ymm5 = ymm6[0,1,2,3],ymm5[4,5,6,7]
1047 ; AVX-NEXT: vinsertf128 $1, 160(%rdi), %ymm0, %ymm10
1048 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm6 = ymm7[0],ymm10[0],ymm7[2],ymm10[2]
1049 ; AVX-NEXT: vmovaps 64(%rdi), %xmm11
1050 ; AVX-NEXT: vmovlhps {{.*#+}} xmm9 = xmm8[0],xmm11[0]
1051 ; AVX-NEXT: vblendps {{.*#+}} ymm6 = ymm9[0,1,2,3],ymm6[4,5,6,7]
1052 ; AVX-NEXT: vinsertf128 $1, 352(%rdi), %ymm0, %ymm12
1053 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm9 = ymm4[0],ymm12[0],ymm4[2],ymm12[2]
1054 ; AVX-NEXT: vmovaps 256(%rdi), %xmm13
1055 ; AVX-NEXT: vmovaps 208(%rdi), %xmm14
1056 ; AVX-NEXT: vmovlhps {{.*#+}} xmm15 = xmm14[0],xmm13[0]
1057 ; AVX-NEXT: vblendps {{.*#+}} ymm9 = ymm15[0,1,2,3],ymm9[4,5,6,7]
1058 ; AVX-NEXT: vmovaps 160(%rdi), %ymm15
1059 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm7 = ymm7[1],ymm10[1],ymm7[3],ymm10[3]
1060 ; AVX-NEXT: vmovaps 32(%rdi), %xmm10
1061 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm8 = xmm8[1],xmm11[1]
1062 ; AVX-NEXT: vblendps {{.*#+}} ymm7 = ymm8[0,1,2,3],ymm7[4,5,6,7]
1063 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm4 = ymm4[1],ymm12[1],ymm4[3],ymm12[3]
1064 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm8 = xmm14[1],xmm13[1]
1065 ; AVX-NEXT: vblendps {{.*#+}} ymm4 = ymm8[0,1,2,3],ymm4[4,5,6,7]
1066 ; AVX-NEXT: vinsertf128 $1, 128(%rdi), %ymm0, %ymm8
1067 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm11 = ymm8[0],ymm15[0],ymm8[2],ymm15[2]
1068 ; AVX-NEXT: vmovaps 80(%rdi), %xmm12
1069 ; AVX-NEXT: vmovlhps {{.*#+}} xmm13 = xmm10[0],xmm12[0]
1070 ; AVX-NEXT: vblendps {{.*#+}} ymm11 = ymm13[0,1,2,3],ymm11[4,5,6,7]
1071 ; AVX-NEXT: vinsertf128 $1, 320(%rdi), %ymm0, %ymm13
1072 ; AVX-NEXT: vmovaps %ymm0, %ymm3
1073 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm14 = ymm13[0],ymm0[0],ymm13[2],ymm0[2]
1074 ; AVX-NEXT: vmovaps 272(%rdi), %xmm1
1075 ; AVX-NEXT: vmovaps 224(%rdi), %xmm0
1076 ; AVX-NEXT: vmovlhps {{.*#+}} xmm2 = xmm0[0],xmm1[0]
1077 ; AVX-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm14[4,5,6,7]
1078 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm8 = ymm8[1],ymm15[1],ymm8[3],ymm15[3]
1079 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm10 = xmm10[1],xmm12[1]
1080 ; AVX-NEXT: vblendps {{.*#+}} ymm8 = ymm10[0,1,2,3],ymm8[4,5,6,7]
1081 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm10 = ymm13[1],ymm3[1],ymm13[3],ymm3[3]
1082 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
1083 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm10[4,5,6,7]
1084 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
1085 ; AVX-NEXT: vmovaps %ymm1, 32(%rsi)
1086 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
1087 ; AVX-NEXT: vmovaps %ymm1, (%rsi)
1088 ; AVX-NEXT: vmovaps %ymm5, 32(%rdx)
1089 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
1090 ; AVX-NEXT: vmovaps %ymm1, (%rdx)
1091 ; AVX-NEXT: vmovaps %ymm9, 32(%rcx)
1092 ; AVX-NEXT: vmovaps %ymm6, (%rcx)
1093 ; AVX-NEXT: vmovaps %ymm4, 32(%r8)
1094 ; AVX-NEXT: vmovaps %ymm7, (%r8)
1095 ; AVX-NEXT: vmovaps %ymm2, 32(%r9)
1096 ; AVX-NEXT: vmovaps %ymm11, (%r9)
1097 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
1098 ; AVX-NEXT: vmovaps %ymm0, 32(%rax)
1099 ; AVX-NEXT: vmovaps %ymm8, (%rax)
1100 ; AVX-NEXT: vzeroupper
1103 ; AVX2-LABEL: load_i64_stride6_vf8:
1105 ; AVX2-NEXT: vmovaps 352(%rdi), %ymm0
1106 ; AVX2-NEXT: vmovaps 320(%rdi), %ymm2
1107 ; AVX2-NEXT: vmovaps 288(%rdi), %ymm7
1108 ; AVX2-NEXT: vmovaps 128(%rdi), %ymm5
1109 ; AVX2-NEXT: vmovaps 96(%rdi), %ymm9
1110 ; AVX2-NEXT: vmovaps (%rdi), %xmm4
1111 ; AVX2-NEXT: vmovaps 16(%rdi), %xmm11
1112 ; AVX2-NEXT: vmovaps 48(%rdi), %xmm6
1113 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm4[0],xmm6[0]
1114 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm9[0],ymm5[0],ymm9[2],ymm5[2]
1115 ; AVX2-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,0,3]
1116 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
1117 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1118 ; AVX2-NEXT: vmovaps 240(%rdi), %xmm8
1119 ; AVX2-NEXT: vmovaps 192(%rdi), %xmm10
1120 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm3 = xmm10[0],xmm8[0]
1121 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm12 = ymm7[0],ymm2[0],ymm7[2],ymm2[2]
1122 ; AVX2-NEXT: vpermpd {{.*#+}} ymm12 = ymm12[0,1,0,3]
1123 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm12[4,5,6,7]
1124 ; AVX2-NEXT: vbroadcastsd 104(%rdi), %ymm12
1125 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm12 = ymm12[1],ymm5[1],ymm12[3],ymm5[3]
1126 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm4[1],xmm6[1]
1127 ; AVX2-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm12[4,5,6,7]
1128 ; AVX2-NEXT: vbroadcastsd 296(%rdi), %ymm6
1129 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm6 = ymm6[1],ymm2[1],ymm6[3],ymm2[3]
1130 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm8 = xmm10[1],xmm8[1]
1131 ; AVX2-NEXT: vblendps {{.*#+}} ymm6 = ymm8[0,1,2,3],ymm6[4,5,6,7]
1132 ; AVX2-NEXT: vbroadcastsd 160(%rdi), %ymm8
1133 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm9[0],ymm8[0],ymm9[2],ymm8[2]
1134 ; AVX2-NEXT: vmovaps 64(%rdi), %xmm12
1135 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm10 = xmm11[0],xmm12[0]
1136 ; AVX2-NEXT: vblendps {{.*#+}} ymm8 = ymm10[0,1,2,3],ymm8[4,5,6,7]
1137 ; AVX2-NEXT: vbroadcastsd 352(%rdi), %ymm10
1138 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm10 = ymm7[0],ymm10[0],ymm7[2],ymm10[2]
1139 ; AVX2-NEXT: vmovaps 256(%rdi), %xmm13
1140 ; AVX2-NEXT: vmovaps 208(%rdi), %xmm14
1141 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm15 = xmm14[0],xmm13[0]
1142 ; AVX2-NEXT: vblendps {{.*#+}} ymm10 = ymm15[0,1,2,3],ymm10[4,5,6,7]
1143 ; AVX2-NEXT: vmovaps 160(%rdi), %ymm15
1144 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm11[1],xmm12[1]
1145 ; AVX2-NEXT: vmovaps 32(%rdi), %xmm12
1146 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm9 = ymm9[1],ymm15[1],ymm9[3],ymm15[3]
1147 ; AVX2-NEXT: vpermpd {{.*#+}} ymm9 = ymm9[0,1,2,1]
1148 ; AVX2-NEXT: vblendps {{.*#+}} ymm9 = ymm11[0,1,2,3],ymm9[4,5,6,7]
1149 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm14[1],xmm13[1]
1150 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm7 = ymm7[1],ymm0[1],ymm7[3],ymm0[3]
1151 ; AVX2-NEXT: vpermpd {{.*#+}} ymm7 = ymm7[0,1,2,1]
1152 ; AVX2-NEXT: vblendps {{.*#+}} ymm7 = ymm11[0,1,2,3],ymm7[4,5,6,7]
1153 ; AVX2-NEXT: vmovaps 80(%rdi), %xmm11
1154 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm13 = xmm12[0],xmm11[0]
1155 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm5 = ymm5[0],ymm15[0],ymm5[2],ymm15[2]
1156 ; AVX2-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,0,3]
1157 ; AVX2-NEXT: vblendps {{.*#+}} ymm5 = ymm13[0,1,2,3],ymm5[4,5,6,7]
1158 ; AVX2-NEXT: vmovaps 272(%rdi), %xmm13
1159 ; AVX2-NEXT: vmovaps 224(%rdi), %xmm14
1160 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm2[0],ymm0[0],ymm2[2],ymm0[2]
1161 ; AVX2-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,0,3]
1162 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm14[0],xmm13[0]
1163 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
1164 ; AVX2-NEXT: vbroadcastsd 136(%rdi), %ymm2
1165 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm15[1],ymm2[3],ymm15[3]
1166 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm12[1],xmm11[1]
1167 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm11[0,1,2,3],ymm2[4,5,6,7]
1168 ; AVX2-NEXT: vbroadcastsd 328(%rdi), %ymm11
1169 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm11[1],ymm0[1],ymm11[3],ymm0[3]
1170 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm14[1],xmm13[1]
1171 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm11[0,1,2,3],ymm0[4,5,6,7]
1172 ; AVX2-NEXT: vmovaps %ymm3, 32(%rsi)
1173 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
1174 ; AVX2-NEXT: vmovaps %ymm3, (%rsi)
1175 ; AVX2-NEXT: vmovaps %ymm6, 32(%rdx)
1176 ; AVX2-NEXT: vmovaps %ymm4, (%rdx)
1177 ; AVX2-NEXT: vmovaps %ymm10, 32(%rcx)
1178 ; AVX2-NEXT: vmovaps %ymm8, (%rcx)
1179 ; AVX2-NEXT: vmovaps %ymm7, 32(%r8)
1180 ; AVX2-NEXT: vmovaps %ymm9, (%r8)
1181 ; AVX2-NEXT: vmovaps %ymm1, 32(%r9)
1182 ; AVX2-NEXT: vmovaps %ymm5, (%r9)
1183 ; AVX2-NEXT: movq {{[0-9]+}}(%rsp), %rax
1184 ; AVX2-NEXT: vmovaps %ymm0, 32(%rax)
1185 ; AVX2-NEXT: vmovaps %ymm2, (%rax)
1186 ; AVX2-NEXT: vzeroupper
1189 ; AVX2-FP-LABEL: load_i64_stride6_vf8:
1191 ; AVX2-FP-NEXT: vmovaps 352(%rdi), %ymm0
1192 ; AVX2-FP-NEXT: vmovaps 320(%rdi), %ymm2
1193 ; AVX2-FP-NEXT: vmovaps 288(%rdi), %ymm7
1194 ; AVX2-FP-NEXT: vmovaps 128(%rdi), %ymm5
1195 ; AVX2-FP-NEXT: vmovaps 96(%rdi), %ymm9
1196 ; AVX2-FP-NEXT: vmovaps (%rdi), %xmm4
1197 ; AVX2-FP-NEXT: vmovaps 16(%rdi), %xmm11
1198 ; AVX2-FP-NEXT: vmovaps 48(%rdi), %xmm6
1199 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm4[0],xmm6[0]
1200 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm9[0],ymm5[0],ymm9[2],ymm5[2]
1201 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,0,3]
1202 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
1203 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1204 ; AVX2-FP-NEXT: vmovaps 240(%rdi), %xmm8
1205 ; AVX2-FP-NEXT: vmovaps 192(%rdi), %xmm10
1206 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm3 = xmm10[0],xmm8[0]
1207 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm12 = ymm7[0],ymm2[0],ymm7[2],ymm2[2]
1208 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm12 = ymm12[0,1,0,3]
1209 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm12[4,5,6,7]
1210 ; AVX2-FP-NEXT: vbroadcastsd 104(%rdi), %ymm12
1211 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm12 = ymm12[1],ymm5[1],ymm12[3],ymm5[3]
1212 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm4[1],xmm6[1]
1213 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm12[4,5,6,7]
1214 ; AVX2-FP-NEXT: vbroadcastsd 296(%rdi), %ymm6
1215 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm6 = ymm6[1],ymm2[1],ymm6[3],ymm2[3]
1216 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm8 = xmm10[1],xmm8[1]
1217 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm6 = ymm8[0,1,2,3],ymm6[4,5,6,7]
1218 ; AVX2-FP-NEXT: vbroadcastsd 160(%rdi), %ymm8
1219 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm9[0],ymm8[0],ymm9[2],ymm8[2]
1220 ; AVX2-FP-NEXT: vmovaps 64(%rdi), %xmm12
1221 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm10 = xmm11[0],xmm12[0]
1222 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm8 = ymm10[0,1,2,3],ymm8[4,5,6,7]
1223 ; AVX2-FP-NEXT: vbroadcastsd 352(%rdi), %ymm10
1224 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm10 = ymm7[0],ymm10[0],ymm7[2],ymm10[2]
1225 ; AVX2-FP-NEXT: vmovaps 256(%rdi), %xmm13
1226 ; AVX2-FP-NEXT: vmovaps 208(%rdi), %xmm14
1227 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm15 = xmm14[0],xmm13[0]
1228 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm10 = ymm15[0,1,2,3],ymm10[4,5,6,7]
1229 ; AVX2-FP-NEXT: vmovaps 160(%rdi), %ymm15
1230 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm11[1],xmm12[1]
1231 ; AVX2-FP-NEXT: vmovaps 32(%rdi), %xmm12
1232 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm9 = ymm9[1],ymm15[1],ymm9[3],ymm15[3]
1233 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm9 = ymm9[0,1,2,1]
1234 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm9 = ymm11[0,1,2,3],ymm9[4,5,6,7]
1235 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm14[1],xmm13[1]
1236 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm7 = ymm7[1],ymm0[1],ymm7[3],ymm0[3]
1237 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm7 = ymm7[0,1,2,1]
1238 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm7 = ymm11[0,1,2,3],ymm7[4,5,6,7]
1239 ; AVX2-FP-NEXT: vmovaps 80(%rdi), %xmm11
1240 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm13 = xmm12[0],xmm11[0]
1241 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm5 = ymm5[0],ymm15[0],ymm5[2],ymm15[2]
1242 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,0,3]
1243 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm5 = ymm13[0,1,2,3],ymm5[4,5,6,7]
1244 ; AVX2-FP-NEXT: vmovaps 272(%rdi), %xmm13
1245 ; AVX2-FP-NEXT: vmovaps 224(%rdi), %xmm14
1246 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm2[0],ymm0[0],ymm2[2],ymm0[2]
1247 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,0,3]
1248 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm14[0],xmm13[0]
1249 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
1250 ; AVX2-FP-NEXT: vbroadcastsd 136(%rdi), %ymm2
1251 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm15[1],ymm2[3],ymm15[3]
1252 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm12[1],xmm11[1]
1253 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm11[0,1,2,3],ymm2[4,5,6,7]
1254 ; AVX2-FP-NEXT: vbroadcastsd 328(%rdi), %ymm11
1255 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm11[1],ymm0[1],ymm11[3],ymm0[3]
1256 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm14[1],xmm13[1]
1257 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm11[0,1,2,3],ymm0[4,5,6,7]
1258 ; AVX2-FP-NEXT: vmovaps %ymm3, 32(%rsi)
1259 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
1260 ; AVX2-FP-NEXT: vmovaps %ymm3, (%rsi)
1261 ; AVX2-FP-NEXT: vmovaps %ymm6, 32(%rdx)
1262 ; AVX2-FP-NEXT: vmovaps %ymm4, (%rdx)
1263 ; AVX2-FP-NEXT: vmovaps %ymm10, 32(%rcx)
1264 ; AVX2-FP-NEXT: vmovaps %ymm8, (%rcx)
1265 ; AVX2-FP-NEXT: vmovaps %ymm7, 32(%r8)
1266 ; AVX2-FP-NEXT: vmovaps %ymm9, (%r8)
1267 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%r9)
1268 ; AVX2-FP-NEXT: vmovaps %ymm5, (%r9)
1269 ; AVX2-FP-NEXT: movq {{[0-9]+}}(%rsp), %rax
1270 ; AVX2-FP-NEXT: vmovaps %ymm0, 32(%rax)
1271 ; AVX2-FP-NEXT: vmovaps %ymm2, (%rax)
1272 ; AVX2-FP-NEXT: vzeroupper
1273 ; AVX2-FP-NEXT: retq
1275 ; AVX2-FCP-LABEL: load_i64_stride6_vf8:
1276 ; AVX2-FCP: # %bb.0:
1277 ; AVX2-FCP-NEXT: vmovaps 352(%rdi), %ymm0
1278 ; AVX2-FCP-NEXT: vmovaps 320(%rdi), %ymm2
1279 ; AVX2-FCP-NEXT: vmovaps 288(%rdi), %ymm7
1280 ; AVX2-FCP-NEXT: vmovaps 128(%rdi), %ymm5
1281 ; AVX2-FCP-NEXT: vmovaps 96(%rdi), %ymm9
1282 ; AVX2-FCP-NEXT: vmovaps (%rdi), %xmm4
1283 ; AVX2-FCP-NEXT: vmovaps 16(%rdi), %xmm11
1284 ; AVX2-FCP-NEXT: vmovaps 48(%rdi), %xmm6
1285 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm4[0],xmm6[0]
1286 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm9[0],ymm5[0],ymm9[2],ymm5[2]
1287 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,0,3]
1288 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
1289 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1290 ; AVX2-FCP-NEXT: vmovaps 240(%rdi), %xmm8
1291 ; AVX2-FCP-NEXT: vmovaps 192(%rdi), %xmm10
1292 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm3 = xmm10[0],xmm8[0]
1293 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm12 = ymm7[0],ymm2[0],ymm7[2],ymm2[2]
1294 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm12 = ymm12[0,1,0,3]
1295 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm12[4,5,6,7]
1296 ; AVX2-FCP-NEXT: vbroadcastsd 104(%rdi), %ymm12
1297 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm12 = ymm12[1],ymm5[1],ymm12[3],ymm5[3]
1298 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm4[1],xmm6[1]
1299 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm12[4,5,6,7]
1300 ; AVX2-FCP-NEXT: vbroadcastsd 296(%rdi), %ymm6
1301 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm6 = ymm6[1],ymm2[1],ymm6[3],ymm2[3]
1302 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm8 = xmm10[1],xmm8[1]
1303 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm6 = ymm8[0,1,2,3],ymm6[4,5,6,7]
1304 ; AVX2-FCP-NEXT: vbroadcastsd 160(%rdi), %ymm8
1305 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm9[0],ymm8[0],ymm9[2],ymm8[2]
1306 ; AVX2-FCP-NEXT: vmovaps 64(%rdi), %xmm12
1307 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm10 = xmm11[0],xmm12[0]
1308 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm8 = ymm10[0,1,2,3],ymm8[4,5,6,7]
1309 ; AVX2-FCP-NEXT: vbroadcastsd 352(%rdi), %ymm10
1310 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm10 = ymm7[0],ymm10[0],ymm7[2],ymm10[2]
1311 ; AVX2-FCP-NEXT: vmovaps 256(%rdi), %xmm13
1312 ; AVX2-FCP-NEXT: vmovaps 208(%rdi), %xmm14
1313 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm15 = xmm14[0],xmm13[0]
1314 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm10 = ymm15[0,1,2,3],ymm10[4,5,6,7]
1315 ; AVX2-FCP-NEXT: vmovaps 160(%rdi), %ymm15
1316 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm11[1],xmm12[1]
1317 ; AVX2-FCP-NEXT: vmovaps 32(%rdi), %xmm12
1318 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm9 = ymm9[1],ymm15[1],ymm9[3],ymm15[3]
1319 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm9 = ymm9[0,1,2,1]
1320 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm9 = ymm11[0,1,2,3],ymm9[4,5,6,7]
1321 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm14[1],xmm13[1]
1322 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm7 = ymm7[1],ymm0[1],ymm7[3],ymm0[3]
1323 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm7 = ymm7[0,1,2,1]
1324 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm7 = ymm11[0,1,2,3],ymm7[4,5,6,7]
1325 ; AVX2-FCP-NEXT: vmovaps 80(%rdi), %xmm11
1326 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm13 = xmm12[0],xmm11[0]
1327 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm5 = ymm5[0],ymm15[0],ymm5[2],ymm15[2]
1328 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,0,3]
1329 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm5 = ymm13[0,1,2,3],ymm5[4,5,6,7]
1330 ; AVX2-FCP-NEXT: vmovaps 272(%rdi), %xmm13
1331 ; AVX2-FCP-NEXT: vmovaps 224(%rdi), %xmm14
1332 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm2[0],ymm0[0],ymm2[2],ymm0[2]
1333 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,0,3]
1334 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm14[0],xmm13[0]
1335 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
1336 ; AVX2-FCP-NEXT: vbroadcastsd 136(%rdi), %ymm2
1337 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm15[1],ymm2[3],ymm15[3]
1338 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm12[1],xmm11[1]
1339 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm11[0,1,2,3],ymm2[4,5,6,7]
1340 ; AVX2-FCP-NEXT: vbroadcastsd 328(%rdi), %ymm11
1341 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm11[1],ymm0[1],ymm11[3],ymm0[3]
1342 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm14[1],xmm13[1]
1343 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm11[0,1,2,3],ymm0[4,5,6,7]
1344 ; AVX2-FCP-NEXT: vmovaps %ymm3, 32(%rsi)
1345 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
1346 ; AVX2-FCP-NEXT: vmovaps %ymm3, (%rsi)
1347 ; AVX2-FCP-NEXT: vmovaps %ymm6, 32(%rdx)
1348 ; AVX2-FCP-NEXT: vmovaps %ymm4, (%rdx)
1349 ; AVX2-FCP-NEXT: vmovaps %ymm10, 32(%rcx)
1350 ; AVX2-FCP-NEXT: vmovaps %ymm8, (%rcx)
1351 ; AVX2-FCP-NEXT: vmovaps %ymm7, 32(%r8)
1352 ; AVX2-FCP-NEXT: vmovaps %ymm9, (%r8)
1353 ; AVX2-FCP-NEXT: vmovaps %ymm1, 32(%r9)
1354 ; AVX2-FCP-NEXT: vmovaps %ymm5, (%r9)
1355 ; AVX2-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
1356 ; AVX2-FCP-NEXT: vmovaps %ymm0, 32(%rax)
1357 ; AVX2-FCP-NEXT: vmovaps %ymm2, (%rax)
1358 ; AVX2-FCP-NEXT: vzeroupper
1359 ; AVX2-FCP-NEXT: retq
1361 ; AVX512-LABEL: load_i64_stride6_vf8:
1363 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
1364 ; AVX512-NEXT: vmovdqa64 320(%rdi), %zmm6
1365 ; AVX512-NEXT: vmovdqa64 256(%rdi), %zmm7
1366 ; AVX512-NEXT: vmovdqa64 (%rdi), %zmm0
1367 ; AVX512-NEXT: vmovdqa64 64(%rdi), %zmm1
1368 ; AVX512-NEXT: vmovdqa64 128(%rdi), %zmm3
1369 ; AVX512-NEXT: vmovdqa64 192(%rdi), %zmm4
1370 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [0,6,0,10,0,6,0,10]
1371 ; AVX512-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
1372 ; AVX512-NEXT: vpermi2q %zmm3, %zmm4, %zmm5
1373 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm2 = [0,6,12,0]
1374 ; AVX512-NEXT: vpermi2q %zmm1, %zmm0, %zmm2
1375 ; AVX512-NEXT: movb $56, %dil
1376 ; AVX512-NEXT: kmovw %edi, %k1
1377 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm2 {%k1}
1378 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
1379 ; AVX512-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1380 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm8
1381 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [0,0,6,12,0,0,6,12]
1382 ; AVX512-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1383 ; AVX512-NEXT: vpermi2q %zmm6, %zmm7, %zmm9
1384 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [0,1,7,13,0,1,7,13]
1385 ; AVX512-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
1386 ; AVX512-NEXT: vpermi2q %zmm6, %zmm7, %zmm10
1387 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [0,10,0,6,0,10,0,6]
1388 ; AVX512-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
1389 ; AVX512-NEXT: vpermi2q %zmm7, %zmm6, %zmm11
1390 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [0,11,1,7,0,11,1,7]
1391 ; AVX512-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
1392 ; AVX512-NEXT: vpermi2q %zmm7, %zmm6, %zmm12
1393 ; AVX512-NEXT: vpermt2q %zmm6, %zmm5, %zmm7
1394 ; AVX512-NEXT: movb $-64, %dil
1395 ; AVX512-NEXT: kmovw %edi, %k2
1396 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm2 {%k2}
1397 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [1,7,0,11,1,7,0,11]
1398 ; AVX512-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
1399 ; AVX512-NEXT: vpermi2q %zmm3, %zmm4, %zmm7
1400 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm13 = [1,7,13,0]
1401 ; AVX512-NEXT: vpermi2q %zmm1, %zmm0, %zmm13
1402 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm13 {%k1}
1403 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [5,11,5,11,5,11,5,11]
1404 ; AVX512-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1405 ; AVX512-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
1406 ; AVX512-NEXT: vmovdqa64 %zmm8, %zmm13 {%k2}
1407 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [10,4,10,4,10,4,10,4]
1408 ; AVX512-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1409 ; AVX512-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1410 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm8 = [10,0,6,0]
1411 ; AVX512-NEXT: vpermi2q %zmm0, %zmm1, %zmm8
1412 ; AVX512-NEXT: movb $24, %dil
1413 ; AVX512-NEXT: kmovw %edi, %k2
1414 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm8 {%k2}
1415 ; AVX512-NEXT: movb $-32, %dil
1416 ; AVX512-NEXT: kmovw %edi, %k1
1417 ; AVX512-NEXT: vmovdqa64 %zmm9, %zmm8 {%k1}
1418 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [11,5,11,5,11,5,11,5]
1419 ; AVX512-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1420 ; AVX512-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1421 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm9 = [11,1,7,0]
1422 ; AVX512-NEXT: vpermi2q %zmm0, %zmm1, %zmm9
1423 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm9 {%k2}
1424 ; AVX512-NEXT: vmovdqa64 %zmm10, %zmm9 {%k1}
1425 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [12,0,0,6,12,0,0,6]
1426 ; AVX512-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1427 ; AVX512-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1428 ; AVX512-NEXT: vpermi2q %zmm1, %zmm0, %zmm5
1429 ; AVX512-NEXT: vinserti32x4 $0, %xmm5, %zmm6, %zmm5
1430 ; AVX512-NEXT: vmovdqa64 %zmm11, %zmm5 {%k1}
1431 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [13,0,1,7,13,0,1,7]
1432 ; AVX512-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1433 ; AVX512-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1434 ; AVX512-NEXT: vpermt2q %zmm1, %zmm7, %zmm0
1435 ; AVX512-NEXT: vinserti32x4 $0, %xmm0, %zmm6, %zmm0
1436 ; AVX512-NEXT: vmovdqa64 %zmm12, %zmm0 {%k1}
1437 ; AVX512-NEXT: vmovdqa64 %zmm2, (%rsi)
1438 ; AVX512-NEXT: vmovdqa64 %zmm13, (%rdx)
1439 ; AVX512-NEXT: vmovdqa64 %zmm8, (%rcx)
1440 ; AVX512-NEXT: vmovdqa64 %zmm9, (%r8)
1441 ; AVX512-NEXT: vmovdqa64 %zmm5, (%r9)
1442 ; AVX512-NEXT: vmovdqa64 %zmm0, (%rax)
1443 ; AVX512-NEXT: vzeroupper
1446 ; AVX512-FCP-LABEL: load_i64_stride6_vf8:
1447 ; AVX512-FCP: # %bb.0:
1448 ; AVX512-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
1449 ; AVX512-FCP-NEXT: vmovdqa64 320(%rdi), %zmm6
1450 ; AVX512-FCP-NEXT: vmovdqa64 256(%rdi), %zmm7
1451 ; AVX512-FCP-NEXT: vmovdqa64 (%rdi), %zmm0
1452 ; AVX512-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
1453 ; AVX512-FCP-NEXT: vmovdqa64 128(%rdi), %zmm3
1454 ; AVX512-FCP-NEXT: vmovdqa64 192(%rdi), %zmm4
1455 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [0,6,0,10,0,6,0,10]
1456 ; AVX512-FCP-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
1457 ; AVX512-FCP-NEXT: vpermi2q %zmm3, %zmm4, %zmm5
1458 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm2 = [0,6,12,0]
1459 ; AVX512-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm2
1460 ; AVX512-FCP-NEXT: movb $56, %dil
1461 ; AVX512-FCP-NEXT: kmovw %edi, %k1
1462 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm2 {%k1}
1463 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
1464 ; AVX512-FCP-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1465 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm8
1466 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [0,0,6,12,0,0,6,12]
1467 ; AVX512-FCP-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1468 ; AVX512-FCP-NEXT: vpermi2q %zmm6, %zmm7, %zmm9
1469 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [0,1,7,13,0,1,7,13]
1470 ; AVX512-FCP-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
1471 ; AVX512-FCP-NEXT: vpermi2q %zmm6, %zmm7, %zmm10
1472 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [0,10,0,6,0,10,0,6]
1473 ; AVX512-FCP-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
1474 ; AVX512-FCP-NEXT: vpermi2q %zmm7, %zmm6, %zmm11
1475 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [0,11,1,7,0,11,1,7]
1476 ; AVX512-FCP-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
1477 ; AVX512-FCP-NEXT: vpermi2q %zmm7, %zmm6, %zmm12
1478 ; AVX512-FCP-NEXT: vpermt2q %zmm6, %zmm5, %zmm7
1479 ; AVX512-FCP-NEXT: movb $-64, %dil
1480 ; AVX512-FCP-NEXT: kmovw %edi, %k2
1481 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm2 {%k2}
1482 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [1,7,0,11,1,7,0,11]
1483 ; AVX512-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
1484 ; AVX512-FCP-NEXT: vpermi2q %zmm3, %zmm4, %zmm7
1485 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm13 = [1,7,13,0]
1486 ; AVX512-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm13
1487 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm13 {%k1}
1488 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [5,11,5,11,5,11,5,11]
1489 ; AVX512-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1490 ; AVX512-FCP-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
1491 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, %zmm13 {%k2}
1492 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [10,4,10,4,10,4,10,4]
1493 ; AVX512-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1494 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1495 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [10,0,6,0]
1496 ; AVX512-FCP-NEXT: vpermi2q %zmm0, %zmm1, %zmm8
1497 ; AVX512-FCP-NEXT: movb $24, %dil
1498 ; AVX512-FCP-NEXT: kmovw %edi, %k2
1499 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm8 {%k2}
1500 ; AVX512-FCP-NEXT: movb $-32, %dil
1501 ; AVX512-FCP-NEXT: kmovw %edi, %k1
1502 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, %zmm8 {%k1}
1503 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [11,5,11,5,11,5,11,5]
1504 ; AVX512-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1505 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1506 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm9 = [11,1,7,0]
1507 ; AVX512-FCP-NEXT: vpermi2q %zmm0, %zmm1, %zmm9
1508 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm9 {%k2}
1509 ; AVX512-FCP-NEXT: vmovdqa64 %zmm10, %zmm9 {%k1}
1510 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [12,0,0,6,12,0,0,6]
1511 ; AVX512-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1512 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1513 ; AVX512-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm5
1514 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm5, %zmm6, %zmm5
1515 ; AVX512-FCP-NEXT: vmovdqa64 %zmm11, %zmm5 {%k1}
1516 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [13,0,1,7,13,0,1,7]
1517 ; AVX512-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1518 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1519 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm7, %zmm0
1520 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm0, %zmm6, %zmm0
1521 ; AVX512-FCP-NEXT: vmovdqa64 %zmm12, %zmm0 {%k1}
1522 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, (%rsi)
1523 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, (%rdx)
1524 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, (%rcx)
1525 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, (%r8)
1526 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, (%r9)
1527 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, (%rax)
1528 ; AVX512-FCP-NEXT: vzeroupper
1529 ; AVX512-FCP-NEXT: retq
1531 ; AVX512DQ-LABEL: load_i64_stride6_vf8:
1532 ; AVX512DQ: # %bb.0:
1533 ; AVX512DQ-NEXT: movq {{[0-9]+}}(%rsp), %rax
1534 ; AVX512DQ-NEXT: vmovdqa64 320(%rdi), %zmm6
1535 ; AVX512DQ-NEXT: vmovdqa64 256(%rdi), %zmm7
1536 ; AVX512DQ-NEXT: vmovdqa64 (%rdi), %zmm0
1537 ; AVX512DQ-NEXT: vmovdqa64 64(%rdi), %zmm1
1538 ; AVX512DQ-NEXT: vmovdqa64 128(%rdi), %zmm3
1539 ; AVX512DQ-NEXT: vmovdqa64 192(%rdi), %zmm4
1540 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [0,6,0,10,0,6,0,10]
1541 ; AVX512DQ-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
1542 ; AVX512DQ-NEXT: vpermi2q %zmm3, %zmm4, %zmm5
1543 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm2 = [0,6,12,0]
1544 ; AVX512DQ-NEXT: vpermi2q %zmm1, %zmm0, %zmm2
1545 ; AVX512DQ-NEXT: movb $56, %dil
1546 ; AVX512DQ-NEXT: kmovw %edi, %k1
1547 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm2 {%k1}
1548 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
1549 ; AVX512DQ-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1550 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm8
1551 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [0,0,6,12,0,0,6,12]
1552 ; AVX512DQ-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1553 ; AVX512DQ-NEXT: vpermi2q %zmm6, %zmm7, %zmm9
1554 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [0,1,7,13,0,1,7,13]
1555 ; AVX512DQ-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
1556 ; AVX512DQ-NEXT: vpermi2q %zmm6, %zmm7, %zmm10
1557 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [0,10,0,6,0,10,0,6]
1558 ; AVX512DQ-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
1559 ; AVX512DQ-NEXT: vpermi2q %zmm7, %zmm6, %zmm11
1560 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [0,11,1,7,0,11,1,7]
1561 ; AVX512DQ-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
1562 ; AVX512DQ-NEXT: vpermi2q %zmm7, %zmm6, %zmm12
1563 ; AVX512DQ-NEXT: vpermt2q %zmm6, %zmm5, %zmm7
1564 ; AVX512DQ-NEXT: movb $-64, %dil
1565 ; AVX512DQ-NEXT: kmovw %edi, %k2
1566 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm2 {%k2}
1567 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [1,7,0,11,1,7,0,11]
1568 ; AVX512DQ-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
1569 ; AVX512DQ-NEXT: vpermi2q %zmm3, %zmm4, %zmm7
1570 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm13 = [1,7,13,0]
1571 ; AVX512DQ-NEXT: vpermi2q %zmm1, %zmm0, %zmm13
1572 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm13 {%k1}
1573 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [5,11,5,11,5,11,5,11]
1574 ; AVX512DQ-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1575 ; AVX512DQ-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
1576 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, %zmm13 {%k2}
1577 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [10,4,10,4,10,4,10,4]
1578 ; AVX512DQ-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1579 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1580 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm8 = [10,0,6,0]
1581 ; AVX512DQ-NEXT: vpermi2q %zmm0, %zmm1, %zmm8
1582 ; AVX512DQ-NEXT: movb $24, %dil
1583 ; AVX512DQ-NEXT: kmovw %edi, %k2
1584 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm8 {%k2}
1585 ; AVX512DQ-NEXT: movb $-32, %dil
1586 ; AVX512DQ-NEXT: kmovw %edi, %k1
1587 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, %zmm8 {%k1}
1588 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [11,5,11,5,11,5,11,5]
1589 ; AVX512DQ-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1590 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1591 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm9 = [11,1,7,0]
1592 ; AVX512DQ-NEXT: vpermi2q %zmm0, %zmm1, %zmm9
1593 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm9 {%k2}
1594 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, %zmm9 {%k1}
1595 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [12,0,0,6,12,0,0,6]
1596 ; AVX512DQ-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1597 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1598 ; AVX512DQ-NEXT: vpermi2q %zmm1, %zmm0, %zmm5
1599 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm5, %zmm6, %zmm5
1600 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, %zmm5 {%k1}
1601 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [13,0,1,7,13,0,1,7]
1602 ; AVX512DQ-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1603 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1604 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm7, %zmm0
1605 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm0, %zmm6, %zmm0
1606 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, %zmm0 {%k1}
1607 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, (%rsi)
1608 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, (%rdx)
1609 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, (%rcx)
1610 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, (%r8)
1611 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, (%r9)
1612 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, (%rax)
1613 ; AVX512DQ-NEXT: vzeroupper
1614 ; AVX512DQ-NEXT: retq
1616 ; AVX512DQ-FCP-LABEL: load_i64_stride6_vf8:
1617 ; AVX512DQ-FCP: # %bb.0:
1618 ; AVX512DQ-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
1619 ; AVX512DQ-FCP-NEXT: vmovdqa64 320(%rdi), %zmm6
1620 ; AVX512DQ-FCP-NEXT: vmovdqa64 256(%rdi), %zmm7
1621 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rdi), %zmm0
1622 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
1623 ; AVX512DQ-FCP-NEXT: vmovdqa64 128(%rdi), %zmm3
1624 ; AVX512DQ-FCP-NEXT: vmovdqa64 192(%rdi), %zmm4
1625 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [0,6,0,10,0,6,0,10]
1626 ; AVX512DQ-FCP-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
1627 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm3, %zmm4, %zmm5
1628 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm2 = [0,6,12,0]
1629 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm2
1630 ; AVX512DQ-FCP-NEXT: movb $56, %dil
1631 ; AVX512DQ-FCP-NEXT: kmovw %edi, %k1
1632 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm2 {%k1}
1633 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
1634 ; AVX512DQ-FCP-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1635 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm8
1636 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [0,0,6,12,0,0,6,12]
1637 ; AVX512DQ-FCP-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1638 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm6, %zmm7, %zmm9
1639 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [0,1,7,13,0,1,7,13]
1640 ; AVX512DQ-FCP-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
1641 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm6, %zmm7, %zmm10
1642 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [0,10,0,6,0,10,0,6]
1643 ; AVX512DQ-FCP-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
1644 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm7, %zmm6, %zmm11
1645 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [0,11,1,7,0,11,1,7]
1646 ; AVX512DQ-FCP-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
1647 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm7, %zmm6, %zmm12
1648 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm6, %zmm5, %zmm7
1649 ; AVX512DQ-FCP-NEXT: movb $-64, %dil
1650 ; AVX512DQ-FCP-NEXT: kmovw %edi, %k2
1651 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm2 {%k2}
1652 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [1,7,0,11,1,7,0,11]
1653 ; AVX512DQ-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
1654 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm3, %zmm4, %zmm7
1655 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm13 = [1,7,13,0]
1656 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm13
1657 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm13 {%k1}
1658 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [5,11,5,11,5,11,5,11]
1659 ; AVX512DQ-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1660 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
1661 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, %zmm13 {%k2}
1662 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [10,4,10,4,10,4,10,4]
1663 ; AVX512DQ-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1664 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1665 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [10,0,6,0]
1666 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm0, %zmm1, %zmm8
1667 ; AVX512DQ-FCP-NEXT: movb $24, %dil
1668 ; AVX512DQ-FCP-NEXT: kmovw %edi, %k2
1669 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm8 {%k2}
1670 ; AVX512DQ-FCP-NEXT: movb $-32, %dil
1671 ; AVX512DQ-FCP-NEXT: kmovw %edi, %k1
1672 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, %zmm8 {%k1}
1673 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [11,5,11,5,11,5,11,5]
1674 ; AVX512DQ-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1675 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1676 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm9 = [11,1,7,0]
1677 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm0, %zmm1, %zmm9
1678 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm9 {%k2}
1679 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm10, %zmm9 {%k1}
1680 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [12,0,0,6,12,0,0,6]
1681 ; AVX512DQ-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1682 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1683 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm5
1684 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm5, %zmm6, %zmm5
1685 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm11, %zmm5 {%k1}
1686 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [13,0,1,7,13,0,1,7]
1687 ; AVX512DQ-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1688 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1689 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm7, %zmm0
1690 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm0, %zmm6, %zmm0
1691 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm12, %zmm0 {%k1}
1692 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, (%rsi)
1693 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, (%rdx)
1694 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, (%rcx)
1695 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, (%r8)
1696 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, (%r9)
1697 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, (%rax)
1698 ; AVX512DQ-FCP-NEXT: vzeroupper
1699 ; AVX512DQ-FCP-NEXT: retq
1701 ; AVX512BW-LABEL: load_i64_stride6_vf8:
1702 ; AVX512BW: # %bb.0:
1703 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1704 ; AVX512BW-NEXT: vmovdqa64 320(%rdi), %zmm6
1705 ; AVX512BW-NEXT: vmovdqa64 256(%rdi), %zmm7
1706 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm0
1707 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %zmm1
1708 ; AVX512BW-NEXT: vmovdqa64 128(%rdi), %zmm3
1709 ; AVX512BW-NEXT: vmovdqa64 192(%rdi), %zmm4
1710 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [0,6,0,10,0,6,0,10]
1711 ; AVX512BW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
1712 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm4, %zmm5
1713 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm2 = [0,6,12,0]
1714 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm0, %zmm2
1715 ; AVX512BW-NEXT: movb $56, %dil
1716 ; AVX512BW-NEXT: kmovd %edi, %k1
1717 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm2 {%k1}
1718 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
1719 ; AVX512BW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1720 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm8
1721 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [0,0,6,12,0,0,6,12]
1722 ; AVX512BW-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1723 ; AVX512BW-NEXT: vpermi2q %zmm6, %zmm7, %zmm9
1724 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [0,1,7,13,0,1,7,13]
1725 ; AVX512BW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
1726 ; AVX512BW-NEXT: vpermi2q %zmm6, %zmm7, %zmm10
1727 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [0,10,0,6,0,10,0,6]
1728 ; AVX512BW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
1729 ; AVX512BW-NEXT: vpermi2q %zmm7, %zmm6, %zmm11
1730 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [0,11,1,7,0,11,1,7]
1731 ; AVX512BW-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
1732 ; AVX512BW-NEXT: vpermi2q %zmm7, %zmm6, %zmm12
1733 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm5, %zmm7
1734 ; AVX512BW-NEXT: movb $-64, %dil
1735 ; AVX512BW-NEXT: kmovd %edi, %k2
1736 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm2 {%k2}
1737 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [1,7,0,11,1,7,0,11]
1738 ; AVX512BW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
1739 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm4, %zmm7
1740 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm13 = [1,7,13,0]
1741 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm0, %zmm13
1742 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm13 {%k1}
1743 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [5,11,5,11,5,11,5,11]
1744 ; AVX512BW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1745 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
1746 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm13 {%k2}
1747 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [10,4,10,4,10,4,10,4]
1748 ; AVX512BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1749 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1750 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm8 = [10,0,6,0]
1751 ; AVX512BW-NEXT: vpermi2q %zmm0, %zmm1, %zmm8
1752 ; AVX512BW-NEXT: movb $24, %dil
1753 ; AVX512BW-NEXT: kmovd %edi, %k2
1754 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm8 {%k2}
1755 ; AVX512BW-NEXT: movb $-32, %dil
1756 ; AVX512BW-NEXT: kmovd %edi, %k1
1757 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm8 {%k1}
1758 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [11,5,11,5,11,5,11,5]
1759 ; AVX512BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1760 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1761 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm9 = [11,1,7,0]
1762 ; AVX512BW-NEXT: vpermi2q %zmm0, %zmm1, %zmm9
1763 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm9 {%k2}
1764 ; AVX512BW-NEXT: vmovdqa64 %zmm10, %zmm9 {%k1}
1765 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [12,0,0,6,12,0,0,6]
1766 ; AVX512BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1767 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1768 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm0, %zmm5
1769 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm5, %zmm6, %zmm5
1770 ; AVX512BW-NEXT: vmovdqa64 %zmm11, %zmm5 {%k1}
1771 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [13,0,1,7,13,0,1,7]
1772 ; AVX512BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1773 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1774 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm7, %zmm0
1775 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm0, %zmm6, %zmm0
1776 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm0 {%k1}
1777 ; AVX512BW-NEXT: vmovdqa64 %zmm2, (%rsi)
1778 ; AVX512BW-NEXT: vmovdqa64 %zmm13, (%rdx)
1779 ; AVX512BW-NEXT: vmovdqa64 %zmm8, (%rcx)
1780 ; AVX512BW-NEXT: vmovdqa64 %zmm9, (%r8)
1781 ; AVX512BW-NEXT: vmovdqa64 %zmm5, (%r9)
1782 ; AVX512BW-NEXT: vmovdqa64 %zmm0, (%rax)
1783 ; AVX512BW-NEXT: vzeroupper
1784 ; AVX512BW-NEXT: retq
1786 ; AVX512BW-FCP-LABEL: load_i64_stride6_vf8:
1787 ; AVX512BW-FCP: # %bb.0:
1788 ; AVX512BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
1789 ; AVX512BW-FCP-NEXT: vmovdqa64 320(%rdi), %zmm6
1790 ; AVX512BW-FCP-NEXT: vmovdqa64 256(%rdi), %zmm7
1791 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm0
1792 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
1793 ; AVX512BW-FCP-NEXT: vmovdqa64 128(%rdi), %zmm3
1794 ; AVX512BW-FCP-NEXT: vmovdqa64 192(%rdi), %zmm4
1795 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [0,6,0,10,0,6,0,10]
1796 ; AVX512BW-FCP-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
1797 ; AVX512BW-FCP-NEXT: vpermi2q %zmm3, %zmm4, %zmm5
1798 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm2 = [0,6,12,0]
1799 ; AVX512BW-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm2
1800 ; AVX512BW-FCP-NEXT: movb $56, %dil
1801 ; AVX512BW-FCP-NEXT: kmovd %edi, %k1
1802 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm2 {%k1}
1803 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
1804 ; AVX512BW-FCP-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1805 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm8
1806 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [0,0,6,12,0,0,6,12]
1807 ; AVX512BW-FCP-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1808 ; AVX512BW-FCP-NEXT: vpermi2q %zmm6, %zmm7, %zmm9
1809 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [0,1,7,13,0,1,7,13]
1810 ; AVX512BW-FCP-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
1811 ; AVX512BW-FCP-NEXT: vpermi2q %zmm6, %zmm7, %zmm10
1812 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [0,10,0,6,0,10,0,6]
1813 ; AVX512BW-FCP-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
1814 ; AVX512BW-FCP-NEXT: vpermi2q %zmm7, %zmm6, %zmm11
1815 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [0,11,1,7,0,11,1,7]
1816 ; AVX512BW-FCP-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
1817 ; AVX512BW-FCP-NEXT: vpermi2q %zmm7, %zmm6, %zmm12
1818 ; AVX512BW-FCP-NEXT: vpermt2q %zmm6, %zmm5, %zmm7
1819 ; AVX512BW-FCP-NEXT: movb $-64, %dil
1820 ; AVX512BW-FCP-NEXT: kmovd %edi, %k2
1821 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm2 {%k2}
1822 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [1,7,0,11,1,7,0,11]
1823 ; AVX512BW-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
1824 ; AVX512BW-FCP-NEXT: vpermi2q %zmm3, %zmm4, %zmm7
1825 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm13 = [1,7,13,0]
1826 ; AVX512BW-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm13
1827 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm13 {%k1}
1828 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [5,11,5,11,5,11,5,11]
1829 ; AVX512BW-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1830 ; AVX512BW-FCP-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
1831 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm13 {%k2}
1832 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [10,4,10,4,10,4,10,4]
1833 ; AVX512BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1834 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1835 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [10,0,6,0]
1836 ; AVX512BW-FCP-NEXT: vpermi2q %zmm0, %zmm1, %zmm8
1837 ; AVX512BW-FCP-NEXT: movb $24, %dil
1838 ; AVX512BW-FCP-NEXT: kmovd %edi, %k2
1839 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm8 {%k2}
1840 ; AVX512BW-FCP-NEXT: movb $-32, %dil
1841 ; AVX512BW-FCP-NEXT: kmovd %edi, %k1
1842 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm8 {%k1}
1843 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [11,5,11,5,11,5,11,5]
1844 ; AVX512BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1845 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1846 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm9 = [11,1,7,0]
1847 ; AVX512BW-FCP-NEXT: vpermi2q %zmm0, %zmm1, %zmm9
1848 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm9 {%k2}
1849 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm9 {%k1}
1850 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [12,0,0,6,12,0,0,6]
1851 ; AVX512BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1852 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1853 ; AVX512BW-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm5
1854 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm5, %zmm6, %zmm5
1855 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm5 {%k1}
1856 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [13,0,1,7,13,0,1,7]
1857 ; AVX512BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1858 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1859 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm7, %zmm0
1860 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm0, %zmm6, %zmm0
1861 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm0 {%k1}
1862 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, (%rsi)
1863 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, (%rdx)
1864 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, (%rcx)
1865 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, (%r8)
1866 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, (%r9)
1867 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, (%rax)
1868 ; AVX512BW-FCP-NEXT: vzeroupper
1869 ; AVX512BW-FCP-NEXT: retq
1871 ; AVX512DQ-BW-LABEL: load_i64_stride6_vf8:
1872 ; AVX512DQ-BW: # %bb.0:
1873 ; AVX512DQ-BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1874 ; AVX512DQ-BW-NEXT: vmovdqa64 320(%rdi), %zmm6
1875 ; AVX512DQ-BW-NEXT: vmovdqa64 256(%rdi), %zmm7
1876 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdi), %zmm0
1877 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rdi), %zmm1
1878 ; AVX512DQ-BW-NEXT: vmovdqa64 128(%rdi), %zmm3
1879 ; AVX512DQ-BW-NEXT: vmovdqa64 192(%rdi), %zmm4
1880 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [0,6,0,10,0,6,0,10]
1881 ; AVX512DQ-BW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
1882 ; AVX512DQ-BW-NEXT: vpermi2q %zmm3, %zmm4, %zmm5
1883 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm2 = [0,6,12,0]
1884 ; AVX512DQ-BW-NEXT: vpermi2q %zmm1, %zmm0, %zmm2
1885 ; AVX512DQ-BW-NEXT: movb $56, %dil
1886 ; AVX512DQ-BW-NEXT: kmovd %edi, %k1
1887 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm2 {%k1}
1888 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
1889 ; AVX512DQ-BW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1890 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm8
1891 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [0,0,6,12,0,0,6,12]
1892 ; AVX512DQ-BW-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1893 ; AVX512DQ-BW-NEXT: vpermi2q %zmm6, %zmm7, %zmm9
1894 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [0,1,7,13,0,1,7,13]
1895 ; AVX512DQ-BW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
1896 ; AVX512DQ-BW-NEXT: vpermi2q %zmm6, %zmm7, %zmm10
1897 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [0,10,0,6,0,10,0,6]
1898 ; AVX512DQ-BW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
1899 ; AVX512DQ-BW-NEXT: vpermi2q %zmm7, %zmm6, %zmm11
1900 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [0,11,1,7,0,11,1,7]
1901 ; AVX512DQ-BW-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
1902 ; AVX512DQ-BW-NEXT: vpermi2q %zmm7, %zmm6, %zmm12
1903 ; AVX512DQ-BW-NEXT: vpermt2q %zmm6, %zmm5, %zmm7
1904 ; AVX512DQ-BW-NEXT: movb $-64, %dil
1905 ; AVX512DQ-BW-NEXT: kmovd %edi, %k2
1906 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm2 {%k2}
1907 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [1,7,0,11,1,7,0,11]
1908 ; AVX512DQ-BW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
1909 ; AVX512DQ-BW-NEXT: vpermi2q %zmm3, %zmm4, %zmm7
1910 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm13 = [1,7,13,0]
1911 ; AVX512DQ-BW-NEXT: vpermi2q %zmm1, %zmm0, %zmm13
1912 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm13 {%k1}
1913 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [5,11,5,11,5,11,5,11]
1914 ; AVX512DQ-BW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1915 ; AVX512DQ-BW-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
1916 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, %zmm13 {%k2}
1917 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [10,4,10,4,10,4,10,4]
1918 ; AVX512DQ-BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1919 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1920 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm8 = [10,0,6,0]
1921 ; AVX512DQ-BW-NEXT: vpermi2q %zmm0, %zmm1, %zmm8
1922 ; AVX512DQ-BW-NEXT: movb $24, %dil
1923 ; AVX512DQ-BW-NEXT: kmovd %edi, %k2
1924 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm8 {%k2}
1925 ; AVX512DQ-BW-NEXT: movb $-32, %dil
1926 ; AVX512DQ-BW-NEXT: kmovd %edi, %k1
1927 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, %zmm8 {%k1}
1928 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [11,5,11,5,11,5,11,5]
1929 ; AVX512DQ-BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1930 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1931 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm9 = [11,1,7,0]
1932 ; AVX512DQ-BW-NEXT: vpermi2q %zmm0, %zmm1, %zmm9
1933 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm9 {%k2}
1934 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, %zmm9 {%k1}
1935 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [12,0,0,6,12,0,0,6]
1936 ; AVX512DQ-BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1937 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1938 ; AVX512DQ-BW-NEXT: vpermi2q %zmm1, %zmm0, %zmm5
1939 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm5, %zmm6, %zmm5
1940 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm11, %zmm5 {%k1}
1941 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [13,0,1,7,13,0,1,7]
1942 ; AVX512DQ-BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1943 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
1944 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm7, %zmm0
1945 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm0, %zmm6, %zmm0
1946 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm12, %zmm0 {%k1}
1947 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, (%rsi)
1948 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, (%rdx)
1949 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, (%rcx)
1950 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, (%r8)
1951 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, (%r9)
1952 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, (%rax)
1953 ; AVX512DQ-BW-NEXT: vzeroupper
1954 ; AVX512DQ-BW-NEXT: retq
1956 ; AVX512DQ-BW-FCP-LABEL: load_i64_stride6_vf8:
1957 ; AVX512DQ-BW-FCP: # %bb.0:
1958 ; AVX512DQ-BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
1959 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 320(%rdi), %zmm6
1960 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 256(%rdi), %zmm7
1961 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm0
1962 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
1963 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 128(%rdi), %zmm3
1964 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 192(%rdi), %zmm4
1965 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [0,6,0,10,0,6,0,10]
1966 ; AVX512DQ-BW-FCP-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
1967 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm3, %zmm4, %zmm5
1968 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm2 = [0,6,12,0]
1969 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm2
1970 ; AVX512DQ-BW-FCP-NEXT: movb $56, %dil
1971 ; AVX512DQ-BW-FCP-NEXT: kmovd %edi, %k1
1972 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm2 {%k1}
1973 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
1974 ; AVX512DQ-BW-FCP-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1975 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm8
1976 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [0,0,6,12,0,0,6,12]
1977 ; AVX512DQ-BW-FCP-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1978 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm6, %zmm7, %zmm9
1979 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [0,1,7,13,0,1,7,13]
1980 ; AVX512DQ-BW-FCP-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
1981 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm6, %zmm7, %zmm10
1982 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [0,10,0,6,0,10,0,6]
1983 ; AVX512DQ-BW-FCP-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
1984 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm7, %zmm6, %zmm11
1985 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [0,11,1,7,0,11,1,7]
1986 ; AVX512DQ-BW-FCP-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
1987 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm7, %zmm6, %zmm12
1988 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm6, %zmm5, %zmm7
1989 ; AVX512DQ-BW-FCP-NEXT: movb $-64, %dil
1990 ; AVX512DQ-BW-FCP-NEXT: kmovd %edi, %k2
1991 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm2 {%k2}
1992 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [1,7,0,11,1,7,0,11]
1993 ; AVX512DQ-BW-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
1994 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm3, %zmm4, %zmm7
1995 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm13 = [1,7,13,0]
1996 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm13
1997 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm13 {%k1}
1998 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [5,11,5,11,5,11,5,11]
1999 ; AVX512DQ-BW-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2000 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
2001 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm13 {%k2}
2002 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [10,4,10,4,10,4,10,4]
2003 ; AVX512DQ-BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2004 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
2005 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [10,0,6,0]
2006 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm0, %zmm1, %zmm8
2007 ; AVX512DQ-BW-FCP-NEXT: movb $24, %dil
2008 ; AVX512DQ-BW-FCP-NEXT: kmovd %edi, %k2
2009 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm8 {%k2}
2010 ; AVX512DQ-BW-FCP-NEXT: movb $-32, %dil
2011 ; AVX512DQ-BW-FCP-NEXT: kmovd %edi, %k1
2012 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm8 {%k1}
2013 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [11,5,11,5,11,5,11,5]
2014 ; AVX512DQ-BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2015 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
2016 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm9 = [11,1,7,0]
2017 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm0, %zmm1, %zmm9
2018 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm9 {%k2}
2019 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm9 {%k1}
2020 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [12,0,0,6,12,0,0,6]
2021 ; AVX512DQ-BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
2022 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
2023 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm5
2024 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm5, %zmm6, %zmm5
2025 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm5 {%k1}
2026 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [13,0,1,7,13,0,1,7]
2027 ; AVX512DQ-BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
2028 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm3, %zmm6
2029 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm7, %zmm0
2030 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm0, %zmm6, %zmm0
2031 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm0 {%k1}
2032 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, (%rsi)
2033 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, (%rdx)
2034 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, (%rcx)
2035 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, (%r8)
2036 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, (%r9)
2037 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, (%rax)
2038 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
2039 ; AVX512DQ-BW-FCP-NEXT: retq
2040 %wide.vec = load <48 x i64>, ptr %in.vec, align 64
2041 %strided.vec0 = shufflevector <48 x i64> %wide.vec, <48 x i64> poison, <8 x i32> <i32 0, i32 6, i32 12, i32 18, i32 24, i32 30, i32 36, i32 42>
2042 %strided.vec1 = shufflevector <48 x i64> %wide.vec, <48 x i64> poison, <8 x i32> <i32 1, i32 7, i32 13, i32 19, i32 25, i32 31, i32 37, i32 43>
2043 %strided.vec2 = shufflevector <48 x i64> %wide.vec, <48 x i64> poison, <8 x i32> <i32 2, i32 8, i32 14, i32 20, i32 26, i32 32, i32 38, i32 44>
2044 %strided.vec3 = shufflevector <48 x i64> %wide.vec, <48 x i64> poison, <8 x i32> <i32 3, i32 9, i32 15, i32 21, i32 27, i32 33, i32 39, i32 45>
2045 %strided.vec4 = shufflevector <48 x i64> %wide.vec, <48 x i64> poison, <8 x i32> <i32 4, i32 10, i32 16, i32 22, i32 28, i32 34, i32 40, i32 46>
2046 %strided.vec5 = shufflevector <48 x i64> %wide.vec, <48 x i64> poison, <8 x i32> <i32 5, i32 11, i32 17, i32 23, i32 29, i32 35, i32 41, i32 47>
2047 store <8 x i64> %strided.vec0, ptr %out.vec0, align 64
2048 store <8 x i64> %strided.vec1, ptr %out.vec1, align 64
2049 store <8 x i64> %strided.vec2, ptr %out.vec2, align 64
2050 store <8 x i64> %strided.vec3, ptr %out.vec3, align 64
2051 store <8 x i64> %strided.vec4, ptr %out.vec4, align 64
2052 store <8 x i64> %strided.vec5, ptr %out.vec5, align 64
2056 define void @load_i64_stride6_vf16(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr %out.vec2, ptr %out.vec3, ptr %out.vec4, ptr %out.vec5) nounwind {
2057 ; SSE-LABEL: load_i64_stride6_vf16:
2059 ; SSE-NEXT: subq $408, %rsp # imm = 0x198
2060 ; SSE-NEXT: movaps (%rdi), %xmm7
2061 ; SSE-NEXT: movaps 624(%rdi), %xmm0
2062 ; SSE-NEXT: movaps 576(%rdi), %xmm8
2063 ; SSE-NEXT: movaps 240(%rdi), %xmm1
2064 ; SSE-NEXT: movaps 192(%rdi), %xmm9
2065 ; SSE-NEXT: movaps 720(%rdi), %xmm2
2066 ; SSE-NEXT: movaps 672(%rdi), %xmm10
2067 ; SSE-NEXT: movaps 336(%rdi), %xmm3
2068 ; SSE-NEXT: movaps 288(%rdi), %xmm11
2069 ; SSE-NEXT: movaps 432(%rdi), %xmm4
2070 ; SSE-NEXT: movaps 384(%rdi), %xmm13
2071 ; SSE-NEXT: movaps 528(%rdi), %xmm5
2072 ; SSE-NEXT: movaps 480(%rdi), %xmm12
2073 ; SSE-NEXT: movaps 144(%rdi), %xmm6
2074 ; SSE-NEXT: movaps 96(%rdi), %xmm14
2075 ; SSE-NEXT: movaps %xmm14, %xmm15
2076 ; SSE-NEXT: movlhps {{.*#+}} xmm15 = xmm15[0],xmm6[0]
2077 ; SSE-NEXT: movaps %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2078 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm6[1]
2079 ; SSE-NEXT: movaps %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2080 ; SSE-NEXT: movaps %xmm12, %xmm6
2081 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm5[0]
2082 ; SSE-NEXT: movaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2083 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm5[1]
2084 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2085 ; SSE-NEXT: movaps %xmm13, %xmm5
2086 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm4[0]
2087 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2088 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm4[1]
2089 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2090 ; SSE-NEXT: movaps %xmm11, %xmm4
2091 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm3[0]
2092 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2093 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm3[1]
2094 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2095 ; SSE-NEXT: movaps %xmm10, %xmm3
2096 ; SSE-NEXT: movlhps {{.*#+}} xmm3 = xmm3[0],xmm2[0]
2097 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2098 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm2[1]
2099 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2100 ; SSE-NEXT: movaps %xmm9, %xmm2
2101 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm1[0]
2102 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2103 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm1[1]
2104 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2105 ; SSE-NEXT: movaps %xmm8, %xmm1
2106 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2107 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2108 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm0[1]
2109 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2110 ; SSE-NEXT: movaps 48(%rdi), %xmm0
2111 ; SSE-NEXT: movaps %xmm7, %xmm1
2112 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2113 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2114 ; SSE-NEXT: unpckhpd {{.*#+}} xmm7 = xmm7[1],xmm0[1]
2115 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2116 ; SSE-NEXT: movaps 64(%rdi), %xmm0
2117 ; SSE-NEXT: movaps 16(%rdi), %xmm1
2118 ; SSE-NEXT: movaps %xmm1, %xmm2
2119 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
2120 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2121 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
2122 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2123 ; SSE-NEXT: movaps 160(%rdi), %xmm0
2124 ; SSE-NEXT: movaps 112(%rdi), %xmm1
2125 ; SSE-NEXT: movaps %xmm1, %xmm2
2126 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
2127 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2128 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
2129 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2130 ; SSE-NEXT: movaps 256(%rdi), %xmm0
2131 ; SSE-NEXT: movaps 208(%rdi), %xmm1
2132 ; SSE-NEXT: movaps %xmm1, %xmm2
2133 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
2134 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2135 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
2136 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2137 ; SSE-NEXT: movaps 352(%rdi), %xmm0
2138 ; SSE-NEXT: movaps 304(%rdi), %xmm1
2139 ; SSE-NEXT: movaps %xmm1, %xmm2
2140 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
2141 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2142 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
2143 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2144 ; SSE-NEXT: movaps 448(%rdi), %xmm0
2145 ; SSE-NEXT: movaps 400(%rdi), %xmm1
2146 ; SSE-NEXT: movaps %xmm1, %xmm2
2147 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
2148 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2149 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
2150 ; SSE-NEXT: movaps %xmm1, (%rsp) # 16-byte Spill
2151 ; SSE-NEXT: movaps 544(%rdi), %xmm0
2152 ; SSE-NEXT: movaps 496(%rdi), %xmm1
2153 ; SSE-NEXT: movaps %xmm1, %xmm2
2154 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
2155 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2156 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
2157 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2158 ; SSE-NEXT: movaps 640(%rdi), %xmm0
2159 ; SSE-NEXT: movaps 592(%rdi), %xmm15
2160 ; SSE-NEXT: movaps %xmm15, %xmm1
2161 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2162 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2163 ; SSE-NEXT: unpckhpd {{.*#+}} xmm15 = xmm15[1],xmm0[1]
2164 ; SSE-NEXT: movaps 736(%rdi), %xmm0
2165 ; SSE-NEXT: movaps 688(%rdi), %xmm11
2166 ; SSE-NEXT: movaps %xmm11, %xmm1
2167 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2168 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2169 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm0[1]
2170 ; SSE-NEXT: movaps 80(%rdi), %xmm0
2171 ; SSE-NEXT: movaps 32(%rdi), %xmm1
2172 ; SSE-NEXT: movaps %xmm1, %xmm2
2173 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
2174 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2175 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
2176 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2177 ; SSE-NEXT: movaps 176(%rdi), %xmm0
2178 ; SSE-NEXT: movaps 128(%rdi), %xmm13
2179 ; SSE-NEXT: movaps %xmm13, %xmm1
2180 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2181 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2182 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm0[1]
2183 ; SSE-NEXT: movaps 272(%rdi), %xmm0
2184 ; SSE-NEXT: movaps 224(%rdi), %xmm12
2185 ; SSE-NEXT: movaps %xmm12, %xmm14
2186 ; SSE-NEXT: movlhps {{.*#+}} xmm14 = xmm14[0],xmm0[0]
2187 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm0[1]
2188 ; SSE-NEXT: movaps 368(%rdi), %xmm0
2189 ; SSE-NEXT: movaps 320(%rdi), %xmm9
2190 ; SSE-NEXT: movaps %xmm9, %xmm10
2191 ; SSE-NEXT: movlhps {{.*#+}} xmm10 = xmm10[0],xmm0[0]
2192 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm0[1]
2193 ; SSE-NEXT: movaps 464(%rdi), %xmm0
2194 ; SSE-NEXT: movaps 416(%rdi), %xmm5
2195 ; SSE-NEXT: movaps %xmm5, %xmm8
2196 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm0[0]
2197 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm0[1]
2198 ; SSE-NEXT: movaps 560(%rdi), %xmm0
2199 ; SSE-NEXT: movaps 512(%rdi), %xmm6
2200 ; SSE-NEXT: movaps %xmm6, %xmm7
2201 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm0[0]
2202 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm0[1]
2203 ; SSE-NEXT: movaps 656(%rdi), %xmm0
2204 ; SSE-NEXT: movaps 608(%rdi), %xmm3
2205 ; SSE-NEXT: movaps %xmm3, %xmm4
2206 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm0[0]
2207 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm0[1]
2208 ; SSE-NEXT: movaps 752(%rdi), %xmm0
2209 ; SSE-NEXT: movaps 704(%rdi), %xmm1
2210 ; SSE-NEXT: movaps %xmm1, %xmm2
2211 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
2212 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
2213 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2214 ; SSE-NEXT: movaps %xmm0, 96(%rsi)
2215 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2216 ; SSE-NEXT: movaps %xmm0, 32(%rsi)
2217 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2218 ; SSE-NEXT: movaps %xmm0, 112(%rsi)
2219 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2220 ; SSE-NEXT: movaps %xmm0, 48(%rsi)
2221 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2222 ; SSE-NEXT: movaps %xmm0, 64(%rsi)
2223 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2224 ; SSE-NEXT: movaps %xmm0, (%rsi)
2225 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2226 ; SSE-NEXT: movaps %xmm0, 80(%rsi)
2227 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2228 ; SSE-NEXT: movaps %xmm0, 16(%rsi)
2229 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2230 ; SSE-NEXT: movaps %xmm0, 96(%rdx)
2231 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2232 ; SSE-NEXT: movaps %xmm0, 32(%rdx)
2233 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2234 ; SSE-NEXT: movaps %xmm0, 112(%rdx)
2235 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2236 ; SSE-NEXT: movaps %xmm0, 48(%rdx)
2237 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2238 ; SSE-NEXT: movaps %xmm0, 64(%rdx)
2239 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2240 ; SSE-NEXT: movaps %xmm0, (%rdx)
2241 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2242 ; SSE-NEXT: movaps %xmm0, 80(%rdx)
2243 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2244 ; SSE-NEXT: movaps %xmm0, 16(%rdx)
2245 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2246 ; SSE-NEXT: movaps %xmm0, 96(%rcx)
2247 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2248 ; SSE-NEXT: movaps %xmm0, 112(%rcx)
2249 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2250 ; SSE-NEXT: movaps %xmm0, 64(%rcx)
2251 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2252 ; SSE-NEXT: movaps %xmm0, 80(%rcx)
2253 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2254 ; SSE-NEXT: movaps %xmm0, 32(%rcx)
2255 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2256 ; SSE-NEXT: movaps %xmm0, 48(%rcx)
2257 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2258 ; SSE-NEXT: movaps %xmm0, (%rcx)
2259 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2260 ; SSE-NEXT: movaps %xmm0, 16(%rcx)
2261 ; SSE-NEXT: movaps %xmm11, 112(%r8)
2262 ; SSE-NEXT: movaps %xmm15, 96(%r8)
2263 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2264 ; SSE-NEXT: movaps %xmm0, 80(%r8)
2265 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
2266 ; SSE-NEXT: movaps %xmm0, 64(%r8)
2267 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2268 ; SSE-NEXT: movaps %xmm0, 48(%r8)
2269 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2270 ; SSE-NEXT: movaps %xmm0, 32(%r8)
2271 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2272 ; SSE-NEXT: movaps %xmm0, 16(%r8)
2273 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2274 ; SSE-NEXT: movaps %xmm0, (%r8)
2275 ; SSE-NEXT: movaps %xmm2, 112(%r9)
2276 ; SSE-NEXT: movaps %xmm4, 96(%r9)
2277 ; SSE-NEXT: movaps %xmm7, 80(%r9)
2278 ; SSE-NEXT: movaps %xmm8, 64(%r9)
2279 ; SSE-NEXT: movaps %xmm10, 48(%r9)
2280 ; SSE-NEXT: movaps %xmm14, 32(%r9)
2281 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2282 ; SSE-NEXT: movaps %xmm0, 16(%r9)
2283 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2284 ; SSE-NEXT: movaps %xmm0, (%r9)
2285 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
2286 ; SSE-NEXT: movaps %xmm1, 112(%rax)
2287 ; SSE-NEXT: movaps %xmm3, 96(%rax)
2288 ; SSE-NEXT: movaps %xmm6, 80(%rax)
2289 ; SSE-NEXT: movaps %xmm5, 64(%rax)
2290 ; SSE-NEXT: movaps %xmm9, 48(%rax)
2291 ; SSE-NEXT: movaps %xmm12, 32(%rax)
2292 ; SSE-NEXT: movaps %xmm13, 16(%rax)
2293 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2294 ; SSE-NEXT: movaps %xmm0, (%rax)
2295 ; SSE-NEXT: addq $408, %rsp # imm = 0x198
2298 ; AVX-LABEL: load_i64_stride6_vf16:
2300 ; AVX-NEXT: subq $552, %rsp # imm = 0x228
2301 ; AVX-NEXT: vmovaps 320(%rdi), %ymm1
2302 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2303 ; AVX-NEXT: vmovaps 512(%rdi), %ymm8
2304 ; AVX-NEXT: vmovups %ymm8, (%rsp) # 32-byte Spill
2305 ; AVX-NEXT: vmovaps 128(%rdi), %ymm2
2306 ; AVX-NEXT: vinsertf128 $1, 96(%rdi), %ymm0, %ymm3
2307 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm3[0],ymm2[0],ymm3[2],ymm2[2]
2308 ; AVX-NEXT: vmovaps (%rdi), %xmm5
2309 ; AVX-NEXT: vmovaps 48(%rdi), %xmm6
2310 ; AVX-NEXT: vmovlhps {{.*#+}} xmm7 = xmm5[0],xmm6[0]
2311 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm7[0,1,2,3],ymm4[4,5,6,7]
2312 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2313 ; AVX-NEXT: vinsertf128 $1, 480(%rdi), %ymm0, %ymm4
2314 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm7 = ymm4[0],ymm8[0],ymm4[2],ymm8[2]
2315 ; AVX-NEXT: vmovaps 432(%rdi), %xmm8
2316 ; AVX-NEXT: vmovaps 384(%rdi), %xmm9
2317 ; AVX-NEXT: vmovlhps {{.*#+}} xmm10 = xmm9[0],xmm8[0]
2318 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm10[0,1,2,3],ymm7[4,5,6,7]
2319 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2320 ; AVX-NEXT: vinsertf128 $1, 288(%rdi), %ymm0, %ymm7
2321 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm10 = ymm7[0],ymm1[0],ymm7[2],ymm1[2]
2322 ; AVX-NEXT: vmovaps 240(%rdi), %xmm11
2323 ; AVX-NEXT: vmovaps 192(%rdi), %xmm12
2324 ; AVX-NEXT: vmovlhps {{.*#+}} xmm13 = xmm12[0],xmm11[0]
2325 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm13[0,1,2,3],ymm10[4,5,6,7]
2326 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2327 ; AVX-NEXT: vmovaps 704(%rdi), %ymm10
2328 ; AVX-NEXT: vinsertf128 $1, 672(%rdi), %ymm0, %ymm13
2329 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm14 = ymm13[0],ymm10[0],ymm13[2],ymm10[2]
2330 ; AVX-NEXT: vmovaps 624(%rdi), %xmm15
2331 ; AVX-NEXT: vmovaps 576(%rdi), %xmm0
2332 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm0[0],xmm15[0]
2333 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm14[4,5,6,7]
2334 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2335 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm3[1],ymm2[1],ymm3[3],ymm2[3]
2336 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm5[1],xmm6[1]
2337 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
2338 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2339 ; AVX-NEXT: vunpckhpd (%rsp), %ymm4, %ymm1 # 32-byte Folded Reload
2340 ; AVX-NEXT: # ymm1 = ymm4[1],mem[1],ymm4[3],mem[3]
2341 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm9[1],xmm8[1]
2342 ; AVX-NEXT: vmovaps 480(%rdi), %ymm3
2343 ; AVX-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2344 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
2345 ; AVX-NEXT: vmovups %ymm1, (%rsp) # 32-byte Spill
2346 ; AVX-NEXT: vmovaps 96(%rdi), %ymm2
2347 ; AVX-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2348 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm7, %ymm4 # 32-byte Folded Reload
2349 ; AVX-NEXT: # ymm4 = ymm7[1],mem[1],ymm7[3],mem[3]
2350 ; AVX-NEXT: vmovaps 16(%rdi), %xmm6
2351 ; AVX-NEXT: vmovaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2352 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm5 = xmm12[1],xmm11[1]
2353 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm5[0,1,2,3],ymm4[4,5,6,7]
2354 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2355 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm4 = ymm13[1],ymm10[1],ymm13[3],ymm10[3]
2356 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm15[1]
2357 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm4[4,5,6,7]
2358 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2359 ; AVX-NEXT: vinsertf128 $1, 160(%rdi), %ymm0, %ymm14
2360 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm14[0],ymm2[2],ymm14[2]
2361 ; AVX-NEXT: vmovaps 64(%rdi), %xmm13
2362 ; AVX-NEXT: vmovlhps {{.*#+}} xmm6 = xmm6[0],xmm13[0]
2363 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1,2,3],ymm4[4,5,6,7]
2364 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2365 ; AVX-NEXT: vinsertf128 $1, 544(%rdi), %ymm0, %ymm11
2366 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm6 = ymm3[0],ymm11[0],ymm3[2],ymm11[2]
2367 ; AVX-NEXT: vmovaps 448(%rdi), %xmm7
2368 ; AVX-NEXT: vmovaps 400(%rdi), %xmm8
2369 ; AVX-NEXT: vmovlhps {{.*#+}} xmm9 = xmm8[0],xmm7[0]
2370 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm9[0,1,2,3],ymm6[4,5,6,7]
2371 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2372 ; AVX-NEXT: vmovaps 288(%rdi), %ymm6
2373 ; AVX-NEXT: vinsertf128 $1, 352(%rdi), %ymm0, %ymm15
2374 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm10 = ymm6[0],ymm15[0],ymm6[2],ymm15[2]
2375 ; AVX-NEXT: vmovaps 256(%rdi), %xmm4
2376 ; AVX-NEXT: vmovaps 208(%rdi), %xmm3
2377 ; AVX-NEXT: vmovlhps {{.*#+}} xmm12 = xmm3[0],xmm4[0]
2378 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm12[0,1,2,3],ymm10[4,5,6,7]
2379 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2380 ; AVX-NEXT: vmovaps 672(%rdi), %ymm5
2381 ; AVX-NEXT: vinsertf128 $1, 736(%rdi), %ymm0, %ymm2
2382 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm12 = ymm5[0],ymm2[0],ymm5[2],ymm2[2]
2383 ; AVX-NEXT: vmovaps 640(%rdi), %xmm1
2384 ; AVX-NEXT: vmovaps 592(%rdi), %xmm0
2385 ; AVX-NEXT: vmovlhps {{.*#+}} xmm9 = xmm0[0],xmm1[0]
2386 ; AVX-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm12[4,5,6,7]
2387 ; AVX-NEXT: vmovups %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2388 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
2389 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm9 = ymm9[1],ymm14[1],ymm9[3],ymm14[3]
2390 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
2391 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm13 = xmm10[1],xmm13[1]
2392 ; AVX-NEXT: vblendps {{.*#+}} ymm9 = ymm13[0,1,2,3],ymm9[4,5,6,7]
2393 ; AVX-NEXT: vmovups %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2394 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
2395 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm11 = ymm9[1],ymm11[1],ymm9[3],ymm11[3]
2396 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm7 = xmm8[1],xmm7[1]
2397 ; AVX-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm11[4,5,6,7]
2398 ; AVX-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2399 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm5[1],ymm2[1],ymm5[3],ymm2[3]
2400 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
2401 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
2402 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2403 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm6[1],ymm15[1],ymm6[3],ymm15[3]
2404 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm3[1],xmm4[1]
2405 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
2406 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2407 ; AVX-NEXT: vmovaps 544(%rdi), %ymm0
2408 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2409 ; AVX-NEXT: vinsertf128 $1, 512(%rdi), %ymm0, %ymm1
2410 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2411 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
2412 ; AVX-NEXT: vmovaps 464(%rdi), %xmm14
2413 ; AVX-NEXT: vmovaps 416(%rdi), %xmm13
2414 ; AVX-NEXT: vmovlhps {{.*#+}} xmm5 = xmm13[0],xmm14[0]
2415 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm5[0,1,2,3],ymm4[4,5,6,7]
2416 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2417 ; AVX-NEXT: vmovaps 160(%rdi), %ymm11
2418 ; AVX-NEXT: vmovaps 32(%rdi), %xmm12
2419 ; AVX-NEXT: vinsertf128 $1, 128(%rdi), %ymm0, %ymm10
2420 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm10[0],ymm11[0],ymm10[2],ymm11[2]
2421 ; AVX-NEXT: vmovaps 80(%rdi), %xmm9
2422 ; AVX-NEXT: vmovlhps {{.*#+}} xmm5 = xmm12[0],xmm9[0]
2423 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm5[0,1,2,3],ymm4[4,5,6,7]
2424 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2425 ; AVX-NEXT: vmovaps 352(%rdi), %ymm8
2426 ; AVX-NEXT: vinsertf128 $1, 320(%rdi), %ymm0, %ymm7
2427 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm7[0],ymm8[0],ymm7[2],ymm8[2]
2428 ; AVX-NEXT: vmovaps 272(%rdi), %xmm6
2429 ; AVX-NEXT: vmovaps 224(%rdi), %xmm3
2430 ; AVX-NEXT: vmovlhps {{.*#+}} xmm5 = xmm3[0],xmm6[0]
2431 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm5[0,1,2,3],ymm4[4,5,6,7]
2432 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2433 ; AVX-NEXT: vmovaps 736(%rdi), %ymm5
2434 ; AVX-NEXT: vinsertf128 $1, 704(%rdi), %ymm0, %ymm2
2435 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm5[0],ymm2[2],ymm5[2]
2436 ; AVX-NEXT: vmovaps 656(%rdi), %xmm1
2437 ; AVX-NEXT: vmovaps 608(%rdi), %xmm0
2438 ; AVX-NEXT: vmovlhps {{.*#+}} xmm15 = xmm0[0],xmm1[0]
2439 ; AVX-NEXT: vblendps {{.*#+}} ymm4 = ymm15[0,1,2,3],ymm4[4,5,6,7]
2440 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm5[1],ymm2[3],ymm5[3]
2441 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
2442 ; AVX-NEXT: vblendps {{.*#+}} ymm5 = ymm0[0,1,2,3],ymm2[4,5,6,7]
2443 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2444 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
2445 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
2446 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm13[1],xmm14[1]
2447 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
2448 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm10[1],ymm11[1],ymm10[3],ymm11[3]
2449 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm12[1],xmm9[1]
2450 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
2451 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm7[1],ymm8[1],ymm7[3],ymm8[3]
2452 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm6[1]
2453 ; AVX-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1,2,3],ymm2[4,5,6,7]
2454 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2455 ; AVX-NEXT: vmovaps %ymm3, 96(%rsi)
2456 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2457 ; AVX-NEXT: vmovaps %ymm3, 32(%rsi)
2458 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2459 ; AVX-NEXT: vmovaps %ymm3, 64(%rsi)
2460 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2461 ; AVX-NEXT: vmovaps %ymm3, (%rsi)
2462 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2463 ; AVX-NEXT: vmovaps %ymm3, 96(%rdx)
2464 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2465 ; AVX-NEXT: vmovaps %ymm3, 32(%rdx)
2466 ; AVX-NEXT: vmovups (%rsp), %ymm3 # 32-byte Reload
2467 ; AVX-NEXT: vmovaps %ymm3, 64(%rdx)
2468 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2469 ; AVX-NEXT: vmovaps %ymm3, (%rdx)
2470 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2471 ; AVX-NEXT: vmovaps %ymm3, 96(%rcx)
2472 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2473 ; AVX-NEXT: vmovaps %ymm3, 32(%rcx)
2474 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2475 ; AVX-NEXT: vmovaps %ymm3, 64(%rcx)
2476 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2477 ; AVX-NEXT: vmovaps %ymm3, (%rcx)
2478 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2479 ; AVX-NEXT: vmovaps %ymm3, 32(%r8)
2480 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2481 ; AVX-NEXT: vmovaps %ymm3, 96(%r8)
2482 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2483 ; AVX-NEXT: vmovaps %ymm3, 64(%r8)
2484 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2485 ; AVX-NEXT: vmovaps %ymm3, (%r8)
2486 ; AVX-NEXT: vmovaps %ymm4, 96(%r9)
2487 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2488 ; AVX-NEXT: vmovaps %ymm3, 32(%r9)
2489 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2490 ; AVX-NEXT: vmovaps %ymm3, (%r9)
2491 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2492 ; AVX-NEXT: vmovaps %ymm3, 64(%r9)
2493 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
2494 ; AVX-NEXT: vmovaps %ymm2, 32(%rax)
2495 ; AVX-NEXT: vmovaps %ymm1, (%rax)
2496 ; AVX-NEXT: vmovaps %ymm0, 64(%rax)
2497 ; AVX-NEXT: vmovaps %ymm5, 96(%rax)
2498 ; AVX-NEXT: addq $552, %rsp # imm = 0x228
2499 ; AVX-NEXT: vzeroupper
2502 ; AVX2-LABEL: load_i64_stride6_vf16:
2504 ; AVX2-NEXT: subq $488, %rsp # imm = 0x1E8
2505 ; AVX2-NEXT: vmovaps 320(%rdi), %ymm10
2506 ; AVX2-NEXT: vmovaps 288(%rdi), %ymm12
2507 ; AVX2-NEXT: vmovups %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2508 ; AVX2-NEXT: vmovaps 512(%rdi), %ymm7
2509 ; AVX2-NEXT: vmovaps 480(%rdi), %ymm4
2510 ; AVX2-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2511 ; AVX2-NEXT: vmovaps 128(%rdi), %ymm0
2512 ; AVX2-NEXT: vmovaps 96(%rdi), %ymm3
2513 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2514 ; AVX2-NEXT: vmovaps (%rdi), %xmm1
2515 ; AVX2-NEXT: vmovaps 48(%rdi), %xmm5
2516 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm5[0]
2517 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm3[0],ymm0[0],ymm3[2],ymm0[2]
2518 ; AVX2-NEXT: vmovaps %ymm0, %ymm15
2519 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2520 ; AVX2-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,0,3]
2521 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm3[4,5,6,7]
2522 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2523 ; AVX2-NEXT: vmovaps 432(%rdi), %xmm3
2524 ; AVX2-NEXT: vmovaps 384(%rdi), %xmm6
2525 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm2 = xmm6[0],xmm3[0]
2526 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm4[0],ymm7[0],ymm4[2],ymm7[2]
2527 ; AVX2-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2528 ; AVX2-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,1,0,3]
2529 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm4[4,5,6,7]
2530 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2531 ; AVX2-NEXT: vmovaps 240(%rdi), %xmm11
2532 ; AVX2-NEXT: vmovaps 192(%rdi), %xmm9
2533 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm2 = xmm9[0],xmm11[0]
2534 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm12[0],ymm10[0],ymm12[2],ymm10[2]
2535 ; AVX2-NEXT: vmovaps %ymm10, %ymm4
2536 ; AVX2-NEXT: vmovups %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2537 ; AVX2-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,1,0,3]
2538 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm8[4,5,6,7]
2539 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2540 ; AVX2-NEXT: vmovaps 704(%rdi), %ymm0
2541 ; AVX2-NEXT: vmovaps 672(%rdi), %ymm8
2542 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm13 = ymm8[0],ymm0[0],ymm8[2],ymm0[2]
2543 ; AVX2-NEXT: vmovaps %ymm0, %ymm10
2544 ; AVX2-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[0,1,0,3]
2545 ; AVX2-NEXT: vmovaps 624(%rdi), %xmm14
2546 ; AVX2-NEXT: vmovaps 576(%rdi), %xmm0
2547 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm12 = xmm0[0],xmm14[0]
2548 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm12[0,1,2,3],ymm13[4,5,6,7]
2549 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2550 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm5[1]
2551 ; AVX2-NEXT: vbroadcastsd 104(%rdi), %ymm5
2552 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm5[1],ymm15[1],ymm5[3],ymm15[3]
2553 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm5[4,5,6,7]
2554 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2555 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm6[1],xmm3[1]
2556 ; AVX2-NEXT: vbroadcastsd 488(%rdi), %ymm3
2557 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm7[1],ymm3[3],ymm7[3]
2558 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
2559 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2560 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm9[1],xmm11[1]
2561 ; AVX2-NEXT: vbroadcastsd 296(%rdi), %ymm3
2562 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm4[1],ymm3[3],ymm4[3]
2563 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
2564 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2565 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm14[1]
2566 ; AVX2-NEXT: vbroadcastsd 680(%rdi), %ymm1
2567 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm10[1],ymm1[3],ymm10[3]
2568 ; AVX2-NEXT: vmovaps %ymm10, %ymm14
2569 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
2570 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2571 ; AVX2-NEXT: vbroadcastsd 160(%rdi), %ymm0
2572 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
2573 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm10[0],ymm0[0],ymm10[2],ymm0[2]
2574 ; AVX2-NEXT: vmovaps 16(%rdi), %xmm7
2575 ; AVX2-NEXT: vmovaps 64(%rdi), %xmm3
2576 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm4 = xmm7[0],xmm3[0]
2577 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
2578 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2579 ; AVX2-NEXT: vbroadcastsd 544(%rdi), %ymm0
2580 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
2581 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm11[0],ymm0[0],ymm11[2],ymm0[2]
2582 ; AVX2-NEXT: vmovaps 448(%rdi), %xmm4
2583 ; AVX2-NEXT: vmovaps 400(%rdi), %xmm5
2584 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm6 = xmm5[0],xmm4[0]
2585 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1,2,3],ymm0[4,5,6,7]
2586 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2587 ; AVX2-NEXT: vbroadcastsd 352(%rdi), %ymm0
2588 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
2589 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm9[0],ymm0[0],ymm9[2],ymm0[2]
2590 ; AVX2-NEXT: vmovaps 256(%rdi), %xmm12
2591 ; AVX2-NEXT: vmovaps 208(%rdi), %xmm2
2592 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm12[0]
2593 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1,2,3],ymm0[4,5,6,7]
2594 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2595 ; AVX2-NEXT: vbroadcastsd 736(%rdi), %ymm0
2596 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm8[0],ymm0[0],ymm8[2],ymm0[2]
2597 ; AVX2-NEXT: vmovaps 640(%rdi), %xmm13
2598 ; AVX2-NEXT: vmovaps 592(%rdi), %xmm1
2599 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm6 = xmm1[0],xmm13[0]
2600 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1,2,3],ymm0[4,5,6,7]
2601 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2602 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm3[1]
2603 ; AVX2-NEXT: vmovaps 160(%rdi), %ymm15
2604 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm10[1],ymm15[1],ymm10[3],ymm15[3]
2605 ; AVX2-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
2606 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
2607 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2608 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm4[1]
2609 ; AVX2-NEXT: vmovaps 544(%rdi), %ymm10
2610 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm11[1],ymm10[1],ymm11[3],ymm10[3]
2611 ; AVX2-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
2612 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
2613 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2614 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm13[1]
2615 ; AVX2-NEXT: vmovaps 736(%rdi), %ymm4
2616 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm8[1],ymm4[1],ymm8[3],ymm4[3]
2617 ; AVX2-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
2618 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm3[4,5,6,7]
2619 ; AVX2-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
2620 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm2[1],xmm12[1]
2621 ; AVX2-NEXT: vmovaps 352(%rdi), %ymm5
2622 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm9[1],ymm5[1],ymm9[3],ymm5[3]
2623 ; AVX2-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,2,1]
2624 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm2[4,5,6,7]
2625 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2626 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2627 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm0[0],ymm10[0],ymm0[2],ymm10[2]
2628 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
2629 ; AVX2-NEXT: vmovaps 464(%rdi), %xmm7
2630 ; AVX2-NEXT: vmovaps 416(%rdi), %xmm6
2631 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm2 = xmm6[0],xmm7[0]
2632 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm1[4,5,6,7]
2633 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2634 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2635 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm0[0],ymm15[0],ymm0[2],ymm15[2]
2636 ; AVX2-NEXT: vpermpd {{.*#+}} ymm8 = ymm1[0,1,0,3]
2637 ; AVX2-NEXT: vmovaps 32(%rdi), %xmm3
2638 ; AVX2-NEXT: vmovaps 80(%rdi), %xmm2
2639 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm9 = xmm3[0],xmm2[0]
2640 ; AVX2-NEXT: vblendps {{.*#+}} ymm13 = ymm9[0,1,2,3],ymm8[4,5,6,7]
2641 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2642 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm0[0],ymm5[0],ymm0[2],ymm5[2]
2643 ; AVX2-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,1,0,3]
2644 ; AVX2-NEXT: vmovaps 272(%rdi), %xmm12
2645 ; AVX2-NEXT: vmovaps 224(%rdi), %xmm1
2646 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm9 = xmm1[0],xmm12[0]
2647 ; AVX2-NEXT: vblendps {{.*#+}} ymm11 = ymm9[0,1,2,3],ymm8[4,5,6,7]
2648 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm14[0],ymm4[0],ymm14[2],ymm4[2]
2649 ; AVX2-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,1,0,3]
2650 ; AVX2-NEXT: vmovaps 656(%rdi), %xmm9
2651 ; AVX2-NEXT: vmovaps 608(%rdi), %xmm0
2652 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm14 = xmm0[0],xmm9[0]
2653 ; AVX2-NEXT: vblendps {{.*#+}} ymm8 = ymm14[0,1,2,3],ymm8[4,5,6,7]
2654 ; AVX2-NEXT: vbroadcastsd 712(%rdi), %ymm14
2655 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm4 = ymm14[1],ymm4[1],ymm14[3],ymm4[3]
2656 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm9[1]
2657 ; AVX2-NEXT: vblendps {{.*#+}} ymm9 = ymm0[0,1,2,3],ymm4[4,5,6,7]
2658 ; AVX2-NEXT: vbroadcastsd 520(%rdi), %ymm0
2659 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm10[1],ymm0[3],ymm10[3]
2660 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm6[1],xmm7[1]
2661 ; AVX2-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm0[4,5,6,7]
2662 ; AVX2-NEXT: vbroadcastsd 136(%rdi), %ymm0
2663 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm15[1],ymm0[3],ymm15[3]
2664 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm3[1],xmm2[1]
2665 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
2666 ; AVX2-NEXT: vbroadcastsd 328(%rdi), %ymm2
2667 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm5[1],ymm2[3],ymm5[3]
2668 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm12[1]
2669 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
2670 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2671 ; AVX2-NEXT: vmovaps %ymm2, 96(%rsi)
2672 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2673 ; AVX2-NEXT: vmovaps %ymm2, 32(%rsi)
2674 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2675 ; AVX2-NEXT: vmovaps %ymm2, 64(%rsi)
2676 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2677 ; AVX2-NEXT: vmovaps %ymm2, (%rsi)
2678 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2679 ; AVX2-NEXT: vmovaps %ymm2, 96(%rdx)
2680 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2681 ; AVX2-NEXT: vmovaps %ymm2, 32(%rdx)
2682 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2683 ; AVX2-NEXT: vmovaps %ymm2, 64(%rdx)
2684 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2685 ; AVX2-NEXT: vmovaps %ymm2, (%rdx)
2686 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2687 ; AVX2-NEXT: vmovaps %ymm2, 96(%rcx)
2688 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2689 ; AVX2-NEXT: vmovaps %ymm2, 32(%rcx)
2690 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2691 ; AVX2-NEXT: vmovaps %ymm2, 64(%rcx)
2692 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2693 ; AVX2-NEXT: vmovaps %ymm2, (%rcx)
2694 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2695 ; AVX2-NEXT: vmovaps %ymm2, 32(%r8)
2696 ; AVX2-NEXT: vmovups (%rsp), %ymm2 # 32-byte Reload
2697 ; AVX2-NEXT: vmovaps %ymm2, 96(%r8)
2698 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2699 ; AVX2-NEXT: vmovaps %ymm2, 64(%r8)
2700 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2701 ; AVX2-NEXT: vmovaps %ymm2, (%r8)
2702 ; AVX2-NEXT: vmovaps %ymm8, 96(%r9)
2703 ; AVX2-NEXT: vmovaps %ymm11, 32(%r9)
2704 ; AVX2-NEXT: vmovaps %ymm13, (%r9)
2705 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2706 ; AVX2-NEXT: vmovaps %ymm2, 64(%r9)
2707 ; AVX2-NEXT: movq {{[0-9]+}}(%rsp), %rax
2708 ; AVX2-NEXT: vmovaps %ymm1, 32(%rax)
2709 ; AVX2-NEXT: vmovaps %ymm0, (%rax)
2710 ; AVX2-NEXT: vmovaps %ymm4, 64(%rax)
2711 ; AVX2-NEXT: vmovaps %ymm9, 96(%rax)
2712 ; AVX2-NEXT: addq $488, %rsp # imm = 0x1E8
2713 ; AVX2-NEXT: vzeroupper
2716 ; AVX2-FP-LABEL: load_i64_stride6_vf16:
2718 ; AVX2-FP-NEXT: subq $488, %rsp # imm = 0x1E8
2719 ; AVX2-FP-NEXT: vmovaps 320(%rdi), %ymm10
2720 ; AVX2-FP-NEXT: vmovaps 288(%rdi), %ymm12
2721 ; AVX2-FP-NEXT: vmovups %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2722 ; AVX2-FP-NEXT: vmovaps 512(%rdi), %ymm7
2723 ; AVX2-FP-NEXT: vmovaps 480(%rdi), %ymm4
2724 ; AVX2-FP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2725 ; AVX2-FP-NEXT: vmovaps 128(%rdi), %ymm0
2726 ; AVX2-FP-NEXT: vmovaps 96(%rdi), %ymm3
2727 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2728 ; AVX2-FP-NEXT: vmovaps (%rdi), %xmm1
2729 ; AVX2-FP-NEXT: vmovaps 48(%rdi), %xmm5
2730 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm5[0]
2731 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm3[0],ymm0[0],ymm3[2],ymm0[2]
2732 ; AVX2-FP-NEXT: vmovaps %ymm0, %ymm15
2733 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2734 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,0,3]
2735 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm3[4,5,6,7]
2736 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2737 ; AVX2-FP-NEXT: vmovaps 432(%rdi), %xmm3
2738 ; AVX2-FP-NEXT: vmovaps 384(%rdi), %xmm6
2739 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm2 = xmm6[0],xmm3[0]
2740 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm4[0],ymm7[0],ymm4[2],ymm7[2]
2741 ; AVX2-FP-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2742 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,1,0,3]
2743 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm4[4,5,6,7]
2744 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2745 ; AVX2-FP-NEXT: vmovaps 240(%rdi), %xmm11
2746 ; AVX2-FP-NEXT: vmovaps 192(%rdi), %xmm9
2747 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm2 = xmm9[0],xmm11[0]
2748 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm12[0],ymm10[0],ymm12[2],ymm10[2]
2749 ; AVX2-FP-NEXT: vmovaps %ymm10, %ymm4
2750 ; AVX2-FP-NEXT: vmovups %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2751 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,1,0,3]
2752 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm8[4,5,6,7]
2753 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2754 ; AVX2-FP-NEXT: vmovaps 704(%rdi), %ymm0
2755 ; AVX2-FP-NEXT: vmovaps 672(%rdi), %ymm8
2756 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm13 = ymm8[0],ymm0[0],ymm8[2],ymm0[2]
2757 ; AVX2-FP-NEXT: vmovaps %ymm0, %ymm10
2758 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[0,1,0,3]
2759 ; AVX2-FP-NEXT: vmovaps 624(%rdi), %xmm14
2760 ; AVX2-FP-NEXT: vmovaps 576(%rdi), %xmm0
2761 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm12 = xmm0[0],xmm14[0]
2762 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm12[0,1,2,3],ymm13[4,5,6,7]
2763 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2764 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm5[1]
2765 ; AVX2-FP-NEXT: vbroadcastsd 104(%rdi), %ymm5
2766 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm5[1],ymm15[1],ymm5[3],ymm15[3]
2767 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm5[4,5,6,7]
2768 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2769 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm6[1],xmm3[1]
2770 ; AVX2-FP-NEXT: vbroadcastsd 488(%rdi), %ymm3
2771 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm7[1],ymm3[3],ymm7[3]
2772 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
2773 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2774 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm9[1],xmm11[1]
2775 ; AVX2-FP-NEXT: vbroadcastsd 296(%rdi), %ymm3
2776 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm4[1],ymm3[3],ymm4[3]
2777 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
2778 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2779 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm14[1]
2780 ; AVX2-FP-NEXT: vbroadcastsd 680(%rdi), %ymm1
2781 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm10[1],ymm1[3],ymm10[3]
2782 ; AVX2-FP-NEXT: vmovaps %ymm10, %ymm14
2783 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
2784 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2785 ; AVX2-FP-NEXT: vbroadcastsd 160(%rdi), %ymm0
2786 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
2787 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm10[0],ymm0[0],ymm10[2],ymm0[2]
2788 ; AVX2-FP-NEXT: vmovaps 16(%rdi), %xmm7
2789 ; AVX2-FP-NEXT: vmovaps 64(%rdi), %xmm3
2790 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm4 = xmm7[0],xmm3[0]
2791 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
2792 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2793 ; AVX2-FP-NEXT: vbroadcastsd 544(%rdi), %ymm0
2794 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
2795 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm11[0],ymm0[0],ymm11[2],ymm0[2]
2796 ; AVX2-FP-NEXT: vmovaps 448(%rdi), %xmm4
2797 ; AVX2-FP-NEXT: vmovaps 400(%rdi), %xmm5
2798 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm6 = xmm5[0],xmm4[0]
2799 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1,2,3],ymm0[4,5,6,7]
2800 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2801 ; AVX2-FP-NEXT: vbroadcastsd 352(%rdi), %ymm0
2802 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
2803 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm9[0],ymm0[0],ymm9[2],ymm0[2]
2804 ; AVX2-FP-NEXT: vmovaps 256(%rdi), %xmm12
2805 ; AVX2-FP-NEXT: vmovaps 208(%rdi), %xmm2
2806 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm12[0]
2807 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1,2,3],ymm0[4,5,6,7]
2808 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2809 ; AVX2-FP-NEXT: vbroadcastsd 736(%rdi), %ymm0
2810 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm8[0],ymm0[0],ymm8[2],ymm0[2]
2811 ; AVX2-FP-NEXT: vmovaps 640(%rdi), %xmm13
2812 ; AVX2-FP-NEXT: vmovaps 592(%rdi), %xmm1
2813 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm6 = xmm1[0],xmm13[0]
2814 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1,2,3],ymm0[4,5,6,7]
2815 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2816 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm3[1]
2817 ; AVX2-FP-NEXT: vmovaps 160(%rdi), %ymm15
2818 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm10[1],ymm15[1],ymm10[3],ymm15[3]
2819 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
2820 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
2821 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2822 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm4[1]
2823 ; AVX2-FP-NEXT: vmovaps 544(%rdi), %ymm10
2824 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm11[1],ymm10[1],ymm11[3],ymm10[3]
2825 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
2826 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
2827 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2828 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm13[1]
2829 ; AVX2-FP-NEXT: vmovaps 736(%rdi), %ymm4
2830 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm8[1],ymm4[1],ymm8[3],ymm4[3]
2831 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
2832 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm3[4,5,6,7]
2833 ; AVX2-FP-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
2834 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm2[1],xmm12[1]
2835 ; AVX2-FP-NEXT: vmovaps 352(%rdi), %ymm5
2836 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm9[1],ymm5[1],ymm9[3],ymm5[3]
2837 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,2,1]
2838 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm2[4,5,6,7]
2839 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2840 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2841 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm0[0],ymm10[0],ymm0[2],ymm10[2]
2842 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
2843 ; AVX2-FP-NEXT: vmovaps 464(%rdi), %xmm7
2844 ; AVX2-FP-NEXT: vmovaps 416(%rdi), %xmm6
2845 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm2 = xmm6[0],xmm7[0]
2846 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm1[4,5,6,7]
2847 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2848 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2849 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm0[0],ymm15[0],ymm0[2],ymm15[2]
2850 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm8 = ymm1[0,1,0,3]
2851 ; AVX2-FP-NEXT: vmovaps 32(%rdi), %xmm3
2852 ; AVX2-FP-NEXT: vmovaps 80(%rdi), %xmm2
2853 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm9 = xmm3[0],xmm2[0]
2854 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm13 = ymm9[0,1,2,3],ymm8[4,5,6,7]
2855 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2856 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm0[0],ymm5[0],ymm0[2],ymm5[2]
2857 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,1,0,3]
2858 ; AVX2-FP-NEXT: vmovaps 272(%rdi), %xmm12
2859 ; AVX2-FP-NEXT: vmovaps 224(%rdi), %xmm1
2860 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm9 = xmm1[0],xmm12[0]
2861 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm11 = ymm9[0,1,2,3],ymm8[4,5,6,7]
2862 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm14[0],ymm4[0],ymm14[2],ymm4[2]
2863 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,1,0,3]
2864 ; AVX2-FP-NEXT: vmovaps 656(%rdi), %xmm9
2865 ; AVX2-FP-NEXT: vmovaps 608(%rdi), %xmm0
2866 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm14 = xmm0[0],xmm9[0]
2867 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm8 = ymm14[0,1,2,3],ymm8[4,5,6,7]
2868 ; AVX2-FP-NEXT: vbroadcastsd 712(%rdi), %ymm14
2869 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm4 = ymm14[1],ymm4[1],ymm14[3],ymm4[3]
2870 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm9[1]
2871 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm9 = ymm0[0,1,2,3],ymm4[4,5,6,7]
2872 ; AVX2-FP-NEXT: vbroadcastsd 520(%rdi), %ymm0
2873 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm10[1],ymm0[3],ymm10[3]
2874 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm6[1],xmm7[1]
2875 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm0[4,5,6,7]
2876 ; AVX2-FP-NEXT: vbroadcastsd 136(%rdi), %ymm0
2877 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm15[1],ymm0[3],ymm15[3]
2878 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm3[1],xmm2[1]
2879 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
2880 ; AVX2-FP-NEXT: vbroadcastsd 328(%rdi), %ymm2
2881 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm5[1],ymm2[3],ymm5[3]
2882 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm12[1]
2883 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
2884 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2885 ; AVX2-FP-NEXT: vmovaps %ymm2, 96(%rsi)
2886 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2887 ; AVX2-FP-NEXT: vmovaps %ymm2, 32(%rsi)
2888 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2889 ; AVX2-FP-NEXT: vmovaps %ymm2, 64(%rsi)
2890 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2891 ; AVX2-FP-NEXT: vmovaps %ymm2, (%rsi)
2892 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2893 ; AVX2-FP-NEXT: vmovaps %ymm2, 96(%rdx)
2894 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2895 ; AVX2-FP-NEXT: vmovaps %ymm2, 32(%rdx)
2896 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2897 ; AVX2-FP-NEXT: vmovaps %ymm2, 64(%rdx)
2898 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2899 ; AVX2-FP-NEXT: vmovaps %ymm2, (%rdx)
2900 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2901 ; AVX2-FP-NEXT: vmovaps %ymm2, 96(%rcx)
2902 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2903 ; AVX2-FP-NEXT: vmovaps %ymm2, 32(%rcx)
2904 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2905 ; AVX2-FP-NEXT: vmovaps %ymm2, 64(%rcx)
2906 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2907 ; AVX2-FP-NEXT: vmovaps %ymm2, (%rcx)
2908 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2909 ; AVX2-FP-NEXT: vmovaps %ymm2, 32(%r8)
2910 ; AVX2-FP-NEXT: vmovups (%rsp), %ymm2 # 32-byte Reload
2911 ; AVX2-FP-NEXT: vmovaps %ymm2, 96(%r8)
2912 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2913 ; AVX2-FP-NEXT: vmovaps %ymm2, 64(%r8)
2914 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2915 ; AVX2-FP-NEXT: vmovaps %ymm2, (%r8)
2916 ; AVX2-FP-NEXT: vmovaps %ymm8, 96(%r9)
2917 ; AVX2-FP-NEXT: vmovaps %ymm11, 32(%r9)
2918 ; AVX2-FP-NEXT: vmovaps %ymm13, (%r9)
2919 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2920 ; AVX2-FP-NEXT: vmovaps %ymm2, 64(%r9)
2921 ; AVX2-FP-NEXT: movq {{[0-9]+}}(%rsp), %rax
2922 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%rax)
2923 ; AVX2-FP-NEXT: vmovaps %ymm0, (%rax)
2924 ; AVX2-FP-NEXT: vmovaps %ymm4, 64(%rax)
2925 ; AVX2-FP-NEXT: vmovaps %ymm9, 96(%rax)
2926 ; AVX2-FP-NEXT: addq $488, %rsp # imm = 0x1E8
2927 ; AVX2-FP-NEXT: vzeroupper
2928 ; AVX2-FP-NEXT: retq
2930 ; AVX2-FCP-LABEL: load_i64_stride6_vf16:
2931 ; AVX2-FCP: # %bb.0:
2932 ; AVX2-FCP-NEXT: subq $488, %rsp # imm = 0x1E8
2933 ; AVX2-FCP-NEXT: vmovaps 320(%rdi), %ymm10
2934 ; AVX2-FCP-NEXT: vmovaps 288(%rdi), %ymm12
2935 ; AVX2-FCP-NEXT: vmovups %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2936 ; AVX2-FCP-NEXT: vmovaps 512(%rdi), %ymm7
2937 ; AVX2-FCP-NEXT: vmovaps 480(%rdi), %ymm4
2938 ; AVX2-FCP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2939 ; AVX2-FCP-NEXT: vmovaps 128(%rdi), %ymm0
2940 ; AVX2-FCP-NEXT: vmovaps 96(%rdi), %ymm3
2941 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2942 ; AVX2-FCP-NEXT: vmovaps (%rdi), %xmm1
2943 ; AVX2-FCP-NEXT: vmovaps 48(%rdi), %xmm5
2944 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm5[0]
2945 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm3[0],ymm0[0],ymm3[2],ymm0[2]
2946 ; AVX2-FCP-NEXT: vmovaps %ymm0, %ymm15
2947 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2948 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,0,3]
2949 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm3[4,5,6,7]
2950 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2951 ; AVX2-FCP-NEXT: vmovaps 432(%rdi), %xmm3
2952 ; AVX2-FCP-NEXT: vmovaps 384(%rdi), %xmm6
2953 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm2 = xmm6[0],xmm3[0]
2954 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm4[0],ymm7[0],ymm4[2],ymm7[2]
2955 ; AVX2-FCP-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2956 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,1,0,3]
2957 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm4[4,5,6,7]
2958 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2959 ; AVX2-FCP-NEXT: vmovaps 240(%rdi), %xmm11
2960 ; AVX2-FCP-NEXT: vmovaps 192(%rdi), %xmm9
2961 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm2 = xmm9[0],xmm11[0]
2962 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm12[0],ymm10[0],ymm12[2],ymm10[2]
2963 ; AVX2-FCP-NEXT: vmovaps %ymm10, %ymm4
2964 ; AVX2-FCP-NEXT: vmovups %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2965 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,1,0,3]
2966 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm8[4,5,6,7]
2967 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2968 ; AVX2-FCP-NEXT: vmovaps 704(%rdi), %ymm0
2969 ; AVX2-FCP-NEXT: vmovaps 672(%rdi), %ymm8
2970 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm13 = ymm8[0],ymm0[0],ymm8[2],ymm0[2]
2971 ; AVX2-FCP-NEXT: vmovaps %ymm0, %ymm10
2972 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[0,1,0,3]
2973 ; AVX2-FCP-NEXT: vmovaps 624(%rdi), %xmm14
2974 ; AVX2-FCP-NEXT: vmovaps 576(%rdi), %xmm0
2975 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm12 = xmm0[0],xmm14[0]
2976 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm12[0,1,2,3],ymm13[4,5,6,7]
2977 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2978 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm5[1]
2979 ; AVX2-FCP-NEXT: vbroadcastsd 104(%rdi), %ymm5
2980 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm5[1],ymm15[1],ymm5[3],ymm15[3]
2981 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm5[4,5,6,7]
2982 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2983 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm6[1],xmm3[1]
2984 ; AVX2-FCP-NEXT: vbroadcastsd 488(%rdi), %ymm3
2985 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm7[1],ymm3[3],ymm7[3]
2986 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
2987 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2988 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm9[1],xmm11[1]
2989 ; AVX2-FCP-NEXT: vbroadcastsd 296(%rdi), %ymm3
2990 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm4[1],ymm3[3],ymm4[3]
2991 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
2992 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2993 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm14[1]
2994 ; AVX2-FCP-NEXT: vbroadcastsd 680(%rdi), %ymm1
2995 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm10[1],ymm1[3],ymm10[3]
2996 ; AVX2-FCP-NEXT: vmovaps %ymm10, %ymm14
2997 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
2998 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2999 ; AVX2-FCP-NEXT: vbroadcastsd 160(%rdi), %ymm0
3000 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
3001 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm10[0],ymm0[0],ymm10[2],ymm0[2]
3002 ; AVX2-FCP-NEXT: vmovaps 16(%rdi), %xmm7
3003 ; AVX2-FCP-NEXT: vmovaps 64(%rdi), %xmm3
3004 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm4 = xmm7[0],xmm3[0]
3005 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
3006 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3007 ; AVX2-FCP-NEXT: vbroadcastsd 544(%rdi), %ymm0
3008 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
3009 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm11[0],ymm0[0],ymm11[2],ymm0[2]
3010 ; AVX2-FCP-NEXT: vmovaps 448(%rdi), %xmm4
3011 ; AVX2-FCP-NEXT: vmovaps 400(%rdi), %xmm5
3012 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm6 = xmm5[0],xmm4[0]
3013 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1,2,3],ymm0[4,5,6,7]
3014 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3015 ; AVX2-FCP-NEXT: vbroadcastsd 352(%rdi), %ymm0
3016 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
3017 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm9[0],ymm0[0],ymm9[2],ymm0[2]
3018 ; AVX2-FCP-NEXT: vmovaps 256(%rdi), %xmm12
3019 ; AVX2-FCP-NEXT: vmovaps 208(%rdi), %xmm2
3020 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm12[0]
3021 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1,2,3],ymm0[4,5,6,7]
3022 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3023 ; AVX2-FCP-NEXT: vbroadcastsd 736(%rdi), %ymm0
3024 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm8[0],ymm0[0],ymm8[2],ymm0[2]
3025 ; AVX2-FCP-NEXT: vmovaps 640(%rdi), %xmm13
3026 ; AVX2-FCP-NEXT: vmovaps 592(%rdi), %xmm1
3027 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm6 = xmm1[0],xmm13[0]
3028 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1,2,3],ymm0[4,5,6,7]
3029 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3030 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm3[1]
3031 ; AVX2-FCP-NEXT: vmovaps 160(%rdi), %ymm15
3032 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm10[1],ymm15[1],ymm10[3],ymm15[3]
3033 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
3034 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
3035 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3036 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm4[1]
3037 ; AVX2-FCP-NEXT: vmovaps 544(%rdi), %ymm10
3038 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm11[1],ymm10[1],ymm11[3],ymm10[3]
3039 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
3040 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
3041 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3042 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm13[1]
3043 ; AVX2-FCP-NEXT: vmovaps 736(%rdi), %ymm4
3044 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm8[1],ymm4[1],ymm8[3],ymm4[3]
3045 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
3046 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm3[4,5,6,7]
3047 ; AVX2-FCP-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
3048 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm2[1],xmm12[1]
3049 ; AVX2-FCP-NEXT: vmovaps 352(%rdi), %ymm5
3050 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm9[1],ymm5[1],ymm9[3],ymm5[3]
3051 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,2,1]
3052 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm2[4,5,6,7]
3053 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3054 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3055 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm0[0],ymm10[0],ymm0[2],ymm10[2]
3056 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
3057 ; AVX2-FCP-NEXT: vmovaps 464(%rdi), %xmm7
3058 ; AVX2-FCP-NEXT: vmovaps 416(%rdi), %xmm6
3059 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm2 = xmm6[0],xmm7[0]
3060 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm1[4,5,6,7]
3061 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3062 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3063 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm0[0],ymm15[0],ymm0[2],ymm15[2]
3064 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm8 = ymm1[0,1,0,3]
3065 ; AVX2-FCP-NEXT: vmovaps 32(%rdi), %xmm3
3066 ; AVX2-FCP-NEXT: vmovaps 80(%rdi), %xmm2
3067 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm9 = xmm3[0],xmm2[0]
3068 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm13 = ymm9[0,1,2,3],ymm8[4,5,6,7]
3069 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3070 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm0[0],ymm5[0],ymm0[2],ymm5[2]
3071 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,1,0,3]
3072 ; AVX2-FCP-NEXT: vmovaps 272(%rdi), %xmm12
3073 ; AVX2-FCP-NEXT: vmovaps 224(%rdi), %xmm1
3074 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm9 = xmm1[0],xmm12[0]
3075 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm11 = ymm9[0,1,2,3],ymm8[4,5,6,7]
3076 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm14[0],ymm4[0],ymm14[2],ymm4[2]
3077 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,1,0,3]
3078 ; AVX2-FCP-NEXT: vmovaps 656(%rdi), %xmm9
3079 ; AVX2-FCP-NEXT: vmovaps 608(%rdi), %xmm0
3080 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm14 = xmm0[0],xmm9[0]
3081 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm8 = ymm14[0,1,2,3],ymm8[4,5,6,7]
3082 ; AVX2-FCP-NEXT: vbroadcastsd 712(%rdi), %ymm14
3083 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm4 = ymm14[1],ymm4[1],ymm14[3],ymm4[3]
3084 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm9[1]
3085 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm9 = ymm0[0,1,2,3],ymm4[4,5,6,7]
3086 ; AVX2-FCP-NEXT: vbroadcastsd 520(%rdi), %ymm0
3087 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm10[1],ymm0[3],ymm10[3]
3088 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm6[1],xmm7[1]
3089 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm0[4,5,6,7]
3090 ; AVX2-FCP-NEXT: vbroadcastsd 136(%rdi), %ymm0
3091 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm15[1],ymm0[3],ymm15[3]
3092 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm3[1],xmm2[1]
3093 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
3094 ; AVX2-FCP-NEXT: vbroadcastsd 328(%rdi), %ymm2
3095 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm5[1],ymm2[3],ymm5[3]
3096 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm12[1]
3097 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
3098 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3099 ; AVX2-FCP-NEXT: vmovaps %ymm2, 96(%rsi)
3100 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3101 ; AVX2-FCP-NEXT: vmovaps %ymm2, 32(%rsi)
3102 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3103 ; AVX2-FCP-NEXT: vmovaps %ymm2, 64(%rsi)
3104 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3105 ; AVX2-FCP-NEXT: vmovaps %ymm2, (%rsi)
3106 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3107 ; AVX2-FCP-NEXT: vmovaps %ymm2, 96(%rdx)
3108 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3109 ; AVX2-FCP-NEXT: vmovaps %ymm2, 32(%rdx)
3110 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3111 ; AVX2-FCP-NEXT: vmovaps %ymm2, 64(%rdx)
3112 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3113 ; AVX2-FCP-NEXT: vmovaps %ymm2, (%rdx)
3114 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3115 ; AVX2-FCP-NEXT: vmovaps %ymm2, 96(%rcx)
3116 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3117 ; AVX2-FCP-NEXT: vmovaps %ymm2, 32(%rcx)
3118 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3119 ; AVX2-FCP-NEXT: vmovaps %ymm2, 64(%rcx)
3120 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3121 ; AVX2-FCP-NEXT: vmovaps %ymm2, (%rcx)
3122 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3123 ; AVX2-FCP-NEXT: vmovaps %ymm2, 32(%r8)
3124 ; AVX2-FCP-NEXT: vmovups (%rsp), %ymm2 # 32-byte Reload
3125 ; AVX2-FCP-NEXT: vmovaps %ymm2, 96(%r8)
3126 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3127 ; AVX2-FCP-NEXT: vmovaps %ymm2, 64(%r8)
3128 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3129 ; AVX2-FCP-NEXT: vmovaps %ymm2, (%r8)
3130 ; AVX2-FCP-NEXT: vmovaps %ymm8, 96(%r9)
3131 ; AVX2-FCP-NEXT: vmovaps %ymm11, 32(%r9)
3132 ; AVX2-FCP-NEXT: vmovaps %ymm13, (%r9)
3133 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3134 ; AVX2-FCP-NEXT: vmovaps %ymm2, 64(%r9)
3135 ; AVX2-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
3136 ; AVX2-FCP-NEXT: vmovaps %ymm1, 32(%rax)
3137 ; AVX2-FCP-NEXT: vmovaps %ymm0, (%rax)
3138 ; AVX2-FCP-NEXT: vmovaps %ymm4, 64(%rax)
3139 ; AVX2-FCP-NEXT: vmovaps %ymm9, 96(%rax)
3140 ; AVX2-FCP-NEXT: addq $488, %rsp # imm = 0x1E8
3141 ; AVX2-FCP-NEXT: vzeroupper
3142 ; AVX2-FCP-NEXT: retq
3144 ; AVX512-LABEL: load_i64_stride6_vf16:
3146 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
3147 ; AVX512-NEXT: vmovdqa64 448(%rdi), %zmm2
3148 ; AVX512-NEXT: vmovdqa64 384(%rdi), %zmm1
3149 ; AVX512-NEXT: vmovdqa64 512(%rdi), %zmm3
3150 ; AVX512-NEXT: vmovdqa64 576(%rdi), %zmm5
3151 ; AVX512-NEXT: vmovdqa64 704(%rdi), %zmm0
3152 ; AVX512-NEXT: vmovdqa64 640(%rdi), %zmm4
3153 ; AVX512-NEXT: vmovdqa64 320(%rdi), %zmm8
3154 ; AVX512-NEXT: vmovdqa64 256(%rdi), %zmm12
3155 ; AVX512-NEXT: vmovdqa64 (%rdi), %zmm9
3156 ; AVX512-NEXT: vmovdqa64 64(%rdi), %zmm11
3157 ; AVX512-NEXT: vmovdqa64 128(%rdi), %zmm10
3158 ; AVX512-NEXT: vmovdqa64 192(%rdi), %zmm13
3159 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [0,6,0,10,0,6,0,10]
3160 ; AVX512-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
3161 ; AVX512-NEXT: vmovdqa64 %zmm13, %zmm15
3162 ; AVX512-NEXT: vpermt2q %zmm10, %zmm14, %zmm15
3163 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm6 = [0,6,12,0]
3164 ; AVX512-NEXT: vmovdqa64 %zmm9, %zmm7
3165 ; AVX512-NEXT: vpermt2q %zmm11, %zmm6, %zmm7
3166 ; AVX512-NEXT: movb $56, %dil
3167 ; AVX512-NEXT: kmovw %edi, %k1
3168 ; AVX512-NEXT: vmovdqa64 %zmm15, %zmm7 {%k1}
3169 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm16 = [4,10,4,10,4,10,4,10]
3170 ; AVX512-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3171 ; AVX512-NEXT: vmovdqa64 %zmm12, %zmm15
3172 ; AVX512-NEXT: vpermt2q %zmm8, %zmm16, %zmm15
3173 ; AVX512-NEXT: movb $-64, %dil
3174 ; AVX512-NEXT: kmovw %edi, %k2
3175 ; AVX512-NEXT: vmovdqa64 %zmm15, %zmm7 {%k2}
3176 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm15
3177 ; AVX512-NEXT: vpermt2q %zmm0, %zmm16, %zmm15
3178 ; AVX512-NEXT: vpermi2q %zmm3, %zmm5, %zmm14
3179 ; AVX512-NEXT: vpermi2q %zmm2, %zmm1, %zmm6
3180 ; AVX512-NEXT: vmovdqa64 %zmm14, %zmm6 {%k1}
3181 ; AVX512-NEXT: vmovdqa64 %zmm15, %zmm6 {%k2}
3182 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [1,7,0,11,1,7,0,11]
3183 ; AVX512-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
3184 ; AVX512-NEXT: vmovdqa64 %zmm13, %zmm17
3185 ; AVX512-NEXT: vpermt2q %zmm10, %zmm18, %zmm17
3186 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm14 = [1,7,13,0]
3187 ; AVX512-NEXT: vmovdqa64 %zmm9, %zmm15
3188 ; AVX512-NEXT: vpermt2q %zmm11, %zmm14, %zmm15
3189 ; AVX512-NEXT: vmovdqa64 %zmm17, %zmm15 {%k1}
3190 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm17 = [5,11,5,11,5,11,5,11]
3191 ; AVX512-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3192 ; AVX512-NEXT: vmovdqa64 %zmm12, %zmm19
3193 ; AVX512-NEXT: vpermt2q %zmm8, %zmm17, %zmm19
3194 ; AVX512-NEXT: vmovdqa64 %zmm19, %zmm15 {%k2}
3195 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm19
3196 ; AVX512-NEXT: vpermt2q %zmm0, %zmm17, %zmm19
3197 ; AVX512-NEXT: vpermi2q %zmm3, %zmm5, %zmm18
3198 ; AVX512-NEXT: vpermi2q %zmm2, %zmm1, %zmm14
3199 ; AVX512-NEXT: vmovdqa64 %zmm18, %zmm14 {%k1}
3200 ; AVX512-NEXT: vmovdqa64 %zmm19, %zmm14 {%k2}
3201 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [10,4,10,4,10,4,10,4]
3202 ; AVX512-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3203 ; AVX512-NEXT: vmovdqa64 %zmm10, %zmm21
3204 ; AVX512-NEXT: vpermt2q %zmm13, %zmm20, %zmm21
3205 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm18 = [10,0,6,0]
3206 ; AVX512-NEXT: vmovdqa64 %zmm11, %zmm19
3207 ; AVX512-NEXT: vpermt2q %zmm9, %zmm18, %zmm19
3208 ; AVX512-NEXT: movb $24, %dil
3209 ; AVX512-NEXT: kmovw %edi, %k2
3210 ; AVX512-NEXT: vmovdqa64 %zmm21, %zmm19 {%k2}
3211 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm21 = [0,0,6,12,0,0,6,12]
3212 ; AVX512-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3]
3213 ; AVX512-NEXT: vmovdqa64 %zmm12, %zmm22
3214 ; AVX512-NEXT: vpermt2q %zmm8, %zmm21, %zmm22
3215 ; AVX512-NEXT: movb $-32, %dil
3216 ; AVX512-NEXT: kmovw %edi, %k1
3217 ; AVX512-NEXT: vmovdqa64 %zmm22, %zmm19 {%k1}
3218 ; AVX512-NEXT: vpermi2q %zmm0, %zmm4, %zmm21
3219 ; AVX512-NEXT: vpermi2q %zmm5, %zmm3, %zmm20
3220 ; AVX512-NEXT: vpermi2q %zmm1, %zmm2, %zmm18
3221 ; AVX512-NEXT: vmovdqa64 %zmm20, %zmm18 {%k2}
3222 ; AVX512-NEXT: vmovdqa64 %zmm21, %zmm18 {%k1}
3223 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm22 = [11,5,11,5,11,5,11,5]
3224 ; AVX512-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3225 ; AVX512-NEXT: vmovdqa64 %zmm10, %zmm23
3226 ; AVX512-NEXT: vpermt2q %zmm13, %zmm22, %zmm23
3227 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm20 = [11,1,7,0]
3228 ; AVX512-NEXT: vmovdqa64 %zmm11, %zmm21
3229 ; AVX512-NEXT: vpermt2q %zmm9, %zmm20, %zmm21
3230 ; AVX512-NEXT: vmovdqa64 %zmm23, %zmm21 {%k2}
3231 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm23 = [0,1,7,13,0,1,7,13]
3232 ; AVX512-NEXT: # zmm23 = mem[0,1,2,3,0,1,2,3]
3233 ; AVX512-NEXT: vmovdqa64 %zmm12, %zmm24
3234 ; AVX512-NEXT: vpermt2q %zmm8, %zmm23, %zmm24
3235 ; AVX512-NEXT: vmovdqa64 %zmm24, %zmm21 {%k1}
3236 ; AVX512-NEXT: vpermi2q %zmm0, %zmm4, %zmm23
3237 ; AVX512-NEXT: vpermi2q %zmm5, %zmm3, %zmm22
3238 ; AVX512-NEXT: vpermi2q %zmm1, %zmm2, %zmm20
3239 ; AVX512-NEXT: vmovdqa64 %zmm22, %zmm20 {%k2}
3240 ; AVX512-NEXT: vmovdqa64 %zmm23, %zmm20 {%k1}
3241 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,10,0,6,0,10,0,6]
3242 ; AVX512-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
3243 ; AVX512-NEXT: vmovdqa64 %zmm8, %zmm23
3244 ; AVX512-NEXT: vpermt2q %zmm12, %zmm22, %zmm23
3245 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm24 = [12,0,0,6,12,0,0,6]
3246 ; AVX512-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3]
3247 ; AVX512-NEXT: vmovdqa64 %zmm10, %zmm25
3248 ; AVX512-NEXT: vpermt2q %zmm13, %zmm24, %zmm25
3249 ; AVX512-NEXT: vmovdqa64 %zmm9, %zmm26
3250 ; AVX512-NEXT: vpermt2q %zmm11, %zmm16, %zmm26
3251 ; AVX512-NEXT: vinserti32x4 $0, %xmm26, %zmm25, %zmm25
3252 ; AVX512-NEXT: vmovdqa64 %zmm23, %zmm25 {%k1}
3253 ; AVX512-NEXT: vpermi2q %zmm5, %zmm3, %zmm24
3254 ; AVX512-NEXT: vpermi2q %zmm2, %zmm1, %zmm16
3255 ; AVX512-NEXT: vinserti32x4 $0, %xmm16, %zmm24, %zmm16
3256 ; AVX512-NEXT: vpermi2q %zmm4, %zmm0, %zmm22
3257 ; AVX512-NEXT: vmovdqa64 %zmm22, %zmm16 {%k1}
3258 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,11,1,7,0,11,1,7]
3259 ; AVX512-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
3260 ; AVX512-NEXT: vpermt2q %zmm12, %zmm22, %zmm8
3261 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [13,0,1,7,13,0,1,7]
3262 ; AVX512-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
3263 ; AVX512-NEXT: vpermt2q %zmm13, %zmm12, %zmm10
3264 ; AVX512-NEXT: vpermt2q %zmm11, %zmm17, %zmm9
3265 ; AVX512-NEXT: vinserti32x4 $0, %xmm9, %zmm10, %zmm9
3266 ; AVX512-NEXT: vmovdqa64 %zmm8, %zmm9 {%k1}
3267 ; AVX512-NEXT: vpermt2q %zmm5, %zmm12, %zmm3
3268 ; AVX512-NEXT: vpermt2q %zmm2, %zmm17, %zmm1
3269 ; AVX512-NEXT: vinserti32x4 $0, %xmm1, %zmm3, %zmm1
3270 ; AVX512-NEXT: vpermt2q %zmm4, %zmm22, %zmm0
3271 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
3272 ; AVX512-NEXT: vmovdqa64 %zmm6, 64(%rsi)
3273 ; AVX512-NEXT: vmovdqa64 %zmm7, (%rsi)
3274 ; AVX512-NEXT: vmovdqa64 %zmm14, 64(%rdx)
3275 ; AVX512-NEXT: vmovdqa64 %zmm15, (%rdx)
3276 ; AVX512-NEXT: vmovdqa64 %zmm18, 64(%rcx)
3277 ; AVX512-NEXT: vmovdqa64 %zmm19, (%rcx)
3278 ; AVX512-NEXT: vmovdqa64 %zmm20, 64(%r8)
3279 ; AVX512-NEXT: vmovdqa64 %zmm21, (%r8)
3280 ; AVX512-NEXT: vmovdqa64 %zmm16, 64(%r9)
3281 ; AVX512-NEXT: vmovdqa64 %zmm25, (%r9)
3282 ; AVX512-NEXT: vmovdqa64 %zmm1, 64(%rax)
3283 ; AVX512-NEXT: vmovdqa64 %zmm9, (%rax)
3284 ; AVX512-NEXT: vzeroupper
3287 ; AVX512-FCP-LABEL: load_i64_stride6_vf16:
3288 ; AVX512-FCP: # %bb.0:
3289 ; AVX512-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
3290 ; AVX512-FCP-NEXT: vmovdqa64 448(%rdi), %zmm2
3291 ; AVX512-FCP-NEXT: vmovdqa64 384(%rdi), %zmm1
3292 ; AVX512-FCP-NEXT: vmovdqa64 512(%rdi), %zmm3
3293 ; AVX512-FCP-NEXT: vmovdqa64 576(%rdi), %zmm5
3294 ; AVX512-FCP-NEXT: vmovdqa64 704(%rdi), %zmm0
3295 ; AVX512-FCP-NEXT: vmovdqa64 640(%rdi), %zmm4
3296 ; AVX512-FCP-NEXT: vmovdqa64 320(%rdi), %zmm8
3297 ; AVX512-FCP-NEXT: vmovdqa64 256(%rdi), %zmm12
3298 ; AVX512-FCP-NEXT: vmovdqa64 (%rdi), %zmm9
3299 ; AVX512-FCP-NEXT: vmovdqa64 64(%rdi), %zmm11
3300 ; AVX512-FCP-NEXT: vmovdqa64 128(%rdi), %zmm10
3301 ; AVX512-FCP-NEXT: vmovdqa64 192(%rdi), %zmm13
3302 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [0,6,0,10,0,6,0,10]
3303 ; AVX512-FCP-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
3304 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, %zmm15
3305 ; AVX512-FCP-NEXT: vpermt2q %zmm10, %zmm14, %zmm15
3306 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm6 = [0,6,12,0]
3307 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, %zmm7
3308 ; AVX512-FCP-NEXT: vpermt2q %zmm11, %zmm6, %zmm7
3309 ; AVX512-FCP-NEXT: movb $56, %dil
3310 ; AVX512-FCP-NEXT: kmovw %edi, %k1
3311 ; AVX512-FCP-NEXT: vmovdqa64 %zmm15, %zmm7 {%k1}
3312 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm16 = [4,10,4,10,4,10,4,10]
3313 ; AVX512-FCP-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3314 ; AVX512-FCP-NEXT: vmovdqa64 %zmm12, %zmm15
3315 ; AVX512-FCP-NEXT: vpermt2q %zmm8, %zmm16, %zmm15
3316 ; AVX512-FCP-NEXT: movb $-64, %dil
3317 ; AVX512-FCP-NEXT: kmovw %edi, %k2
3318 ; AVX512-FCP-NEXT: vmovdqa64 %zmm15, %zmm7 {%k2}
3319 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm15
3320 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm16, %zmm15
3321 ; AVX512-FCP-NEXT: vpermi2q %zmm3, %zmm5, %zmm14
3322 ; AVX512-FCP-NEXT: vpermi2q %zmm2, %zmm1, %zmm6
3323 ; AVX512-FCP-NEXT: vmovdqa64 %zmm14, %zmm6 {%k1}
3324 ; AVX512-FCP-NEXT: vmovdqa64 %zmm15, %zmm6 {%k2}
3325 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [1,7,0,11,1,7,0,11]
3326 ; AVX512-FCP-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
3327 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, %zmm17
3328 ; AVX512-FCP-NEXT: vpermt2q %zmm10, %zmm18, %zmm17
3329 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm14 = [1,7,13,0]
3330 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, %zmm15
3331 ; AVX512-FCP-NEXT: vpermt2q %zmm11, %zmm14, %zmm15
3332 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, %zmm15 {%k1}
3333 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm17 = [5,11,5,11,5,11,5,11]
3334 ; AVX512-FCP-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3335 ; AVX512-FCP-NEXT: vmovdqa64 %zmm12, %zmm19
3336 ; AVX512-FCP-NEXT: vpermt2q %zmm8, %zmm17, %zmm19
3337 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, %zmm15 {%k2}
3338 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm19
3339 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm17, %zmm19
3340 ; AVX512-FCP-NEXT: vpermi2q %zmm3, %zmm5, %zmm18
3341 ; AVX512-FCP-NEXT: vpermi2q %zmm2, %zmm1, %zmm14
3342 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, %zmm14 {%k1}
3343 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, %zmm14 {%k2}
3344 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [10,4,10,4,10,4,10,4]
3345 ; AVX512-FCP-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3346 ; AVX512-FCP-NEXT: vmovdqa64 %zmm10, %zmm21
3347 ; AVX512-FCP-NEXT: vpermt2q %zmm13, %zmm20, %zmm21
3348 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm18 = [10,0,6,0]
3349 ; AVX512-FCP-NEXT: vmovdqa64 %zmm11, %zmm19
3350 ; AVX512-FCP-NEXT: vpermt2q %zmm9, %zmm18, %zmm19
3351 ; AVX512-FCP-NEXT: movb $24, %dil
3352 ; AVX512-FCP-NEXT: kmovw %edi, %k2
3353 ; AVX512-FCP-NEXT: vmovdqa64 %zmm21, %zmm19 {%k2}
3354 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm21 = [0,0,6,12,0,0,6,12]
3355 ; AVX512-FCP-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3]
3356 ; AVX512-FCP-NEXT: vmovdqa64 %zmm12, %zmm22
3357 ; AVX512-FCP-NEXT: vpermt2q %zmm8, %zmm21, %zmm22
3358 ; AVX512-FCP-NEXT: movb $-32, %dil
3359 ; AVX512-FCP-NEXT: kmovw %edi, %k1
3360 ; AVX512-FCP-NEXT: vmovdqa64 %zmm22, %zmm19 {%k1}
3361 ; AVX512-FCP-NEXT: vpermi2q %zmm0, %zmm4, %zmm21
3362 ; AVX512-FCP-NEXT: vpermi2q %zmm5, %zmm3, %zmm20
3363 ; AVX512-FCP-NEXT: vpermi2q %zmm1, %zmm2, %zmm18
3364 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, %zmm18 {%k2}
3365 ; AVX512-FCP-NEXT: vmovdqa64 %zmm21, %zmm18 {%k1}
3366 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm22 = [11,5,11,5,11,5,11,5]
3367 ; AVX512-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3368 ; AVX512-FCP-NEXT: vmovdqa64 %zmm10, %zmm23
3369 ; AVX512-FCP-NEXT: vpermt2q %zmm13, %zmm22, %zmm23
3370 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm20 = [11,1,7,0]
3371 ; AVX512-FCP-NEXT: vmovdqa64 %zmm11, %zmm21
3372 ; AVX512-FCP-NEXT: vpermt2q %zmm9, %zmm20, %zmm21
3373 ; AVX512-FCP-NEXT: vmovdqa64 %zmm23, %zmm21 {%k2}
3374 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm23 = [0,1,7,13,0,1,7,13]
3375 ; AVX512-FCP-NEXT: # zmm23 = mem[0,1,2,3,0,1,2,3]
3376 ; AVX512-FCP-NEXT: vmovdqa64 %zmm12, %zmm24
3377 ; AVX512-FCP-NEXT: vpermt2q %zmm8, %zmm23, %zmm24
3378 ; AVX512-FCP-NEXT: vmovdqa64 %zmm24, %zmm21 {%k1}
3379 ; AVX512-FCP-NEXT: vpermi2q %zmm0, %zmm4, %zmm23
3380 ; AVX512-FCP-NEXT: vpermi2q %zmm5, %zmm3, %zmm22
3381 ; AVX512-FCP-NEXT: vpermi2q %zmm1, %zmm2, %zmm20
3382 ; AVX512-FCP-NEXT: vmovdqa64 %zmm22, %zmm20 {%k2}
3383 ; AVX512-FCP-NEXT: vmovdqa64 %zmm23, %zmm20 {%k1}
3384 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,10,0,6,0,10,0,6]
3385 ; AVX512-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
3386 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, %zmm23
3387 ; AVX512-FCP-NEXT: vpermt2q %zmm12, %zmm22, %zmm23
3388 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm24 = [12,0,0,6,12,0,0,6]
3389 ; AVX512-FCP-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3]
3390 ; AVX512-FCP-NEXT: vmovdqa64 %zmm10, %zmm25
3391 ; AVX512-FCP-NEXT: vpermt2q %zmm13, %zmm24, %zmm25
3392 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, %zmm26
3393 ; AVX512-FCP-NEXT: vpermt2q %zmm11, %zmm16, %zmm26
3394 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm26, %zmm25, %zmm25
3395 ; AVX512-FCP-NEXT: vmovdqa64 %zmm23, %zmm25 {%k1}
3396 ; AVX512-FCP-NEXT: vpermi2q %zmm5, %zmm3, %zmm24
3397 ; AVX512-FCP-NEXT: vpermi2q %zmm2, %zmm1, %zmm16
3398 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm16, %zmm24, %zmm16
3399 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm0, %zmm22
3400 ; AVX512-FCP-NEXT: vmovdqa64 %zmm22, %zmm16 {%k1}
3401 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,11,1,7,0,11,1,7]
3402 ; AVX512-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
3403 ; AVX512-FCP-NEXT: vpermt2q %zmm12, %zmm22, %zmm8
3404 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [13,0,1,7,13,0,1,7]
3405 ; AVX512-FCP-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
3406 ; AVX512-FCP-NEXT: vpermt2q %zmm13, %zmm12, %zmm10
3407 ; AVX512-FCP-NEXT: vpermt2q %zmm11, %zmm17, %zmm9
3408 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm9, %zmm10, %zmm9
3409 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, %zmm9 {%k1}
3410 ; AVX512-FCP-NEXT: vpermt2q %zmm5, %zmm12, %zmm3
3411 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm17, %zmm1
3412 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm1, %zmm3, %zmm1
3413 ; AVX512-FCP-NEXT: vpermt2q %zmm4, %zmm22, %zmm0
3414 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
3415 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, 64(%rsi)
3416 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, (%rsi)
3417 ; AVX512-FCP-NEXT: vmovdqa64 %zmm14, 64(%rdx)
3418 ; AVX512-FCP-NEXT: vmovdqa64 %zmm15, (%rdx)
3419 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, 64(%rcx)
3420 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, (%rcx)
3421 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, 64(%r8)
3422 ; AVX512-FCP-NEXT: vmovdqa64 %zmm21, (%r8)
3423 ; AVX512-FCP-NEXT: vmovdqa64 %zmm16, 64(%r9)
3424 ; AVX512-FCP-NEXT: vmovdqa64 %zmm25, (%r9)
3425 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, 64(%rax)
3426 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, (%rax)
3427 ; AVX512-FCP-NEXT: vzeroupper
3428 ; AVX512-FCP-NEXT: retq
3430 ; AVX512DQ-LABEL: load_i64_stride6_vf16:
3431 ; AVX512DQ: # %bb.0:
3432 ; AVX512DQ-NEXT: movq {{[0-9]+}}(%rsp), %rax
3433 ; AVX512DQ-NEXT: vmovdqa64 448(%rdi), %zmm2
3434 ; AVX512DQ-NEXT: vmovdqa64 384(%rdi), %zmm1
3435 ; AVX512DQ-NEXT: vmovdqa64 512(%rdi), %zmm3
3436 ; AVX512DQ-NEXT: vmovdqa64 576(%rdi), %zmm5
3437 ; AVX512DQ-NEXT: vmovdqa64 704(%rdi), %zmm0
3438 ; AVX512DQ-NEXT: vmovdqa64 640(%rdi), %zmm4
3439 ; AVX512DQ-NEXT: vmovdqa64 320(%rdi), %zmm8
3440 ; AVX512DQ-NEXT: vmovdqa64 256(%rdi), %zmm12
3441 ; AVX512DQ-NEXT: vmovdqa64 (%rdi), %zmm9
3442 ; AVX512DQ-NEXT: vmovdqa64 64(%rdi), %zmm11
3443 ; AVX512DQ-NEXT: vmovdqa64 128(%rdi), %zmm10
3444 ; AVX512DQ-NEXT: vmovdqa64 192(%rdi), %zmm13
3445 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [0,6,0,10,0,6,0,10]
3446 ; AVX512DQ-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
3447 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, %zmm15
3448 ; AVX512DQ-NEXT: vpermt2q %zmm10, %zmm14, %zmm15
3449 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm6 = [0,6,12,0]
3450 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, %zmm7
3451 ; AVX512DQ-NEXT: vpermt2q %zmm11, %zmm6, %zmm7
3452 ; AVX512DQ-NEXT: movb $56, %dil
3453 ; AVX512DQ-NEXT: kmovw %edi, %k1
3454 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, %zmm7 {%k1}
3455 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm16 = [4,10,4,10,4,10,4,10]
3456 ; AVX512DQ-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3457 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, %zmm15
3458 ; AVX512DQ-NEXT: vpermt2q %zmm8, %zmm16, %zmm15
3459 ; AVX512DQ-NEXT: movb $-64, %dil
3460 ; AVX512DQ-NEXT: kmovw %edi, %k2
3461 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, %zmm7 {%k2}
3462 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm15
3463 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm16, %zmm15
3464 ; AVX512DQ-NEXT: vpermi2q %zmm3, %zmm5, %zmm14
3465 ; AVX512DQ-NEXT: vpermi2q %zmm2, %zmm1, %zmm6
3466 ; AVX512DQ-NEXT: vmovdqa64 %zmm14, %zmm6 {%k1}
3467 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, %zmm6 {%k2}
3468 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [1,7,0,11,1,7,0,11]
3469 ; AVX512DQ-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
3470 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, %zmm17
3471 ; AVX512DQ-NEXT: vpermt2q %zmm10, %zmm18, %zmm17
3472 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm14 = [1,7,13,0]
3473 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, %zmm15
3474 ; AVX512DQ-NEXT: vpermt2q %zmm11, %zmm14, %zmm15
3475 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, %zmm15 {%k1}
3476 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm17 = [5,11,5,11,5,11,5,11]
3477 ; AVX512DQ-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3478 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, %zmm19
3479 ; AVX512DQ-NEXT: vpermt2q %zmm8, %zmm17, %zmm19
3480 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, %zmm15 {%k2}
3481 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm19
3482 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm17, %zmm19
3483 ; AVX512DQ-NEXT: vpermi2q %zmm3, %zmm5, %zmm18
3484 ; AVX512DQ-NEXT: vpermi2q %zmm2, %zmm1, %zmm14
3485 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, %zmm14 {%k1}
3486 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, %zmm14 {%k2}
3487 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [10,4,10,4,10,4,10,4]
3488 ; AVX512DQ-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3489 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, %zmm21
3490 ; AVX512DQ-NEXT: vpermt2q %zmm13, %zmm20, %zmm21
3491 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm18 = [10,0,6,0]
3492 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, %zmm19
3493 ; AVX512DQ-NEXT: vpermt2q %zmm9, %zmm18, %zmm19
3494 ; AVX512DQ-NEXT: movb $24, %dil
3495 ; AVX512DQ-NEXT: kmovw %edi, %k2
3496 ; AVX512DQ-NEXT: vmovdqa64 %zmm21, %zmm19 {%k2}
3497 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm21 = [0,0,6,12,0,0,6,12]
3498 ; AVX512DQ-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3]
3499 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, %zmm22
3500 ; AVX512DQ-NEXT: vpermt2q %zmm8, %zmm21, %zmm22
3501 ; AVX512DQ-NEXT: movb $-32, %dil
3502 ; AVX512DQ-NEXT: kmovw %edi, %k1
3503 ; AVX512DQ-NEXT: vmovdqa64 %zmm22, %zmm19 {%k1}
3504 ; AVX512DQ-NEXT: vpermi2q %zmm0, %zmm4, %zmm21
3505 ; AVX512DQ-NEXT: vpermi2q %zmm5, %zmm3, %zmm20
3506 ; AVX512DQ-NEXT: vpermi2q %zmm1, %zmm2, %zmm18
3507 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, %zmm18 {%k2}
3508 ; AVX512DQ-NEXT: vmovdqa64 %zmm21, %zmm18 {%k1}
3509 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm22 = [11,5,11,5,11,5,11,5]
3510 ; AVX512DQ-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3511 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, %zmm23
3512 ; AVX512DQ-NEXT: vpermt2q %zmm13, %zmm22, %zmm23
3513 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm20 = [11,1,7,0]
3514 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, %zmm21
3515 ; AVX512DQ-NEXT: vpermt2q %zmm9, %zmm20, %zmm21
3516 ; AVX512DQ-NEXT: vmovdqa64 %zmm23, %zmm21 {%k2}
3517 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm23 = [0,1,7,13,0,1,7,13]
3518 ; AVX512DQ-NEXT: # zmm23 = mem[0,1,2,3,0,1,2,3]
3519 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, %zmm24
3520 ; AVX512DQ-NEXT: vpermt2q %zmm8, %zmm23, %zmm24
3521 ; AVX512DQ-NEXT: vmovdqa64 %zmm24, %zmm21 {%k1}
3522 ; AVX512DQ-NEXT: vpermi2q %zmm0, %zmm4, %zmm23
3523 ; AVX512DQ-NEXT: vpermi2q %zmm5, %zmm3, %zmm22
3524 ; AVX512DQ-NEXT: vpermi2q %zmm1, %zmm2, %zmm20
3525 ; AVX512DQ-NEXT: vmovdqa64 %zmm22, %zmm20 {%k2}
3526 ; AVX512DQ-NEXT: vmovdqa64 %zmm23, %zmm20 {%k1}
3527 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,10,0,6,0,10,0,6]
3528 ; AVX512DQ-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
3529 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, %zmm23
3530 ; AVX512DQ-NEXT: vpermt2q %zmm12, %zmm22, %zmm23
3531 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm24 = [12,0,0,6,12,0,0,6]
3532 ; AVX512DQ-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3]
3533 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, %zmm25
3534 ; AVX512DQ-NEXT: vpermt2q %zmm13, %zmm24, %zmm25
3535 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, %zmm26
3536 ; AVX512DQ-NEXT: vpermt2q %zmm11, %zmm16, %zmm26
3537 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm26, %zmm25, %zmm25
3538 ; AVX512DQ-NEXT: vmovdqa64 %zmm23, %zmm25 {%k1}
3539 ; AVX512DQ-NEXT: vpermi2q %zmm5, %zmm3, %zmm24
3540 ; AVX512DQ-NEXT: vpermi2q %zmm2, %zmm1, %zmm16
3541 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm16, %zmm24, %zmm16
3542 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm0, %zmm22
3543 ; AVX512DQ-NEXT: vmovdqa64 %zmm22, %zmm16 {%k1}
3544 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,11,1,7,0,11,1,7]
3545 ; AVX512DQ-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
3546 ; AVX512DQ-NEXT: vpermt2q %zmm12, %zmm22, %zmm8
3547 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [13,0,1,7,13,0,1,7]
3548 ; AVX512DQ-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
3549 ; AVX512DQ-NEXT: vpermt2q %zmm13, %zmm12, %zmm10
3550 ; AVX512DQ-NEXT: vpermt2q %zmm11, %zmm17, %zmm9
3551 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm9, %zmm10, %zmm9
3552 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, %zmm9 {%k1}
3553 ; AVX512DQ-NEXT: vpermt2q %zmm5, %zmm12, %zmm3
3554 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm17, %zmm1
3555 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm1, %zmm3, %zmm1
3556 ; AVX512DQ-NEXT: vpermt2q %zmm4, %zmm22, %zmm0
3557 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
3558 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, 64(%rsi)
3559 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, (%rsi)
3560 ; AVX512DQ-NEXT: vmovdqa64 %zmm14, 64(%rdx)
3561 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, (%rdx)
3562 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, 64(%rcx)
3563 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, (%rcx)
3564 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, 64(%r8)
3565 ; AVX512DQ-NEXT: vmovdqa64 %zmm21, (%r8)
3566 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, 64(%r9)
3567 ; AVX512DQ-NEXT: vmovdqa64 %zmm25, (%r9)
3568 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 64(%rax)
3569 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, (%rax)
3570 ; AVX512DQ-NEXT: vzeroupper
3571 ; AVX512DQ-NEXT: retq
3573 ; AVX512DQ-FCP-LABEL: load_i64_stride6_vf16:
3574 ; AVX512DQ-FCP: # %bb.0:
3575 ; AVX512DQ-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
3576 ; AVX512DQ-FCP-NEXT: vmovdqa64 448(%rdi), %zmm2
3577 ; AVX512DQ-FCP-NEXT: vmovdqa64 384(%rdi), %zmm1
3578 ; AVX512DQ-FCP-NEXT: vmovdqa64 512(%rdi), %zmm3
3579 ; AVX512DQ-FCP-NEXT: vmovdqa64 576(%rdi), %zmm5
3580 ; AVX512DQ-FCP-NEXT: vmovdqa64 704(%rdi), %zmm0
3581 ; AVX512DQ-FCP-NEXT: vmovdqa64 640(%rdi), %zmm4
3582 ; AVX512DQ-FCP-NEXT: vmovdqa64 320(%rdi), %zmm8
3583 ; AVX512DQ-FCP-NEXT: vmovdqa64 256(%rdi), %zmm12
3584 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rdi), %zmm9
3585 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rdi), %zmm11
3586 ; AVX512DQ-FCP-NEXT: vmovdqa64 128(%rdi), %zmm10
3587 ; AVX512DQ-FCP-NEXT: vmovdqa64 192(%rdi), %zmm13
3588 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [0,6,0,10,0,6,0,10]
3589 ; AVX512DQ-FCP-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
3590 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, %zmm15
3591 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm10, %zmm14, %zmm15
3592 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm6 = [0,6,12,0]
3593 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, %zmm7
3594 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm11, %zmm6, %zmm7
3595 ; AVX512DQ-FCP-NEXT: movb $56, %dil
3596 ; AVX512DQ-FCP-NEXT: kmovw %edi, %k1
3597 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm15, %zmm7 {%k1}
3598 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm16 = [4,10,4,10,4,10,4,10]
3599 ; AVX512DQ-FCP-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3600 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm12, %zmm15
3601 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm8, %zmm16, %zmm15
3602 ; AVX512DQ-FCP-NEXT: movb $-64, %dil
3603 ; AVX512DQ-FCP-NEXT: kmovw %edi, %k2
3604 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm15, %zmm7 {%k2}
3605 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm15
3606 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm16, %zmm15
3607 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm3, %zmm5, %zmm14
3608 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm2, %zmm1, %zmm6
3609 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm14, %zmm6 {%k1}
3610 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm15, %zmm6 {%k2}
3611 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [1,7,0,11,1,7,0,11]
3612 ; AVX512DQ-FCP-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
3613 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, %zmm17
3614 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm10, %zmm18, %zmm17
3615 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm14 = [1,7,13,0]
3616 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, %zmm15
3617 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm11, %zmm14, %zmm15
3618 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, %zmm15 {%k1}
3619 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm17 = [5,11,5,11,5,11,5,11]
3620 ; AVX512DQ-FCP-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3621 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm12, %zmm19
3622 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm8, %zmm17, %zmm19
3623 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, %zmm15 {%k2}
3624 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm19
3625 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm17, %zmm19
3626 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm3, %zmm5, %zmm18
3627 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm2, %zmm1, %zmm14
3628 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, %zmm14 {%k1}
3629 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, %zmm14 {%k2}
3630 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [10,4,10,4,10,4,10,4]
3631 ; AVX512DQ-FCP-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3632 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm10, %zmm21
3633 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm13, %zmm20, %zmm21
3634 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm18 = [10,0,6,0]
3635 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm11, %zmm19
3636 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm9, %zmm18, %zmm19
3637 ; AVX512DQ-FCP-NEXT: movb $24, %dil
3638 ; AVX512DQ-FCP-NEXT: kmovw %edi, %k2
3639 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm21, %zmm19 {%k2}
3640 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm21 = [0,0,6,12,0,0,6,12]
3641 ; AVX512DQ-FCP-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3]
3642 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm12, %zmm22
3643 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm8, %zmm21, %zmm22
3644 ; AVX512DQ-FCP-NEXT: movb $-32, %dil
3645 ; AVX512DQ-FCP-NEXT: kmovw %edi, %k1
3646 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm22, %zmm19 {%k1}
3647 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm0, %zmm4, %zmm21
3648 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm5, %zmm3, %zmm20
3649 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm1, %zmm2, %zmm18
3650 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, %zmm18 {%k2}
3651 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm21, %zmm18 {%k1}
3652 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm22 = [11,5,11,5,11,5,11,5]
3653 ; AVX512DQ-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3654 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm10, %zmm23
3655 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm13, %zmm22, %zmm23
3656 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm20 = [11,1,7,0]
3657 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm11, %zmm21
3658 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm9, %zmm20, %zmm21
3659 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm23, %zmm21 {%k2}
3660 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm23 = [0,1,7,13,0,1,7,13]
3661 ; AVX512DQ-FCP-NEXT: # zmm23 = mem[0,1,2,3,0,1,2,3]
3662 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm12, %zmm24
3663 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm8, %zmm23, %zmm24
3664 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm24, %zmm21 {%k1}
3665 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm0, %zmm4, %zmm23
3666 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm5, %zmm3, %zmm22
3667 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm1, %zmm2, %zmm20
3668 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm22, %zmm20 {%k2}
3669 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm23, %zmm20 {%k1}
3670 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,10,0,6,0,10,0,6]
3671 ; AVX512DQ-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
3672 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, %zmm23
3673 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm12, %zmm22, %zmm23
3674 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm24 = [12,0,0,6,12,0,0,6]
3675 ; AVX512DQ-FCP-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3]
3676 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm10, %zmm25
3677 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm13, %zmm24, %zmm25
3678 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, %zmm26
3679 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm11, %zmm16, %zmm26
3680 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm26, %zmm25, %zmm25
3681 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm23, %zmm25 {%k1}
3682 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm5, %zmm3, %zmm24
3683 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm2, %zmm1, %zmm16
3684 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm16, %zmm24, %zmm16
3685 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm0, %zmm22
3686 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm22, %zmm16 {%k1}
3687 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,11,1,7,0,11,1,7]
3688 ; AVX512DQ-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
3689 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm12, %zmm22, %zmm8
3690 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [13,0,1,7,13,0,1,7]
3691 ; AVX512DQ-FCP-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
3692 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm13, %zmm12, %zmm10
3693 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm11, %zmm17, %zmm9
3694 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm9, %zmm10, %zmm9
3695 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, %zmm9 {%k1}
3696 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm5, %zmm12, %zmm3
3697 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm17, %zmm1
3698 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm1, %zmm3, %zmm1
3699 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm4, %zmm22, %zmm0
3700 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
3701 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, 64(%rsi)
3702 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, (%rsi)
3703 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm14, 64(%rdx)
3704 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm15, (%rdx)
3705 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, 64(%rcx)
3706 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, (%rcx)
3707 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, 64(%r8)
3708 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm21, (%r8)
3709 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm16, 64(%r9)
3710 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm25, (%r9)
3711 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, 64(%rax)
3712 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, (%rax)
3713 ; AVX512DQ-FCP-NEXT: vzeroupper
3714 ; AVX512DQ-FCP-NEXT: retq
3716 ; AVX512BW-LABEL: load_i64_stride6_vf16:
3717 ; AVX512BW: # %bb.0:
3718 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
3719 ; AVX512BW-NEXT: vmovdqa64 448(%rdi), %zmm2
3720 ; AVX512BW-NEXT: vmovdqa64 384(%rdi), %zmm1
3721 ; AVX512BW-NEXT: vmovdqa64 512(%rdi), %zmm3
3722 ; AVX512BW-NEXT: vmovdqa64 576(%rdi), %zmm5
3723 ; AVX512BW-NEXT: vmovdqa64 704(%rdi), %zmm0
3724 ; AVX512BW-NEXT: vmovdqa64 640(%rdi), %zmm4
3725 ; AVX512BW-NEXT: vmovdqa64 320(%rdi), %zmm8
3726 ; AVX512BW-NEXT: vmovdqa64 256(%rdi), %zmm12
3727 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm9
3728 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %zmm11
3729 ; AVX512BW-NEXT: vmovdqa64 128(%rdi), %zmm10
3730 ; AVX512BW-NEXT: vmovdqa64 192(%rdi), %zmm13
3731 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [0,6,0,10,0,6,0,10]
3732 ; AVX512BW-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
3733 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm15
3734 ; AVX512BW-NEXT: vpermt2q %zmm10, %zmm14, %zmm15
3735 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm6 = [0,6,12,0]
3736 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm7
3737 ; AVX512BW-NEXT: vpermt2q %zmm11, %zmm6, %zmm7
3738 ; AVX512BW-NEXT: movb $56, %dil
3739 ; AVX512BW-NEXT: kmovd %edi, %k1
3740 ; AVX512BW-NEXT: vmovdqa64 %zmm15, %zmm7 {%k1}
3741 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm16 = [4,10,4,10,4,10,4,10]
3742 ; AVX512BW-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3743 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm15
3744 ; AVX512BW-NEXT: vpermt2q %zmm8, %zmm16, %zmm15
3745 ; AVX512BW-NEXT: movb $-64, %dil
3746 ; AVX512BW-NEXT: kmovd %edi, %k2
3747 ; AVX512BW-NEXT: vmovdqa64 %zmm15, %zmm7 {%k2}
3748 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm15
3749 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm16, %zmm15
3750 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm5, %zmm14
3751 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm1, %zmm6
3752 ; AVX512BW-NEXT: vmovdqa64 %zmm14, %zmm6 {%k1}
3753 ; AVX512BW-NEXT: vmovdqa64 %zmm15, %zmm6 {%k2}
3754 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [1,7,0,11,1,7,0,11]
3755 ; AVX512BW-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
3756 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm17
3757 ; AVX512BW-NEXT: vpermt2q %zmm10, %zmm18, %zmm17
3758 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm14 = [1,7,13,0]
3759 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm15
3760 ; AVX512BW-NEXT: vpermt2q %zmm11, %zmm14, %zmm15
3761 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm15 {%k1}
3762 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm17 = [5,11,5,11,5,11,5,11]
3763 ; AVX512BW-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3764 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm19
3765 ; AVX512BW-NEXT: vpermt2q %zmm8, %zmm17, %zmm19
3766 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm15 {%k2}
3767 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm19
3768 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm17, %zmm19
3769 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm5, %zmm18
3770 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm1, %zmm14
3771 ; AVX512BW-NEXT: vmovdqa64 %zmm18, %zmm14 {%k1}
3772 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm14 {%k2}
3773 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [10,4,10,4,10,4,10,4]
3774 ; AVX512BW-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3775 ; AVX512BW-NEXT: vmovdqa64 %zmm10, %zmm21
3776 ; AVX512BW-NEXT: vpermt2q %zmm13, %zmm20, %zmm21
3777 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm18 = [10,0,6,0]
3778 ; AVX512BW-NEXT: vmovdqa64 %zmm11, %zmm19
3779 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm18, %zmm19
3780 ; AVX512BW-NEXT: movb $24, %dil
3781 ; AVX512BW-NEXT: kmovd %edi, %k2
3782 ; AVX512BW-NEXT: vmovdqa64 %zmm21, %zmm19 {%k2}
3783 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm21 = [0,0,6,12,0,0,6,12]
3784 ; AVX512BW-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3]
3785 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm22
3786 ; AVX512BW-NEXT: vpermt2q %zmm8, %zmm21, %zmm22
3787 ; AVX512BW-NEXT: movb $-32, %dil
3788 ; AVX512BW-NEXT: kmovd %edi, %k1
3789 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm19 {%k1}
3790 ; AVX512BW-NEXT: vpermi2q %zmm0, %zmm4, %zmm21
3791 ; AVX512BW-NEXT: vpermi2q %zmm5, %zmm3, %zmm20
3792 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm2, %zmm18
3793 ; AVX512BW-NEXT: vmovdqa64 %zmm20, %zmm18 {%k2}
3794 ; AVX512BW-NEXT: vmovdqa64 %zmm21, %zmm18 {%k1}
3795 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm22 = [11,5,11,5,11,5,11,5]
3796 ; AVX512BW-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3797 ; AVX512BW-NEXT: vmovdqa64 %zmm10, %zmm23
3798 ; AVX512BW-NEXT: vpermt2q %zmm13, %zmm22, %zmm23
3799 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm20 = [11,1,7,0]
3800 ; AVX512BW-NEXT: vmovdqa64 %zmm11, %zmm21
3801 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm20, %zmm21
3802 ; AVX512BW-NEXT: vmovdqa64 %zmm23, %zmm21 {%k2}
3803 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm23 = [0,1,7,13,0,1,7,13]
3804 ; AVX512BW-NEXT: # zmm23 = mem[0,1,2,3,0,1,2,3]
3805 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm24
3806 ; AVX512BW-NEXT: vpermt2q %zmm8, %zmm23, %zmm24
3807 ; AVX512BW-NEXT: vmovdqa64 %zmm24, %zmm21 {%k1}
3808 ; AVX512BW-NEXT: vpermi2q %zmm0, %zmm4, %zmm23
3809 ; AVX512BW-NEXT: vpermi2q %zmm5, %zmm3, %zmm22
3810 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm2, %zmm20
3811 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm20 {%k2}
3812 ; AVX512BW-NEXT: vmovdqa64 %zmm23, %zmm20 {%k1}
3813 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,10,0,6,0,10,0,6]
3814 ; AVX512BW-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
3815 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm23
3816 ; AVX512BW-NEXT: vpermt2q %zmm12, %zmm22, %zmm23
3817 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm24 = [12,0,0,6,12,0,0,6]
3818 ; AVX512BW-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3]
3819 ; AVX512BW-NEXT: vmovdqa64 %zmm10, %zmm25
3820 ; AVX512BW-NEXT: vpermt2q %zmm13, %zmm24, %zmm25
3821 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm26
3822 ; AVX512BW-NEXT: vpermt2q %zmm11, %zmm16, %zmm26
3823 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm26, %zmm25, %zmm25
3824 ; AVX512BW-NEXT: vmovdqa64 %zmm23, %zmm25 {%k1}
3825 ; AVX512BW-NEXT: vpermi2q %zmm5, %zmm3, %zmm24
3826 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm1, %zmm16
3827 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm16, %zmm24, %zmm16
3828 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm0, %zmm22
3829 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm16 {%k1}
3830 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,11,1,7,0,11,1,7]
3831 ; AVX512BW-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
3832 ; AVX512BW-NEXT: vpermt2q %zmm12, %zmm22, %zmm8
3833 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [13,0,1,7,13,0,1,7]
3834 ; AVX512BW-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
3835 ; AVX512BW-NEXT: vpermt2q %zmm13, %zmm12, %zmm10
3836 ; AVX512BW-NEXT: vpermt2q %zmm11, %zmm17, %zmm9
3837 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm9, %zmm10, %zmm9
3838 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm9 {%k1}
3839 ; AVX512BW-NEXT: vpermt2q %zmm5, %zmm12, %zmm3
3840 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm17, %zmm1
3841 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm1, %zmm3, %zmm1
3842 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm22, %zmm0
3843 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
3844 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 64(%rsi)
3845 ; AVX512BW-NEXT: vmovdqa64 %zmm7, (%rsi)
3846 ; AVX512BW-NEXT: vmovdqa64 %zmm14, 64(%rdx)
3847 ; AVX512BW-NEXT: vmovdqa64 %zmm15, (%rdx)
3848 ; AVX512BW-NEXT: vmovdqa64 %zmm18, 64(%rcx)
3849 ; AVX512BW-NEXT: vmovdqa64 %zmm19, (%rcx)
3850 ; AVX512BW-NEXT: vmovdqa64 %zmm20, 64(%r8)
3851 ; AVX512BW-NEXT: vmovdqa64 %zmm21, (%r8)
3852 ; AVX512BW-NEXT: vmovdqa64 %zmm16, 64(%r9)
3853 ; AVX512BW-NEXT: vmovdqa64 %zmm25, (%r9)
3854 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 64(%rax)
3855 ; AVX512BW-NEXT: vmovdqa64 %zmm9, (%rax)
3856 ; AVX512BW-NEXT: vzeroupper
3857 ; AVX512BW-NEXT: retq
3859 ; AVX512BW-FCP-LABEL: load_i64_stride6_vf16:
3860 ; AVX512BW-FCP: # %bb.0:
3861 ; AVX512BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
3862 ; AVX512BW-FCP-NEXT: vmovdqa64 448(%rdi), %zmm2
3863 ; AVX512BW-FCP-NEXT: vmovdqa64 384(%rdi), %zmm1
3864 ; AVX512BW-FCP-NEXT: vmovdqa64 512(%rdi), %zmm3
3865 ; AVX512BW-FCP-NEXT: vmovdqa64 576(%rdi), %zmm5
3866 ; AVX512BW-FCP-NEXT: vmovdqa64 704(%rdi), %zmm0
3867 ; AVX512BW-FCP-NEXT: vmovdqa64 640(%rdi), %zmm4
3868 ; AVX512BW-FCP-NEXT: vmovdqa64 320(%rdi), %zmm8
3869 ; AVX512BW-FCP-NEXT: vmovdqa64 256(%rdi), %zmm12
3870 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm9
3871 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm11
3872 ; AVX512BW-FCP-NEXT: vmovdqa64 128(%rdi), %zmm10
3873 ; AVX512BW-FCP-NEXT: vmovdqa64 192(%rdi), %zmm13
3874 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [0,6,0,10,0,6,0,10]
3875 ; AVX512BW-FCP-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
3876 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm15
3877 ; AVX512BW-FCP-NEXT: vpermt2q %zmm10, %zmm14, %zmm15
3878 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm6 = [0,6,12,0]
3879 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm7
3880 ; AVX512BW-FCP-NEXT: vpermt2q %zmm11, %zmm6, %zmm7
3881 ; AVX512BW-FCP-NEXT: movb $56, %dil
3882 ; AVX512BW-FCP-NEXT: kmovd %edi, %k1
3883 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm7 {%k1}
3884 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm16 = [4,10,4,10,4,10,4,10]
3885 ; AVX512BW-FCP-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3886 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm15
3887 ; AVX512BW-FCP-NEXT: vpermt2q %zmm8, %zmm16, %zmm15
3888 ; AVX512BW-FCP-NEXT: movb $-64, %dil
3889 ; AVX512BW-FCP-NEXT: kmovd %edi, %k2
3890 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm7 {%k2}
3891 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm15
3892 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm16, %zmm15
3893 ; AVX512BW-FCP-NEXT: vpermi2q %zmm3, %zmm5, %zmm14
3894 ; AVX512BW-FCP-NEXT: vpermi2q %zmm2, %zmm1, %zmm6
3895 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm14, %zmm6 {%k1}
3896 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm6 {%k2}
3897 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [1,7,0,11,1,7,0,11]
3898 ; AVX512BW-FCP-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
3899 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm17
3900 ; AVX512BW-FCP-NEXT: vpermt2q %zmm10, %zmm18, %zmm17
3901 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm14 = [1,7,13,0]
3902 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm15
3903 ; AVX512BW-FCP-NEXT: vpermt2q %zmm11, %zmm14, %zmm15
3904 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm15 {%k1}
3905 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm17 = [5,11,5,11,5,11,5,11]
3906 ; AVX512BW-FCP-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3907 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm19
3908 ; AVX512BW-FCP-NEXT: vpermt2q %zmm8, %zmm17, %zmm19
3909 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm15 {%k2}
3910 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm19
3911 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm17, %zmm19
3912 ; AVX512BW-FCP-NEXT: vpermi2q %zmm3, %zmm5, %zmm18
3913 ; AVX512BW-FCP-NEXT: vpermi2q %zmm2, %zmm1, %zmm14
3914 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm14 {%k1}
3915 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm14 {%k2}
3916 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [10,4,10,4,10,4,10,4]
3917 ; AVX512BW-FCP-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3918 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm21
3919 ; AVX512BW-FCP-NEXT: vpermt2q %zmm13, %zmm20, %zmm21
3920 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm18 = [10,0,6,0]
3921 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm19
3922 ; AVX512BW-FCP-NEXT: vpermt2q %zmm9, %zmm18, %zmm19
3923 ; AVX512BW-FCP-NEXT: movb $24, %dil
3924 ; AVX512BW-FCP-NEXT: kmovd %edi, %k2
3925 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm19 {%k2}
3926 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm21 = [0,0,6,12,0,0,6,12]
3927 ; AVX512BW-FCP-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3]
3928 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm22
3929 ; AVX512BW-FCP-NEXT: vpermt2q %zmm8, %zmm21, %zmm22
3930 ; AVX512BW-FCP-NEXT: movb $-32, %dil
3931 ; AVX512BW-FCP-NEXT: kmovd %edi, %k1
3932 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm19 {%k1}
3933 ; AVX512BW-FCP-NEXT: vpermi2q %zmm0, %zmm4, %zmm21
3934 ; AVX512BW-FCP-NEXT: vpermi2q %zmm5, %zmm3, %zmm20
3935 ; AVX512BW-FCP-NEXT: vpermi2q %zmm1, %zmm2, %zmm18
3936 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm18 {%k2}
3937 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm18 {%k1}
3938 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm22 = [11,5,11,5,11,5,11,5]
3939 ; AVX512BW-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3940 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm23
3941 ; AVX512BW-FCP-NEXT: vpermt2q %zmm13, %zmm22, %zmm23
3942 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm20 = [11,1,7,0]
3943 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm21
3944 ; AVX512BW-FCP-NEXT: vpermt2q %zmm9, %zmm20, %zmm21
3945 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm21 {%k2}
3946 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm23 = [0,1,7,13,0,1,7,13]
3947 ; AVX512BW-FCP-NEXT: # zmm23 = mem[0,1,2,3,0,1,2,3]
3948 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm24
3949 ; AVX512BW-FCP-NEXT: vpermt2q %zmm8, %zmm23, %zmm24
3950 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm21 {%k1}
3951 ; AVX512BW-FCP-NEXT: vpermi2q %zmm0, %zmm4, %zmm23
3952 ; AVX512BW-FCP-NEXT: vpermi2q %zmm5, %zmm3, %zmm22
3953 ; AVX512BW-FCP-NEXT: vpermi2q %zmm1, %zmm2, %zmm20
3954 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm20 {%k2}
3955 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm20 {%k1}
3956 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,10,0,6,0,10,0,6]
3957 ; AVX512BW-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
3958 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm23
3959 ; AVX512BW-FCP-NEXT: vpermt2q %zmm12, %zmm22, %zmm23
3960 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm24 = [12,0,0,6,12,0,0,6]
3961 ; AVX512BW-FCP-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3]
3962 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm25
3963 ; AVX512BW-FCP-NEXT: vpermt2q %zmm13, %zmm24, %zmm25
3964 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm26
3965 ; AVX512BW-FCP-NEXT: vpermt2q %zmm11, %zmm16, %zmm26
3966 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm26, %zmm25, %zmm25
3967 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm25 {%k1}
3968 ; AVX512BW-FCP-NEXT: vpermi2q %zmm5, %zmm3, %zmm24
3969 ; AVX512BW-FCP-NEXT: vpermi2q %zmm2, %zmm1, %zmm16
3970 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm16, %zmm24, %zmm16
3971 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm0, %zmm22
3972 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm16 {%k1}
3973 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,11,1,7,0,11,1,7]
3974 ; AVX512BW-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
3975 ; AVX512BW-FCP-NEXT: vpermt2q %zmm12, %zmm22, %zmm8
3976 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [13,0,1,7,13,0,1,7]
3977 ; AVX512BW-FCP-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
3978 ; AVX512BW-FCP-NEXT: vpermt2q %zmm13, %zmm12, %zmm10
3979 ; AVX512BW-FCP-NEXT: vpermt2q %zmm11, %zmm17, %zmm9
3980 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm9, %zmm10, %zmm9
3981 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm9 {%k1}
3982 ; AVX512BW-FCP-NEXT: vpermt2q %zmm5, %zmm12, %zmm3
3983 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm17, %zmm1
3984 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm1, %zmm3, %zmm1
3985 ; AVX512BW-FCP-NEXT: vpermt2q %zmm4, %zmm22, %zmm0
3986 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
3987 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, 64(%rsi)
3988 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, (%rsi)
3989 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm14, 64(%rdx)
3990 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm15, (%rdx)
3991 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, 64(%rcx)
3992 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, (%rcx)
3993 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, 64(%r8)
3994 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm21, (%r8)
3995 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm16, 64(%r9)
3996 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm25, (%r9)
3997 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, 64(%rax)
3998 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, (%rax)
3999 ; AVX512BW-FCP-NEXT: vzeroupper
4000 ; AVX512BW-FCP-NEXT: retq
4002 ; AVX512DQ-BW-LABEL: load_i64_stride6_vf16:
4003 ; AVX512DQ-BW: # %bb.0:
4004 ; AVX512DQ-BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
4005 ; AVX512DQ-BW-NEXT: vmovdqa64 448(%rdi), %zmm2
4006 ; AVX512DQ-BW-NEXT: vmovdqa64 384(%rdi), %zmm1
4007 ; AVX512DQ-BW-NEXT: vmovdqa64 512(%rdi), %zmm3
4008 ; AVX512DQ-BW-NEXT: vmovdqa64 576(%rdi), %zmm5
4009 ; AVX512DQ-BW-NEXT: vmovdqa64 704(%rdi), %zmm0
4010 ; AVX512DQ-BW-NEXT: vmovdqa64 640(%rdi), %zmm4
4011 ; AVX512DQ-BW-NEXT: vmovdqa64 320(%rdi), %zmm8
4012 ; AVX512DQ-BW-NEXT: vmovdqa64 256(%rdi), %zmm12
4013 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdi), %zmm9
4014 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rdi), %zmm11
4015 ; AVX512DQ-BW-NEXT: vmovdqa64 128(%rdi), %zmm10
4016 ; AVX512DQ-BW-NEXT: vmovdqa64 192(%rdi), %zmm13
4017 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [0,6,0,10,0,6,0,10]
4018 ; AVX512DQ-BW-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
4019 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, %zmm15
4020 ; AVX512DQ-BW-NEXT: vpermt2q %zmm10, %zmm14, %zmm15
4021 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm6 = [0,6,12,0]
4022 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, %zmm7
4023 ; AVX512DQ-BW-NEXT: vpermt2q %zmm11, %zmm6, %zmm7
4024 ; AVX512DQ-BW-NEXT: movb $56, %dil
4025 ; AVX512DQ-BW-NEXT: kmovd %edi, %k1
4026 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm15, %zmm7 {%k1}
4027 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm16 = [4,10,4,10,4,10,4,10]
4028 ; AVX512DQ-BW-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4029 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm12, %zmm15
4030 ; AVX512DQ-BW-NEXT: vpermt2q %zmm8, %zmm16, %zmm15
4031 ; AVX512DQ-BW-NEXT: movb $-64, %dil
4032 ; AVX512DQ-BW-NEXT: kmovd %edi, %k2
4033 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm15, %zmm7 {%k2}
4034 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm15
4035 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm16, %zmm15
4036 ; AVX512DQ-BW-NEXT: vpermi2q %zmm3, %zmm5, %zmm14
4037 ; AVX512DQ-BW-NEXT: vpermi2q %zmm2, %zmm1, %zmm6
4038 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm14, %zmm6 {%k1}
4039 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm15, %zmm6 {%k2}
4040 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [1,7,0,11,1,7,0,11]
4041 ; AVX512DQ-BW-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
4042 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, %zmm17
4043 ; AVX512DQ-BW-NEXT: vpermt2q %zmm10, %zmm18, %zmm17
4044 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm14 = [1,7,13,0]
4045 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, %zmm15
4046 ; AVX512DQ-BW-NEXT: vpermt2q %zmm11, %zmm14, %zmm15
4047 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, %zmm15 {%k1}
4048 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm17 = [5,11,5,11,5,11,5,11]
4049 ; AVX512DQ-BW-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4050 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm12, %zmm19
4051 ; AVX512DQ-BW-NEXT: vpermt2q %zmm8, %zmm17, %zmm19
4052 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, %zmm15 {%k2}
4053 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm19
4054 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm17, %zmm19
4055 ; AVX512DQ-BW-NEXT: vpermi2q %zmm3, %zmm5, %zmm18
4056 ; AVX512DQ-BW-NEXT: vpermi2q %zmm2, %zmm1, %zmm14
4057 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, %zmm14 {%k1}
4058 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, %zmm14 {%k2}
4059 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [10,4,10,4,10,4,10,4]
4060 ; AVX512DQ-BW-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4061 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, %zmm21
4062 ; AVX512DQ-BW-NEXT: vpermt2q %zmm13, %zmm20, %zmm21
4063 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm18 = [10,0,6,0]
4064 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm11, %zmm19
4065 ; AVX512DQ-BW-NEXT: vpermt2q %zmm9, %zmm18, %zmm19
4066 ; AVX512DQ-BW-NEXT: movb $24, %dil
4067 ; AVX512DQ-BW-NEXT: kmovd %edi, %k2
4068 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm21, %zmm19 {%k2}
4069 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm21 = [0,0,6,12,0,0,6,12]
4070 ; AVX512DQ-BW-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3]
4071 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm12, %zmm22
4072 ; AVX512DQ-BW-NEXT: vpermt2q %zmm8, %zmm21, %zmm22
4073 ; AVX512DQ-BW-NEXT: movb $-32, %dil
4074 ; AVX512DQ-BW-NEXT: kmovd %edi, %k1
4075 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm22, %zmm19 {%k1}
4076 ; AVX512DQ-BW-NEXT: vpermi2q %zmm0, %zmm4, %zmm21
4077 ; AVX512DQ-BW-NEXT: vpermi2q %zmm5, %zmm3, %zmm20
4078 ; AVX512DQ-BW-NEXT: vpermi2q %zmm1, %zmm2, %zmm18
4079 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, %zmm18 {%k2}
4080 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm21, %zmm18 {%k1}
4081 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm22 = [11,5,11,5,11,5,11,5]
4082 ; AVX512DQ-BW-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4083 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, %zmm23
4084 ; AVX512DQ-BW-NEXT: vpermt2q %zmm13, %zmm22, %zmm23
4085 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm20 = [11,1,7,0]
4086 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm11, %zmm21
4087 ; AVX512DQ-BW-NEXT: vpermt2q %zmm9, %zmm20, %zmm21
4088 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm23, %zmm21 {%k2}
4089 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm23 = [0,1,7,13,0,1,7,13]
4090 ; AVX512DQ-BW-NEXT: # zmm23 = mem[0,1,2,3,0,1,2,3]
4091 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm12, %zmm24
4092 ; AVX512DQ-BW-NEXT: vpermt2q %zmm8, %zmm23, %zmm24
4093 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm24, %zmm21 {%k1}
4094 ; AVX512DQ-BW-NEXT: vpermi2q %zmm0, %zmm4, %zmm23
4095 ; AVX512DQ-BW-NEXT: vpermi2q %zmm5, %zmm3, %zmm22
4096 ; AVX512DQ-BW-NEXT: vpermi2q %zmm1, %zmm2, %zmm20
4097 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm22, %zmm20 {%k2}
4098 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm23, %zmm20 {%k1}
4099 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,10,0,6,0,10,0,6]
4100 ; AVX512DQ-BW-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
4101 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, %zmm23
4102 ; AVX512DQ-BW-NEXT: vpermt2q %zmm12, %zmm22, %zmm23
4103 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm24 = [12,0,0,6,12,0,0,6]
4104 ; AVX512DQ-BW-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3]
4105 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, %zmm25
4106 ; AVX512DQ-BW-NEXT: vpermt2q %zmm13, %zmm24, %zmm25
4107 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, %zmm26
4108 ; AVX512DQ-BW-NEXT: vpermt2q %zmm11, %zmm16, %zmm26
4109 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm26, %zmm25, %zmm25
4110 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm23, %zmm25 {%k1}
4111 ; AVX512DQ-BW-NEXT: vpermi2q %zmm5, %zmm3, %zmm24
4112 ; AVX512DQ-BW-NEXT: vpermi2q %zmm2, %zmm1, %zmm16
4113 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm16, %zmm24, %zmm16
4114 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm0, %zmm22
4115 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm22, %zmm16 {%k1}
4116 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,11,1,7,0,11,1,7]
4117 ; AVX512DQ-BW-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
4118 ; AVX512DQ-BW-NEXT: vpermt2q %zmm12, %zmm22, %zmm8
4119 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [13,0,1,7,13,0,1,7]
4120 ; AVX512DQ-BW-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
4121 ; AVX512DQ-BW-NEXT: vpermt2q %zmm13, %zmm12, %zmm10
4122 ; AVX512DQ-BW-NEXT: vpermt2q %zmm11, %zmm17, %zmm9
4123 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm9, %zmm10, %zmm9
4124 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, %zmm9 {%k1}
4125 ; AVX512DQ-BW-NEXT: vpermt2q %zmm5, %zmm12, %zmm3
4126 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm17, %zmm1
4127 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm1, %zmm3, %zmm1
4128 ; AVX512DQ-BW-NEXT: vpermt2q %zmm4, %zmm22, %zmm0
4129 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
4130 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, 64(%rsi)
4131 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, (%rsi)
4132 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm14, 64(%rdx)
4133 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm15, (%rdx)
4134 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, 64(%rcx)
4135 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, (%rcx)
4136 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, 64(%r8)
4137 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm21, (%r8)
4138 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm16, 64(%r9)
4139 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm25, (%r9)
4140 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, 64(%rax)
4141 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, (%rax)
4142 ; AVX512DQ-BW-NEXT: vzeroupper
4143 ; AVX512DQ-BW-NEXT: retq
4145 ; AVX512DQ-BW-FCP-LABEL: load_i64_stride6_vf16:
4146 ; AVX512DQ-BW-FCP: # %bb.0:
4147 ; AVX512DQ-BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
4148 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 448(%rdi), %zmm2
4149 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 384(%rdi), %zmm1
4150 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 512(%rdi), %zmm3
4151 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 576(%rdi), %zmm5
4152 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 704(%rdi), %zmm0
4153 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 640(%rdi), %zmm4
4154 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 320(%rdi), %zmm8
4155 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 256(%rdi), %zmm12
4156 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm9
4157 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm11
4158 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 128(%rdi), %zmm10
4159 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 192(%rdi), %zmm13
4160 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [0,6,0,10,0,6,0,10]
4161 ; AVX512DQ-BW-FCP-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
4162 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm15
4163 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm10, %zmm14, %zmm15
4164 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm6 = [0,6,12,0]
4165 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm7
4166 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm11, %zmm6, %zmm7
4167 ; AVX512DQ-BW-FCP-NEXT: movb $56, %dil
4168 ; AVX512DQ-BW-FCP-NEXT: kmovd %edi, %k1
4169 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm7 {%k1}
4170 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm16 = [4,10,4,10,4,10,4,10]
4171 ; AVX512DQ-BW-FCP-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4172 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm15
4173 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm8, %zmm16, %zmm15
4174 ; AVX512DQ-BW-FCP-NEXT: movb $-64, %dil
4175 ; AVX512DQ-BW-FCP-NEXT: kmovd %edi, %k2
4176 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm7 {%k2}
4177 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm15
4178 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm16, %zmm15
4179 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm3, %zmm5, %zmm14
4180 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm2, %zmm1, %zmm6
4181 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm14, %zmm6 {%k1}
4182 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm6 {%k2}
4183 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [1,7,0,11,1,7,0,11]
4184 ; AVX512DQ-BW-FCP-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
4185 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm17
4186 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm10, %zmm18, %zmm17
4187 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm14 = [1,7,13,0]
4188 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm15
4189 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm11, %zmm14, %zmm15
4190 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm15 {%k1}
4191 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm17 = [5,11,5,11,5,11,5,11]
4192 ; AVX512DQ-BW-FCP-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4193 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm19
4194 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm8, %zmm17, %zmm19
4195 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm15 {%k2}
4196 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm19
4197 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm17, %zmm19
4198 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm3, %zmm5, %zmm18
4199 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm2, %zmm1, %zmm14
4200 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm14 {%k1}
4201 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm14 {%k2}
4202 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [10,4,10,4,10,4,10,4]
4203 ; AVX512DQ-BW-FCP-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4204 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm21
4205 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm13, %zmm20, %zmm21
4206 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm18 = [10,0,6,0]
4207 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm19
4208 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm9, %zmm18, %zmm19
4209 ; AVX512DQ-BW-FCP-NEXT: movb $24, %dil
4210 ; AVX512DQ-BW-FCP-NEXT: kmovd %edi, %k2
4211 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm19 {%k2}
4212 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm21 = [0,0,6,12,0,0,6,12]
4213 ; AVX512DQ-BW-FCP-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3]
4214 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm22
4215 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm8, %zmm21, %zmm22
4216 ; AVX512DQ-BW-FCP-NEXT: movb $-32, %dil
4217 ; AVX512DQ-BW-FCP-NEXT: kmovd %edi, %k1
4218 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm19 {%k1}
4219 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm0, %zmm4, %zmm21
4220 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm5, %zmm3, %zmm20
4221 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm1, %zmm2, %zmm18
4222 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm18 {%k2}
4223 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm18 {%k1}
4224 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm22 = [11,5,11,5,11,5,11,5]
4225 ; AVX512DQ-BW-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4226 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm23
4227 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm13, %zmm22, %zmm23
4228 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm20 = [11,1,7,0]
4229 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm21
4230 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm9, %zmm20, %zmm21
4231 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm21 {%k2}
4232 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm23 = [0,1,7,13,0,1,7,13]
4233 ; AVX512DQ-BW-FCP-NEXT: # zmm23 = mem[0,1,2,3,0,1,2,3]
4234 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm24
4235 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm8, %zmm23, %zmm24
4236 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm21 {%k1}
4237 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm0, %zmm4, %zmm23
4238 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm5, %zmm3, %zmm22
4239 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm1, %zmm2, %zmm20
4240 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm20 {%k2}
4241 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm20 {%k1}
4242 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,10,0,6,0,10,0,6]
4243 ; AVX512DQ-BW-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
4244 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm23
4245 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm12, %zmm22, %zmm23
4246 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm24 = [12,0,0,6,12,0,0,6]
4247 ; AVX512DQ-BW-FCP-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3]
4248 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm25
4249 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm13, %zmm24, %zmm25
4250 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm26
4251 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm11, %zmm16, %zmm26
4252 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm26, %zmm25, %zmm25
4253 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm25 {%k1}
4254 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm5, %zmm3, %zmm24
4255 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm2, %zmm1, %zmm16
4256 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm16, %zmm24, %zmm16
4257 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm0, %zmm22
4258 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm16 {%k1}
4259 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [0,11,1,7,0,11,1,7]
4260 ; AVX512DQ-BW-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
4261 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm12, %zmm22, %zmm8
4262 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [13,0,1,7,13,0,1,7]
4263 ; AVX512DQ-BW-FCP-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
4264 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm13, %zmm12, %zmm10
4265 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm11, %zmm17, %zmm9
4266 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm9, %zmm10, %zmm9
4267 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm9 {%k1}
4268 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm5, %zmm12, %zmm3
4269 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm17, %zmm1
4270 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm1, %zmm3, %zmm1
4271 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm4, %zmm22, %zmm0
4272 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
4273 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, 64(%rsi)
4274 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, (%rsi)
4275 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm14, 64(%rdx)
4276 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm15, (%rdx)
4277 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, 64(%rcx)
4278 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, (%rcx)
4279 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, 64(%r8)
4280 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm21, (%r8)
4281 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm16, 64(%r9)
4282 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm25, (%r9)
4283 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, 64(%rax)
4284 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, (%rax)
4285 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
4286 ; AVX512DQ-BW-FCP-NEXT: retq
4287 %wide.vec = load <96 x i64>, ptr %in.vec, align 64
4288 %strided.vec0 = shufflevector <96 x i64> %wide.vec, <96 x i64> poison, <16 x i32> <i32 0, i32 6, i32 12, i32 18, i32 24, i32 30, i32 36, i32 42, i32 48, i32 54, i32 60, i32 66, i32 72, i32 78, i32 84, i32 90>
4289 %strided.vec1 = shufflevector <96 x i64> %wide.vec, <96 x i64> poison, <16 x i32> <i32 1, i32 7, i32 13, i32 19, i32 25, i32 31, i32 37, i32 43, i32 49, i32 55, i32 61, i32 67, i32 73, i32 79, i32 85, i32 91>
4290 %strided.vec2 = shufflevector <96 x i64> %wide.vec, <96 x i64> poison, <16 x i32> <i32 2, i32 8, i32 14, i32 20, i32 26, i32 32, i32 38, i32 44, i32 50, i32 56, i32 62, i32 68, i32 74, i32 80, i32 86, i32 92>
4291 %strided.vec3 = shufflevector <96 x i64> %wide.vec, <96 x i64> poison, <16 x i32> <i32 3, i32 9, i32 15, i32 21, i32 27, i32 33, i32 39, i32 45, i32 51, i32 57, i32 63, i32 69, i32 75, i32 81, i32 87, i32 93>
4292 %strided.vec4 = shufflevector <96 x i64> %wide.vec, <96 x i64> poison, <16 x i32> <i32 4, i32 10, i32 16, i32 22, i32 28, i32 34, i32 40, i32 46, i32 52, i32 58, i32 64, i32 70, i32 76, i32 82, i32 88, i32 94>
4293 %strided.vec5 = shufflevector <96 x i64> %wide.vec, <96 x i64> poison, <16 x i32> <i32 5, i32 11, i32 17, i32 23, i32 29, i32 35, i32 41, i32 47, i32 53, i32 59, i32 65, i32 71, i32 77, i32 83, i32 89, i32 95>
4294 store <16 x i64> %strided.vec0, ptr %out.vec0, align 64
4295 store <16 x i64> %strided.vec1, ptr %out.vec1, align 64
4296 store <16 x i64> %strided.vec2, ptr %out.vec2, align 64
4297 store <16 x i64> %strided.vec3, ptr %out.vec3, align 64
4298 store <16 x i64> %strided.vec4, ptr %out.vec4, align 64
4299 store <16 x i64> %strided.vec5, ptr %out.vec5, align 64
4303 define void @load_i64_stride6_vf32(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr %out.vec2, ptr %out.vec3, ptr %out.vec4, ptr %out.vec5) nounwind {
4304 ; SSE-LABEL: load_i64_stride6_vf32:
4306 ; SSE-NEXT: subq $1176, %rsp # imm = 0x498
4307 ; SSE-NEXT: movaps 624(%rdi), %xmm0
4308 ; SSE-NEXT: movaps 576(%rdi), %xmm9
4309 ; SSE-NEXT: movaps 240(%rdi), %xmm3
4310 ; SSE-NEXT: movaps 192(%rdi), %xmm8
4311 ; SSE-NEXT: movaps 720(%rdi), %xmm1
4312 ; SSE-NEXT: movaps 672(%rdi), %xmm11
4313 ; SSE-NEXT: movaps 336(%rdi), %xmm5
4314 ; SSE-NEXT: movaps 288(%rdi), %xmm10
4315 ; SSE-NEXT: movaps 432(%rdi), %xmm4
4316 ; SSE-NEXT: movaps 384(%rdi), %xmm12
4317 ; SSE-NEXT: movaps 912(%rdi), %xmm2
4318 ; SSE-NEXT: movaps 528(%rdi), %xmm6
4319 ; SSE-NEXT: movaps 480(%rdi), %xmm14
4320 ; SSE-NEXT: movaps 144(%rdi), %xmm7
4321 ; SSE-NEXT: movaps 96(%rdi), %xmm13
4322 ; SSE-NEXT: movaps %xmm13, %xmm15
4323 ; SSE-NEXT: movlhps {{.*#+}} xmm15 = xmm15[0],xmm7[0]
4324 ; SSE-NEXT: movaps %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4325 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm7[1]
4326 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4327 ; SSE-NEXT: movaps %xmm10, %xmm7
4328 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm5[0]
4329 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4330 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm5[1]
4331 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4332 ; SSE-NEXT: movaps %xmm8, %xmm5
4333 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm3[0]
4334 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4335 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm3[1]
4336 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4337 ; SSE-NEXT: movaps %xmm14, %xmm3
4338 ; SSE-NEXT: movlhps {{.*#+}} xmm3 = xmm3[0],xmm6[0]
4339 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4340 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm6[1]
4341 ; SSE-NEXT: movaps %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4342 ; SSE-NEXT: movaps %xmm12, %xmm3
4343 ; SSE-NEXT: movlhps {{.*#+}} xmm3 = xmm3[0],xmm4[0]
4344 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4345 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm4[1]
4346 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4347 ; SSE-NEXT: movaps %xmm11, %xmm3
4348 ; SSE-NEXT: movlhps {{.*#+}} xmm3 = xmm3[0],xmm1[0]
4349 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4350 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm1[1]
4351 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4352 ; SSE-NEXT: movaps %xmm9, %xmm1
4353 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4354 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4355 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm0[1]
4356 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4357 ; SSE-NEXT: movaps 864(%rdi), %xmm0
4358 ; SSE-NEXT: movaps %xmm0, %xmm1
4359 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4360 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4361 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
4362 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4363 ; SSE-NEXT: movaps 816(%rdi), %xmm0
4364 ; SSE-NEXT: movaps 768(%rdi), %xmm1
4365 ; SSE-NEXT: movaps %xmm1, %xmm2
4366 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4367 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4368 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4369 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4370 ; SSE-NEXT: movaps 1104(%rdi), %xmm0
4371 ; SSE-NEXT: movaps 1056(%rdi), %xmm1
4372 ; SSE-NEXT: movaps %xmm1, %xmm2
4373 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4374 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4375 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4376 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4377 ; SSE-NEXT: movaps 1008(%rdi), %xmm0
4378 ; SSE-NEXT: movaps 960(%rdi), %xmm1
4379 ; SSE-NEXT: movaps %xmm1, %xmm2
4380 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4381 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4382 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4383 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4384 ; SSE-NEXT: movaps 1296(%rdi), %xmm0
4385 ; SSE-NEXT: movaps 1248(%rdi), %xmm1
4386 ; SSE-NEXT: movaps %xmm1, %xmm2
4387 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4388 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4389 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4390 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4391 ; SSE-NEXT: movaps 1200(%rdi), %xmm0
4392 ; SSE-NEXT: movaps 1152(%rdi), %xmm1
4393 ; SSE-NEXT: movaps %xmm1, %xmm2
4394 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4395 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4396 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4397 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4398 ; SSE-NEXT: movaps 1488(%rdi), %xmm0
4399 ; SSE-NEXT: movaps 1440(%rdi), %xmm1
4400 ; SSE-NEXT: movaps %xmm1, %xmm2
4401 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4402 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4403 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4404 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4405 ; SSE-NEXT: movaps 1392(%rdi), %xmm0
4406 ; SSE-NEXT: movaps 1344(%rdi), %xmm1
4407 ; SSE-NEXT: movaps %xmm1, %xmm2
4408 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4409 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4410 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4411 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4412 ; SSE-NEXT: movaps (%rdi), %xmm1
4413 ; SSE-NEXT: movaps 48(%rdi), %xmm0
4414 ; SSE-NEXT: movaps %xmm1, %xmm2
4415 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4416 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4417 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4418 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4419 ; SSE-NEXT: movaps 64(%rdi), %xmm0
4420 ; SSE-NEXT: movaps 16(%rdi), %xmm1
4421 ; SSE-NEXT: movaps %xmm1, %xmm2
4422 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4423 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4424 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4425 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4426 ; SSE-NEXT: movaps 160(%rdi), %xmm0
4427 ; SSE-NEXT: movaps 112(%rdi), %xmm1
4428 ; SSE-NEXT: movaps %xmm1, %xmm2
4429 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4430 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4431 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4432 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4433 ; SSE-NEXT: movaps 256(%rdi), %xmm0
4434 ; SSE-NEXT: movaps 208(%rdi), %xmm1
4435 ; SSE-NEXT: movaps %xmm1, %xmm2
4436 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4437 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4438 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4439 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4440 ; SSE-NEXT: movaps 352(%rdi), %xmm0
4441 ; SSE-NEXT: movaps 304(%rdi), %xmm1
4442 ; SSE-NEXT: movaps %xmm1, %xmm2
4443 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4444 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4445 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4446 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4447 ; SSE-NEXT: movaps 448(%rdi), %xmm0
4448 ; SSE-NEXT: movaps 400(%rdi), %xmm1
4449 ; SSE-NEXT: movaps %xmm1, %xmm2
4450 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4451 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4452 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4453 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4454 ; SSE-NEXT: movaps 544(%rdi), %xmm0
4455 ; SSE-NEXT: movaps 496(%rdi), %xmm1
4456 ; SSE-NEXT: movaps %xmm1, %xmm2
4457 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4458 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4459 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4460 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4461 ; SSE-NEXT: movaps 640(%rdi), %xmm0
4462 ; SSE-NEXT: movaps 592(%rdi), %xmm1
4463 ; SSE-NEXT: movaps %xmm1, %xmm2
4464 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4465 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4466 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4467 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4468 ; SSE-NEXT: movaps 736(%rdi), %xmm0
4469 ; SSE-NEXT: movaps 688(%rdi), %xmm1
4470 ; SSE-NEXT: movaps %xmm1, %xmm2
4471 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4472 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4473 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4474 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4475 ; SSE-NEXT: movaps 832(%rdi), %xmm0
4476 ; SSE-NEXT: movaps 784(%rdi), %xmm1
4477 ; SSE-NEXT: movaps %xmm1, %xmm2
4478 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4479 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4480 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4481 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4482 ; SSE-NEXT: movaps 928(%rdi), %xmm0
4483 ; SSE-NEXT: movaps 880(%rdi), %xmm1
4484 ; SSE-NEXT: movaps %xmm1, %xmm2
4485 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4486 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4487 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4488 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4489 ; SSE-NEXT: movaps 1024(%rdi), %xmm0
4490 ; SSE-NEXT: movaps 976(%rdi), %xmm1
4491 ; SSE-NEXT: movaps %xmm1, %xmm2
4492 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4493 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4494 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4495 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4496 ; SSE-NEXT: movaps 1120(%rdi), %xmm0
4497 ; SSE-NEXT: movaps 1072(%rdi), %xmm1
4498 ; SSE-NEXT: movaps %xmm1, %xmm2
4499 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4500 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4501 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4502 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4503 ; SSE-NEXT: movaps 1216(%rdi), %xmm0
4504 ; SSE-NEXT: movaps 1168(%rdi), %xmm1
4505 ; SSE-NEXT: movaps %xmm1, %xmm2
4506 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4507 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4508 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4509 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4510 ; SSE-NEXT: movaps 1312(%rdi), %xmm0
4511 ; SSE-NEXT: movaps 1264(%rdi), %xmm1
4512 ; SSE-NEXT: movaps %xmm1, %xmm2
4513 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4514 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4515 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4516 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4517 ; SSE-NEXT: movaps 1408(%rdi), %xmm0
4518 ; SSE-NEXT: movaps 1360(%rdi), %xmm1
4519 ; SSE-NEXT: movaps %xmm1, %xmm2
4520 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4521 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4522 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4523 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4524 ; SSE-NEXT: movaps 1504(%rdi), %xmm0
4525 ; SSE-NEXT: movaps 1456(%rdi), %xmm1
4526 ; SSE-NEXT: movaps %xmm1, %xmm2
4527 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4528 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4529 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4530 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4531 ; SSE-NEXT: movaps 80(%rdi), %xmm0
4532 ; SSE-NEXT: movaps 32(%rdi), %xmm1
4533 ; SSE-NEXT: movaps %xmm1, %xmm2
4534 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4535 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4536 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4537 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4538 ; SSE-NEXT: movaps 176(%rdi), %xmm0
4539 ; SSE-NEXT: movaps 128(%rdi), %xmm1
4540 ; SSE-NEXT: movaps %xmm1, %xmm2
4541 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4542 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4543 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4544 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4545 ; SSE-NEXT: movaps 272(%rdi), %xmm0
4546 ; SSE-NEXT: movaps 224(%rdi), %xmm1
4547 ; SSE-NEXT: movaps %xmm1, %xmm2
4548 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4549 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4550 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4551 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4552 ; SSE-NEXT: movaps 368(%rdi), %xmm0
4553 ; SSE-NEXT: movaps 320(%rdi), %xmm1
4554 ; SSE-NEXT: movaps %xmm1, %xmm2
4555 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4556 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4557 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4558 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4559 ; SSE-NEXT: movaps 464(%rdi), %xmm0
4560 ; SSE-NEXT: movaps 416(%rdi), %xmm1
4561 ; SSE-NEXT: movaps %xmm1, %xmm2
4562 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4563 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4564 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4565 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4566 ; SSE-NEXT: movaps 560(%rdi), %xmm0
4567 ; SSE-NEXT: movaps 512(%rdi), %xmm1
4568 ; SSE-NEXT: movaps %xmm1, %xmm2
4569 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4570 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4571 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4572 ; SSE-NEXT: movaps %xmm1, (%rsp) # 16-byte Spill
4573 ; SSE-NEXT: movaps 656(%rdi), %xmm0
4574 ; SSE-NEXT: movaps 608(%rdi), %xmm1
4575 ; SSE-NEXT: movaps %xmm1, %xmm2
4576 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4577 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4578 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4579 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4580 ; SSE-NEXT: movaps 752(%rdi), %xmm0
4581 ; SSE-NEXT: movaps 704(%rdi), %xmm14
4582 ; SSE-NEXT: movaps %xmm14, %xmm1
4583 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4584 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4585 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm0[1]
4586 ; SSE-NEXT: movaps 848(%rdi), %xmm0
4587 ; SSE-NEXT: movaps 800(%rdi), %xmm15
4588 ; SSE-NEXT: movaps %xmm15, %xmm1
4589 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4590 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4591 ; SSE-NEXT: unpckhpd {{.*#+}} xmm15 = xmm15[1],xmm0[1]
4592 ; SSE-NEXT: movaps 944(%rdi), %xmm0
4593 ; SSE-NEXT: movaps 896(%rdi), %xmm13
4594 ; SSE-NEXT: movaps %xmm13, %xmm1
4595 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4596 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4597 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm0[1]
4598 ; SSE-NEXT: movaps 1040(%rdi), %xmm0
4599 ; SSE-NEXT: movaps 992(%rdi), %xmm10
4600 ; SSE-NEXT: movaps %xmm10, %xmm12
4601 ; SSE-NEXT: movlhps {{.*#+}} xmm12 = xmm12[0],xmm0[0]
4602 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm0[1]
4603 ; SSE-NEXT: movaps 1136(%rdi), %xmm0
4604 ; SSE-NEXT: movaps 1088(%rdi), %xmm9
4605 ; SSE-NEXT: movaps %xmm9, %xmm11
4606 ; SSE-NEXT: movlhps {{.*#+}} xmm11 = xmm11[0],xmm0[0]
4607 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm0[1]
4608 ; SSE-NEXT: movaps 1232(%rdi), %xmm0
4609 ; SSE-NEXT: movaps 1184(%rdi), %xmm7
4610 ; SSE-NEXT: movaps %xmm7, %xmm8
4611 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm0[0]
4612 ; SSE-NEXT: unpckhpd {{.*#+}} xmm7 = xmm7[1],xmm0[1]
4613 ; SSE-NEXT: movaps 1328(%rdi), %xmm0
4614 ; SSE-NEXT: movaps 1280(%rdi), %xmm5
4615 ; SSE-NEXT: movaps %xmm5, %xmm6
4616 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm0[0]
4617 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm0[1]
4618 ; SSE-NEXT: movaps 1424(%rdi), %xmm0
4619 ; SSE-NEXT: movaps 1376(%rdi), %xmm3
4620 ; SSE-NEXT: movaps %xmm3, %xmm4
4621 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm0[0]
4622 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm0[1]
4623 ; SSE-NEXT: movaps 1520(%rdi), %xmm0
4624 ; SSE-NEXT: movaps 1472(%rdi), %xmm1
4625 ; SSE-NEXT: movaps %xmm1, %xmm2
4626 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4627 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4628 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4629 ; SSE-NEXT: movaps %xmm0, 224(%rsi)
4630 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4631 ; SSE-NEXT: movaps %xmm0, 160(%rsi)
4632 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4633 ; SSE-NEXT: movaps %xmm0, 96(%rsi)
4634 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4635 ; SSE-NEXT: movaps %xmm0, 32(%rsi)
4636 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4637 ; SSE-NEXT: movaps %xmm0, 240(%rsi)
4638 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4639 ; SSE-NEXT: movaps %xmm0, 176(%rsi)
4640 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4641 ; SSE-NEXT: movaps %xmm0, 112(%rsi)
4642 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4643 ; SSE-NEXT: movaps %xmm0, 48(%rsi)
4644 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4645 ; SSE-NEXT: movaps %xmm0, 192(%rsi)
4646 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4647 ; SSE-NEXT: movaps %xmm0, 128(%rsi)
4648 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4649 ; SSE-NEXT: movaps %xmm0, 64(%rsi)
4650 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4651 ; SSE-NEXT: movaps %xmm0, (%rsi)
4652 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4653 ; SSE-NEXT: movaps %xmm0, 208(%rsi)
4654 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4655 ; SSE-NEXT: movaps %xmm0, 144(%rsi)
4656 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4657 ; SSE-NEXT: movaps %xmm0, 80(%rsi)
4658 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4659 ; SSE-NEXT: movaps %xmm0, 16(%rsi)
4660 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4661 ; SSE-NEXT: movaps %xmm0, 224(%rdx)
4662 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4663 ; SSE-NEXT: movaps %xmm0, 240(%rdx)
4664 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4665 ; SSE-NEXT: movaps %xmm0, 192(%rdx)
4666 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4667 ; SSE-NEXT: movaps %xmm0, 208(%rdx)
4668 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4669 ; SSE-NEXT: movaps %xmm0, 160(%rdx)
4670 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4671 ; SSE-NEXT: movaps %xmm0, 176(%rdx)
4672 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4673 ; SSE-NEXT: movaps %xmm0, 128(%rdx)
4674 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4675 ; SSE-NEXT: movaps %xmm0, 144(%rdx)
4676 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4677 ; SSE-NEXT: movaps %xmm0, 96(%rdx)
4678 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4679 ; SSE-NEXT: movaps %xmm0, 112(%rdx)
4680 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4681 ; SSE-NEXT: movaps %xmm0, 64(%rdx)
4682 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4683 ; SSE-NEXT: movaps %xmm0, 80(%rdx)
4684 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4685 ; SSE-NEXT: movaps %xmm0, 32(%rdx)
4686 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4687 ; SSE-NEXT: movaps %xmm0, 48(%rdx)
4688 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4689 ; SSE-NEXT: movaps %xmm0, (%rdx)
4690 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4691 ; SSE-NEXT: movaps %xmm0, 16(%rdx)
4692 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4693 ; SSE-NEXT: movaps %xmm0, 240(%rcx)
4694 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4695 ; SSE-NEXT: movaps %xmm0, 224(%rcx)
4696 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4697 ; SSE-NEXT: movaps %xmm0, 208(%rcx)
4698 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4699 ; SSE-NEXT: movaps %xmm0, 192(%rcx)
4700 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4701 ; SSE-NEXT: movaps %xmm0, 176(%rcx)
4702 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4703 ; SSE-NEXT: movaps %xmm0, 160(%rcx)
4704 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4705 ; SSE-NEXT: movaps %xmm0, 144(%rcx)
4706 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4707 ; SSE-NEXT: movaps %xmm0, 128(%rcx)
4708 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4709 ; SSE-NEXT: movaps %xmm0, 112(%rcx)
4710 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4711 ; SSE-NEXT: movaps %xmm0, 96(%rcx)
4712 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4713 ; SSE-NEXT: movaps %xmm0, 80(%rcx)
4714 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4715 ; SSE-NEXT: movaps %xmm0, 64(%rcx)
4716 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4717 ; SSE-NEXT: movaps %xmm0, 48(%rcx)
4718 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4719 ; SSE-NEXT: movaps %xmm0, 32(%rcx)
4720 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4721 ; SSE-NEXT: movaps %xmm0, 16(%rcx)
4722 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4723 ; SSE-NEXT: movaps %xmm0, (%rcx)
4724 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4725 ; SSE-NEXT: movaps %xmm0, 240(%r8)
4726 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4727 ; SSE-NEXT: movaps %xmm0, 224(%r8)
4728 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4729 ; SSE-NEXT: movaps %xmm0, 208(%r8)
4730 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4731 ; SSE-NEXT: movaps %xmm0, 192(%r8)
4732 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4733 ; SSE-NEXT: movaps %xmm0, 176(%r8)
4734 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4735 ; SSE-NEXT: movaps %xmm0, 160(%r8)
4736 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4737 ; SSE-NEXT: movaps %xmm0, 144(%r8)
4738 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4739 ; SSE-NEXT: movaps %xmm0, 128(%r8)
4740 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4741 ; SSE-NEXT: movaps %xmm0, 112(%r8)
4742 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4743 ; SSE-NEXT: movaps %xmm0, 96(%r8)
4744 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4745 ; SSE-NEXT: movaps %xmm0, 80(%r8)
4746 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4747 ; SSE-NEXT: movaps %xmm0, 64(%r8)
4748 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4749 ; SSE-NEXT: movaps %xmm0, 48(%r8)
4750 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4751 ; SSE-NEXT: movaps %xmm0, 32(%r8)
4752 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4753 ; SSE-NEXT: movaps %xmm0, 16(%r8)
4754 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4755 ; SSE-NEXT: movaps %xmm0, (%r8)
4756 ; SSE-NEXT: movaps %xmm2, 240(%r9)
4757 ; SSE-NEXT: movaps %xmm4, 224(%r9)
4758 ; SSE-NEXT: movaps %xmm6, 208(%r9)
4759 ; SSE-NEXT: movaps %xmm8, 192(%r9)
4760 ; SSE-NEXT: movaps %xmm11, 176(%r9)
4761 ; SSE-NEXT: movaps %xmm12, 160(%r9)
4762 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4763 ; SSE-NEXT: movaps %xmm0, 144(%r9)
4764 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4765 ; SSE-NEXT: movaps %xmm0, 128(%r9)
4766 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4767 ; SSE-NEXT: movaps %xmm0, 112(%r9)
4768 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4769 ; SSE-NEXT: movaps %xmm0, 96(%r9)
4770 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4771 ; SSE-NEXT: movaps %xmm0, 80(%r9)
4772 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4773 ; SSE-NEXT: movaps %xmm0, 64(%r9)
4774 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4775 ; SSE-NEXT: movaps %xmm0, 48(%r9)
4776 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4777 ; SSE-NEXT: movaps %xmm0, 32(%r9)
4778 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4779 ; SSE-NEXT: movaps %xmm0, 16(%r9)
4780 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4781 ; SSE-NEXT: movaps %xmm0, (%r9)
4782 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
4783 ; SSE-NEXT: movaps %xmm1, 240(%rax)
4784 ; SSE-NEXT: movaps %xmm3, 224(%rax)
4785 ; SSE-NEXT: movaps %xmm5, 208(%rax)
4786 ; SSE-NEXT: movaps %xmm7, 192(%rax)
4787 ; SSE-NEXT: movaps %xmm9, 176(%rax)
4788 ; SSE-NEXT: movaps %xmm10, 160(%rax)
4789 ; SSE-NEXT: movaps %xmm13, 144(%rax)
4790 ; SSE-NEXT: movaps %xmm15, 128(%rax)
4791 ; SSE-NEXT: movaps %xmm14, 112(%rax)
4792 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4793 ; SSE-NEXT: movaps %xmm0, 96(%rax)
4794 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
4795 ; SSE-NEXT: movaps %xmm0, 80(%rax)
4796 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4797 ; SSE-NEXT: movaps %xmm0, 64(%rax)
4798 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4799 ; SSE-NEXT: movaps %xmm0, 48(%rax)
4800 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4801 ; SSE-NEXT: movaps %xmm0, 32(%rax)
4802 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4803 ; SSE-NEXT: movaps %xmm0, 16(%rax)
4804 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4805 ; SSE-NEXT: movaps %xmm0, (%rax)
4806 ; SSE-NEXT: addq $1176, %rsp # imm = 0x498
4809 ; AVX-LABEL: load_i64_stride6_vf32:
4811 ; AVX-NEXT: subq $1624, %rsp # imm = 0x658
4812 ; AVX-NEXT: vmovaps 1088(%rdi), %ymm2
4813 ; AVX-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4814 ; AVX-NEXT: vmovaps 704(%rdi), %ymm3
4815 ; AVX-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4816 ; AVX-NEXT: vmovaps 320(%rdi), %ymm1
4817 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4818 ; AVX-NEXT: vinsertf128 $1, 288(%rdi), %ymm0, %ymm0
4819 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4820 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
4821 ; AVX-NEXT: vmovaps 240(%rdi), %xmm4
4822 ; AVX-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4823 ; AVX-NEXT: vmovaps 192(%rdi), %xmm1
4824 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4825 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm4[0]
4826 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4827 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4828 ; AVX-NEXT: vinsertf128 $1, 672(%rdi), %ymm0, %ymm0
4829 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4830 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
4831 ; AVX-NEXT: vmovaps 624(%rdi), %xmm3
4832 ; AVX-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4833 ; AVX-NEXT: vmovaps 576(%rdi), %xmm1
4834 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4835 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
4836 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4837 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4838 ; AVX-NEXT: vinsertf128 $1, 1056(%rdi), %ymm0, %ymm0
4839 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4840 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm2[0],ymm0[2],ymm2[2]
4841 ; AVX-NEXT: vmovaps 1008(%rdi), %xmm2
4842 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4843 ; AVX-NEXT: vmovaps 960(%rdi), %xmm1
4844 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4845 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4846 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4847 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4848 ; AVX-NEXT: vmovaps 1472(%rdi), %ymm1
4849 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4850 ; AVX-NEXT: vinsertf128 $1, 1440(%rdi), %ymm0, %ymm0
4851 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4852 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
4853 ; AVX-NEXT: vmovaps 1392(%rdi), %xmm2
4854 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4855 ; AVX-NEXT: vmovaps 1344(%rdi), %xmm1
4856 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4857 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4858 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4859 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4860 ; AVX-NEXT: vmovaps 128(%rdi), %ymm1
4861 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4862 ; AVX-NEXT: vinsertf128 $1, 96(%rdi), %ymm0, %ymm0
4863 ; AVX-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
4864 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
4865 ; AVX-NEXT: vmovaps (%rdi), %xmm14
4866 ; AVX-NEXT: vmovaps 48(%rdi), %xmm13
4867 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm14[0],xmm13[0]
4868 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4869 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4870 ; AVX-NEXT: vmovaps 512(%rdi), %ymm12
4871 ; AVX-NEXT: vinsertf128 $1, 480(%rdi), %ymm0, %ymm11
4872 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm11[0],ymm12[0],ymm11[2],ymm12[2]
4873 ; AVX-NEXT: vmovaps 432(%rdi), %xmm10
4874 ; AVX-NEXT: vmovaps 384(%rdi), %xmm9
4875 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm9[0],xmm10[0]
4876 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4877 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4878 ; AVX-NEXT: vmovaps 896(%rdi), %ymm8
4879 ; AVX-NEXT: vinsertf128 $1, 864(%rdi), %ymm0, %ymm7
4880 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm7[0],ymm8[0],ymm7[2],ymm8[2]
4881 ; AVX-NEXT: vmovaps 816(%rdi), %xmm6
4882 ; AVX-NEXT: vmovaps 768(%rdi), %xmm5
4883 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm6[0]
4884 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4885 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4886 ; AVX-NEXT: vmovaps 1280(%rdi), %ymm4
4887 ; AVX-NEXT: vinsertf128 $1, 1248(%rdi), %ymm0, %ymm3
4888 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm3[0],ymm4[0],ymm3[2],ymm4[2]
4889 ; AVX-NEXT: vmovaps 1200(%rdi), %xmm2
4890 ; AVX-NEXT: vmovaps 1152(%rdi), %xmm1
4891 ; AVX-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
4892 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
4893 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4894 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4895 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
4896 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
4897 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
4898 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
4899 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
4900 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
4901 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4902 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4903 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
4904 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
4905 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
4906 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
4907 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
4908 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
4909 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4910 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4911 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
4912 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
4913 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
4914 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
4915 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
4916 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
4917 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4918 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4919 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
4920 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
4921 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
4922 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
4923 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
4924 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
4925 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4926 ; AVX-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
4927 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
4928 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
4929 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm13 = xmm14[1],xmm13[1]
4930 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm13[0,1,2,3],ymm0[4,5,6,7]
4931 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4932 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm11[1],ymm12[1],ymm11[3],ymm12[3]
4933 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm9 = xmm9[1],xmm10[1]
4934 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm9[0,1,2,3],ymm0[4,5,6,7]
4935 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4936 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm7[1],ymm8[1],ymm7[3],ymm8[3]
4937 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm5 = xmm5[1],xmm6[1]
4938 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm5[0,1,2,3],ymm0[4,5,6,7]
4939 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4940 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm3[1],ymm4[1],ymm3[3],ymm4[3]
4941 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
4942 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4943 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4944 ; AVX-NEXT: vmovaps 288(%rdi), %ymm1
4945 ; AVX-NEXT: vmovups %ymm1, (%rsp) # 32-byte Spill
4946 ; AVX-NEXT: vinsertf128 $1, 352(%rdi), %ymm0, %ymm0
4947 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4948 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
4949 ; AVX-NEXT: vmovaps 256(%rdi), %xmm2
4950 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4951 ; AVX-NEXT: vmovaps 208(%rdi), %xmm1
4952 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4953 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4954 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4955 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4956 ; AVX-NEXT: vmovaps 672(%rdi), %ymm1
4957 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4958 ; AVX-NEXT: vinsertf128 $1, 736(%rdi), %ymm0, %ymm0
4959 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4960 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
4961 ; AVX-NEXT: vmovaps 640(%rdi), %xmm2
4962 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4963 ; AVX-NEXT: vmovaps 592(%rdi), %xmm1
4964 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4965 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4966 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4967 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4968 ; AVX-NEXT: vmovaps 1056(%rdi), %ymm1
4969 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4970 ; AVX-NEXT: vinsertf128 $1, 1120(%rdi), %ymm0, %ymm0
4971 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4972 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
4973 ; AVX-NEXT: vmovaps 1024(%rdi), %xmm2
4974 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4975 ; AVX-NEXT: vmovaps 976(%rdi), %xmm1
4976 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4977 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4978 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4979 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4980 ; AVX-NEXT: vmovaps 1440(%rdi), %ymm1
4981 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4982 ; AVX-NEXT: vinsertf128 $1, 1504(%rdi), %ymm0, %ymm0
4983 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4984 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
4985 ; AVX-NEXT: vmovaps 1408(%rdi), %xmm14
4986 ; AVX-NEXT: vmovaps 1360(%rdi), %xmm12
4987 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm12[0],xmm14[0]
4988 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4989 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4990 ; AVX-NEXT: vmovaps 96(%rdi), %ymm1
4991 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4992 ; AVX-NEXT: vmovaps 16(%rdi), %xmm2
4993 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4994 ; AVX-NEXT: vinsertf128 $1, 160(%rdi), %ymm0, %ymm0
4995 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4996 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
4997 ; AVX-NEXT: vmovaps 64(%rdi), %xmm1
4998 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4999 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
5000 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5001 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5002 ; AVX-NEXT: vmovaps 480(%rdi), %ymm11
5003 ; AVX-NEXT: vinsertf128 $1, 544(%rdi), %ymm0, %ymm10
5004 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm11[0],ymm10[0],ymm11[2],ymm10[2]
5005 ; AVX-NEXT: vmovaps 448(%rdi), %xmm9
5006 ; AVX-NEXT: vmovaps 400(%rdi), %xmm8
5007 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm8[0],xmm9[0]
5008 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5009 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5010 ; AVX-NEXT: vmovaps 864(%rdi), %ymm7
5011 ; AVX-NEXT: vinsertf128 $1, 928(%rdi), %ymm0, %ymm6
5012 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm7[0],ymm6[0],ymm7[2],ymm6[2]
5013 ; AVX-NEXT: vmovaps 832(%rdi), %xmm5
5014 ; AVX-NEXT: vmovaps 784(%rdi), %xmm4
5015 ; AVX-NEXT: vmovlhps {{.*#+}} xmm13 = xmm4[0],xmm5[0]
5016 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm13[0,1,2,3],ymm0[4,5,6,7]
5017 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5018 ; AVX-NEXT: vmovaps 1248(%rdi), %ymm3
5019 ; AVX-NEXT: vinsertf128 $1, 1312(%rdi), %ymm0, %ymm13
5020 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm3[0],ymm13[0],ymm3[2],ymm13[2]
5021 ; AVX-NEXT: vmovaps 1216(%rdi), %xmm2
5022 ; AVX-NEXT: vmovaps 1168(%rdi), %xmm1
5023 ; AVX-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
5024 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
5025 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5026 ; AVX-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
5027 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5028 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5029 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
5030 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
5031 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
5032 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
5033 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5034 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5035 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5036 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5037 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
5038 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
5039 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
5040 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
5041 ; AVX-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
5042 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5043 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5044 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5045 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
5046 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
5047 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
5048 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
5049 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5050 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5051 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5052 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5053 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm12 = xmm12[1],xmm14[1]
5054 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm12[0,1,2,3],ymm0[4,5,6,7]
5055 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5056 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm3[1],ymm13[1],ymm3[3],ymm13[3]
5057 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
5058 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5059 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5060 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm7[1],ymm6[1],ymm7[3],ymm6[3]
5061 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm4[1],xmm5[1]
5062 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5063 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5064 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm11[1],ymm10[1],ymm11[3],ymm10[3]
5065 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm8[1],xmm9[1]
5066 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5067 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5068 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5069 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5070 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5071 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5072 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
5073 ; AVX-NEXT: # xmm1 = xmm1[1],mem[1]
5074 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5075 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5076 ; AVX-NEXT: vmovaps 160(%rdi), %ymm1
5077 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5078 ; AVX-NEXT: vmovaps 32(%rdi), %xmm2
5079 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5080 ; AVX-NEXT: vinsertf128 $1, 128(%rdi), %ymm0, %ymm0
5081 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5082 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5083 ; AVX-NEXT: vmovaps 80(%rdi), %xmm1
5084 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5085 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
5086 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5087 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5088 ; AVX-NEXT: vmovaps 352(%rdi), %ymm1
5089 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5090 ; AVX-NEXT: vinsertf128 $1, 320(%rdi), %ymm0, %ymm0
5091 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5092 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5093 ; AVX-NEXT: vmovaps 272(%rdi), %xmm2
5094 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5095 ; AVX-NEXT: vmovaps 224(%rdi), %xmm1
5096 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5097 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
5098 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5099 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5100 ; AVX-NEXT: vmovaps 544(%rdi), %ymm1
5101 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5102 ; AVX-NEXT: vinsertf128 $1, 512(%rdi), %ymm0, %ymm0
5103 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5104 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5105 ; AVX-NEXT: vmovaps 464(%rdi), %xmm2
5106 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5107 ; AVX-NEXT: vmovaps 416(%rdi), %xmm1
5108 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5109 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
5110 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5111 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5112 ; AVX-NEXT: vmovaps 736(%rdi), %ymm1
5113 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5114 ; AVX-NEXT: vinsertf128 $1, 704(%rdi), %ymm0, %ymm0
5115 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5116 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5117 ; AVX-NEXT: vmovaps 656(%rdi), %xmm2
5118 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5119 ; AVX-NEXT: vmovaps 608(%rdi), %xmm1
5120 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5121 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
5122 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5123 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5124 ; AVX-NEXT: vmovaps 928(%rdi), %ymm1
5125 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5126 ; AVX-NEXT: vinsertf128 $1, 896(%rdi), %ymm0, %ymm0
5127 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5128 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5129 ; AVX-NEXT: vmovaps 848(%rdi), %xmm14
5130 ; AVX-NEXT: vmovaps 800(%rdi), %xmm13
5131 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm13[0],xmm14[0]
5132 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5133 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5134 ; AVX-NEXT: vmovaps 1120(%rdi), %ymm12
5135 ; AVX-NEXT: vinsertf128 $1, 1088(%rdi), %ymm0, %ymm11
5136 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm11[0],ymm12[0],ymm11[2],ymm12[2]
5137 ; AVX-NEXT: vmovaps 1040(%rdi), %xmm10
5138 ; AVX-NEXT: vmovaps 992(%rdi), %xmm9
5139 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm9[0],xmm10[0]
5140 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5141 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5142 ; AVX-NEXT: vmovaps 1312(%rdi), %ymm8
5143 ; AVX-NEXT: vinsertf128 $1, 1280(%rdi), %ymm0, %ymm7
5144 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm7[0],ymm8[0],ymm7[2],ymm8[2]
5145 ; AVX-NEXT: vmovaps 1232(%rdi), %xmm6
5146 ; AVX-NEXT: vmovaps 1184(%rdi), %xmm5
5147 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm6[0]
5148 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5149 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5150 ; AVX-NEXT: vmovaps 1504(%rdi), %ymm4
5151 ; AVX-NEXT: vinsertf128 $1, 1472(%rdi), %ymm0, %ymm3
5152 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm3[0],ymm4[0],ymm3[2],ymm4[2]
5153 ; AVX-NEXT: vmovaps 1424(%rdi), %xmm2
5154 ; AVX-NEXT: vmovaps 1376(%rdi), %xmm1
5155 ; AVX-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
5156 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
5157 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5158 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5159 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5160 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5161 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
5162 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
5163 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
5164 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
5165 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5166 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5167 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5168 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5169 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
5170 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
5171 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
5172 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
5173 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5174 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5175 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5176 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5177 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
5178 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
5179 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
5180 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
5181 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5182 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5183 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5184 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5185 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
5186 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
5187 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
5188 ; AVX-NEXT: vblendps {{.*#+}} ymm15 = ymm15[0,1,2,3],ymm0[4,5,6,7]
5189 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5190 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5191 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5192 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm13 = xmm13[1],xmm14[1]
5193 ; AVX-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3],ymm0[4,5,6,7]
5194 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm11[1],ymm12[1],ymm11[3],ymm12[3]
5195 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm9 = xmm9[1],xmm10[1]
5196 ; AVX-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm0[4,5,6,7]
5197 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm7[1],ymm8[1],ymm7[3],ymm8[3]
5198 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm5 = xmm5[1],xmm6[1]
5199 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm5[0,1,2,3],ymm0[4,5,6,7]
5200 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm4[1],ymm3[3],ymm4[3]
5201 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
5202 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
5203 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5204 ; AVX-NEXT: vmovaps %ymm2, 192(%rsi)
5205 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5206 ; AVX-NEXT: vmovaps %ymm2, 128(%rsi)
5207 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5208 ; AVX-NEXT: vmovaps %ymm2, 64(%rsi)
5209 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5210 ; AVX-NEXT: vmovaps %ymm2, (%rsi)
5211 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5212 ; AVX-NEXT: vmovaps %ymm2, 224(%rsi)
5213 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5214 ; AVX-NEXT: vmovaps %ymm2, 160(%rsi)
5215 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5216 ; AVX-NEXT: vmovaps %ymm2, 96(%rsi)
5217 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5218 ; AVX-NEXT: vmovaps %ymm2, 32(%rsi)
5219 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5220 ; AVX-NEXT: vmovaps %ymm2, 192(%rdx)
5221 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5222 ; AVX-NEXT: vmovaps %ymm2, 128(%rdx)
5223 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5224 ; AVX-NEXT: vmovaps %ymm2, 64(%rdx)
5225 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5226 ; AVX-NEXT: vmovaps %ymm2, (%rdx)
5227 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5228 ; AVX-NEXT: vmovaps %ymm2, 224(%rdx)
5229 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5230 ; AVX-NEXT: vmovaps %ymm2, 160(%rdx)
5231 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5232 ; AVX-NEXT: vmovaps %ymm2, 96(%rdx)
5233 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5234 ; AVX-NEXT: vmovaps %ymm2, 32(%rdx)
5235 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5236 ; AVX-NEXT: vmovaps %ymm2, 192(%rcx)
5237 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5238 ; AVX-NEXT: vmovaps %ymm2, 128(%rcx)
5239 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5240 ; AVX-NEXT: vmovaps %ymm2, 64(%rcx)
5241 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5242 ; AVX-NEXT: vmovaps %ymm2, (%rcx)
5243 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5244 ; AVX-NEXT: vmovaps %ymm2, 224(%rcx)
5245 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5246 ; AVX-NEXT: vmovaps %ymm2, 160(%rcx)
5247 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5248 ; AVX-NEXT: vmovaps %ymm2, 96(%rcx)
5249 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5250 ; AVX-NEXT: vmovaps %ymm2, 32(%rcx)
5251 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5252 ; AVX-NEXT: vmovaps %ymm2, (%r8)
5253 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5254 ; AVX-NEXT: vmovaps %ymm2, 64(%r8)
5255 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5256 ; AVX-NEXT: vmovaps %ymm2, 128(%r8)
5257 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5258 ; AVX-NEXT: vmovaps %ymm2, 192(%r8)
5259 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5260 ; AVX-NEXT: vmovaps %ymm2, 224(%r8)
5261 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5262 ; AVX-NEXT: vmovaps %ymm2, 160(%r8)
5263 ; AVX-NEXT: vmovups (%rsp), %ymm2 # 32-byte Reload
5264 ; AVX-NEXT: vmovaps %ymm2, 96(%r8)
5265 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5266 ; AVX-NEXT: vmovaps %ymm2, 32(%r8)
5267 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5268 ; AVX-NEXT: vmovaps %ymm2, 224(%r9)
5269 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5270 ; AVX-NEXT: vmovaps %ymm2, 192(%r9)
5271 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5272 ; AVX-NEXT: vmovaps %ymm2, 160(%r9)
5273 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5274 ; AVX-NEXT: vmovaps %ymm2, 128(%r9)
5275 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5276 ; AVX-NEXT: vmovaps %ymm2, 96(%r9)
5277 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5278 ; AVX-NEXT: vmovaps %ymm2, 64(%r9)
5279 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5280 ; AVX-NEXT: vmovaps %ymm2, 32(%r9)
5281 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
5282 ; AVX-NEXT: vmovaps %ymm2, (%r9)
5283 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
5284 ; AVX-NEXT: vmovaps %ymm1, 224(%rax)
5285 ; AVX-NEXT: vmovaps %ymm0, 192(%rax)
5286 ; AVX-NEXT: vmovaps %ymm9, 160(%rax)
5287 ; AVX-NEXT: vmovaps %ymm13, 128(%rax)
5288 ; AVX-NEXT: vmovaps %ymm15, 96(%rax)
5289 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5290 ; AVX-NEXT: vmovaps %ymm0, 64(%rax)
5291 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5292 ; AVX-NEXT: vmovaps %ymm0, 32(%rax)
5293 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5294 ; AVX-NEXT: vmovaps %ymm0, (%rax)
5295 ; AVX-NEXT: addq $1624, %rsp # imm = 0x658
5296 ; AVX-NEXT: vzeroupper
5299 ; AVX2-LABEL: load_i64_stride6_vf32:
5301 ; AVX2-NEXT: subq $1496, %rsp # imm = 0x5D8
5302 ; AVX2-NEXT: vmovaps 1088(%rdi), %ymm2
5303 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5304 ; AVX2-NEXT: vmovaps 1056(%rdi), %ymm4
5305 ; AVX2-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5306 ; AVX2-NEXT: vmovaps 704(%rdi), %ymm3
5307 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5308 ; AVX2-NEXT: vmovaps 672(%rdi), %ymm5
5309 ; AVX2-NEXT: vmovups %ymm5, (%rsp) # 32-byte Spill
5310 ; AVX2-NEXT: vmovaps 320(%rdi), %ymm1
5311 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5312 ; AVX2-NEXT: vmovaps 288(%rdi), %ymm7
5313 ; AVX2-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5314 ; AVX2-NEXT: vmovaps 240(%rdi), %xmm0
5315 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5316 ; AVX2-NEXT: vmovaps 192(%rdi), %xmm6
5317 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm0 = xmm6[0],xmm0[0]
5318 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm7[0],ymm1[0],ymm7[2],ymm1[2]
5319 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
5320 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5321 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5322 ; AVX2-NEXT: vmovaps 624(%rdi), %xmm1
5323 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5324 ; AVX2-NEXT: vmovaps 576(%rdi), %xmm0
5325 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5326 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
5327 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm5[0],ymm3[0],ymm5[2],ymm3[2]
5328 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
5329 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5330 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5331 ; AVX2-NEXT: vmovaps 1008(%rdi), %xmm1
5332 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5333 ; AVX2-NEXT: vmovaps 960(%rdi), %xmm0
5334 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5335 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
5336 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm4[0],ymm2[0],ymm4[2],ymm2[2]
5337 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
5338 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5339 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5340 ; AVX2-NEXT: vmovaps 1472(%rdi), %ymm0
5341 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5342 ; AVX2-NEXT: vmovaps 1440(%rdi), %ymm1
5343 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5344 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5345 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5346 ; AVX2-NEXT: vmovaps 1392(%rdi), %xmm1
5347 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5348 ; AVX2-NEXT: vmovaps 1344(%rdi), %xmm4
5349 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm4[0],xmm1[0]
5350 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5351 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5352 ; AVX2-NEXT: vmovaps 128(%rdi), %ymm15
5353 ; AVX2-NEXT: vmovaps 96(%rdi), %ymm0
5354 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5355 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm15[0],ymm0[2],ymm15[2]
5356 ; AVX2-NEXT: vmovups %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5357 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5358 ; AVX2-NEXT: vmovaps (%rdi), %xmm8
5359 ; AVX2-NEXT: vmovaps 48(%rdi), %xmm2
5360 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm10 = xmm8[0],xmm2[0]
5361 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm10[0,1,2,3],ymm0[4,5,6,7]
5362 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5363 ; AVX2-NEXT: vmovaps 512(%rdi), %ymm7
5364 ; AVX2-NEXT: vmovaps 480(%rdi), %ymm0
5365 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5366 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm7[0],ymm0[2],ymm7[2]
5367 ; AVX2-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5368 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5369 ; AVX2-NEXT: vmovaps 432(%rdi), %xmm11
5370 ; AVX2-NEXT: vmovaps 384(%rdi), %xmm12
5371 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm10 = xmm12[0],xmm11[0]
5372 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm10[0,1,2,3],ymm0[4,5,6,7]
5373 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5374 ; AVX2-NEXT: vmovaps 896(%rdi), %ymm5
5375 ; AVX2-NEXT: vmovaps 864(%rdi), %ymm0
5376 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5377 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm5[0],ymm0[2],ymm5[2]
5378 ; AVX2-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5379 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5380 ; AVX2-NEXT: vmovaps 816(%rdi), %xmm13
5381 ; AVX2-NEXT: vmovaps 768(%rdi), %xmm10
5382 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm14 = xmm10[0],xmm13[0]
5383 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm14[0,1,2,3],ymm0[4,5,6,7]
5384 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5385 ; AVX2-NEXT: vmovaps 1280(%rdi), %ymm3
5386 ; AVX2-NEXT: vmovaps 1248(%rdi), %ymm0
5387 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5388 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
5389 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5390 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5391 ; AVX2-NEXT: vmovaps 1200(%rdi), %xmm14
5392 ; AVX2-NEXT: vmovaps 1152(%rdi), %xmm1
5393 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm9 = xmm1[0],xmm14[0]
5394 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm9[0,1,2,3],ymm0[4,5,6,7]
5395 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5396 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm6, %xmm0 # 16-byte Folded Reload
5397 ; AVX2-NEXT: # xmm0 = xmm6[1],mem[1]
5398 ; AVX2-NEXT: vbroadcastsd 296(%rdi), %ymm6
5399 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm6 # 32-byte Folded Reload
5400 ; AVX2-NEXT: # ymm6 = ymm6[1],mem[1],ymm6[3],mem[3]
5401 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm6[4,5,6,7]
5402 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5403 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5404 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
5405 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
5406 ; AVX2-NEXT: vbroadcastsd 680(%rdi), %ymm6
5407 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm6 # 32-byte Folded Reload
5408 ; AVX2-NEXT: # ymm6 = ymm6[1],mem[1],ymm6[3],mem[3]
5409 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm6[4,5,6,7]
5410 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5411 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5412 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
5413 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
5414 ; AVX2-NEXT: vbroadcastsd 1064(%rdi), %ymm6
5415 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm6 # 32-byte Folded Reload
5416 ; AVX2-NEXT: # ymm6 = ymm6[1],mem[1],ymm6[3],mem[3]
5417 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm6[4,5,6,7]
5418 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5419 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm4, %xmm0 # 16-byte Folded Reload
5420 ; AVX2-NEXT: # xmm0 = xmm4[1],mem[1]
5421 ; AVX2-NEXT: vbroadcastsd 1448(%rdi), %ymm4
5422 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm4, %ymm4 # 32-byte Folded Reload
5423 ; AVX2-NEXT: # ymm4 = ymm4[1],mem[1],ymm4[3],mem[3]
5424 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm4[4,5,6,7]
5425 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5426 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm8[1],xmm2[1]
5427 ; AVX2-NEXT: vbroadcastsd 104(%rdi), %ymm2
5428 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm15[1],ymm2[3],ymm15[3]
5429 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5430 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5431 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm12[1],xmm11[1]
5432 ; AVX2-NEXT: vbroadcastsd 488(%rdi), %ymm2
5433 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm7[1],ymm2[3],ymm7[3]
5434 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5435 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5436 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm10[1],xmm13[1]
5437 ; AVX2-NEXT: vbroadcastsd 872(%rdi), %ymm2
5438 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm5[1],ymm2[3],ymm5[3]
5439 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5440 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5441 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm14[1]
5442 ; AVX2-NEXT: vbroadcastsd 1256(%rdi), %ymm1
5443 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm3[1],ymm1[3],ymm3[3]
5444 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5445 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5446 ; AVX2-NEXT: vbroadcastsd 352(%rdi), %ymm0
5447 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5448 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5449 ; AVX2-NEXT: vmovaps 256(%rdi), %xmm3
5450 ; AVX2-NEXT: vmovaps 208(%rdi), %xmm5
5451 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm3[0]
5452 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5453 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5454 ; AVX2-NEXT: vbroadcastsd 736(%rdi), %ymm0
5455 ; AVX2-NEXT: vmovups (%rsp), %ymm1 # 32-byte Reload
5456 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5457 ; AVX2-NEXT: vmovaps 640(%rdi), %xmm6
5458 ; AVX2-NEXT: vmovaps 592(%rdi), %xmm7
5459 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm7[0],xmm6[0]
5460 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5461 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5462 ; AVX2-NEXT: vbroadcastsd 1120(%rdi), %ymm0
5463 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5464 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5465 ; AVX2-NEXT: vmovaps 1024(%rdi), %xmm8
5466 ; AVX2-NEXT: vmovaps 976(%rdi), %xmm9
5467 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm9[0],xmm8[0]
5468 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5469 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5470 ; AVX2-NEXT: vbroadcastsd 1504(%rdi), %ymm0
5471 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5472 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5473 ; AVX2-NEXT: vmovaps 1408(%rdi), %xmm10
5474 ; AVX2-NEXT: vmovaps 1360(%rdi), %xmm11
5475 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm10[0]
5476 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5477 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5478 ; AVX2-NEXT: vbroadcastsd 160(%rdi), %ymm0
5479 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5480 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5481 ; AVX2-NEXT: vmovaps 16(%rdi), %xmm0
5482 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5483 ; AVX2-NEXT: vmovaps 64(%rdi), %xmm2
5484 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5485 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm2[0]
5486 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm1[4,5,6,7]
5487 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5488 ; AVX2-NEXT: vbroadcastsd 544(%rdi), %ymm1
5489 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5490 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm12 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5491 ; AVX2-NEXT: vmovaps 448(%rdi), %xmm0
5492 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5493 ; AVX2-NEXT: vmovaps 400(%rdi), %xmm1
5494 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5495 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm13 = xmm1[0],xmm0[0]
5496 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm13[0,1,2,3],ymm12[4,5,6,7]
5497 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5498 ; AVX2-NEXT: vbroadcastsd 928(%rdi), %ymm12
5499 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
5500 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm14 = ymm4[0],ymm12[0],ymm4[2],ymm12[2]
5501 ; AVX2-NEXT: vmovaps 832(%rdi), %xmm12
5502 ; AVX2-NEXT: vmovaps 784(%rdi), %xmm13
5503 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm15 = xmm13[0],xmm12[0]
5504 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm14[4,5,6,7]
5505 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5506 ; AVX2-NEXT: vbroadcastsd 1312(%rdi), %ymm14
5507 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5508 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm14 = ymm1[0],ymm14[0],ymm1[2],ymm14[2]
5509 ; AVX2-NEXT: vmovaps 1216(%rdi), %xmm15
5510 ; AVX2-NEXT: vmovaps 1168(%rdi), %xmm0
5511 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm2 = xmm0[0],xmm15[0]
5512 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm14[4,5,6,7]
5513 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5514 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm5[1],xmm3[1]
5515 ; AVX2-NEXT: vmovaps 352(%rdi), %ymm5
5516 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
5517 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm5[1],ymm3[3],ymm5[3]
5518 ; AVX2-NEXT: vmovaps %ymm5, %ymm14
5519 ; AVX2-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5520 ; AVX2-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
5521 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
5522 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5523 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm7[1],xmm6[1]
5524 ; AVX2-NEXT: vmovaps 736(%rdi), %ymm5
5525 ; AVX2-NEXT: vmovups (%rsp), %ymm3 # 32-byte Reload
5526 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm5[1],ymm3[3],ymm5[3]
5527 ; AVX2-NEXT: vmovaps %ymm5, %ymm7
5528 ; AVX2-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5529 ; AVX2-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
5530 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
5531 ; AVX2-NEXT: vmovups %ymm2, (%rsp) # 32-byte Spill
5532 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm9[1],xmm8[1]
5533 ; AVX2-NEXT: vmovaps 1120(%rdi), %ymm5
5534 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
5535 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm5[1],ymm3[3],ymm5[3]
5536 ; AVX2-NEXT: vmovaps %ymm5, %ymm6
5537 ; AVX2-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5538 ; AVX2-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
5539 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
5540 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5541 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm11[1],xmm10[1]
5542 ; AVX2-NEXT: vmovaps 1504(%rdi), %ymm5
5543 ; AVX2-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5544 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
5545 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm5[1],ymm3[3],ymm5[3]
5546 ; AVX2-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
5547 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
5548 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5549 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm15[1]
5550 ; AVX2-NEXT: vmovaps 1312(%rdi), %ymm3
5551 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm1[1],ymm3[1],ymm1[3],ymm3[3]
5552 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5553 ; AVX2-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,2,1]
5554 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5555 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5556 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm13[1],xmm12[1]
5557 ; AVX2-NEXT: vmovaps 928(%rdi), %ymm1
5558 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm4[1],ymm1[1],ymm4[3],ymm1[3]
5559 ; AVX2-NEXT: vmovaps %ymm1, %ymm4
5560 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5561 ; AVX2-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,2,1]
5562 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5563 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5564 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5565 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
5566 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
5567 ; AVX2-NEXT: vmovaps 544(%rdi), %ymm2
5568 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5569 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
5570 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5571 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
5572 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5573 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5574 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5575 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
5576 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
5577 ; AVX2-NEXT: vmovaps 160(%rdi), %ymm9
5578 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5579 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm9[1],ymm1[3],ymm9[3]
5580 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
5581 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5582 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5583 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5584 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm9[0],ymm0[2],ymm9[2]
5585 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5586 ; AVX2-NEXT: vmovaps 32(%rdi), %xmm5
5587 ; AVX2-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5588 ; AVX2-NEXT: vmovaps 80(%rdi), %xmm1
5589 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5590 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm1[0]
5591 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5592 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5593 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5594 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm14[0],ymm0[2],ymm14[2]
5595 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5596 ; AVX2-NEXT: vmovaps 272(%rdi), %xmm5
5597 ; AVX2-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5598 ; AVX2-NEXT: vmovaps 224(%rdi), %xmm1
5599 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5600 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm5[0]
5601 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5602 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5603 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5604 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm2[0],ymm0[2],ymm2[2]
5605 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5606 ; AVX2-NEXT: vmovaps 464(%rdi), %xmm14
5607 ; AVX2-NEXT: vmovaps 416(%rdi), %xmm13
5608 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm13[0],xmm14[0]
5609 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5610 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5611 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5612 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm7[0],ymm0[2],ymm7[2]
5613 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5614 ; AVX2-NEXT: vmovaps 656(%rdi), %xmm12
5615 ; AVX2-NEXT: vmovaps 608(%rdi), %xmm11
5616 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm12[0]
5617 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5618 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5619 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5620 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
5621 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5622 ; AVX2-NEXT: vmovaps 848(%rdi), %xmm10
5623 ; AVX2-NEXT: vmovaps 800(%rdi), %xmm7
5624 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm7[0],xmm10[0]
5625 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5626 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5627 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5628 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm6[0],ymm0[2],ymm6[2]
5629 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5630 ; AVX2-NEXT: vmovaps 1040(%rdi), %xmm8
5631 ; AVX2-NEXT: vmovaps 992(%rdi), %xmm5
5632 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm8[0]
5633 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5634 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5635 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5636 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
5637 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5638 ; AVX2-NEXT: vmovaps 1232(%rdi), %xmm6
5639 ; AVX2-NEXT: vmovaps 1184(%rdi), %xmm3
5640 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm6[0]
5641 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5642 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5643 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5644 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
5645 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
5646 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5647 ; AVX2-NEXT: vmovaps 1424(%rdi), %xmm2
5648 ; AVX2-NEXT: vmovaps 1376(%rdi), %xmm1
5649 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
5650 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
5651 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5652 ; AVX2-NEXT: vbroadcastsd 136(%rdi), %ymm0
5653 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm9[1],ymm0[3],ymm9[3]
5654 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
5655 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9, %xmm9 # 16-byte Folded Reload
5656 ; AVX2-NEXT: # xmm9 = xmm9[1],mem[1]
5657 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm9[0,1,2,3],ymm0[4,5,6,7]
5658 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5659 ; AVX2-NEXT: vbroadcastsd 328(%rdi), %ymm0
5660 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5661 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5662 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
5663 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9, %xmm9 # 16-byte Folded Reload
5664 ; AVX2-NEXT: # xmm9 = xmm9[1],mem[1]
5665 ; AVX2-NEXT: vblendps {{.*#+}} ymm15 = ymm9[0,1,2,3],ymm0[4,5,6,7]
5666 ; AVX2-NEXT: vbroadcastsd 520(%rdi), %ymm0
5667 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5668 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5669 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm9 = xmm13[1],xmm14[1]
5670 ; AVX2-NEXT: vblendps {{.*#+}} ymm13 = ymm9[0,1,2,3],ymm0[4,5,6,7]
5671 ; AVX2-NEXT: vbroadcastsd 712(%rdi), %ymm0
5672 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5673 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5674 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm9 = xmm11[1],xmm12[1]
5675 ; AVX2-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm0[4,5,6,7]
5676 ; AVX2-NEXT: vbroadcastsd 904(%rdi), %ymm0
5677 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5678 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5679 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm7 = xmm7[1],xmm10[1]
5680 ; AVX2-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm0[4,5,6,7]
5681 ; AVX2-NEXT: vbroadcastsd 1096(%rdi), %ymm0
5682 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5683 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5684 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm5 = xmm5[1],xmm8[1]
5685 ; AVX2-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm0[4,5,6,7]
5686 ; AVX2-NEXT: vbroadcastsd 1288(%rdi), %ymm0
5687 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5688 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5689 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm6[1]
5690 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm0[4,5,6,7]
5691 ; AVX2-NEXT: vbroadcastsd 1480(%rdi), %ymm0
5692 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm4[1],ymm0[3],ymm4[3]
5693 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
5694 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5695 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5696 ; AVX2-NEXT: vmovaps %ymm1, 192(%rsi)
5697 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5698 ; AVX2-NEXT: vmovaps %ymm1, 128(%rsi)
5699 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5700 ; AVX2-NEXT: vmovaps %ymm1, 64(%rsi)
5701 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5702 ; AVX2-NEXT: vmovaps %ymm1, (%rsi)
5703 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5704 ; AVX2-NEXT: vmovaps %ymm1, 224(%rsi)
5705 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5706 ; AVX2-NEXT: vmovaps %ymm1, 160(%rsi)
5707 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5708 ; AVX2-NEXT: vmovaps %ymm1, 96(%rsi)
5709 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5710 ; AVX2-NEXT: vmovaps %ymm1, 32(%rsi)
5711 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5712 ; AVX2-NEXT: vmovaps %ymm1, 192(%rdx)
5713 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5714 ; AVX2-NEXT: vmovaps %ymm1, 128(%rdx)
5715 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5716 ; AVX2-NEXT: vmovaps %ymm1, 64(%rdx)
5717 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5718 ; AVX2-NEXT: vmovaps %ymm1, (%rdx)
5719 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5720 ; AVX2-NEXT: vmovaps %ymm1, 224(%rdx)
5721 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5722 ; AVX2-NEXT: vmovaps %ymm1, 160(%rdx)
5723 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5724 ; AVX2-NEXT: vmovaps %ymm1, 96(%rdx)
5725 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5726 ; AVX2-NEXT: vmovaps %ymm1, 32(%rdx)
5727 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5728 ; AVX2-NEXT: vmovaps %ymm1, 192(%rcx)
5729 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5730 ; AVX2-NEXT: vmovaps %ymm1, 128(%rcx)
5731 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5732 ; AVX2-NEXT: vmovaps %ymm1, 64(%rcx)
5733 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5734 ; AVX2-NEXT: vmovaps %ymm1, (%rcx)
5735 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5736 ; AVX2-NEXT: vmovaps %ymm1, 224(%rcx)
5737 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5738 ; AVX2-NEXT: vmovaps %ymm1, 160(%rcx)
5739 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5740 ; AVX2-NEXT: vmovaps %ymm1, 96(%rcx)
5741 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5742 ; AVX2-NEXT: vmovaps %ymm1, 32(%rcx)
5743 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5744 ; AVX2-NEXT: vmovaps %ymm1, (%r8)
5745 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5746 ; AVX2-NEXT: vmovaps %ymm1, 64(%r8)
5747 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5748 ; AVX2-NEXT: vmovaps %ymm1, 128(%r8)
5749 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5750 ; AVX2-NEXT: vmovaps %ymm1, 192(%r8)
5751 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5752 ; AVX2-NEXT: vmovaps %ymm1, 224(%r8)
5753 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5754 ; AVX2-NEXT: vmovaps %ymm1, 160(%r8)
5755 ; AVX2-NEXT: vmovups (%rsp), %ymm1 # 32-byte Reload
5756 ; AVX2-NEXT: vmovaps %ymm1, 96(%r8)
5757 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5758 ; AVX2-NEXT: vmovaps %ymm1, 32(%r8)
5759 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5760 ; AVX2-NEXT: vmovaps %ymm1, 224(%r9)
5761 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5762 ; AVX2-NEXT: vmovaps %ymm1, 192(%r9)
5763 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5764 ; AVX2-NEXT: vmovaps %ymm1, 160(%r9)
5765 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5766 ; AVX2-NEXT: vmovaps %ymm1, 128(%r9)
5767 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5768 ; AVX2-NEXT: vmovaps %ymm1, 96(%r9)
5769 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5770 ; AVX2-NEXT: vmovaps %ymm1, 64(%r9)
5771 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5772 ; AVX2-NEXT: vmovaps %ymm1, 32(%r9)
5773 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5774 ; AVX2-NEXT: vmovaps %ymm1, (%r9)
5775 ; AVX2-NEXT: movq {{[0-9]+}}(%rsp), %rax
5776 ; AVX2-NEXT: vmovaps %ymm0, 224(%rax)
5777 ; AVX2-NEXT: vmovaps %ymm3, 192(%rax)
5778 ; AVX2-NEXT: vmovaps %ymm5, 160(%rax)
5779 ; AVX2-NEXT: vmovaps %ymm7, 128(%rax)
5780 ; AVX2-NEXT: vmovaps %ymm9, 96(%rax)
5781 ; AVX2-NEXT: vmovaps %ymm13, 64(%rax)
5782 ; AVX2-NEXT: vmovaps %ymm15, 32(%rax)
5783 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5784 ; AVX2-NEXT: vmovaps %ymm0, (%rax)
5785 ; AVX2-NEXT: addq $1496, %rsp # imm = 0x5D8
5786 ; AVX2-NEXT: vzeroupper
5789 ; AVX2-FP-LABEL: load_i64_stride6_vf32:
5791 ; AVX2-FP-NEXT: subq $1496, %rsp # imm = 0x5D8
5792 ; AVX2-FP-NEXT: vmovaps 1088(%rdi), %ymm2
5793 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5794 ; AVX2-FP-NEXT: vmovaps 1056(%rdi), %ymm4
5795 ; AVX2-FP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5796 ; AVX2-FP-NEXT: vmovaps 704(%rdi), %ymm3
5797 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5798 ; AVX2-FP-NEXT: vmovaps 672(%rdi), %ymm5
5799 ; AVX2-FP-NEXT: vmovups %ymm5, (%rsp) # 32-byte Spill
5800 ; AVX2-FP-NEXT: vmovaps 320(%rdi), %ymm1
5801 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5802 ; AVX2-FP-NEXT: vmovaps 288(%rdi), %ymm7
5803 ; AVX2-FP-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5804 ; AVX2-FP-NEXT: vmovaps 240(%rdi), %xmm0
5805 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5806 ; AVX2-FP-NEXT: vmovaps 192(%rdi), %xmm6
5807 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm6[0],xmm0[0]
5808 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm7[0],ymm1[0],ymm7[2],ymm1[2]
5809 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
5810 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5811 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5812 ; AVX2-FP-NEXT: vmovaps 624(%rdi), %xmm1
5813 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5814 ; AVX2-FP-NEXT: vmovaps 576(%rdi), %xmm0
5815 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5816 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
5817 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm5[0],ymm3[0],ymm5[2],ymm3[2]
5818 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
5819 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5820 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5821 ; AVX2-FP-NEXT: vmovaps 1008(%rdi), %xmm1
5822 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5823 ; AVX2-FP-NEXT: vmovaps 960(%rdi), %xmm0
5824 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5825 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
5826 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm4[0],ymm2[0],ymm4[2],ymm2[2]
5827 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
5828 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5829 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5830 ; AVX2-FP-NEXT: vmovaps 1472(%rdi), %ymm0
5831 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5832 ; AVX2-FP-NEXT: vmovaps 1440(%rdi), %ymm1
5833 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5834 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5835 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5836 ; AVX2-FP-NEXT: vmovaps 1392(%rdi), %xmm1
5837 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5838 ; AVX2-FP-NEXT: vmovaps 1344(%rdi), %xmm4
5839 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm4[0],xmm1[0]
5840 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5841 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5842 ; AVX2-FP-NEXT: vmovaps 128(%rdi), %ymm15
5843 ; AVX2-FP-NEXT: vmovaps 96(%rdi), %ymm0
5844 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5845 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm15[0],ymm0[2],ymm15[2]
5846 ; AVX2-FP-NEXT: vmovups %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5847 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5848 ; AVX2-FP-NEXT: vmovaps (%rdi), %xmm8
5849 ; AVX2-FP-NEXT: vmovaps 48(%rdi), %xmm2
5850 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm10 = xmm8[0],xmm2[0]
5851 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm10[0,1,2,3],ymm0[4,5,6,7]
5852 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5853 ; AVX2-FP-NEXT: vmovaps 512(%rdi), %ymm7
5854 ; AVX2-FP-NEXT: vmovaps 480(%rdi), %ymm0
5855 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5856 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm7[0],ymm0[2],ymm7[2]
5857 ; AVX2-FP-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5858 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5859 ; AVX2-FP-NEXT: vmovaps 432(%rdi), %xmm11
5860 ; AVX2-FP-NEXT: vmovaps 384(%rdi), %xmm12
5861 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm10 = xmm12[0],xmm11[0]
5862 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm10[0,1,2,3],ymm0[4,5,6,7]
5863 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5864 ; AVX2-FP-NEXT: vmovaps 896(%rdi), %ymm5
5865 ; AVX2-FP-NEXT: vmovaps 864(%rdi), %ymm0
5866 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5867 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm5[0],ymm0[2],ymm5[2]
5868 ; AVX2-FP-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5869 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5870 ; AVX2-FP-NEXT: vmovaps 816(%rdi), %xmm13
5871 ; AVX2-FP-NEXT: vmovaps 768(%rdi), %xmm10
5872 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm14 = xmm10[0],xmm13[0]
5873 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm14[0,1,2,3],ymm0[4,5,6,7]
5874 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5875 ; AVX2-FP-NEXT: vmovaps 1280(%rdi), %ymm3
5876 ; AVX2-FP-NEXT: vmovaps 1248(%rdi), %ymm0
5877 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5878 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
5879 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5880 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
5881 ; AVX2-FP-NEXT: vmovaps 1200(%rdi), %xmm14
5882 ; AVX2-FP-NEXT: vmovaps 1152(%rdi), %xmm1
5883 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm9 = xmm1[0],xmm14[0]
5884 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm9[0,1,2,3],ymm0[4,5,6,7]
5885 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5886 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm6, %xmm0 # 16-byte Folded Reload
5887 ; AVX2-FP-NEXT: # xmm0 = xmm6[1],mem[1]
5888 ; AVX2-FP-NEXT: vbroadcastsd 296(%rdi), %ymm6
5889 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm6 # 32-byte Folded Reload
5890 ; AVX2-FP-NEXT: # ymm6 = ymm6[1],mem[1],ymm6[3],mem[3]
5891 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm6[4,5,6,7]
5892 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5893 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5894 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
5895 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
5896 ; AVX2-FP-NEXT: vbroadcastsd 680(%rdi), %ymm6
5897 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm6 # 32-byte Folded Reload
5898 ; AVX2-FP-NEXT: # ymm6 = ymm6[1],mem[1],ymm6[3],mem[3]
5899 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm6[4,5,6,7]
5900 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5901 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5902 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
5903 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
5904 ; AVX2-FP-NEXT: vbroadcastsd 1064(%rdi), %ymm6
5905 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm6 # 32-byte Folded Reload
5906 ; AVX2-FP-NEXT: # ymm6 = ymm6[1],mem[1],ymm6[3],mem[3]
5907 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm6[4,5,6,7]
5908 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5909 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm4, %xmm0 # 16-byte Folded Reload
5910 ; AVX2-FP-NEXT: # xmm0 = xmm4[1],mem[1]
5911 ; AVX2-FP-NEXT: vbroadcastsd 1448(%rdi), %ymm4
5912 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm4, %ymm4 # 32-byte Folded Reload
5913 ; AVX2-FP-NEXT: # ymm4 = ymm4[1],mem[1],ymm4[3],mem[3]
5914 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm4[4,5,6,7]
5915 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5916 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm8[1],xmm2[1]
5917 ; AVX2-FP-NEXT: vbroadcastsd 104(%rdi), %ymm2
5918 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm15[1],ymm2[3],ymm15[3]
5919 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5920 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5921 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm12[1],xmm11[1]
5922 ; AVX2-FP-NEXT: vbroadcastsd 488(%rdi), %ymm2
5923 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm7[1],ymm2[3],ymm7[3]
5924 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5925 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5926 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm10[1],xmm13[1]
5927 ; AVX2-FP-NEXT: vbroadcastsd 872(%rdi), %ymm2
5928 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm5[1],ymm2[3],ymm5[3]
5929 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5930 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5931 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm14[1]
5932 ; AVX2-FP-NEXT: vbroadcastsd 1256(%rdi), %ymm1
5933 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm3[1],ymm1[3],ymm3[3]
5934 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5935 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5936 ; AVX2-FP-NEXT: vbroadcastsd 352(%rdi), %ymm0
5937 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5938 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5939 ; AVX2-FP-NEXT: vmovaps 256(%rdi), %xmm3
5940 ; AVX2-FP-NEXT: vmovaps 208(%rdi), %xmm5
5941 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm3[0]
5942 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5943 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5944 ; AVX2-FP-NEXT: vbroadcastsd 736(%rdi), %ymm0
5945 ; AVX2-FP-NEXT: vmovups (%rsp), %ymm1 # 32-byte Reload
5946 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5947 ; AVX2-FP-NEXT: vmovaps 640(%rdi), %xmm6
5948 ; AVX2-FP-NEXT: vmovaps 592(%rdi), %xmm7
5949 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm7[0],xmm6[0]
5950 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5951 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5952 ; AVX2-FP-NEXT: vbroadcastsd 1120(%rdi), %ymm0
5953 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5954 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5955 ; AVX2-FP-NEXT: vmovaps 1024(%rdi), %xmm8
5956 ; AVX2-FP-NEXT: vmovaps 976(%rdi), %xmm9
5957 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm9[0],xmm8[0]
5958 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5959 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5960 ; AVX2-FP-NEXT: vbroadcastsd 1504(%rdi), %ymm0
5961 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5962 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5963 ; AVX2-FP-NEXT: vmovaps 1408(%rdi), %xmm10
5964 ; AVX2-FP-NEXT: vmovaps 1360(%rdi), %xmm11
5965 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm10[0]
5966 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5967 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5968 ; AVX2-FP-NEXT: vbroadcastsd 160(%rdi), %ymm0
5969 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5970 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5971 ; AVX2-FP-NEXT: vmovaps 16(%rdi), %xmm0
5972 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5973 ; AVX2-FP-NEXT: vmovaps 64(%rdi), %xmm2
5974 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5975 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm2[0]
5976 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm1[4,5,6,7]
5977 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5978 ; AVX2-FP-NEXT: vbroadcastsd 544(%rdi), %ymm1
5979 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5980 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm12 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5981 ; AVX2-FP-NEXT: vmovaps 448(%rdi), %xmm0
5982 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5983 ; AVX2-FP-NEXT: vmovaps 400(%rdi), %xmm1
5984 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5985 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm13 = xmm1[0],xmm0[0]
5986 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm13[0,1,2,3],ymm12[4,5,6,7]
5987 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5988 ; AVX2-FP-NEXT: vbroadcastsd 928(%rdi), %ymm12
5989 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
5990 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm14 = ymm4[0],ymm12[0],ymm4[2],ymm12[2]
5991 ; AVX2-FP-NEXT: vmovaps 832(%rdi), %xmm12
5992 ; AVX2-FP-NEXT: vmovaps 784(%rdi), %xmm13
5993 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm15 = xmm13[0],xmm12[0]
5994 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm14[4,5,6,7]
5995 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5996 ; AVX2-FP-NEXT: vbroadcastsd 1312(%rdi), %ymm14
5997 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5998 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm14 = ymm1[0],ymm14[0],ymm1[2],ymm14[2]
5999 ; AVX2-FP-NEXT: vmovaps 1216(%rdi), %xmm15
6000 ; AVX2-FP-NEXT: vmovaps 1168(%rdi), %xmm0
6001 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm2 = xmm0[0],xmm15[0]
6002 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm14[4,5,6,7]
6003 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6004 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm5[1],xmm3[1]
6005 ; AVX2-FP-NEXT: vmovaps 352(%rdi), %ymm5
6006 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6007 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm5[1],ymm3[3],ymm5[3]
6008 ; AVX2-FP-NEXT: vmovaps %ymm5, %ymm14
6009 ; AVX2-FP-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6010 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
6011 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
6012 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6013 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm7[1],xmm6[1]
6014 ; AVX2-FP-NEXT: vmovaps 736(%rdi), %ymm5
6015 ; AVX2-FP-NEXT: vmovups (%rsp), %ymm3 # 32-byte Reload
6016 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm5[1],ymm3[3],ymm5[3]
6017 ; AVX2-FP-NEXT: vmovaps %ymm5, %ymm7
6018 ; AVX2-FP-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6019 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
6020 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
6021 ; AVX2-FP-NEXT: vmovups %ymm2, (%rsp) # 32-byte Spill
6022 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm9[1],xmm8[1]
6023 ; AVX2-FP-NEXT: vmovaps 1120(%rdi), %ymm5
6024 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6025 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm5[1],ymm3[3],ymm5[3]
6026 ; AVX2-FP-NEXT: vmovaps %ymm5, %ymm6
6027 ; AVX2-FP-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6028 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
6029 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
6030 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6031 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm11[1],xmm10[1]
6032 ; AVX2-FP-NEXT: vmovaps 1504(%rdi), %ymm5
6033 ; AVX2-FP-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6034 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6035 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm5[1],ymm3[3],ymm5[3]
6036 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
6037 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
6038 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6039 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm15[1]
6040 ; AVX2-FP-NEXT: vmovaps 1312(%rdi), %ymm3
6041 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm1[1],ymm3[1],ymm1[3],ymm3[3]
6042 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6043 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,2,1]
6044 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6045 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6046 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm13[1],xmm12[1]
6047 ; AVX2-FP-NEXT: vmovaps 928(%rdi), %ymm1
6048 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm4[1],ymm1[1],ymm4[3],ymm1[3]
6049 ; AVX2-FP-NEXT: vmovaps %ymm1, %ymm4
6050 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6051 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,2,1]
6052 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6053 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6054 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6055 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
6056 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
6057 ; AVX2-FP-NEXT: vmovaps 544(%rdi), %ymm2
6058 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6059 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
6060 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6061 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
6062 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
6063 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6064 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6065 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
6066 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
6067 ; AVX2-FP-NEXT: vmovaps 160(%rdi), %ymm9
6068 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6069 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm9[1],ymm1[3],ymm9[3]
6070 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
6071 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
6072 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6073 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6074 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm9[0],ymm0[2],ymm9[2]
6075 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6076 ; AVX2-FP-NEXT: vmovaps 32(%rdi), %xmm5
6077 ; AVX2-FP-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6078 ; AVX2-FP-NEXT: vmovaps 80(%rdi), %xmm1
6079 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6080 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm1[0]
6081 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6082 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6083 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6084 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm14[0],ymm0[2],ymm14[2]
6085 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6086 ; AVX2-FP-NEXT: vmovaps 272(%rdi), %xmm5
6087 ; AVX2-FP-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6088 ; AVX2-FP-NEXT: vmovaps 224(%rdi), %xmm1
6089 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6090 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm5[0]
6091 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6092 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6093 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6094 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm2[0],ymm0[2],ymm2[2]
6095 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6096 ; AVX2-FP-NEXT: vmovaps 464(%rdi), %xmm14
6097 ; AVX2-FP-NEXT: vmovaps 416(%rdi), %xmm13
6098 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm13[0],xmm14[0]
6099 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6100 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6101 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6102 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm7[0],ymm0[2],ymm7[2]
6103 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6104 ; AVX2-FP-NEXT: vmovaps 656(%rdi), %xmm12
6105 ; AVX2-FP-NEXT: vmovaps 608(%rdi), %xmm11
6106 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm12[0]
6107 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6108 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6109 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6110 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
6111 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6112 ; AVX2-FP-NEXT: vmovaps 848(%rdi), %xmm10
6113 ; AVX2-FP-NEXT: vmovaps 800(%rdi), %xmm7
6114 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm7[0],xmm10[0]
6115 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6116 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6117 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6118 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm6[0],ymm0[2],ymm6[2]
6119 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6120 ; AVX2-FP-NEXT: vmovaps 1040(%rdi), %xmm8
6121 ; AVX2-FP-NEXT: vmovaps 992(%rdi), %xmm5
6122 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm8[0]
6123 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6124 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6125 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6126 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
6127 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6128 ; AVX2-FP-NEXT: vmovaps 1232(%rdi), %xmm6
6129 ; AVX2-FP-NEXT: vmovaps 1184(%rdi), %xmm3
6130 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm6[0]
6131 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6132 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6133 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6134 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
6135 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
6136 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6137 ; AVX2-FP-NEXT: vmovaps 1424(%rdi), %xmm2
6138 ; AVX2-FP-NEXT: vmovaps 1376(%rdi), %xmm1
6139 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
6140 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
6141 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6142 ; AVX2-FP-NEXT: vbroadcastsd 136(%rdi), %ymm0
6143 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm9[1],ymm0[3],ymm9[3]
6144 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
6145 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9, %xmm9 # 16-byte Folded Reload
6146 ; AVX2-FP-NEXT: # xmm9 = xmm9[1],mem[1]
6147 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm9[0,1,2,3],ymm0[4,5,6,7]
6148 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6149 ; AVX2-FP-NEXT: vbroadcastsd 328(%rdi), %ymm0
6150 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6151 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6152 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
6153 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9, %xmm9 # 16-byte Folded Reload
6154 ; AVX2-FP-NEXT: # xmm9 = xmm9[1],mem[1]
6155 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm15 = ymm9[0,1,2,3],ymm0[4,5,6,7]
6156 ; AVX2-FP-NEXT: vbroadcastsd 520(%rdi), %ymm0
6157 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6158 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6159 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm9 = xmm13[1],xmm14[1]
6160 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm13 = ymm9[0,1,2,3],ymm0[4,5,6,7]
6161 ; AVX2-FP-NEXT: vbroadcastsd 712(%rdi), %ymm0
6162 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6163 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6164 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm9 = xmm11[1],xmm12[1]
6165 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm0[4,5,6,7]
6166 ; AVX2-FP-NEXT: vbroadcastsd 904(%rdi), %ymm0
6167 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6168 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6169 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm7 = xmm7[1],xmm10[1]
6170 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm0[4,5,6,7]
6171 ; AVX2-FP-NEXT: vbroadcastsd 1096(%rdi), %ymm0
6172 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6173 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6174 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm5 = xmm5[1],xmm8[1]
6175 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm0[4,5,6,7]
6176 ; AVX2-FP-NEXT: vbroadcastsd 1288(%rdi), %ymm0
6177 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6178 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6179 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm6[1]
6180 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm0[4,5,6,7]
6181 ; AVX2-FP-NEXT: vbroadcastsd 1480(%rdi), %ymm0
6182 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm4[1],ymm0[3],ymm4[3]
6183 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
6184 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6185 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6186 ; AVX2-FP-NEXT: vmovaps %ymm1, 192(%rsi)
6187 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6188 ; AVX2-FP-NEXT: vmovaps %ymm1, 128(%rsi)
6189 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6190 ; AVX2-FP-NEXT: vmovaps %ymm1, 64(%rsi)
6191 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6192 ; AVX2-FP-NEXT: vmovaps %ymm1, (%rsi)
6193 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6194 ; AVX2-FP-NEXT: vmovaps %ymm1, 224(%rsi)
6195 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6196 ; AVX2-FP-NEXT: vmovaps %ymm1, 160(%rsi)
6197 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6198 ; AVX2-FP-NEXT: vmovaps %ymm1, 96(%rsi)
6199 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6200 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%rsi)
6201 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6202 ; AVX2-FP-NEXT: vmovaps %ymm1, 192(%rdx)
6203 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6204 ; AVX2-FP-NEXT: vmovaps %ymm1, 128(%rdx)
6205 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6206 ; AVX2-FP-NEXT: vmovaps %ymm1, 64(%rdx)
6207 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6208 ; AVX2-FP-NEXT: vmovaps %ymm1, (%rdx)
6209 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6210 ; AVX2-FP-NEXT: vmovaps %ymm1, 224(%rdx)
6211 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6212 ; AVX2-FP-NEXT: vmovaps %ymm1, 160(%rdx)
6213 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6214 ; AVX2-FP-NEXT: vmovaps %ymm1, 96(%rdx)
6215 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6216 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%rdx)
6217 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6218 ; AVX2-FP-NEXT: vmovaps %ymm1, 192(%rcx)
6219 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6220 ; AVX2-FP-NEXT: vmovaps %ymm1, 128(%rcx)
6221 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6222 ; AVX2-FP-NEXT: vmovaps %ymm1, 64(%rcx)
6223 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6224 ; AVX2-FP-NEXT: vmovaps %ymm1, (%rcx)
6225 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6226 ; AVX2-FP-NEXT: vmovaps %ymm1, 224(%rcx)
6227 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6228 ; AVX2-FP-NEXT: vmovaps %ymm1, 160(%rcx)
6229 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6230 ; AVX2-FP-NEXT: vmovaps %ymm1, 96(%rcx)
6231 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6232 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%rcx)
6233 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6234 ; AVX2-FP-NEXT: vmovaps %ymm1, (%r8)
6235 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6236 ; AVX2-FP-NEXT: vmovaps %ymm1, 64(%r8)
6237 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6238 ; AVX2-FP-NEXT: vmovaps %ymm1, 128(%r8)
6239 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6240 ; AVX2-FP-NEXT: vmovaps %ymm1, 192(%r8)
6241 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6242 ; AVX2-FP-NEXT: vmovaps %ymm1, 224(%r8)
6243 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6244 ; AVX2-FP-NEXT: vmovaps %ymm1, 160(%r8)
6245 ; AVX2-FP-NEXT: vmovups (%rsp), %ymm1 # 32-byte Reload
6246 ; AVX2-FP-NEXT: vmovaps %ymm1, 96(%r8)
6247 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6248 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%r8)
6249 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6250 ; AVX2-FP-NEXT: vmovaps %ymm1, 224(%r9)
6251 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6252 ; AVX2-FP-NEXT: vmovaps %ymm1, 192(%r9)
6253 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6254 ; AVX2-FP-NEXT: vmovaps %ymm1, 160(%r9)
6255 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6256 ; AVX2-FP-NEXT: vmovaps %ymm1, 128(%r9)
6257 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6258 ; AVX2-FP-NEXT: vmovaps %ymm1, 96(%r9)
6259 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6260 ; AVX2-FP-NEXT: vmovaps %ymm1, 64(%r9)
6261 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6262 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%r9)
6263 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6264 ; AVX2-FP-NEXT: vmovaps %ymm1, (%r9)
6265 ; AVX2-FP-NEXT: movq {{[0-9]+}}(%rsp), %rax
6266 ; AVX2-FP-NEXT: vmovaps %ymm0, 224(%rax)
6267 ; AVX2-FP-NEXT: vmovaps %ymm3, 192(%rax)
6268 ; AVX2-FP-NEXT: vmovaps %ymm5, 160(%rax)
6269 ; AVX2-FP-NEXT: vmovaps %ymm7, 128(%rax)
6270 ; AVX2-FP-NEXT: vmovaps %ymm9, 96(%rax)
6271 ; AVX2-FP-NEXT: vmovaps %ymm13, 64(%rax)
6272 ; AVX2-FP-NEXT: vmovaps %ymm15, 32(%rax)
6273 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6274 ; AVX2-FP-NEXT: vmovaps %ymm0, (%rax)
6275 ; AVX2-FP-NEXT: addq $1496, %rsp # imm = 0x5D8
6276 ; AVX2-FP-NEXT: vzeroupper
6277 ; AVX2-FP-NEXT: retq
6279 ; AVX2-FCP-LABEL: load_i64_stride6_vf32:
6280 ; AVX2-FCP: # %bb.0:
6281 ; AVX2-FCP-NEXT: subq $1496, %rsp # imm = 0x5D8
6282 ; AVX2-FCP-NEXT: vmovaps 1088(%rdi), %ymm2
6283 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6284 ; AVX2-FCP-NEXT: vmovaps 1056(%rdi), %ymm4
6285 ; AVX2-FCP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6286 ; AVX2-FCP-NEXT: vmovaps 704(%rdi), %ymm3
6287 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6288 ; AVX2-FCP-NEXT: vmovaps 672(%rdi), %ymm5
6289 ; AVX2-FCP-NEXT: vmovups %ymm5, (%rsp) # 32-byte Spill
6290 ; AVX2-FCP-NEXT: vmovaps 320(%rdi), %ymm1
6291 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6292 ; AVX2-FCP-NEXT: vmovaps 288(%rdi), %ymm7
6293 ; AVX2-FCP-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6294 ; AVX2-FCP-NEXT: vmovaps 240(%rdi), %xmm0
6295 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6296 ; AVX2-FCP-NEXT: vmovaps 192(%rdi), %xmm6
6297 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm6[0],xmm0[0]
6298 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm7[0],ymm1[0],ymm7[2],ymm1[2]
6299 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
6300 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
6301 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6302 ; AVX2-FCP-NEXT: vmovaps 624(%rdi), %xmm1
6303 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6304 ; AVX2-FCP-NEXT: vmovaps 576(%rdi), %xmm0
6305 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6306 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
6307 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm5[0],ymm3[0],ymm5[2],ymm3[2]
6308 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
6309 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
6310 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6311 ; AVX2-FCP-NEXT: vmovaps 1008(%rdi), %xmm1
6312 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6313 ; AVX2-FCP-NEXT: vmovaps 960(%rdi), %xmm0
6314 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6315 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
6316 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm4[0],ymm2[0],ymm4[2],ymm2[2]
6317 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
6318 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
6319 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6320 ; AVX2-FCP-NEXT: vmovaps 1472(%rdi), %ymm0
6321 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6322 ; AVX2-FCP-NEXT: vmovaps 1440(%rdi), %ymm1
6323 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6324 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
6325 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6326 ; AVX2-FCP-NEXT: vmovaps 1392(%rdi), %xmm1
6327 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6328 ; AVX2-FCP-NEXT: vmovaps 1344(%rdi), %xmm4
6329 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm4[0],xmm1[0]
6330 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6331 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6332 ; AVX2-FCP-NEXT: vmovaps 128(%rdi), %ymm15
6333 ; AVX2-FCP-NEXT: vmovaps 96(%rdi), %ymm0
6334 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6335 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm15[0],ymm0[2],ymm15[2]
6336 ; AVX2-FCP-NEXT: vmovups %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6337 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6338 ; AVX2-FCP-NEXT: vmovaps (%rdi), %xmm8
6339 ; AVX2-FCP-NEXT: vmovaps 48(%rdi), %xmm2
6340 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm10 = xmm8[0],xmm2[0]
6341 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm10[0,1,2,3],ymm0[4,5,6,7]
6342 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6343 ; AVX2-FCP-NEXT: vmovaps 512(%rdi), %ymm7
6344 ; AVX2-FCP-NEXT: vmovaps 480(%rdi), %ymm0
6345 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6346 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm7[0],ymm0[2],ymm7[2]
6347 ; AVX2-FCP-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6348 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6349 ; AVX2-FCP-NEXT: vmovaps 432(%rdi), %xmm11
6350 ; AVX2-FCP-NEXT: vmovaps 384(%rdi), %xmm12
6351 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm10 = xmm12[0],xmm11[0]
6352 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm10[0,1,2,3],ymm0[4,5,6,7]
6353 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6354 ; AVX2-FCP-NEXT: vmovaps 896(%rdi), %ymm5
6355 ; AVX2-FCP-NEXT: vmovaps 864(%rdi), %ymm0
6356 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6357 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm5[0],ymm0[2],ymm5[2]
6358 ; AVX2-FCP-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6359 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6360 ; AVX2-FCP-NEXT: vmovaps 816(%rdi), %xmm13
6361 ; AVX2-FCP-NEXT: vmovaps 768(%rdi), %xmm10
6362 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm14 = xmm10[0],xmm13[0]
6363 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm14[0,1,2,3],ymm0[4,5,6,7]
6364 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6365 ; AVX2-FCP-NEXT: vmovaps 1280(%rdi), %ymm3
6366 ; AVX2-FCP-NEXT: vmovaps 1248(%rdi), %ymm0
6367 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6368 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
6369 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6370 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6371 ; AVX2-FCP-NEXT: vmovaps 1200(%rdi), %xmm14
6372 ; AVX2-FCP-NEXT: vmovaps 1152(%rdi), %xmm1
6373 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm9 = xmm1[0],xmm14[0]
6374 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm9[0,1,2,3],ymm0[4,5,6,7]
6375 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6376 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm6, %xmm0 # 16-byte Folded Reload
6377 ; AVX2-FCP-NEXT: # xmm0 = xmm6[1],mem[1]
6378 ; AVX2-FCP-NEXT: vbroadcastsd 296(%rdi), %ymm6
6379 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm6 # 32-byte Folded Reload
6380 ; AVX2-FCP-NEXT: # ymm6 = ymm6[1],mem[1],ymm6[3],mem[3]
6381 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm6[4,5,6,7]
6382 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6383 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6384 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
6385 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
6386 ; AVX2-FCP-NEXT: vbroadcastsd 680(%rdi), %ymm6
6387 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm6 # 32-byte Folded Reload
6388 ; AVX2-FCP-NEXT: # ymm6 = ymm6[1],mem[1],ymm6[3],mem[3]
6389 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm6[4,5,6,7]
6390 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6391 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6392 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
6393 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
6394 ; AVX2-FCP-NEXT: vbroadcastsd 1064(%rdi), %ymm6
6395 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm6 # 32-byte Folded Reload
6396 ; AVX2-FCP-NEXT: # ymm6 = ymm6[1],mem[1],ymm6[3],mem[3]
6397 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm6[4,5,6,7]
6398 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6399 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm4, %xmm0 # 16-byte Folded Reload
6400 ; AVX2-FCP-NEXT: # xmm0 = xmm4[1],mem[1]
6401 ; AVX2-FCP-NEXT: vbroadcastsd 1448(%rdi), %ymm4
6402 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm4, %ymm4 # 32-byte Folded Reload
6403 ; AVX2-FCP-NEXT: # ymm4 = ymm4[1],mem[1],ymm4[3],mem[3]
6404 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm4[4,5,6,7]
6405 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6406 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm8[1],xmm2[1]
6407 ; AVX2-FCP-NEXT: vbroadcastsd 104(%rdi), %ymm2
6408 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm15[1],ymm2[3],ymm15[3]
6409 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6410 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6411 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm12[1],xmm11[1]
6412 ; AVX2-FCP-NEXT: vbroadcastsd 488(%rdi), %ymm2
6413 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm7[1],ymm2[3],ymm7[3]
6414 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6415 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6416 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm10[1],xmm13[1]
6417 ; AVX2-FCP-NEXT: vbroadcastsd 872(%rdi), %ymm2
6418 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm5[1],ymm2[3],ymm5[3]
6419 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6420 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6421 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm14[1]
6422 ; AVX2-FCP-NEXT: vbroadcastsd 1256(%rdi), %ymm1
6423 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm3[1],ymm1[3],ymm3[3]
6424 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
6425 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6426 ; AVX2-FCP-NEXT: vbroadcastsd 352(%rdi), %ymm0
6427 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6428 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
6429 ; AVX2-FCP-NEXT: vmovaps 256(%rdi), %xmm3
6430 ; AVX2-FCP-NEXT: vmovaps 208(%rdi), %xmm5
6431 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm3[0]
6432 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6433 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6434 ; AVX2-FCP-NEXT: vbroadcastsd 736(%rdi), %ymm0
6435 ; AVX2-FCP-NEXT: vmovups (%rsp), %ymm1 # 32-byte Reload
6436 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
6437 ; AVX2-FCP-NEXT: vmovaps 640(%rdi), %xmm6
6438 ; AVX2-FCP-NEXT: vmovaps 592(%rdi), %xmm7
6439 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm7[0],xmm6[0]
6440 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6441 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6442 ; AVX2-FCP-NEXT: vbroadcastsd 1120(%rdi), %ymm0
6443 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6444 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
6445 ; AVX2-FCP-NEXT: vmovaps 1024(%rdi), %xmm8
6446 ; AVX2-FCP-NEXT: vmovaps 976(%rdi), %xmm9
6447 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm9[0],xmm8[0]
6448 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6449 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6450 ; AVX2-FCP-NEXT: vbroadcastsd 1504(%rdi), %ymm0
6451 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6452 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
6453 ; AVX2-FCP-NEXT: vmovaps 1408(%rdi), %xmm10
6454 ; AVX2-FCP-NEXT: vmovaps 1360(%rdi), %xmm11
6455 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm10[0]
6456 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6457 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6458 ; AVX2-FCP-NEXT: vbroadcastsd 160(%rdi), %ymm0
6459 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6460 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
6461 ; AVX2-FCP-NEXT: vmovaps 16(%rdi), %xmm0
6462 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6463 ; AVX2-FCP-NEXT: vmovaps 64(%rdi), %xmm2
6464 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6465 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm4 = xmm0[0],xmm2[0]
6466 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm1[4,5,6,7]
6467 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6468 ; AVX2-FCP-NEXT: vbroadcastsd 544(%rdi), %ymm1
6469 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6470 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm12 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
6471 ; AVX2-FCP-NEXT: vmovaps 448(%rdi), %xmm0
6472 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6473 ; AVX2-FCP-NEXT: vmovaps 400(%rdi), %xmm1
6474 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6475 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm13 = xmm1[0],xmm0[0]
6476 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm13[0,1,2,3],ymm12[4,5,6,7]
6477 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6478 ; AVX2-FCP-NEXT: vbroadcastsd 928(%rdi), %ymm12
6479 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
6480 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm14 = ymm4[0],ymm12[0],ymm4[2],ymm12[2]
6481 ; AVX2-FCP-NEXT: vmovaps 832(%rdi), %xmm12
6482 ; AVX2-FCP-NEXT: vmovaps 784(%rdi), %xmm13
6483 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm15 = xmm13[0],xmm12[0]
6484 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm14[4,5,6,7]
6485 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6486 ; AVX2-FCP-NEXT: vbroadcastsd 1312(%rdi), %ymm14
6487 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6488 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm14 = ymm1[0],ymm14[0],ymm1[2],ymm14[2]
6489 ; AVX2-FCP-NEXT: vmovaps 1216(%rdi), %xmm15
6490 ; AVX2-FCP-NEXT: vmovaps 1168(%rdi), %xmm0
6491 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm2 = xmm0[0],xmm15[0]
6492 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm14[4,5,6,7]
6493 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6494 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm5[1],xmm3[1]
6495 ; AVX2-FCP-NEXT: vmovaps 352(%rdi), %ymm5
6496 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6497 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm5[1],ymm3[3],ymm5[3]
6498 ; AVX2-FCP-NEXT: vmovaps %ymm5, %ymm14
6499 ; AVX2-FCP-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6500 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
6501 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
6502 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6503 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm7[1],xmm6[1]
6504 ; AVX2-FCP-NEXT: vmovaps 736(%rdi), %ymm5
6505 ; AVX2-FCP-NEXT: vmovups (%rsp), %ymm3 # 32-byte Reload
6506 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm5[1],ymm3[3],ymm5[3]
6507 ; AVX2-FCP-NEXT: vmovaps %ymm5, %ymm7
6508 ; AVX2-FCP-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6509 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
6510 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
6511 ; AVX2-FCP-NEXT: vmovups %ymm2, (%rsp) # 32-byte Spill
6512 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm9[1],xmm8[1]
6513 ; AVX2-FCP-NEXT: vmovaps 1120(%rdi), %ymm5
6514 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6515 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm5[1],ymm3[3],ymm5[3]
6516 ; AVX2-FCP-NEXT: vmovaps %ymm5, %ymm6
6517 ; AVX2-FCP-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6518 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
6519 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
6520 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6521 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm11[1],xmm10[1]
6522 ; AVX2-FCP-NEXT: vmovaps 1504(%rdi), %ymm5
6523 ; AVX2-FCP-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6524 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6525 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm5[1],ymm3[3],ymm5[3]
6526 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,2,1]
6527 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
6528 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6529 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm15[1]
6530 ; AVX2-FCP-NEXT: vmovaps 1312(%rdi), %ymm3
6531 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm1[1],ymm3[1],ymm1[3],ymm3[3]
6532 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6533 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,2,1]
6534 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6535 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6536 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm13[1],xmm12[1]
6537 ; AVX2-FCP-NEXT: vmovaps 928(%rdi), %ymm1
6538 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm4[1],ymm1[1],ymm4[3],ymm1[3]
6539 ; AVX2-FCP-NEXT: vmovaps %ymm1, %ymm4
6540 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6541 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,1,2,1]
6542 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6543 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6544 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6545 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
6546 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
6547 ; AVX2-FCP-NEXT: vmovaps 544(%rdi), %ymm2
6548 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6549 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
6550 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6551 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
6552 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
6553 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6554 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6555 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
6556 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
6557 ; AVX2-FCP-NEXT: vmovaps 160(%rdi), %ymm9
6558 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6559 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm9[1],ymm1[3],ymm9[3]
6560 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
6561 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
6562 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6563 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6564 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm9[0],ymm0[2],ymm9[2]
6565 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6566 ; AVX2-FCP-NEXT: vmovaps 32(%rdi), %xmm5
6567 ; AVX2-FCP-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6568 ; AVX2-FCP-NEXT: vmovaps 80(%rdi), %xmm1
6569 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6570 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm1[0]
6571 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6572 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6573 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6574 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm14[0],ymm0[2],ymm14[2]
6575 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6576 ; AVX2-FCP-NEXT: vmovaps 272(%rdi), %xmm5
6577 ; AVX2-FCP-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6578 ; AVX2-FCP-NEXT: vmovaps 224(%rdi), %xmm1
6579 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6580 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm5[0]
6581 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6582 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6583 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6584 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm2[0],ymm0[2],ymm2[2]
6585 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6586 ; AVX2-FCP-NEXT: vmovaps 464(%rdi), %xmm14
6587 ; AVX2-FCP-NEXT: vmovaps 416(%rdi), %xmm13
6588 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm13[0],xmm14[0]
6589 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6590 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6591 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6592 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm7[0],ymm0[2],ymm7[2]
6593 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6594 ; AVX2-FCP-NEXT: vmovaps 656(%rdi), %xmm12
6595 ; AVX2-FCP-NEXT: vmovaps 608(%rdi), %xmm11
6596 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm12[0]
6597 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6598 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6599 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6600 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
6601 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6602 ; AVX2-FCP-NEXT: vmovaps 848(%rdi), %xmm10
6603 ; AVX2-FCP-NEXT: vmovaps 800(%rdi), %xmm7
6604 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm7[0],xmm10[0]
6605 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6606 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6607 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6608 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm6[0],ymm0[2],ymm6[2]
6609 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6610 ; AVX2-FCP-NEXT: vmovaps 1040(%rdi), %xmm8
6611 ; AVX2-FCP-NEXT: vmovaps 992(%rdi), %xmm5
6612 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm8[0]
6613 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6614 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6615 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6616 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
6617 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6618 ; AVX2-FCP-NEXT: vmovaps 1232(%rdi), %xmm6
6619 ; AVX2-FCP-NEXT: vmovaps 1184(%rdi), %xmm3
6620 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm6[0]
6621 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6622 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6623 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6624 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
6625 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
6626 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
6627 ; AVX2-FCP-NEXT: vmovaps 1424(%rdi), %xmm2
6628 ; AVX2-FCP-NEXT: vmovaps 1376(%rdi), %xmm1
6629 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
6630 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
6631 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6632 ; AVX2-FCP-NEXT: vbroadcastsd 136(%rdi), %ymm0
6633 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm9[1],ymm0[3],ymm9[3]
6634 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
6635 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9, %xmm9 # 16-byte Folded Reload
6636 ; AVX2-FCP-NEXT: # xmm9 = xmm9[1],mem[1]
6637 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm9[0,1,2,3],ymm0[4,5,6,7]
6638 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6639 ; AVX2-FCP-NEXT: vbroadcastsd 328(%rdi), %ymm0
6640 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6641 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6642 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
6643 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9, %xmm9 # 16-byte Folded Reload
6644 ; AVX2-FCP-NEXT: # xmm9 = xmm9[1],mem[1]
6645 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm15 = ymm9[0,1,2,3],ymm0[4,5,6,7]
6646 ; AVX2-FCP-NEXT: vbroadcastsd 520(%rdi), %ymm0
6647 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6648 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6649 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm9 = xmm13[1],xmm14[1]
6650 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm13 = ymm9[0,1,2,3],ymm0[4,5,6,7]
6651 ; AVX2-FCP-NEXT: vbroadcastsd 712(%rdi), %ymm0
6652 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6653 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6654 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm9 = xmm11[1],xmm12[1]
6655 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm0[4,5,6,7]
6656 ; AVX2-FCP-NEXT: vbroadcastsd 904(%rdi), %ymm0
6657 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6658 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6659 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm7 = xmm7[1],xmm10[1]
6660 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm0[4,5,6,7]
6661 ; AVX2-FCP-NEXT: vbroadcastsd 1096(%rdi), %ymm0
6662 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6663 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6664 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm5 = xmm5[1],xmm8[1]
6665 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm0[4,5,6,7]
6666 ; AVX2-FCP-NEXT: vbroadcastsd 1288(%rdi), %ymm0
6667 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6668 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6669 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm6[1]
6670 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm0[4,5,6,7]
6671 ; AVX2-FCP-NEXT: vbroadcastsd 1480(%rdi), %ymm0
6672 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm4[1],ymm0[3],ymm4[3]
6673 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
6674 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6675 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6676 ; AVX2-FCP-NEXT: vmovaps %ymm1, 192(%rsi)
6677 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6678 ; AVX2-FCP-NEXT: vmovaps %ymm1, 128(%rsi)
6679 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6680 ; AVX2-FCP-NEXT: vmovaps %ymm1, 64(%rsi)
6681 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6682 ; AVX2-FCP-NEXT: vmovaps %ymm1, (%rsi)
6683 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6684 ; AVX2-FCP-NEXT: vmovaps %ymm1, 224(%rsi)
6685 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6686 ; AVX2-FCP-NEXT: vmovaps %ymm1, 160(%rsi)
6687 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6688 ; AVX2-FCP-NEXT: vmovaps %ymm1, 96(%rsi)
6689 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6690 ; AVX2-FCP-NEXT: vmovaps %ymm1, 32(%rsi)
6691 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6692 ; AVX2-FCP-NEXT: vmovaps %ymm1, 192(%rdx)
6693 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6694 ; AVX2-FCP-NEXT: vmovaps %ymm1, 128(%rdx)
6695 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6696 ; AVX2-FCP-NEXT: vmovaps %ymm1, 64(%rdx)
6697 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6698 ; AVX2-FCP-NEXT: vmovaps %ymm1, (%rdx)
6699 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6700 ; AVX2-FCP-NEXT: vmovaps %ymm1, 224(%rdx)
6701 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6702 ; AVX2-FCP-NEXT: vmovaps %ymm1, 160(%rdx)
6703 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6704 ; AVX2-FCP-NEXT: vmovaps %ymm1, 96(%rdx)
6705 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6706 ; AVX2-FCP-NEXT: vmovaps %ymm1, 32(%rdx)
6707 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6708 ; AVX2-FCP-NEXT: vmovaps %ymm1, 192(%rcx)
6709 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6710 ; AVX2-FCP-NEXT: vmovaps %ymm1, 128(%rcx)
6711 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6712 ; AVX2-FCP-NEXT: vmovaps %ymm1, 64(%rcx)
6713 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6714 ; AVX2-FCP-NEXT: vmovaps %ymm1, (%rcx)
6715 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6716 ; AVX2-FCP-NEXT: vmovaps %ymm1, 224(%rcx)
6717 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6718 ; AVX2-FCP-NEXT: vmovaps %ymm1, 160(%rcx)
6719 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6720 ; AVX2-FCP-NEXT: vmovaps %ymm1, 96(%rcx)
6721 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6722 ; AVX2-FCP-NEXT: vmovaps %ymm1, 32(%rcx)
6723 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6724 ; AVX2-FCP-NEXT: vmovaps %ymm1, (%r8)
6725 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6726 ; AVX2-FCP-NEXT: vmovaps %ymm1, 64(%r8)
6727 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6728 ; AVX2-FCP-NEXT: vmovaps %ymm1, 128(%r8)
6729 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6730 ; AVX2-FCP-NEXT: vmovaps %ymm1, 192(%r8)
6731 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6732 ; AVX2-FCP-NEXT: vmovaps %ymm1, 224(%r8)
6733 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6734 ; AVX2-FCP-NEXT: vmovaps %ymm1, 160(%r8)
6735 ; AVX2-FCP-NEXT: vmovups (%rsp), %ymm1 # 32-byte Reload
6736 ; AVX2-FCP-NEXT: vmovaps %ymm1, 96(%r8)
6737 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6738 ; AVX2-FCP-NEXT: vmovaps %ymm1, 32(%r8)
6739 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6740 ; AVX2-FCP-NEXT: vmovaps %ymm1, 224(%r9)
6741 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6742 ; AVX2-FCP-NEXT: vmovaps %ymm1, 192(%r9)
6743 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6744 ; AVX2-FCP-NEXT: vmovaps %ymm1, 160(%r9)
6745 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6746 ; AVX2-FCP-NEXT: vmovaps %ymm1, 128(%r9)
6747 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6748 ; AVX2-FCP-NEXT: vmovaps %ymm1, 96(%r9)
6749 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6750 ; AVX2-FCP-NEXT: vmovaps %ymm1, 64(%r9)
6751 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6752 ; AVX2-FCP-NEXT: vmovaps %ymm1, 32(%r9)
6753 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6754 ; AVX2-FCP-NEXT: vmovaps %ymm1, (%r9)
6755 ; AVX2-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
6756 ; AVX2-FCP-NEXT: vmovaps %ymm0, 224(%rax)
6757 ; AVX2-FCP-NEXT: vmovaps %ymm3, 192(%rax)
6758 ; AVX2-FCP-NEXT: vmovaps %ymm5, 160(%rax)
6759 ; AVX2-FCP-NEXT: vmovaps %ymm7, 128(%rax)
6760 ; AVX2-FCP-NEXT: vmovaps %ymm9, 96(%rax)
6761 ; AVX2-FCP-NEXT: vmovaps %ymm13, 64(%rax)
6762 ; AVX2-FCP-NEXT: vmovaps %ymm15, 32(%rax)
6763 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6764 ; AVX2-FCP-NEXT: vmovaps %ymm0, (%rax)
6765 ; AVX2-FCP-NEXT: addq $1496, %rsp # imm = 0x5D8
6766 ; AVX2-FCP-NEXT: vzeroupper
6767 ; AVX2-FCP-NEXT: retq
6769 ; AVX512-LABEL: load_i64_stride6_vf32:
6771 ; AVX512-NEXT: subq $2632, %rsp # imm = 0xA48
6772 ; AVX512-NEXT: vmovdqa64 1280(%rdi), %zmm2
6773 ; AVX512-NEXT: vmovdqa64 1344(%rdi), %zmm21
6774 ; AVX512-NEXT: vmovdqa64 896(%rdi), %zmm1
6775 ; AVX512-NEXT: vmovdqa64 960(%rdi), %zmm19
6776 ; AVX512-NEXT: vmovdqa64 448(%rdi), %zmm18
6777 ; AVX512-NEXT: vmovdqa64 384(%rdi), %zmm3
6778 ; AVX512-NEXT: vmovdqa64 512(%rdi), %zmm4
6779 ; AVX512-NEXT: vmovdqa64 576(%rdi), %zmm0
6780 ; AVX512-NEXT: vmovdqa64 128(%rdi), %zmm5
6781 ; AVX512-NEXT: vmovdqa64 192(%rdi), %zmm25
6782 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,6,0,10,0,6,0,10]
6783 ; AVX512-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
6784 ; AVX512-NEXT: vmovdqa64 %zmm25, %zmm7
6785 ; AVX512-NEXT: vpermt2q %zmm5, %zmm6, %zmm7
6786 ; AVX512-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6787 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm7 = [0,6,12,0]
6788 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm8
6789 ; AVX512-NEXT: vpermt2q %zmm4, %zmm6, %zmm8
6790 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6791 ; AVX512-NEXT: vmovdqa64 %zmm3, %zmm8
6792 ; AVX512-NEXT: vpermt2q %zmm18, %zmm7, %zmm8
6793 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6794 ; AVX512-NEXT: vmovdqa64 %zmm19, %zmm8
6795 ; AVX512-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
6796 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6797 ; AVX512-NEXT: vpermi2q %zmm2, %zmm21, %zmm6
6798 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6799 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [1,7,0,11,1,7,0,11]
6800 ; AVX512-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
6801 ; AVX512-NEXT: vmovdqa64 %zmm19, %zmm8
6802 ; AVX512-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
6803 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6804 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm8
6805 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm6
6806 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm10 = [1,7,13,0]
6807 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1
6808 ; AVX512-NEXT: vpermt2q %zmm4, %zmm8, %zmm1
6809 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6810 ; AVX512-NEXT: vmovdqa64 %zmm25, %zmm1
6811 ; AVX512-NEXT: vpermt2q %zmm5, %zmm8, %zmm1
6812 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6813 ; AVX512-NEXT: vpermi2q %zmm2, %zmm21, %zmm8
6814 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6815 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [10,4,10,4,10,4,10,4]
6816 ; AVX512-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6817 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm8
6818 ; AVX512-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
6819 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6820 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm8
6821 ; AVX512-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
6822 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6823 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm8
6824 ; AVX512-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
6825 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6826 ; AVX512-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
6827 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6828 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [11,5,11,5,11,5,11,5]
6829 ; AVX512-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6830 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm8
6831 ; AVX512-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
6832 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6833 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm8
6834 ; AVX512-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
6835 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6836 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm8
6837 ; AVX512-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
6838 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6839 ; AVX512-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
6840 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6841 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [12,0,0,6,12,0,0,6]
6842 ; AVX512-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
6843 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm1
6844 ; AVX512-NEXT: vpermt2q %zmm0, %zmm8, %zmm1
6845 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6846 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [13,0,1,7,13,0,1,7]
6847 ; AVX512-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
6848 ; AVX512-NEXT: vpermt2q %zmm0, %zmm1, %zmm4
6849 ; AVX512-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6850 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm0
6851 ; AVX512-NEXT: vpermt2q %zmm25, %zmm8, %zmm0
6852 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6853 ; AVX512-NEXT: vpermt2q %zmm25, %zmm1, %zmm5
6854 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6855 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm0
6856 ; AVX512-NEXT: vpermt2q %zmm19, %zmm8, %zmm0
6857 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6858 ; AVX512-NEXT: vpermi2q %zmm21, %zmm2, %zmm8
6859 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6860 ; AVX512-NEXT: vpermt2q %zmm21, %zmm1, %zmm2
6861 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6862 ; AVX512-NEXT: vmovdqa64 %zmm3, %zmm0
6863 ; AVX512-NEXT: vpermt2q %zmm18, %zmm10, %zmm0
6864 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6865 ; AVX512-NEXT: vpermt2q %zmm19, %zmm1, %zmm6
6866 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6867 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm11 = [10,0,6,0]
6868 ; AVX512-NEXT: vmovdqa64 %zmm18, %zmm0
6869 ; AVX512-NEXT: vpermt2q %zmm3, %zmm11, %zmm0
6870 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6871 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm31 = [11,1,7,0]
6872 ; AVX512-NEXT: vmovdqa64 %zmm18, %zmm0
6873 ; AVX512-NEXT: vpermt2q %zmm3, %zmm31, %zmm0
6874 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6875 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
6876 ; AVX512-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6877 ; AVX512-NEXT: vmovdqa64 %zmm3, %zmm0
6878 ; AVX512-NEXT: vpermt2q %zmm18, %zmm5, %zmm0
6879 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6880 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
6881 ; AVX512-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6882 ; AVX512-NEXT: vpermt2q %zmm18, %zmm0, %zmm3
6883 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6884 ; AVX512-NEXT: vmovdqa64 (%rdi), %zmm25
6885 ; AVX512-NEXT: vmovdqa64 64(%rdi), %zmm1
6886 ; AVX512-NEXT: vmovdqa64 %zmm25, %zmm29
6887 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm2
6888 ; AVX512-NEXT: vpermt2q %zmm1, %zmm7, %zmm29
6889 ; AVX512-NEXT: vmovdqa64 %zmm25, %zmm3
6890 ; AVX512-NEXT: vpermt2q %zmm1, %zmm10, %zmm3
6891 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6892 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
6893 ; AVX512-NEXT: vpermt2q %zmm25, %zmm11, %zmm3
6894 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6895 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
6896 ; AVX512-NEXT: vpermt2q %zmm25, %zmm31, %zmm3
6897 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6898 ; AVX512-NEXT: vmovdqa64 %zmm25, %zmm3
6899 ; AVX512-NEXT: vpermt2q %zmm1, %zmm5, %zmm3
6900 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6901 ; AVX512-NEXT: vpermt2q %zmm1, %zmm0, %zmm25
6902 ; AVX512-NEXT: vmovdqa64 320(%rdi), %zmm9
6903 ; AVX512-NEXT: vmovdqa64 256(%rdi), %zmm27
6904 ; AVX512-NEXT: vmovdqa64 %zmm27, %zmm1
6905 ; AVX512-NEXT: vpermt2q %zmm9, %zmm5, %zmm1
6906 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6907 ; AVX512-NEXT: vmovdqa64 704(%rdi), %zmm30
6908 ; AVX512-NEXT: vmovdqa64 640(%rdi), %zmm26
6909 ; AVX512-NEXT: vmovdqa64 %zmm26, %zmm1
6910 ; AVX512-NEXT: vpermt2q %zmm30, %zmm5, %zmm1
6911 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6912 ; AVX512-NEXT: vmovdqa64 1088(%rdi), %zmm7
6913 ; AVX512-NEXT: vmovdqa64 1024(%rdi), %zmm20
6914 ; AVX512-NEXT: vmovdqa64 %zmm20, %zmm1
6915 ; AVX512-NEXT: vpermt2q %zmm7, %zmm5, %zmm1
6916 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6917 ; AVX512-NEXT: vmovdqa64 832(%rdi), %zmm1
6918 ; AVX512-NEXT: vmovdqa64 768(%rdi), %zmm8
6919 ; AVX512-NEXT: vmovdqa64 %zmm8, %zmm15
6920 ; AVX512-NEXT: vpermt2q %zmm1, %zmm2, %zmm15
6921 ; AVX512-NEXT: vmovdqa64 1472(%rdi), %zmm6
6922 ; AVX512-NEXT: vmovdqa64 1408(%rdi), %zmm21
6923 ; AVX512-NEXT: vmovdqa64 %zmm21, %zmm3
6924 ; AVX512-NEXT: vpermt2q %zmm6, %zmm5, %zmm3
6925 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6926 ; AVX512-NEXT: vmovdqa64 1216(%rdi), %zmm19
6927 ; AVX512-NEXT: vmovdqa64 1152(%rdi), %zmm4
6928 ; AVX512-NEXT: vpermi2q %zmm19, %zmm4, %zmm2
6929 ; AVX512-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
6930 ; AVX512-NEXT: vmovdqa64 %zmm8, %zmm12
6931 ; AVX512-NEXT: vpermt2q %zmm1, %zmm10, %zmm12
6932 ; AVX512-NEXT: vpermi2q %zmm19, %zmm4, %zmm10
6933 ; AVX512-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6934 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm13
6935 ; AVX512-NEXT: vpermt2q %zmm8, %zmm11, %zmm13
6936 ; AVX512-NEXT: vpermi2q %zmm4, %zmm19, %zmm11
6937 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm14
6938 ; AVX512-NEXT: vpermt2q %zmm8, %zmm31, %zmm14
6939 ; AVX512-NEXT: vpermi2q %zmm4, %zmm19, %zmm31
6940 ; AVX512-NEXT: vmovdqa64 %zmm8, %zmm2
6941 ; AVX512-NEXT: vpermt2q %zmm1, %zmm5, %zmm2
6942 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6943 ; AVX512-NEXT: vpermi2q %zmm19, %zmm4, %zmm5
6944 ; AVX512-NEXT: vpermt2q %zmm19, %zmm0, %zmm4
6945 ; AVX512-NEXT: vpermt2q %zmm1, %zmm0, %zmm8
6946 ; AVX512-NEXT: vmovdqa64 %zmm20, %zmm19
6947 ; AVX512-NEXT: vpermt2q %zmm7, %zmm0, %zmm19
6948 ; AVX512-NEXT: vmovdqa64 %zmm26, %zmm22
6949 ; AVX512-NEXT: vpermt2q %zmm30, %zmm0, %zmm22
6950 ; AVX512-NEXT: vmovdqa64 %zmm27, %zmm17
6951 ; AVX512-NEXT: vpermt2q %zmm9, %zmm0, %zmm17
6952 ; AVX512-NEXT: vmovdqa64 %zmm21, %zmm18
6953 ; AVX512-NEXT: vpermt2q %zmm6, %zmm0, %zmm18
6954 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
6955 ; AVX512-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
6956 ; AVX512-NEXT: vmovdqa64 %zmm26, %zmm24
6957 ; AVX512-NEXT: vpermt2q %zmm30, %zmm1, %zmm24
6958 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
6959 ; AVX512-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
6960 ; AVX512-NEXT: vmovdqa64 %zmm26, %zmm0
6961 ; AVX512-NEXT: vpermt2q %zmm30, %zmm2, %zmm0
6962 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6963 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
6964 ; AVX512-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
6965 ; AVX512-NEXT: vmovdqa64 %zmm30, %zmm0
6966 ; AVX512-NEXT: vpermt2q %zmm26, %zmm3, %zmm0
6967 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6968 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
6969 ; AVX512-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
6970 ; AVX512-NEXT: vpermt2q %zmm26, %zmm0, %zmm30
6971 ; AVX512-NEXT: vmovdqa64 %zmm27, %zmm26
6972 ; AVX512-NEXT: vpermt2q %zmm9, %zmm1, %zmm26
6973 ; AVX512-NEXT: vmovdqa64 %zmm27, %zmm23
6974 ; AVX512-NEXT: vpermt2q %zmm9, %zmm2, %zmm23
6975 ; AVX512-NEXT: vmovdqa64 %zmm9, %zmm28
6976 ; AVX512-NEXT: vpermt2q %zmm27, %zmm3, %zmm28
6977 ; AVX512-NEXT: vpermt2q %zmm27, %zmm0, %zmm9
6978 ; AVX512-NEXT: vmovdqa64 %zmm20, %zmm27
6979 ; AVX512-NEXT: vpermt2q %zmm7, %zmm1, %zmm27
6980 ; AVX512-NEXT: vpermi2q %zmm6, %zmm21, %zmm1
6981 ; AVX512-NEXT: vmovdqa64 %zmm20, %zmm10
6982 ; AVX512-NEXT: vpermt2q %zmm7, %zmm2, %zmm10
6983 ; AVX512-NEXT: vpermi2q %zmm6, %zmm21, %zmm2
6984 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm16
6985 ; AVX512-NEXT: vpermt2q %zmm20, %zmm3, %zmm16
6986 ; AVX512-NEXT: vpermi2q %zmm21, %zmm6, %zmm3
6987 ; AVX512-NEXT: vpermt2q %zmm21, %zmm0, %zmm6
6988 ; AVX512-NEXT: vpermt2q %zmm20, %zmm0, %zmm7
6989 ; AVX512-NEXT: movb $56, %al
6990 ; AVX512-NEXT: kmovw %eax, %k1
6991 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6992 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
6993 ; AVX512-NEXT: movb $-64, %al
6994 ; AVX512-NEXT: kmovw %eax, %k2
6995 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6996 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
6997 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6998 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
6999 ; AVX512-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
7000 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
7001 ; AVX512-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
7002 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7003 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7004 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm15 {%k1}
7005 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7006 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm15 {%k2}
7007 ; AVX512-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
7008 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
7009 ; AVX512-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
7010 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
7011 ; AVX512-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
7012 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm20
7013 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7014 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
7015 ; AVX512-NEXT: vmovdqa64 %zmm19, %zmm12 {%k2}
7016 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7017 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
7018 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm21 {%k1}
7019 ; AVX512-NEXT: vmovdqa64 %zmm22, %zmm21 {%k2}
7020 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7021 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
7022 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
7023 ; AVX512-NEXT: vmovdqa64 %zmm17, %zmm19 {%k2}
7024 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7025 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
7026 ; AVX512-NEXT: vmovdqa64 %zmm17, %zmm0 {%k1}
7027 ; AVX512-NEXT: vmovdqa64 %zmm18, %zmm0 {%k2}
7028 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm17
7029 ; AVX512-NEXT: movb $24, %al
7030 ; AVX512-NEXT: kmovw %eax, %k2
7031 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7032 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm13 {%k2}
7033 ; AVX512-NEXT: movb $-32, %al
7034 ; AVX512-NEXT: kmovw %eax, %k1
7035 ; AVX512-NEXT: vmovdqa64 %zmm27, %zmm13 {%k1}
7036 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7037 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
7038 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
7039 ; AVX512-NEXT: vmovdqa64 %zmm24, %zmm18 {%k1}
7040 ; AVX512-NEXT: vmovdqa64 %zmm18, %zmm22
7041 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7042 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
7043 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
7044 ; AVX512-NEXT: vmovdqa64 %zmm26, %zmm18 {%k1}
7045 ; AVX512-NEXT: vmovdqa64 %zmm18, %zmm24
7046 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7047 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
7048 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm11 {%k1}
7049 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7050 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm14 {%k2}
7051 ; AVX512-NEXT: vmovdqa64 %zmm10, %zmm14 {%k1}
7052 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7053 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7054 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
7055 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7056 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
7057 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm18
7058 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7059 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7060 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
7061 ; AVX512-NEXT: vmovdqa64 %zmm23, %zmm1 {%k1}
7062 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm23
7063 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7064 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
7065 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm31 {%k1}
7066 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7067 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
7068 ; AVX512-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
7069 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7070 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
7071 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
7072 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
7073 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
7074 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
7075 ; AVX512-NEXT: vmovdqa64 %zmm28, %zmm2 {%k1}
7076 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
7077 ; AVX512-NEXT: vinserti32x4 $0, %xmm5, %zmm10, %zmm5
7078 ; AVX512-NEXT: vmovdqa64 %zmm3, %zmm5 {%k1}
7079 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
7080 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
7081 ; AVX512-NEXT: vmovdqa64 %zmm30, %zmm3 {%k1}
7082 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
7083 ; AVX512-NEXT: vinserti32x4 $0, %xmm25, %zmm10, %zmm10
7084 ; AVX512-NEXT: vmovdqa64 %zmm9, %zmm10 {%k1}
7085 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
7086 ; AVX512-NEXT: vinserti32x4 $0, %xmm4, %zmm9, %zmm4
7087 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm4 {%k1}
7088 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7089 ; AVX512-NEXT: vinserti32x4 $0, %xmm8, %zmm6, %zmm6
7090 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm6 {%k1}
7091 ; AVX512-NEXT: vmovdqa64 %zmm20, 192(%rsi)
7092 ; AVX512-NEXT: vmovdqa64 %zmm15, 128(%rsi)
7093 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
7094 ; AVX512-NEXT: vmovaps %zmm7, 64(%rsi)
7095 ; AVX512-NEXT: vmovdqa64 %zmm29, (%rsi)
7096 ; AVX512-NEXT: vmovdqa64 %zmm17, 192(%rdx)
7097 ; AVX512-NEXT: vmovdqa64 %zmm19, (%rdx)
7098 ; AVX512-NEXT: vmovdqa64 %zmm21, 64(%rdx)
7099 ; AVX512-NEXT: vmovdqa64 %zmm12, 128(%rdx)
7100 ; AVX512-NEXT: vmovdqa64 %zmm11, 192(%rcx)
7101 ; AVX512-NEXT: vmovdqa64 %zmm24, (%rcx)
7102 ; AVX512-NEXT: vmovdqa64 %zmm22, 64(%rcx)
7103 ; AVX512-NEXT: vmovdqa64 %zmm13, 128(%rcx)
7104 ; AVX512-NEXT: vmovdqa64 %zmm31, 192(%r8)
7105 ; AVX512-NEXT: vmovdqa64 %zmm23, (%r8)
7106 ; AVX512-NEXT: vmovdqa64 %zmm18, 64(%r8)
7107 ; AVX512-NEXT: vmovdqa64 %zmm14, 128(%r8)
7108 ; AVX512-NEXT: vmovdqa64 %zmm5, 192(%r9)
7109 ; AVX512-NEXT: vmovdqa64 %zmm2, (%r9)
7110 ; AVX512-NEXT: vmovdqa64 %zmm1, 64(%r9)
7111 ; AVX512-NEXT: vmovdqa64 %zmm0, 128(%r9)
7112 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
7113 ; AVX512-NEXT: vmovdqa64 %zmm6, 128(%rax)
7114 ; AVX512-NEXT: vmovdqa64 %zmm4, 192(%rax)
7115 ; AVX512-NEXT: vmovdqa64 %zmm10, (%rax)
7116 ; AVX512-NEXT: vmovdqa64 %zmm3, 64(%rax)
7117 ; AVX512-NEXT: addq $2632, %rsp # imm = 0xA48
7118 ; AVX512-NEXT: vzeroupper
7121 ; AVX512-FCP-LABEL: load_i64_stride6_vf32:
7122 ; AVX512-FCP: # %bb.0:
7123 ; AVX512-FCP-NEXT: subq $2632, %rsp # imm = 0xA48
7124 ; AVX512-FCP-NEXT: vmovdqa64 1280(%rdi), %zmm2
7125 ; AVX512-FCP-NEXT: vmovdqa64 1344(%rdi), %zmm21
7126 ; AVX512-FCP-NEXT: vmovdqa64 896(%rdi), %zmm1
7127 ; AVX512-FCP-NEXT: vmovdqa64 960(%rdi), %zmm19
7128 ; AVX512-FCP-NEXT: vmovdqa64 448(%rdi), %zmm18
7129 ; AVX512-FCP-NEXT: vmovdqa64 384(%rdi), %zmm3
7130 ; AVX512-FCP-NEXT: vmovdqa64 512(%rdi), %zmm4
7131 ; AVX512-FCP-NEXT: vmovdqa64 576(%rdi), %zmm0
7132 ; AVX512-FCP-NEXT: vmovdqa64 128(%rdi), %zmm5
7133 ; AVX512-FCP-NEXT: vmovdqa64 192(%rdi), %zmm25
7134 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,6,0,10,0,6,0,10]
7135 ; AVX512-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
7136 ; AVX512-FCP-NEXT: vmovdqa64 %zmm25, %zmm7
7137 ; AVX512-FCP-NEXT: vpermt2q %zmm5, %zmm6, %zmm7
7138 ; AVX512-FCP-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7139 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm7 = [0,6,12,0]
7140 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm8
7141 ; AVX512-FCP-NEXT: vpermt2q %zmm4, %zmm6, %zmm8
7142 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7143 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, %zmm8
7144 ; AVX512-FCP-NEXT: vpermt2q %zmm18, %zmm7, %zmm8
7145 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7146 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, %zmm8
7147 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
7148 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7149 ; AVX512-FCP-NEXT: vpermi2q %zmm2, %zmm21, %zmm6
7150 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7151 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [1,7,0,11,1,7,0,11]
7152 ; AVX512-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
7153 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, %zmm8
7154 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
7155 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7156 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm8
7157 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm6
7158 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm10 = [1,7,13,0]
7159 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
7160 ; AVX512-FCP-NEXT: vpermt2q %zmm4, %zmm8, %zmm1
7161 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7162 ; AVX512-FCP-NEXT: vmovdqa64 %zmm25, %zmm1
7163 ; AVX512-FCP-NEXT: vpermt2q %zmm5, %zmm8, %zmm1
7164 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7165 ; AVX512-FCP-NEXT: vpermi2q %zmm2, %zmm21, %zmm8
7166 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7167 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [10,4,10,4,10,4,10,4]
7168 ; AVX512-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7169 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm8
7170 ; AVX512-FCP-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
7171 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7172 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm8
7173 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
7174 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7175 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm8
7176 ; AVX512-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
7177 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7178 ; AVX512-FCP-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
7179 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7180 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [11,5,11,5,11,5,11,5]
7181 ; AVX512-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7182 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm8
7183 ; AVX512-FCP-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
7184 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7185 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm8
7186 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
7187 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7188 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm8
7189 ; AVX512-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
7190 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7191 ; AVX512-FCP-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
7192 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7193 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [12,0,0,6,12,0,0,6]
7194 ; AVX512-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
7195 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm1
7196 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm8, %zmm1
7197 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7198 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [13,0,1,7,13,0,1,7]
7199 ; AVX512-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
7200 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm4
7201 ; AVX512-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7202 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
7203 ; AVX512-FCP-NEXT: vpermt2q %zmm25, %zmm8, %zmm0
7204 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7205 ; AVX512-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm5
7206 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7207 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm0
7208 ; AVX512-FCP-NEXT: vpermt2q %zmm19, %zmm8, %zmm0
7209 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7210 ; AVX512-FCP-NEXT: vpermi2q %zmm21, %zmm2, %zmm8
7211 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7212 ; AVX512-FCP-NEXT: vpermt2q %zmm21, %zmm1, %zmm2
7213 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7214 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, %zmm0
7215 ; AVX512-FCP-NEXT: vpermt2q %zmm18, %zmm10, %zmm0
7216 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7217 ; AVX512-FCP-NEXT: vpermt2q %zmm19, %zmm1, %zmm6
7218 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7219 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm11 = [10,0,6,0]
7220 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, %zmm0
7221 ; AVX512-FCP-NEXT: vpermt2q %zmm3, %zmm11, %zmm0
7222 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7223 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm31 = [11,1,7,0]
7224 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, %zmm0
7225 ; AVX512-FCP-NEXT: vpermt2q %zmm3, %zmm31, %zmm0
7226 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7227 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
7228 ; AVX512-FCP-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7229 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, %zmm0
7230 ; AVX512-FCP-NEXT: vpermt2q %zmm18, %zmm5, %zmm0
7231 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7232 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
7233 ; AVX512-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7234 ; AVX512-FCP-NEXT: vpermt2q %zmm18, %zmm0, %zmm3
7235 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7236 ; AVX512-FCP-NEXT: vmovdqa64 (%rdi), %zmm25
7237 ; AVX512-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
7238 ; AVX512-FCP-NEXT: vmovdqa64 %zmm25, %zmm29
7239 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm2
7240 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm7, %zmm29
7241 ; AVX512-FCP-NEXT: vmovdqa64 %zmm25, %zmm3
7242 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm10, %zmm3
7243 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7244 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
7245 ; AVX512-FCP-NEXT: vpermt2q %zmm25, %zmm11, %zmm3
7246 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7247 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
7248 ; AVX512-FCP-NEXT: vpermt2q %zmm25, %zmm31, %zmm3
7249 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7250 ; AVX512-FCP-NEXT: vmovdqa64 %zmm25, %zmm3
7251 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm3
7252 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7253 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm25
7254 ; AVX512-FCP-NEXT: vmovdqa64 320(%rdi), %zmm9
7255 ; AVX512-FCP-NEXT: vmovdqa64 256(%rdi), %zmm27
7256 ; AVX512-FCP-NEXT: vmovdqa64 %zmm27, %zmm1
7257 ; AVX512-FCP-NEXT: vpermt2q %zmm9, %zmm5, %zmm1
7258 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7259 ; AVX512-FCP-NEXT: vmovdqa64 704(%rdi), %zmm30
7260 ; AVX512-FCP-NEXT: vmovdqa64 640(%rdi), %zmm26
7261 ; AVX512-FCP-NEXT: vmovdqa64 %zmm26, %zmm1
7262 ; AVX512-FCP-NEXT: vpermt2q %zmm30, %zmm5, %zmm1
7263 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7264 ; AVX512-FCP-NEXT: vmovdqa64 1088(%rdi), %zmm7
7265 ; AVX512-FCP-NEXT: vmovdqa64 1024(%rdi), %zmm20
7266 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, %zmm1
7267 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm5, %zmm1
7268 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7269 ; AVX512-FCP-NEXT: vmovdqa64 832(%rdi), %zmm1
7270 ; AVX512-FCP-NEXT: vmovdqa64 768(%rdi), %zmm8
7271 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, %zmm15
7272 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm15
7273 ; AVX512-FCP-NEXT: vmovdqa64 1472(%rdi), %zmm6
7274 ; AVX512-FCP-NEXT: vmovdqa64 1408(%rdi), %zmm21
7275 ; AVX512-FCP-NEXT: vmovdqa64 %zmm21, %zmm3
7276 ; AVX512-FCP-NEXT: vpermt2q %zmm6, %zmm5, %zmm3
7277 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7278 ; AVX512-FCP-NEXT: vmovdqa64 1216(%rdi), %zmm19
7279 ; AVX512-FCP-NEXT: vmovdqa64 1152(%rdi), %zmm4
7280 ; AVX512-FCP-NEXT: vpermi2q %zmm19, %zmm4, %zmm2
7281 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
7282 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, %zmm12
7283 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm10, %zmm12
7284 ; AVX512-FCP-NEXT: vpermi2q %zmm19, %zmm4, %zmm10
7285 ; AVX512-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7286 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm13
7287 ; AVX512-FCP-NEXT: vpermt2q %zmm8, %zmm11, %zmm13
7288 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm19, %zmm11
7289 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm14
7290 ; AVX512-FCP-NEXT: vpermt2q %zmm8, %zmm31, %zmm14
7291 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm19, %zmm31
7292 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, %zmm2
7293 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm2
7294 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7295 ; AVX512-FCP-NEXT: vpermi2q %zmm19, %zmm4, %zmm5
7296 ; AVX512-FCP-NEXT: vpermt2q %zmm19, %zmm0, %zmm4
7297 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm8
7298 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, %zmm19
7299 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm19
7300 ; AVX512-FCP-NEXT: vmovdqa64 %zmm26, %zmm22
7301 ; AVX512-FCP-NEXT: vpermt2q %zmm30, %zmm0, %zmm22
7302 ; AVX512-FCP-NEXT: vmovdqa64 %zmm27, %zmm17
7303 ; AVX512-FCP-NEXT: vpermt2q %zmm9, %zmm0, %zmm17
7304 ; AVX512-FCP-NEXT: vmovdqa64 %zmm21, %zmm18
7305 ; AVX512-FCP-NEXT: vpermt2q %zmm6, %zmm0, %zmm18
7306 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
7307 ; AVX512-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
7308 ; AVX512-FCP-NEXT: vmovdqa64 %zmm26, %zmm24
7309 ; AVX512-FCP-NEXT: vpermt2q %zmm30, %zmm1, %zmm24
7310 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
7311 ; AVX512-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
7312 ; AVX512-FCP-NEXT: vmovdqa64 %zmm26, %zmm0
7313 ; AVX512-FCP-NEXT: vpermt2q %zmm30, %zmm2, %zmm0
7314 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7315 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
7316 ; AVX512-FCP-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
7317 ; AVX512-FCP-NEXT: vmovdqa64 %zmm30, %zmm0
7318 ; AVX512-FCP-NEXT: vpermt2q %zmm26, %zmm3, %zmm0
7319 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7320 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
7321 ; AVX512-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
7322 ; AVX512-FCP-NEXT: vpermt2q %zmm26, %zmm0, %zmm30
7323 ; AVX512-FCP-NEXT: vmovdqa64 %zmm27, %zmm26
7324 ; AVX512-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm26
7325 ; AVX512-FCP-NEXT: vmovdqa64 %zmm27, %zmm23
7326 ; AVX512-FCP-NEXT: vpermt2q %zmm9, %zmm2, %zmm23
7327 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, %zmm28
7328 ; AVX512-FCP-NEXT: vpermt2q %zmm27, %zmm3, %zmm28
7329 ; AVX512-FCP-NEXT: vpermt2q %zmm27, %zmm0, %zmm9
7330 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, %zmm27
7331 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm27
7332 ; AVX512-FCP-NEXT: vpermi2q %zmm6, %zmm21, %zmm1
7333 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, %zmm10
7334 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm2, %zmm10
7335 ; AVX512-FCP-NEXT: vpermi2q %zmm6, %zmm21, %zmm2
7336 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm16
7337 ; AVX512-FCP-NEXT: vpermt2q %zmm20, %zmm3, %zmm16
7338 ; AVX512-FCP-NEXT: vpermi2q %zmm21, %zmm6, %zmm3
7339 ; AVX512-FCP-NEXT: vpermt2q %zmm21, %zmm0, %zmm6
7340 ; AVX512-FCP-NEXT: vpermt2q %zmm20, %zmm0, %zmm7
7341 ; AVX512-FCP-NEXT: movb $56, %al
7342 ; AVX512-FCP-NEXT: kmovw %eax, %k1
7343 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7344 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
7345 ; AVX512-FCP-NEXT: movb $-64, %al
7346 ; AVX512-FCP-NEXT: kmovw %eax, %k2
7347 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7348 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
7349 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7350 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
7351 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
7352 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
7353 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
7354 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7355 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7356 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm15 {%k1}
7357 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7358 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm15 {%k2}
7359 ; AVX512-FCP-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
7360 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
7361 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
7362 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
7363 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
7364 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm20
7365 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7366 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
7367 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, %zmm12 {%k2}
7368 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7369 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
7370 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm21 {%k1}
7371 ; AVX512-FCP-NEXT: vmovdqa64 %zmm22, %zmm21 {%k2}
7372 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7373 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
7374 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
7375 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, %zmm19 {%k2}
7376 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7377 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
7378 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, %zmm0 {%k1}
7379 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, %zmm0 {%k2}
7380 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm17
7381 ; AVX512-FCP-NEXT: movb $24, %al
7382 ; AVX512-FCP-NEXT: kmovw %eax, %k2
7383 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7384 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm13 {%k2}
7385 ; AVX512-FCP-NEXT: movb $-32, %al
7386 ; AVX512-FCP-NEXT: kmovw %eax, %k1
7387 ; AVX512-FCP-NEXT: vmovdqa64 %zmm27, %zmm13 {%k1}
7388 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7389 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
7390 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
7391 ; AVX512-FCP-NEXT: vmovdqa64 %zmm24, %zmm18 {%k1}
7392 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, %zmm22
7393 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7394 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
7395 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
7396 ; AVX512-FCP-NEXT: vmovdqa64 %zmm26, %zmm18 {%k1}
7397 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, %zmm24
7398 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7399 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
7400 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm11 {%k1}
7401 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7402 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm14 {%k2}
7403 ; AVX512-FCP-NEXT: vmovdqa64 %zmm10, %zmm14 {%k1}
7404 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7405 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7406 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
7407 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7408 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
7409 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm18
7410 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7411 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7412 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
7413 ; AVX512-FCP-NEXT: vmovdqa64 %zmm23, %zmm1 {%k1}
7414 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm23
7415 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7416 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
7417 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm31 {%k1}
7418 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7419 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
7420 ; AVX512-FCP-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
7421 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7422 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
7423 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
7424 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
7425 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
7426 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
7427 ; AVX512-FCP-NEXT: vmovdqa64 %zmm28, %zmm2 {%k1}
7428 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
7429 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm5, %zmm10, %zmm5
7430 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, %zmm5 {%k1}
7431 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
7432 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
7433 ; AVX512-FCP-NEXT: vmovdqa64 %zmm30, %zmm3 {%k1}
7434 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
7435 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm25, %zmm10, %zmm10
7436 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, %zmm10 {%k1}
7437 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
7438 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm4, %zmm9, %zmm4
7439 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm4 {%k1}
7440 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7441 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm8, %zmm6, %zmm6
7442 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm6 {%k1}
7443 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, 192(%rsi)
7444 ; AVX512-FCP-NEXT: vmovdqa64 %zmm15, 128(%rsi)
7445 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
7446 ; AVX512-FCP-NEXT: vmovaps %zmm7, 64(%rsi)
7447 ; AVX512-FCP-NEXT: vmovdqa64 %zmm29, (%rsi)
7448 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, 192(%rdx)
7449 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, (%rdx)
7450 ; AVX512-FCP-NEXT: vmovdqa64 %zmm21, 64(%rdx)
7451 ; AVX512-FCP-NEXT: vmovdqa64 %zmm12, 128(%rdx)
7452 ; AVX512-FCP-NEXT: vmovdqa64 %zmm11, 192(%rcx)
7453 ; AVX512-FCP-NEXT: vmovdqa64 %zmm24, (%rcx)
7454 ; AVX512-FCP-NEXT: vmovdqa64 %zmm22, 64(%rcx)
7455 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, 128(%rcx)
7456 ; AVX512-FCP-NEXT: vmovdqa64 %zmm31, 192(%r8)
7457 ; AVX512-FCP-NEXT: vmovdqa64 %zmm23, (%r8)
7458 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, 64(%r8)
7459 ; AVX512-FCP-NEXT: vmovdqa64 %zmm14, 128(%r8)
7460 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, 192(%r9)
7461 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, (%r9)
7462 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, 64(%r9)
7463 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, 128(%r9)
7464 ; AVX512-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
7465 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, 128(%rax)
7466 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, 192(%rax)
7467 ; AVX512-FCP-NEXT: vmovdqa64 %zmm10, (%rax)
7468 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, 64(%rax)
7469 ; AVX512-FCP-NEXT: addq $2632, %rsp # imm = 0xA48
7470 ; AVX512-FCP-NEXT: vzeroupper
7471 ; AVX512-FCP-NEXT: retq
7473 ; AVX512DQ-LABEL: load_i64_stride6_vf32:
7474 ; AVX512DQ: # %bb.0:
7475 ; AVX512DQ-NEXT: subq $2632, %rsp # imm = 0xA48
7476 ; AVX512DQ-NEXT: vmovdqa64 1280(%rdi), %zmm2
7477 ; AVX512DQ-NEXT: vmovdqa64 1344(%rdi), %zmm21
7478 ; AVX512DQ-NEXT: vmovdqa64 896(%rdi), %zmm1
7479 ; AVX512DQ-NEXT: vmovdqa64 960(%rdi), %zmm19
7480 ; AVX512DQ-NEXT: vmovdqa64 448(%rdi), %zmm18
7481 ; AVX512DQ-NEXT: vmovdqa64 384(%rdi), %zmm3
7482 ; AVX512DQ-NEXT: vmovdqa64 512(%rdi), %zmm4
7483 ; AVX512DQ-NEXT: vmovdqa64 576(%rdi), %zmm0
7484 ; AVX512DQ-NEXT: vmovdqa64 128(%rdi), %zmm5
7485 ; AVX512DQ-NEXT: vmovdqa64 192(%rdi), %zmm25
7486 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,6,0,10,0,6,0,10]
7487 ; AVX512DQ-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
7488 ; AVX512DQ-NEXT: vmovdqa64 %zmm25, %zmm7
7489 ; AVX512DQ-NEXT: vpermt2q %zmm5, %zmm6, %zmm7
7490 ; AVX512DQ-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7491 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm7 = [0,6,12,0]
7492 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm8
7493 ; AVX512DQ-NEXT: vpermt2q %zmm4, %zmm6, %zmm8
7494 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7495 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, %zmm8
7496 ; AVX512DQ-NEXT: vpermt2q %zmm18, %zmm7, %zmm8
7497 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7498 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, %zmm8
7499 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
7500 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7501 ; AVX512DQ-NEXT: vpermi2q %zmm2, %zmm21, %zmm6
7502 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7503 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [1,7,0,11,1,7,0,11]
7504 ; AVX512DQ-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
7505 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, %zmm8
7506 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
7507 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7508 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm8
7509 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm6
7510 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm10 = [1,7,13,0]
7511 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1
7512 ; AVX512DQ-NEXT: vpermt2q %zmm4, %zmm8, %zmm1
7513 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7514 ; AVX512DQ-NEXT: vmovdqa64 %zmm25, %zmm1
7515 ; AVX512DQ-NEXT: vpermt2q %zmm5, %zmm8, %zmm1
7516 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7517 ; AVX512DQ-NEXT: vpermi2q %zmm2, %zmm21, %zmm8
7518 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7519 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [10,4,10,4,10,4,10,4]
7520 ; AVX512DQ-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7521 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm8
7522 ; AVX512DQ-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
7523 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7524 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm8
7525 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
7526 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7527 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm8
7528 ; AVX512DQ-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
7529 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7530 ; AVX512DQ-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
7531 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7532 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [11,5,11,5,11,5,11,5]
7533 ; AVX512DQ-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7534 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm8
7535 ; AVX512DQ-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
7536 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7537 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm8
7538 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
7539 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7540 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm8
7541 ; AVX512DQ-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
7542 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7543 ; AVX512DQ-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
7544 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7545 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [12,0,0,6,12,0,0,6]
7546 ; AVX512DQ-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
7547 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm1
7548 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm8, %zmm1
7549 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7550 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [13,0,1,7,13,0,1,7]
7551 ; AVX512DQ-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
7552 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm1, %zmm4
7553 ; AVX512DQ-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7554 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm0
7555 ; AVX512DQ-NEXT: vpermt2q %zmm25, %zmm8, %zmm0
7556 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7557 ; AVX512DQ-NEXT: vpermt2q %zmm25, %zmm1, %zmm5
7558 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7559 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm0
7560 ; AVX512DQ-NEXT: vpermt2q %zmm19, %zmm8, %zmm0
7561 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7562 ; AVX512DQ-NEXT: vpermi2q %zmm21, %zmm2, %zmm8
7563 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7564 ; AVX512DQ-NEXT: vpermt2q %zmm21, %zmm1, %zmm2
7565 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7566 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, %zmm0
7567 ; AVX512DQ-NEXT: vpermt2q %zmm18, %zmm10, %zmm0
7568 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7569 ; AVX512DQ-NEXT: vpermt2q %zmm19, %zmm1, %zmm6
7570 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7571 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm11 = [10,0,6,0]
7572 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, %zmm0
7573 ; AVX512DQ-NEXT: vpermt2q %zmm3, %zmm11, %zmm0
7574 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7575 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm31 = [11,1,7,0]
7576 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, %zmm0
7577 ; AVX512DQ-NEXT: vpermt2q %zmm3, %zmm31, %zmm0
7578 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7579 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
7580 ; AVX512DQ-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7581 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, %zmm0
7582 ; AVX512DQ-NEXT: vpermt2q %zmm18, %zmm5, %zmm0
7583 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7584 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
7585 ; AVX512DQ-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7586 ; AVX512DQ-NEXT: vpermt2q %zmm18, %zmm0, %zmm3
7587 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7588 ; AVX512DQ-NEXT: vmovdqa64 (%rdi), %zmm25
7589 ; AVX512DQ-NEXT: vmovdqa64 64(%rdi), %zmm1
7590 ; AVX512DQ-NEXT: vmovdqa64 %zmm25, %zmm29
7591 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm2
7592 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm7, %zmm29
7593 ; AVX512DQ-NEXT: vmovdqa64 %zmm25, %zmm3
7594 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm10, %zmm3
7595 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7596 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
7597 ; AVX512DQ-NEXT: vpermt2q %zmm25, %zmm11, %zmm3
7598 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7599 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
7600 ; AVX512DQ-NEXT: vpermt2q %zmm25, %zmm31, %zmm3
7601 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7602 ; AVX512DQ-NEXT: vmovdqa64 %zmm25, %zmm3
7603 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm5, %zmm3
7604 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7605 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm0, %zmm25
7606 ; AVX512DQ-NEXT: vmovdqa64 320(%rdi), %zmm9
7607 ; AVX512DQ-NEXT: vmovdqa64 256(%rdi), %zmm27
7608 ; AVX512DQ-NEXT: vmovdqa64 %zmm27, %zmm1
7609 ; AVX512DQ-NEXT: vpermt2q %zmm9, %zmm5, %zmm1
7610 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7611 ; AVX512DQ-NEXT: vmovdqa64 704(%rdi), %zmm30
7612 ; AVX512DQ-NEXT: vmovdqa64 640(%rdi), %zmm26
7613 ; AVX512DQ-NEXT: vmovdqa64 %zmm26, %zmm1
7614 ; AVX512DQ-NEXT: vpermt2q %zmm30, %zmm5, %zmm1
7615 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7616 ; AVX512DQ-NEXT: vmovdqa64 1088(%rdi), %zmm7
7617 ; AVX512DQ-NEXT: vmovdqa64 1024(%rdi), %zmm20
7618 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, %zmm1
7619 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm5, %zmm1
7620 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7621 ; AVX512DQ-NEXT: vmovdqa64 832(%rdi), %zmm1
7622 ; AVX512DQ-NEXT: vmovdqa64 768(%rdi), %zmm8
7623 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, %zmm15
7624 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm2, %zmm15
7625 ; AVX512DQ-NEXT: vmovdqa64 1472(%rdi), %zmm6
7626 ; AVX512DQ-NEXT: vmovdqa64 1408(%rdi), %zmm21
7627 ; AVX512DQ-NEXT: vmovdqa64 %zmm21, %zmm3
7628 ; AVX512DQ-NEXT: vpermt2q %zmm6, %zmm5, %zmm3
7629 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7630 ; AVX512DQ-NEXT: vmovdqa64 1216(%rdi), %zmm19
7631 ; AVX512DQ-NEXT: vmovdqa64 1152(%rdi), %zmm4
7632 ; AVX512DQ-NEXT: vpermi2q %zmm19, %zmm4, %zmm2
7633 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
7634 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, %zmm12
7635 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm10, %zmm12
7636 ; AVX512DQ-NEXT: vpermi2q %zmm19, %zmm4, %zmm10
7637 ; AVX512DQ-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7638 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm13
7639 ; AVX512DQ-NEXT: vpermt2q %zmm8, %zmm11, %zmm13
7640 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm19, %zmm11
7641 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm14
7642 ; AVX512DQ-NEXT: vpermt2q %zmm8, %zmm31, %zmm14
7643 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm19, %zmm31
7644 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, %zmm2
7645 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm5, %zmm2
7646 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7647 ; AVX512DQ-NEXT: vpermi2q %zmm19, %zmm4, %zmm5
7648 ; AVX512DQ-NEXT: vpermt2q %zmm19, %zmm0, %zmm4
7649 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm0, %zmm8
7650 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, %zmm19
7651 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm0, %zmm19
7652 ; AVX512DQ-NEXT: vmovdqa64 %zmm26, %zmm22
7653 ; AVX512DQ-NEXT: vpermt2q %zmm30, %zmm0, %zmm22
7654 ; AVX512DQ-NEXT: vmovdqa64 %zmm27, %zmm17
7655 ; AVX512DQ-NEXT: vpermt2q %zmm9, %zmm0, %zmm17
7656 ; AVX512DQ-NEXT: vmovdqa64 %zmm21, %zmm18
7657 ; AVX512DQ-NEXT: vpermt2q %zmm6, %zmm0, %zmm18
7658 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
7659 ; AVX512DQ-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
7660 ; AVX512DQ-NEXT: vmovdqa64 %zmm26, %zmm24
7661 ; AVX512DQ-NEXT: vpermt2q %zmm30, %zmm1, %zmm24
7662 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
7663 ; AVX512DQ-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
7664 ; AVX512DQ-NEXT: vmovdqa64 %zmm26, %zmm0
7665 ; AVX512DQ-NEXT: vpermt2q %zmm30, %zmm2, %zmm0
7666 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7667 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
7668 ; AVX512DQ-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
7669 ; AVX512DQ-NEXT: vmovdqa64 %zmm30, %zmm0
7670 ; AVX512DQ-NEXT: vpermt2q %zmm26, %zmm3, %zmm0
7671 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7672 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
7673 ; AVX512DQ-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
7674 ; AVX512DQ-NEXT: vpermt2q %zmm26, %zmm0, %zmm30
7675 ; AVX512DQ-NEXT: vmovdqa64 %zmm27, %zmm26
7676 ; AVX512DQ-NEXT: vpermt2q %zmm9, %zmm1, %zmm26
7677 ; AVX512DQ-NEXT: vmovdqa64 %zmm27, %zmm23
7678 ; AVX512DQ-NEXT: vpermt2q %zmm9, %zmm2, %zmm23
7679 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, %zmm28
7680 ; AVX512DQ-NEXT: vpermt2q %zmm27, %zmm3, %zmm28
7681 ; AVX512DQ-NEXT: vpermt2q %zmm27, %zmm0, %zmm9
7682 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, %zmm27
7683 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm1, %zmm27
7684 ; AVX512DQ-NEXT: vpermi2q %zmm6, %zmm21, %zmm1
7685 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, %zmm10
7686 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm2, %zmm10
7687 ; AVX512DQ-NEXT: vpermi2q %zmm6, %zmm21, %zmm2
7688 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm16
7689 ; AVX512DQ-NEXT: vpermt2q %zmm20, %zmm3, %zmm16
7690 ; AVX512DQ-NEXT: vpermi2q %zmm21, %zmm6, %zmm3
7691 ; AVX512DQ-NEXT: vpermt2q %zmm21, %zmm0, %zmm6
7692 ; AVX512DQ-NEXT: vpermt2q %zmm20, %zmm0, %zmm7
7693 ; AVX512DQ-NEXT: movb $56, %al
7694 ; AVX512DQ-NEXT: kmovw %eax, %k1
7695 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7696 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
7697 ; AVX512DQ-NEXT: movb $-64, %al
7698 ; AVX512DQ-NEXT: kmovw %eax, %k2
7699 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7700 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
7701 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7702 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
7703 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
7704 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
7705 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
7706 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7707 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7708 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm15 {%k1}
7709 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7710 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm15 {%k2}
7711 ; AVX512DQ-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
7712 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
7713 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
7714 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
7715 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
7716 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm20
7717 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7718 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
7719 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, %zmm12 {%k2}
7720 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7721 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
7722 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm21 {%k1}
7723 ; AVX512DQ-NEXT: vmovdqa64 %zmm22, %zmm21 {%k2}
7724 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7725 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
7726 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
7727 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, %zmm19 {%k2}
7728 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7729 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
7730 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, %zmm0 {%k1}
7731 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, %zmm0 {%k2}
7732 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm17
7733 ; AVX512DQ-NEXT: movb $24, %al
7734 ; AVX512DQ-NEXT: kmovw %eax, %k2
7735 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7736 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm13 {%k2}
7737 ; AVX512DQ-NEXT: movb $-32, %al
7738 ; AVX512DQ-NEXT: kmovw %eax, %k1
7739 ; AVX512DQ-NEXT: vmovdqa64 %zmm27, %zmm13 {%k1}
7740 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7741 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
7742 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
7743 ; AVX512DQ-NEXT: vmovdqa64 %zmm24, %zmm18 {%k1}
7744 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, %zmm22
7745 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7746 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
7747 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
7748 ; AVX512DQ-NEXT: vmovdqa64 %zmm26, %zmm18 {%k1}
7749 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, %zmm24
7750 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7751 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
7752 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm11 {%k1}
7753 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7754 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm14 {%k2}
7755 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, %zmm14 {%k1}
7756 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7757 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7758 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
7759 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7760 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
7761 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm18
7762 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7763 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7764 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
7765 ; AVX512DQ-NEXT: vmovdqa64 %zmm23, %zmm1 {%k1}
7766 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm23
7767 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7768 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
7769 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm31 {%k1}
7770 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7771 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
7772 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
7773 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7774 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
7775 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
7776 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
7777 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
7778 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
7779 ; AVX512DQ-NEXT: vmovdqa64 %zmm28, %zmm2 {%k1}
7780 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
7781 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm5, %zmm10, %zmm5
7782 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, %zmm5 {%k1}
7783 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
7784 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
7785 ; AVX512DQ-NEXT: vmovdqa64 %zmm30, %zmm3 {%k1}
7786 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
7787 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm25, %zmm10, %zmm10
7788 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, %zmm10 {%k1}
7789 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
7790 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm4, %zmm9, %zmm4
7791 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm4 {%k1}
7792 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7793 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm8, %zmm6, %zmm6
7794 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm6 {%k1}
7795 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, 192(%rsi)
7796 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, 128(%rsi)
7797 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
7798 ; AVX512DQ-NEXT: vmovaps %zmm7, 64(%rsi)
7799 ; AVX512DQ-NEXT: vmovdqa64 %zmm29, (%rsi)
7800 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, 192(%rdx)
7801 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, (%rdx)
7802 ; AVX512DQ-NEXT: vmovdqa64 %zmm21, 64(%rdx)
7803 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, 128(%rdx)
7804 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, 192(%rcx)
7805 ; AVX512DQ-NEXT: vmovdqa64 %zmm24, (%rcx)
7806 ; AVX512DQ-NEXT: vmovdqa64 %zmm22, 64(%rcx)
7807 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, 128(%rcx)
7808 ; AVX512DQ-NEXT: vmovdqa64 %zmm31, 192(%r8)
7809 ; AVX512DQ-NEXT: vmovdqa64 %zmm23, (%r8)
7810 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, 64(%r8)
7811 ; AVX512DQ-NEXT: vmovdqa64 %zmm14, 128(%r8)
7812 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, 192(%r9)
7813 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, (%r9)
7814 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 64(%r9)
7815 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, 128(%r9)
7816 ; AVX512DQ-NEXT: movq {{[0-9]+}}(%rsp), %rax
7817 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, 128(%rax)
7818 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, 192(%rax)
7819 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, (%rax)
7820 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, 64(%rax)
7821 ; AVX512DQ-NEXT: addq $2632, %rsp # imm = 0xA48
7822 ; AVX512DQ-NEXT: vzeroupper
7823 ; AVX512DQ-NEXT: retq
7825 ; AVX512DQ-FCP-LABEL: load_i64_stride6_vf32:
7826 ; AVX512DQ-FCP: # %bb.0:
7827 ; AVX512DQ-FCP-NEXT: subq $2632, %rsp # imm = 0xA48
7828 ; AVX512DQ-FCP-NEXT: vmovdqa64 1280(%rdi), %zmm2
7829 ; AVX512DQ-FCP-NEXT: vmovdqa64 1344(%rdi), %zmm21
7830 ; AVX512DQ-FCP-NEXT: vmovdqa64 896(%rdi), %zmm1
7831 ; AVX512DQ-FCP-NEXT: vmovdqa64 960(%rdi), %zmm19
7832 ; AVX512DQ-FCP-NEXT: vmovdqa64 448(%rdi), %zmm18
7833 ; AVX512DQ-FCP-NEXT: vmovdqa64 384(%rdi), %zmm3
7834 ; AVX512DQ-FCP-NEXT: vmovdqa64 512(%rdi), %zmm4
7835 ; AVX512DQ-FCP-NEXT: vmovdqa64 576(%rdi), %zmm0
7836 ; AVX512DQ-FCP-NEXT: vmovdqa64 128(%rdi), %zmm5
7837 ; AVX512DQ-FCP-NEXT: vmovdqa64 192(%rdi), %zmm25
7838 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,6,0,10,0,6,0,10]
7839 ; AVX512DQ-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
7840 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm25, %zmm7
7841 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm5, %zmm6, %zmm7
7842 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7843 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm7 = [0,6,12,0]
7844 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm8
7845 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm4, %zmm6, %zmm8
7846 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7847 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, %zmm8
7848 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm18, %zmm7, %zmm8
7849 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7850 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, %zmm8
7851 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
7852 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7853 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm2, %zmm21, %zmm6
7854 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7855 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [1,7,0,11,1,7,0,11]
7856 ; AVX512DQ-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
7857 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, %zmm8
7858 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
7859 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7860 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm8
7861 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm6
7862 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm10 = [1,7,13,0]
7863 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
7864 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm4, %zmm8, %zmm1
7865 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7866 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm25, %zmm1
7867 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm5, %zmm8, %zmm1
7868 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7869 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm2, %zmm21, %zmm8
7870 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7871 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [10,4,10,4,10,4,10,4]
7872 ; AVX512DQ-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7873 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm8
7874 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
7875 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7876 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm8
7877 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
7878 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7879 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm8
7880 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
7881 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7882 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
7883 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7884 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [11,5,11,5,11,5,11,5]
7885 ; AVX512DQ-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7886 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm8
7887 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
7888 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7889 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm8
7890 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
7891 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7892 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm8
7893 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
7894 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7895 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
7896 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7897 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [12,0,0,6,12,0,0,6]
7898 ; AVX512DQ-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
7899 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm1
7900 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm8, %zmm1
7901 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7902 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [13,0,1,7,13,0,1,7]
7903 ; AVX512DQ-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
7904 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm4
7905 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7906 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
7907 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm25, %zmm8, %zmm0
7908 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7909 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm5
7910 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7911 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm0
7912 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm19, %zmm8, %zmm0
7913 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7914 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm21, %zmm2, %zmm8
7915 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7916 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm21, %zmm1, %zmm2
7917 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7918 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, %zmm0
7919 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm18, %zmm10, %zmm0
7920 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7921 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm19, %zmm1, %zmm6
7922 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7923 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm11 = [10,0,6,0]
7924 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, %zmm0
7925 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm3, %zmm11, %zmm0
7926 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7927 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm31 = [11,1,7,0]
7928 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, %zmm0
7929 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm3, %zmm31, %zmm0
7930 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7931 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
7932 ; AVX512DQ-FCP-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7933 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, %zmm0
7934 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm18, %zmm5, %zmm0
7935 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7936 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
7937 ; AVX512DQ-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7938 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm18, %zmm0, %zmm3
7939 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7940 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rdi), %zmm25
7941 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
7942 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm25, %zmm29
7943 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm2
7944 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm7, %zmm29
7945 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm25, %zmm3
7946 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm10, %zmm3
7947 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7948 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
7949 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm25, %zmm11, %zmm3
7950 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7951 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
7952 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm25, %zmm31, %zmm3
7953 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7954 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm25, %zmm3
7955 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm3
7956 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7957 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm25
7958 ; AVX512DQ-FCP-NEXT: vmovdqa64 320(%rdi), %zmm9
7959 ; AVX512DQ-FCP-NEXT: vmovdqa64 256(%rdi), %zmm27
7960 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm27, %zmm1
7961 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm9, %zmm5, %zmm1
7962 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7963 ; AVX512DQ-FCP-NEXT: vmovdqa64 704(%rdi), %zmm30
7964 ; AVX512DQ-FCP-NEXT: vmovdqa64 640(%rdi), %zmm26
7965 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm26, %zmm1
7966 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm30, %zmm5, %zmm1
7967 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7968 ; AVX512DQ-FCP-NEXT: vmovdqa64 1088(%rdi), %zmm7
7969 ; AVX512DQ-FCP-NEXT: vmovdqa64 1024(%rdi), %zmm20
7970 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, %zmm1
7971 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm5, %zmm1
7972 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7973 ; AVX512DQ-FCP-NEXT: vmovdqa64 832(%rdi), %zmm1
7974 ; AVX512DQ-FCP-NEXT: vmovdqa64 768(%rdi), %zmm8
7975 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, %zmm15
7976 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm15
7977 ; AVX512DQ-FCP-NEXT: vmovdqa64 1472(%rdi), %zmm6
7978 ; AVX512DQ-FCP-NEXT: vmovdqa64 1408(%rdi), %zmm21
7979 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm21, %zmm3
7980 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm6, %zmm5, %zmm3
7981 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7982 ; AVX512DQ-FCP-NEXT: vmovdqa64 1216(%rdi), %zmm19
7983 ; AVX512DQ-FCP-NEXT: vmovdqa64 1152(%rdi), %zmm4
7984 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm19, %zmm4, %zmm2
7985 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
7986 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, %zmm12
7987 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm10, %zmm12
7988 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm19, %zmm4, %zmm10
7989 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7990 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm13
7991 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm8, %zmm11, %zmm13
7992 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm19, %zmm11
7993 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm14
7994 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm8, %zmm31, %zmm14
7995 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm19, %zmm31
7996 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, %zmm2
7997 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm2
7998 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7999 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm19, %zmm4, %zmm5
8000 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm19, %zmm0, %zmm4
8001 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm8
8002 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, %zmm19
8003 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm19
8004 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm26, %zmm22
8005 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm30, %zmm0, %zmm22
8006 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm27, %zmm17
8007 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm9, %zmm0, %zmm17
8008 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm21, %zmm18
8009 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm6, %zmm0, %zmm18
8010 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
8011 ; AVX512DQ-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
8012 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm26, %zmm24
8013 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm30, %zmm1, %zmm24
8014 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
8015 ; AVX512DQ-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
8016 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm26, %zmm0
8017 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm30, %zmm2, %zmm0
8018 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8019 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
8020 ; AVX512DQ-FCP-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
8021 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm30, %zmm0
8022 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm26, %zmm3, %zmm0
8023 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8024 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
8025 ; AVX512DQ-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
8026 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm26, %zmm0, %zmm30
8027 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm27, %zmm26
8028 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm26
8029 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm27, %zmm23
8030 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm9, %zmm2, %zmm23
8031 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, %zmm28
8032 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm27, %zmm3, %zmm28
8033 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm27, %zmm0, %zmm9
8034 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, %zmm27
8035 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm27
8036 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm6, %zmm21, %zmm1
8037 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, %zmm10
8038 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm2, %zmm10
8039 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm6, %zmm21, %zmm2
8040 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm16
8041 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm20, %zmm3, %zmm16
8042 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm21, %zmm6, %zmm3
8043 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm21, %zmm0, %zmm6
8044 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm20, %zmm0, %zmm7
8045 ; AVX512DQ-FCP-NEXT: movb $56, %al
8046 ; AVX512DQ-FCP-NEXT: kmovw %eax, %k1
8047 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8048 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
8049 ; AVX512DQ-FCP-NEXT: movb $-64, %al
8050 ; AVX512DQ-FCP-NEXT: kmovw %eax, %k2
8051 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8052 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
8053 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8054 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
8055 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
8056 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
8057 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
8058 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8059 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8060 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm15 {%k1}
8061 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8062 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm15 {%k2}
8063 ; AVX512DQ-FCP-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
8064 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
8065 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
8066 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
8067 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
8068 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm20
8069 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8070 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
8071 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, %zmm12 {%k2}
8072 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8073 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
8074 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm21 {%k1}
8075 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm22, %zmm21 {%k2}
8076 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8077 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
8078 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
8079 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, %zmm19 {%k2}
8080 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8081 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
8082 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, %zmm0 {%k1}
8083 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, %zmm0 {%k2}
8084 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm17
8085 ; AVX512DQ-FCP-NEXT: movb $24, %al
8086 ; AVX512DQ-FCP-NEXT: kmovw %eax, %k2
8087 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8088 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm13 {%k2}
8089 ; AVX512DQ-FCP-NEXT: movb $-32, %al
8090 ; AVX512DQ-FCP-NEXT: kmovw %eax, %k1
8091 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm27, %zmm13 {%k1}
8092 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8093 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
8094 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
8095 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm24, %zmm18 {%k1}
8096 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, %zmm22
8097 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8098 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
8099 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
8100 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm26, %zmm18 {%k1}
8101 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, %zmm24
8102 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8103 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
8104 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm11 {%k1}
8105 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8106 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm14 {%k2}
8107 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm10, %zmm14 {%k1}
8108 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8109 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8110 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
8111 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8112 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
8113 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm18
8114 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8115 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8116 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
8117 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm23, %zmm1 {%k1}
8118 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm23
8119 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8120 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
8121 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm31 {%k1}
8122 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8123 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
8124 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
8125 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8126 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
8127 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
8128 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
8129 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
8130 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
8131 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm28, %zmm2 {%k1}
8132 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
8133 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm5, %zmm10, %zmm5
8134 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, %zmm5 {%k1}
8135 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
8136 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
8137 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm30, %zmm3 {%k1}
8138 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
8139 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm25, %zmm10, %zmm10
8140 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, %zmm10 {%k1}
8141 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
8142 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm4, %zmm9, %zmm4
8143 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm4 {%k1}
8144 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
8145 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm8, %zmm6, %zmm6
8146 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm6 {%k1}
8147 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, 192(%rsi)
8148 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm15, 128(%rsi)
8149 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
8150 ; AVX512DQ-FCP-NEXT: vmovaps %zmm7, 64(%rsi)
8151 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm29, (%rsi)
8152 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, 192(%rdx)
8153 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, (%rdx)
8154 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm21, 64(%rdx)
8155 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm12, 128(%rdx)
8156 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm11, 192(%rcx)
8157 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm24, (%rcx)
8158 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm22, 64(%rcx)
8159 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, 128(%rcx)
8160 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm31, 192(%r8)
8161 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm23, (%r8)
8162 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, 64(%r8)
8163 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm14, 128(%r8)
8164 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, 192(%r9)
8165 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, (%r9)
8166 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, 64(%r9)
8167 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, 128(%r9)
8168 ; AVX512DQ-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
8169 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, 128(%rax)
8170 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, 192(%rax)
8171 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm10, (%rax)
8172 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, 64(%rax)
8173 ; AVX512DQ-FCP-NEXT: addq $2632, %rsp # imm = 0xA48
8174 ; AVX512DQ-FCP-NEXT: vzeroupper
8175 ; AVX512DQ-FCP-NEXT: retq
8177 ; AVX512BW-LABEL: load_i64_stride6_vf32:
8178 ; AVX512BW: # %bb.0:
8179 ; AVX512BW-NEXT: subq $2632, %rsp # imm = 0xA48
8180 ; AVX512BW-NEXT: vmovdqa64 1280(%rdi), %zmm2
8181 ; AVX512BW-NEXT: vmovdqa64 1344(%rdi), %zmm21
8182 ; AVX512BW-NEXT: vmovdqa64 896(%rdi), %zmm1
8183 ; AVX512BW-NEXT: vmovdqa64 960(%rdi), %zmm19
8184 ; AVX512BW-NEXT: vmovdqa64 448(%rdi), %zmm18
8185 ; AVX512BW-NEXT: vmovdqa64 384(%rdi), %zmm3
8186 ; AVX512BW-NEXT: vmovdqa64 512(%rdi), %zmm4
8187 ; AVX512BW-NEXT: vmovdqa64 576(%rdi), %zmm0
8188 ; AVX512BW-NEXT: vmovdqa64 128(%rdi), %zmm5
8189 ; AVX512BW-NEXT: vmovdqa64 192(%rdi), %zmm25
8190 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,6,0,10,0,6,0,10]
8191 ; AVX512BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
8192 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm7
8193 ; AVX512BW-NEXT: vpermt2q %zmm5, %zmm6, %zmm7
8194 ; AVX512BW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8195 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm7 = [0,6,12,0]
8196 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm8
8197 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm6, %zmm8
8198 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8199 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm8
8200 ; AVX512BW-NEXT: vpermt2q %zmm18, %zmm7, %zmm8
8201 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8202 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm8
8203 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
8204 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8205 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm21, %zmm6
8206 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8207 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [1,7,0,11,1,7,0,11]
8208 ; AVX512BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
8209 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm8
8210 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
8211 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8212 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm8
8213 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm6
8214 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm10 = [1,7,13,0]
8215 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1
8216 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm8, %zmm1
8217 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8218 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm1
8219 ; AVX512BW-NEXT: vpermt2q %zmm5, %zmm8, %zmm1
8220 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8221 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm21, %zmm8
8222 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8223 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [10,4,10,4,10,4,10,4]
8224 ; AVX512BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8225 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm8
8226 ; AVX512BW-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
8227 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8228 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm8
8229 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
8230 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8231 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm8
8232 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
8233 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8234 ; AVX512BW-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
8235 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8236 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [11,5,11,5,11,5,11,5]
8237 ; AVX512BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8238 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm8
8239 ; AVX512BW-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
8240 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8241 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm8
8242 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
8243 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8244 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm8
8245 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
8246 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8247 ; AVX512BW-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
8248 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8249 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [12,0,0,6,12,0,0,6]
8250 ; AVX512BW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
8251 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm1
8252 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm8, %zmm1
8253 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8254 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [13,0,1,7,13,0,1,7]
8255 ; AVX512BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
8256 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm1, %zmm4
8257 ; AVX512BW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8258 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm0
8259 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm8, %zmm0
8260 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8261 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm1, %zmm5
8262 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8263 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm0
8264 ; AVX512BW-NEXT: vpermt2q %zmm19, %zmm8, %zmm0
8265 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8266 ; AVX512BW-NEXT: vpermi2q %zmm21, %zmm2, %zmm8
8267 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8268 ; AVX512BW-NEXT: vpermt2q %zmm21, %zmm1, %zmm2
8269 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8270 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm0
8271 ; AVX512BW-NEXT: vpermt2q %zmm18, %zmm10, %zmm0
8272 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8273 ; AVX512BW-NEXT: vpermt2q %zmm19, %zmm1, %zmm6
8274 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8275 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm11 = [10,0,6,0]
8276 ; AVX512BW-NEXT: vmovdqa64 %zmm18, %zmm0
8277 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm11, %zmm0
8278 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8279 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm31 = [11,1,7,0]
8280 ; AVX512BW-NEXT: vmovdqa64 %zmm18, %zmm0
8281 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm31, %zmm0
8282 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8283 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
8284 ; AVX512BW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8285 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm0
8286 ; AVX512BW-NEXT: vpermt2q %zmm18, %zmm5, %zmm0
8287 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8288 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
8289 ; AVX512BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8290 ; AVX512BW-NEXT: vpermt2q %zmm18, %zmm0, %zmm3
8291 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8292 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm25
8293 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %zmm1
8294 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm29
8295 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm2
8296 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm7, %zmm29
8297 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm3
8298 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm10, %zmm3
8299 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8300 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
8301 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm11, %zmm3
8302 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8303 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
8304 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm31, %zmm3
8305 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8306 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm3
8307 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm5, %zmm3
8308 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8309 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm25
8310 ; AVX512BW-NEXT: vmovdqa64 320(%rdi), %zmm9
8311 ; AVX512BW-NEXT: vmovdqa64 256(%rdi), %zmm27
8312 ; AVX512BW-NEXT: vmovdqa64 %zmm27, %zmm1
8313 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm5, %zmm1
8314 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8315 ; AVX512BW-NEXT: vmovdqa64 704(%rdi), %zmm30
8316 ; AVX512BW-NEXT: vmovdqa64 640(%rdi), %zmm26
8317 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm1
8318 ; AVX512BW-NEXT: vpermt2q %zmm30, %zmm5, %zmm1
8319 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8320 ; AVX512BW-NEXT: vmovdqa64 1088(%rdi), %zmm7
8321 ; AVX512BW-NEXT: vmovdqa64 1024(%rdi), %zmm20
8322 ; AVX512BW-NEXT: vmovdqa64 %zmm20, %zmm1
8323 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm5, %zmm1
8324 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8325 ; AVX512BW-NEXT: vmovdqa64 832(%rdi), %zmm1
8326 ; AVX512BW-NEXT: vmovdqa64 768(%rdi), %zmm8
8327 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm15
8328 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm2, %zmm15
8329 ; AVX512BW-NEXT: vmovdqa64 1472(%rdi), %zmm6
8330 ; AVX512BW-NEXT: vmovdqa64 1408(%rdi), %zmm21
8331 ; AVX512BW-NEXT: vmovdqa64 %zmm21, %zmm3
8332 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm5, %zmm3
8333 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8334 ; AVX512BW-NEXT: vmovdqa64 1216(%rdi), %zmm19
8335 ; AVX512BW-NEXT: vmovdqa64 1152(%rdi), %zmm4
8336 ; AVX512BW-NEXT: vpermi2q %zmm19, %zmm4, %zmm2
8337 ; AVX512BW-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
8338 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm12
8339 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm10, %zmm12
8340 ; AVX512BW-NEXT: vpermi2q %zmm19, %zmm4, %zmm10
8341 ; AVX512BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8342 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm13
8343 ; AVX512BW-NEXT: vpermt2q %zmm8, %zmm11, %zmm13
8344 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm19, %zmm11
8345 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm14
8346 ; AVX512BW-NEXT: vpermt2q %zmm8, %zmm31, %zmm14
8347 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm19, %zmm31
8348 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm2
8349 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm5, %zmm2
8350 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8351 ; AVX512BW-NEXT: vpermi2q %zmm19, %zmm4, %zmm5
8352 ; AVX512BW-NEXT: vpermt2q %zmm19, %zmm0, %zmm4
8353 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm8
8354 ; AVX512BW-NEXT: vmovdqa64 %zmm20, %zmm19
8355 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm0, %zmm19
8356 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm22
8357 ; AVX512BW-NEXT: vpermt2q %zmm30, %zmm0, %zmm22
8358 ; AVX512BW-NEXT: vmovdqa64 %zmm27, %zmm17
8359 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm0, %zmm17
8360 ; AVX512BW-NEXT: vmovdqa64 %zmm21, %zmm18
8361 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm0, %zmm18
8362 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
8363 ; AVX512BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
8364 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm24
8365 ; AVX512BW-NEXT: vpermt2q %zmm30, %zmm1, %zmm24
8366 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
8367 ; AVX512BW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
8368 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm0
8369 ; AVX512BW-NEXT: vpermt2q %zmm30, %zmm2, %zmm0
8370 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8371 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
8372 ; AVX512BW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
8373 ; AVX512BW-NEXT: vmovdqa64 %zmm30, %zmm0
8374 ; AVX512BW-NEXT: vpermt2q %zmm26, %zmm3, %zmm0
8375 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8376 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
8377 ; AVX512BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
8378 ; AVX512BW-NEXT: vpermt2q %zmm26, %zmm0, %zmm30
8379 ; AVX512BW-NEXT: vmovdqa64 %zmm27, %zmm26
8380 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm1, %zmm26
8381 ; AVX512BW-NEXT: vmovdqa64 %zmm27, %zmm23
8382 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm2, %zmm23
8383 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm28
8384 ; AVX512BW-NEXT: vpermt2q %zmm27, %zmm3, %zmm28
8385 ; AVX512BW-NEXT: vpermt2q %zmm27, %zmm0, %zmm9
8386 ; AVX512BW-NEXT: vmovdqa64 %zmm20, %zmm27
8387 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm1, %zmm27
8388 ; AVX512BW-NEXT: vpermi2q %zmm6, %zmm21, %zmm1
8389 ; AVX512BW-NEXT: vmovdqa64 %zmm20, %zmm10
8390 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm2, %zmm10
8391 ; AVX512BW-NEXT: vpermi2q %zmm6, %zmm21, %zmm2
8392 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm16
8393 ; AVX512BW-NEXT: vpermt2q %zmm20, %zmm3, %zmm16
8394 ; AVX512BW-NEXT: vpermi2q %zmm21, %zmm6, %zmm3
8395 ; AVX512BW-NEXT: vpermt2q %zmm21, %zmm0, %zmm6
8396 ; AVX512BW-NEXT: vpermt2q %zmm20, %zmm0, %zmm7
8397 ; AVX512BW-NEXT: movb $56, %al
8398 ; AVX512BW-NEXT: kmovd %eax, %k1
8399 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8400 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
8401 ; AVX512BW-NEXT: movb $-64, %al
8402 ; AVX512BW-NEXT: kmovd %eax, %k2
8403 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8404 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
8405 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8406 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
8407 ; AVX512BW-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
8408 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
8409 ; AVX512BW-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
8410 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8411 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8412 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm15 {%k1}
8413 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8414 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm15 {%k2}
8415 ; AVX512BW-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
8416 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
8417 ; AVX512BW-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
8418 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
8419 ; AVX512BW-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
8420 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm20
8421 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8422 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
8423 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm12 {%k2}
8424 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8425 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
8426 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm21 {%k1}
8427 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm21 {%k2}
8428 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8429 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
8430 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
8431 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm19 {%k2}
8432 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8433 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
8434 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm0 {%k1}
8435 ; AVX512BW-NEXT: vmovdqa64 %zmm18, %zmm0 {%k2}
8436 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm17
8437 ; AVX512BW-NEXT: movb $24, %al
8438 ; AVX512BW-NEXT: kmovd %eax, %k2
8439 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8440 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm13 {%k2}
8441 ; AVX512BW-NEXT: movb $-32, %al
8442 ; AVX512BW-NEXT: kmovd %eax, %k1
8443 ; AVX512BW-NEXT: vmovdqa64 %zmm27, %zmm13 {%k1}
8444 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8445 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
8446 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
8447 ; AVX512BW-NEXT: vmovdqa64 %zmm24, %zmm18 {%k1}
8448 ; AVX512BW-NEXT: vmovdqa64 %zmm18, %zmm22
8449 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8450 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
8451 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
8452 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm18 {%k1}
8453 ; AVX512BW-NEXT: vmovdqa64 %zmm18, %zmm24
8454 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8455 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
8456 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm11 {%k1}
8457 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8458 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm14 {%k2}
8459 ; AVX512BW-NEXT: vmovdqa64 %zmm10, %zmm14 {%k1}
8460 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8461 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8462 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
8463 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8464 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
8465 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm18
8466 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8467 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8468 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
8469 ; AVX512BW-NEXT: vmovdqa64 %zmm23, %zmm1 {%k1}
8470 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm23
8471 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8472 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
8473 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm31 {%k1}
8474 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8475 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
8476 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
8477 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8478 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
8479 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
8480 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
8481 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
8482 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
8483 ; AVX512BW-NEXT: vmovdqa64 %zmm28, %zmm2 {%k1}
8484 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
8485 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm5, %zmm10, %zmm5
8486 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm5 {%k1}
8487 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
8488 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
8489 ; AVX512BW-NEXT: vmovdqa64 %zmm30, %zmm3 {%k1}
8490 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
8491 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm25, %zmm10, %zmm10
8492 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm10 {%k1}
8493 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
8494 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm4, %zmm9, %zmm4
8495 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm4 {%k1}
8496 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
8497 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm8, %zmm6, %zmm6
8498 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm6 {%k1}
8499 ; AVX512BW-NEXT: vmovdqa64 %zmm20, 192(%rsi)
8500 ; AVX512BW-NEXT: vmovdqa64 %zmm15, 128(%rsi)
8501 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
8502 ; AVX512BW-NEXT: vmovaps %zmm7, 64(%rsi)
8503 ; AVX512BW-NEXT: vmovdqa64 %zmm29, (%rsi)
8504 ; AVX512BW-NEXT: vmovdqa64 %zmm17, 192(%rdx)
8505 ; AVX512BW-NEXT: vmovdqa64 %zmm19, (%rdx)
8506 ; AVX512BW-NEXT: vmovdqa64 %zmm21, 64(%rdx)
8507 ; AVX512BW-NEXT: vmovdqa64 %zmm12, 128(%rdx)
8508 ; AVX512BW-NEXT: vmovdqa64 %zmm11, 192(%rcx)
8509 ; AVX512BW-NEXT: vmovdqa64 %zmm24, (%rcx)
8510 ; AVX512BW-NEXT: vmovdqa64 %zmm22, 64(%rcx)
8511 ; AVX512BW-NEXT: vmovdqa64 %zmm13, 128(%rcx)
8512 ; AVX512BW-NEXT: vmovdqa64 %zmm31, 192(%r8)
8513 ; AVX512BW-NEXT: vmovdqa64 %zmm23, (%r8)
8514 ; AVX512BW-NEXT: vmovdqa64 %zmm18, 64(%r8)
8515 ; AVX512BW-NEXT: vmovdqa64 %zmm14, 128(%r8)
8516 ; AVX512BW-NEXT: vmovdqa64 %zmm5, 192(%r9)
8517 ; AVX512BW-NEXT: vmovdqa64 %zmm2, (%r9)
8518 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 64(%r9)
8519 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 128(%r9)
8520 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
8521 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 128(%rax)
8522 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 192(%rax)
8523 ; AVX512BW-NEXT: vmovdqa64 %zmm10, (%rax)
8524 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 64(%rax)
8525 ; AVX512BW-NEXT: addq $2632, %rsp # imm = 0xA48
8526 ; AVX512BW-NEXT: vzeroupper
8527 ; AVX512BW-NEXT: retq
8529 ; AVX512BW-FCP-LABEL: load_i64_stride6_vf32:
8530 ; AVX512BW-FCP: # %bb.0:
8531 ; AVX512BW-FCP-NEXT: subq $2632, %rsp # imm = 0xA48
8532 ; AVX512BW-FCP-NEXT: vmovdqa64 1280(%rdi), %zmm2
8533 ; AVX512BW-FCP-NEXT: vmovdqa64 1344(%rdi), %zmm21
8534 ; AVX512BW-FCP-NEXT: vmovdqa64 896(%rdi), %zmm1
8535 ; AVX512BW-FCP-NEXT: vmovdqa64 960(%rdi), %zmm19
8536 ; AVX512BW-FCP-NEXT: vmovdqa64 448(%rdi), %zmm18
8537 ; AVX512BW-FCP-NEXT: vmovdqa64 384(%rdi), %zmm3
8538 ; AVX512BW-FCP-NEXT: vmovdqa64 512(%rdi), %zmm4
8539 ; AVX512BW-FCP-NEXT: vmovdqa64 576(%rdi), %zmm0
8540 ; AVX512BW-FCP-NEXT: vmovdqa64 128(%rdi), %zmm5
8541 ; AVX512BW-FCP-NEXT: vmovdqa64 192(%rdi), %zmm25
8542 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,6,0,10,0,6,0,10]
8543 ; AVX512BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
8544 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm7
8545 ; AVX512BW-FCP-NEXT: vpermt2q %zmm5, %zmm6, %zmm7
8546 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8547 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm7 = [0,6,12,0]
8548 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm8
8549 ; AVX512BW-FCP-NEXT: vpermt2q %zmm4, %zmm6, %zmm8
8550 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8551 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm8
8552 ; AVX512BW-FCP-NEXT: vpermt2q %zmm18, %zmm7, %zmm8
8553 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8554 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm8
8555 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
8556 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8557 ; AVX512BW-FCP-NEXT: vpermi2q %zmm2, %zmm21, %zmm6
8558 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8559 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [1,7,0,11,1,7,0,11]
8560 ; AVX512BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
8561 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm8
8562 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
8563 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8564 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm8
8565 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm6
8566 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm10 = [1,7,13,0]
8567 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
8568 ; AVX512BW-FCP-NEXT: vpermt2q %zmm4, %zmm8, %zmm1
8569 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8570 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm1
8571 ; AVX512BW-FCP-NEXT: vpermt2q %zmm5, %zmm8, %zmm1
8572 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8573 ; AVX512BW-FCP-NEXT: vpermi2q %zmm2, %zmm21, %zmm8
8574 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8575 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [10,4,10,4,10,4,10,4]
8576 ; AVX512BW-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8577 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm8
8578 ; AVX512BW-FCP-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
8579 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8580 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm8
8581 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
8582 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8583 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm8
8584 ; AVX512BW-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
8585 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8586 ; AVX512BW-FCP-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
8587 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8588 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [11,5,11,5,11,5,11,5]
8589 ; AVX512BW-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8590 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm8
8591 ; AVX512BW-FCP-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
8592 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8593 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm8
8594 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
8595 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8596 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm8
8597 ; AVX512BW-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
8598 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8599 ; AVX512BW-FCP-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
8600 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8601 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [12,0,0,6,12,0,0,6]
8602 ; AVX512BW-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
8603 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm1
8604 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm8, %zmm1
8605 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8606 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [13,0,1,7,13,0,1,7]
8607 ; AVX512BW-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
8608 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm4
8609 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8610 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
8611 ; AVX512BW-FCP-NEXT: vpermt2q %zmm25, %zmm8, %zmm0
8612 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8613 ; AVX512BW-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm5
8614 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8615 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm0
8616 ; AVX512BW-FCP-NEXT: vpermt2q %zmm19, %zmm8, %zmm0
8617 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8618 ; AVX512BW-FCP-NEXT: vpermi2q %zmm21, %zmm2, %zmm8
8619 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8620 ; AVX512BW-FCP-NEXT: vpermt2q %zmm21, %zmm1, %zmm2
8621 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8622 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm0
8623 ; AVX512BW-FCP-NEXT: vpermt2q %zmm18, %zmm10, %zmm0
8624 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8625 ; AVX512BW-FCP-NEXT: vpermt2q %zmm19, %zmm1, %zmm6
8626 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8627 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm11 = [10,0,6,0]
8628 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm0
8629 ; AVX512BW-FCP-NEXT: vpermt2q %zmm3, %zmm11, %zmm0
8630 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8631 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm31 = [11,1,7,0]
8632 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm0
8633 ; AVX512BW-FCP-NEXT: vpermt2q %zmm3, %zmm31, %zmm0
8634 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8635 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
8636 ; AVX512BW-FCP-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8637 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm0
8638 ; AVX512BW-FCP-NEXT: vpermt2q %zmm18, %zmm5, %zmm0
8639 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8640 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
8641 ; AVX512BW-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8642 ; AVX512BW-FCP-NEXT: vpermt2q %zmm18, %zmm0, %zmm3
8643 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8644 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm25
8645 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
8646 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm29
8647 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm2
8648 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm7, %zmm29
8649 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm3
8650 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm10, %zmm3
8651 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8652 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
8653 ; AVX512BW-FCP-NEXT: vpermt2q %zmm25, %zmm11, %zmm3
8654 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8655 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
8656 ; AVX512BW-FCP-NEXT: vpermt2q %zmm25, %zmm31, %zmm3
8657 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8658 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm3
8659 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm3
8660 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8661 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm25
8662 ; AVX512BW-FCP-NEXT: vmovdqa64 320(%rdi), %zmm9
8663 ; AVX512BW-FCP-NEXT: vmovdqa64 256(%rdi), %zmm27
8664 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm27, %zmm1
8665 ; AVX512BW-FCP-NEXT: vpermt2q %zmm9, %zmm5, %zmm1
8666 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8667 ; AVX512BW-FCP-NEXT: vmovdqa64 704(%rdi), %zmm30
8668 ; AVX512BW-FCP-NEXT: vmovdqa64 640(%rdi), %zmm26
8669 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm1
8670 ; AVX512BW-FCP-NEXT: vpermt2q %zmm30, %zmm5, %zmm1
8671 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8672 ; AVX512BW-FCP-NEXT: vmovdqa64 1088(%rdi), %zmm7
8673 ; AVX512BW-FCP-NEXT: vmovdqa64 1024(%rdi), %zmm20
8674 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm1
8675 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm5, %zmm1
8676 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8677 ; AVX512BW-FCP-NEXT: vmovdqa64 832(%rdi), %zmm1
8678 ; AVX512BW-FCP-NEXT: vmovdqa64 768(%rdi), %zmm8
8679 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm15
8680 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm15
8681 ; AVX512BW-FCP-NEXT: vmovdqa64 1472(%rdi), %zmm6
8682 ; AVX512BW-FCP-NEXT: vmovdqa64 1408(%rdi), %zmm21
8683 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm3
8684 ; AVX512BW-FCP-NEXT: vpermt2q %zmm6, %zmm5, %zmm3
8685 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8686 ; AVX512BW-FCP-NEXT: vmovdqa64 1216(%rdi), %zmm19
8687 ; AVX512BW-FCP-NEXT: vmovdqa64 1152(%rdi), %zmm4
8688 ; AVX512BW-FCP-NEXT: vpermi2q %zmm19, %zmm4, %zmm2
8689 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
8690 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm12
8691 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm10, %zmm12
8692 ; AVX512BW-FCP-NEXT: vpermi2q %zmm19, %zmm4, %zmm10
8693 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8694 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm13
8695 ; AVX512BW-FCP-NEXT: vpermt2q %zmm8, %zmm11, %zmm13
8696 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm19, %zmm11
8697 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm14
8698 ; AVX512BW-FCP-NEXT: vpermt2q %zmm8, %zmm31, %zmm14
8699 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm19, %zmm31
8700 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm2
8701 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm2
8702 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8703 ; AVX512BW-FCP-NEXT: vpermi2q %zmm19, %zmm4, %zmm5
8704 ; AVX512BW-FCP-NEXT: vpermt2q %zmm19, %zmm0, %zmm4
8705 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm8
8706 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm19
8707 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm19
8708 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm22
8709 ; AVX512BW-FCP-NEXT: vpermt2q %zmm30, %zmm0, %zmm22
8710 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm27, %zmm17
8711 ; AVX512BW-FCP-NEXT: vpermt2q %zmm9, %zmm0, %zmm17
8712 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm18
8713 ; AVX512BW-FCP-NEXT: vpermt2q %zmm6, %zmm0, %zmm18
8714 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
8715 ; AVX512BW-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
8716 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm24
8717 ; AVX512BW-FCP-NEXT: vpermt2q %zmm30, %zmm1, %zmm24
8718 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
8719 ; AVX512BW-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
8720 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm0
8721 ; AVX512BW-FCP-NEXT: vpermt2q %zmm30, %zmm2, %zmm0
8722 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8723 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
8724 ; AVX512BW-FCP-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
8725 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm30, %zmm0
8726 ; AVX512BW-FCP-NEXT: vpermt2q %zmm26, %zmm3, %zmm0
8727 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8728 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
8729 ; AVX512BW-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
8730 ; AVX512BW-FCP-NEXT: vpermt2q %zmm26, %zmm0, %zmm30
8731 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm27, %zmm26
8732 ; AVX512BW-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm26
8733 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm27, %zmm23
8734 ; AVX512BW-FCP-NEXT: vpermt2q %zmm9, %zmm2, %zmm23
8735 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm28
8736 ; AVX512BW-FCP-NEXT: vpermt2q %zmm27, %zmm3, %zmm28
8737 ; AVX512BW-FCP-NEXT: vpermt2q %zmm27, %zmm0, %zmm9
8738 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm27
8739 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm27
8740 ; AVX512BW-FCP-NEXT: vpermi2q %zmm6, %zmm21, %zmm1
8741 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm10
8742 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm2, %zmm10
8743 ; AVX512BW-FCP-NEXT: vpermi2q %zmm6, %zmm21, %zmm2
8744 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm16
8745 ; AVX512BW-FCP-NEXT: vpermt2q %zmm20, %zmm3, %zmm16
8746 ; AVX512BW-FCP-NEXT: vpermi2q %zmm21, %zmm6, %zmm3
8747 ; AVX512BW-FCP-NEXT: vpermt2q %zmm21, %zmm0, %zmm6
8748 ; AVX512BW-FCP-NEXT: vpermt2q %zmm20, %zmm0, %zmm7
8749 ; AVX512BW-FCP-NEXT: movb $56, %al
8750 ; AVX512BW-FCP-NEXT: kmovd %eax, %k1
8751 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8752 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
8753 ; AVX512BW-FCP-NEXT: movb $-64, %al
8754 ; AVX512BW-FCP-NEXT: kmovd %eax, %k2
8755 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8756 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
8757 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8758 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
8759 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
8760 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
8761 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
8762 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8763 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8764 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm15 {%k1}
8765 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8766 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm15 {%k2}
8767 ; AVX512BW-FCP-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
8768 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
8769 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
8770 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
8771 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
8772 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm20
8773 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8774 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
8775 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm12 {%k2}
8776 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8777 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
8778 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm21 {%k1}
8779 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm21 {%k2}
8780 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8781 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
8782 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
8783 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm19 {%k2}
8784 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8785 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
8786 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm0 {%k1}
8787 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm0 {%k2}
8788 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm17
8789 ; AVX512BW-FCP-NEXT: movb $24, %al
8790 ; AVX512BW-FCP-NEXT: kmovd %eax, %k2
8791 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8792 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm13 {%k2}
8793 ; AVX512BW-FCP-NEXT: movb $-32, %al
8794 ; AVX512BW-FCP-NEXT: kmovd %eax, %k1
8795 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm27, %zmm13 {%k1}
8796 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8797 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
8798 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
8799 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm18 {%k1}
8800 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm22
8801 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8802 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
8803 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
8804 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm18 {%k1}
8805 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm24
8806 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8807 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
8808 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm11 {%k1}
8809 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8810 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm14 {%k2}
8811 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm14 {%k1}
8812 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8813 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8814 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
8815 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8816 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
8817 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm18
8818 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8819 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8820 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
8821 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm1 {%k1}
8822 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm23
8823 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8824 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
8825 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm31 {%k1}
8826 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8827 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
8828 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
8829 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8830 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
8831 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
8832 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
8833 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
8834 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
8835 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm28, %zmm2 {%k1}
8836 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
8837 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm5, %zmm10, %zmm5
8838 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm5 {%k1}
8839 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
8840 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
8841 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm30, %zmm3 {%k1}
8842 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
8843 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm25, %zmm10, %zmm10
8844 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm10 {%k1}
8845 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
8846 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm4, %zmm9, %zmm4
8847 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm4 {%k1}
8848 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
8849 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm8, %zmm6, %zmm6
8850 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm6 {%k1}
8851 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, 192(%rsi)
8852 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm15, 128(%rsi)
8853 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
8854 ; AVX512BW-FCP-NEXT: vmovaps %zmm7, 64(%rsi)
8855 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm29, (%rsi)
8856 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, 192(%rdx)
8857 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, (%rdx)
8858 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm21, 64(%rdx)
8859 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm12, 128(%rdx)
8860 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm11, 192(%rcx)
8861 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm24, (%rcx)
8862 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm22, 64(%rcx)
8863 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, 128(%rcx)
8864 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm31, 192(%r8)
8865 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm23, (%r8)
8866 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, 64(%r8)
8867 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm14, 128(%r8)
8868 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, 192(%r9)
8869 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, (%r9)
8870 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, 64(%r9)
8871 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, 128(%r9)
8872 ; AVX512BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
8873 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, 128(%rax)
8874 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, 192(%rax)
8875 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, (%rax)
8876 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, 64(%rax)
8877 ; AVX512BW-FCP-NEXT: addq $2632, %rsp # imm = 0xA48
8878 ; AVX512BW-FCP-NEXT: vzeroupper
8879 ; AVX512BW-FCP-NEXT: retq
8881 ; AVX512DQ-BW-LABEL: load_i64_stride6_vf32:
8882 ; AVX512DQ-BW: # %bb.0:
8883 ; AVX512DQ-BW-NEXT: subq $2632, %rsp # imm = 0xA48
8884 ; AVX512DQ-BW-NEXT: vmovdqa64 1280(%rdi), %zmm2
8885 ; AVX512DQ-BW-NEXT: vmovdqa64 1344(%rdi), %zmm21
8886 ; AVX512DQ-BW-NEXT: vmovdqa64 896(%rdi), %zmm1
8887 ; AVX512DQ-BW-NEXT: vmovdqa64 960(%rdi), %zmm19
8888 ; AVX512DQ-BW-NEXT: vmovdqa64 448(%rdi), %zmm18
8889 ; AVX512DQ-BW-NEXT: vmovdqa64 384(%rdi), %zmm3
8890 ; AVX512DQ-BW-NEXT: vmovdqa64 512(%rdi), %zmm4
8891 ; AVX512DQ-BW-NEXT: vmovdqa64 576(%rdi), %zmm0
8892 ; AVX512DQ-BW-NEXT: vmovdqa64 128(%rdi), %zmm5
8893 ; AVX512DQ-BW-NEXT: vmovdqa64 192(%rdi), %zmm25
8894 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,6,0,10,0,6,0,10]
8895 ; AVX512DQ-BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
8896 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm25, %zmm7
8897 ; AVX512DQ-BW-NEXT: vpermt2q %zmm5, %zmm6, %zmm7
8898 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8899 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm7 = [0,6,12,0]
8900 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm8
8901 ; AVX512DQ-BW-NEXT: vpermt2q %zmm4, %zmm6, %zmm8
8902 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8903 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, %zmm8
8904 ; AVX512DQ-BW-NEXT: vpermt2q %zmm18, %zmm7, %zmm8
8905 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8906 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, %zmm8
8907 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
8908 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8909 ; AVX512DQ-BW-NEXT: vpermi2q %zmm2, %zmm21, %zmm6
8910 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8911 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [1,7,0,11,1,7,0,11]
8912 ; AVX512DQ-BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
8913 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, %zmm8
8914 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
8915 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8916 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm8
8917 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm6
8918 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm10 = [1,7,13,0]
8919 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1
8920 ; AVX512DQ-BW-NEXT: vpermt2q %zmm4, %zmm8, %zmm1
8921 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8922 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm25, %zmm1
8923 ; AVX512DQ-BW-NEXT: vpermt2q %zmm5, %zmm8, %zmm1
8924 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8925 ; AVX512DQ-BW-NEXT: vpermi2q %zmm2, %zmm21, %zmm8
8926 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8927 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [10,4,10,4,10,4,10,4]
8928 ; AVX512DQ-BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8929 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm8
8930 ; AVX512DQ-BW-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
8931 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8932 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm8
8933 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
8934 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8935 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm8
8936 ; AVX512DQ-BW-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
8937 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8938 ; AVX512DQ-BW-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
8939 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8940 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [11,5,11,5,11,5,11,5]
8941 ; AVX512DQ-BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8942 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm8
8943 ; AVX512DQ-BW-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
8944 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8945 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm8
8946 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
8947 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8948 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm8
8949 ; AVX512DQ-BW-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
8950 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8951 ; AVX512DQ-BW-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
8952 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8953 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [12,0,0,6,12,0,0,6]
8954 ; AVX512DQ-BW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
8955 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm1
8956 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm8, %zmm1
8957 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8958 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [13,0,1,7,13,0,1,7]
8959 ; AVX512DQ-BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
8960 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm1, %zmm4
8961 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8962 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm0
8963 ; AVX512DQ-BW-NEXT: vpermt2q %zmm25, %zmm8, %zmm0
8964 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8965 ; AVX512DQ-BW-NEXT: vpermt2q %zmm25, %zmm1, %zmm5
8966 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8967 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm0
8968 ; AVX512DQ-BW-NEXT: vpermt2q %zmm19, %zmm8, %zmm0
8969 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8970 ; AVX512DQ-BW-NEXT: vpermi2q %zmm21, %zmm2, %zmm8
8971 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8972 ; AVX512DQ-BW-NEXT: vpermt2q %zmm21, %zmm1, %zmm2
8973 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8974 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, %zmm0
8975 ; AVX512DQ-BW-NEXT: vpermt2q %zmm18, %zmm10, %zmm0
8976 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8977 ; AVX512DQ-BW-NEXT: vpermt2q %zmm19, %zmm1, %zmm6
8978 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8979 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm11 = [10,0,6,0]
8980 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, %zmm0
8981 ; AVX512DQ-BW-NEXT: vpermt2q %zmm3, %zmm11, %zmm0
8982 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8983 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm31 = [11,1,7,0]
8984 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, %zmm0
8985 ; AVX512DQ-BW-NEXT: vpermt2q %zmm3, %zmm31, %zmm0
8986 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8987 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
8988 ; AVX512DQ-BW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8989 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, %zmm0
8990 ; AVX512DQ-BW-NEXT: vpermt2q %zmm18, %zmm5, %zmm0
8991 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8992 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
8993 ; AVX512DQ-BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8994 ; AVX512DQ-BW-NEXT: vpermt2q %zmm18, %zmm0, %zmm3
8995 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8996 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdi), %zmm25
8997 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rdi), %zmm1
8998 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm25, %zmm29
8999 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm2
9000 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm7, %zmm29
9001 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm25, %zmm3
9002 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm10, %zmm3
9003 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9004 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
9005 ; AVX512DQ-BW-NEXT: vpermt2q %zmm25, %zmm11, %zmm3
9006 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9007 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
9008 ; AVX512DQ-BW-NEXT: vpermt2q %zmm25, %zmm31, %zmm3
9009 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9010 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm25, %zmm3
9011 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm5, %zmm3
9012 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9013 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm25
9014 ; AVX512DQ-BW-NEXT: vmovdqa64 320(%rdi), %zmm9
9015 ; AVX512DQ-BW-NEXT: vmovdqa64 256(%rdi), %zmm27
9016 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm27, %zmm1
9017 ; AVX512DQ-BW-NEXT: vpermt2q %zmm9, %zmm5, %zmm1
9018 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9019 ; AVX512DQ-BW-NEXT: vmovdqa64 704(%rdi), %zmm30
9020 ; AVX512DQ-BW-NEXT: vmovdqa64 640(%rdi), %zmm26
9021 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm26, %zmm1
9022 ; AVX512DQ-BW-NEXT: vpermt2q %zmm30, %zmm5, %zmm1
9023 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9024 ; AVX512DQ-BW-NEXT: vmovdqa64 1088(%rdi), %zmm7
9025 ; AVX512DQ-BW-NEXT: vmovdqa64 1024(%rdi), %zmm20
9026 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, %zmm1
9027 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm5, %zmm1
9028 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9029 ; AVX512DQ-BW-NEXT: vmovdqa64 832(%rdi), %zmm1
9030 ; AVX512DQ-BW-NEXT: vmovdqa64 768(%rdi), %zmm8
9031 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, %zmm15
9032 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm2, %zmm15
9033 ; AVX512DQ-BW-NEXT: vmovdqa64 1472(%rdi), %zmm6
9034 ; AVX512DQ-BW-NEXT: vmovdqa64 1408(%rdi), %zmm21
9035 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm21, %zmm3
9036 ; AVX512DQ-BW-NEXT: vpermt2q %zmm6, %zmm5, %zmm3
9037 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9038 ; AVX512DQ-BW-NEXT: vmovdqa64 1216(%rdi), %zmm19
9039 ; AVX512DQ-BW-NEXT: vmovdqa64 1152(%rdi), %zmm4
9040 ; AVX512DQ-BW-NEXT: vpermi2q %zmm19, %zmm4, %zmm2
9041 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
9042 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, %zmm12
9043 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm10, %zmm12
9044 ; AVX512DQ-BW-NEXT: vpermi2q %zmm19, %zmm4, %zmm10
9045 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9046 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm13
9047 ; AVX512DQ-BW-NEXT: vpermt2q %zmm8, %zmm11, %zmm13
9048 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm19, %zmm11
9049 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm14
9050 ; AVX512DQ-BW-NEXT: vpermt2q %zmm8, %zmm31, %zmm14
9051 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm19, %zmm31
9052 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, %zmm2
9053 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm5, %zmm2
9054 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9055 ; AVX512DQ-BW-NEXT: vpermi2q %zmm19, %zmm4, %zmm5
9056 ; AVX512DQ-BW-NEXT: vpermt2q %zmm19, %zmm0, %zmm4
9057 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm8
9058 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, %zmm19
9059 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm0, %zmm19
9060 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm26, %zmm22
9061 ; AVX512DQ-BW-NEXT: vpermt2q %zmm30, %zmm0, %zmm22
9062 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm27, %zmm17
9063 ; AVX512DQ-BW-NEXT: vpermt2q %zmm9, %zmm0, %zmm17
9064 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm21, %zmm18
9065 ; AVX512DQ-BW-NEXT: vpermt2q %zmm6, %zmm0, %zmm18
9066 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
9067 ; AVX512DQ-BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
9068 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm26, %zmm24
9069 ; AVX512DQ-BW-NEXT: vpermt2q %zmm30, %zmm1, %zmm24
9070 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
9071 ; AVX512DQ-BW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
9072 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm26, %zmm0
9073 ; AVX512DQ-BW-NEXT: vpermt2q %zmm30, %zmm2, %zmm0
9074 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9075 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
9076 ; AVX512DQ-BW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
9077 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm30, %zmm0
9078 ; AVX512DQ-BW-NEXT: vpermt2q %zmm26, %zmm3, %zmm0
9079 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9080 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
9081 ; AVX512DQ-BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
9082 ; AVX512DQ-BW-NEXT: vpermt2q %zmm26, %zmm0, %zmm30
9083 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm27, %zmm26
9084 ; AVX512DQ-BW-NEXT: vpermt2q %zmm9, %zmm1, %zmm26
9085 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm27, %zmm23
9086 ; AVX512DQ-BW-NEXT: vpermt2q %zmm9, %zmm2, %zmm23
9087 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, %zmm28
9088 ; AVX512DQ-BW-NEXT: vpermt2q %zmm27, %zmm3, %zmm28
9089 ; AVX512DQ-BW-NEXT: vpermt2q %zmm27, %zmm0, %zmm9
9090 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, %zmm27
9091 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm1, %zmm27
9092 ; AVX512DQ-BW-NEXT: vpermi2q %zmm6, %zmm21, %zmm1
9093 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, %zmm10
9094 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm2, %zmm10
9095 ; AVX512DQ-BW-NEXT: vpermi2q %zmm6, %zmm21, %zmm2
9096 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm16
9097 ; AVX512DQ-BW-NEXT: vpermt2q %zmm20, %zmm3, %zmm16
9098 ; AVX512DQ-BW-NEXT: vpermi2q %zmm21, %zmm6, %zmm3
9099 ; AVX512DQ-BW-NEXT: vpermt2q %zmm21, %zmm0, %zmm6
9100 ; AVX512DQ-BW-NEXT: vpermt2q %zmm20, %zmm0, %zmm7
9101 ; AVX512DQ-BW-NEXT: movb $56, %al
9102 ; AVX512DQ-BW-NEXT: kmovd %eax, %k1
9103 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9104 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
9105 ; AVX512DQ-BW-NEXT: movb $-64, %al
9106 ; AVX512DQ-BW-NEXT: kmovd %eax, %k2
9107 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9108 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
9109 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9110 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
9111 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
9112 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
9113 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
9114 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9115 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9116 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm15 {%k1}
9117 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9118 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm15 {%k2}
9119 ; AVX512DQ-BW-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
9120 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
9121 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
9122 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
9123 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
9124 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm20
9125 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9126 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
9127 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, %zmm12 {%k2}
9128 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9129 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
9130 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm21 {%k1}
9131 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm22, %zmm21 {%k2}
9132 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9133 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
9134 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
9135 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, %zmm19 {%k2}
9136 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9137 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
9138 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, %zmm0 {%k1}
9139 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, %zmm0 {%k2}
9140 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm17
9141 ; AVX512DQ-BW-NEXT: movb $24, %al
9142 ; AVX512DQ-BW-NEXT: kmovd %eax, %k2
9143 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9144 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm13 {%k2}
9145 ; AVX512DQ-BW-NEXT: movb $-32, %al
9146 ; AVX512DQ-BW-NEXT: kmovd %eax, %k1
9147 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm27, %zmm13 {%k1}
9148 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9149 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
9150 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
9151 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm24, %zmm18 {%k1}
9152 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, %zmm22
9153 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9154 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
9155 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
9156 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm26, %zmm18 {%k1}
9157 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, %zmm24
9158 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9159 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
9160 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm11 {%k1}
9161 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9162 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm14 {%k2}
9163 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, %zmm14 {%k1}
9164 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9165 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9166 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
9167 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9168 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
9169 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm18
9170 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9171 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9172 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
9173 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm23, %zmm1 {%k1}
9174 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm23
9175 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9176 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
9177 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm31 {%k1}
9178 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9179 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
9180 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
9181 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9182 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
9183 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9184 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
9185 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9186 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
9187 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm28, %zmm2 {%k1}
9188 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
9189 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm5, %zmm10, %zmm5
9190 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, %zmm5 {%k1}
9191 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
9192 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
9193 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm30, %zmm3 {%k1}
9194 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
9195 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm25, %zmm10, %zmm10
9196 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, %zmm10 {%k1}
9197 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
9198 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm4, %zmm9, %zmm4
9199 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm4 {%k1}
9200 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
9201 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm8, %zmm6, %zmm6
9202 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm6 {%k1}
9203 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, 192(%rsi)
9204 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm15, 128(%rsi)
9205 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
9206 ; AVX512DQ-BW-NEXT: vmovaps %zmm7, 64(%rsi)
9207 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm29, (%rsi)
9208 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, 192(%rdx)
9209 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, (%rdx)
9210 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm21, 64(%rdx)
9211 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm12, 128(%rdx)
9212 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm11, 192(%rcx)
9213 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm24, (%rcx)
9214 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm22, 64(%rcx)
9215 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, 128(%rcx)
9216 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm31, 192(%r8)
9217 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm23, (%r8)
9218 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, 64(%r8)
9219 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm14, 128(%r8)
9220 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, 192(%r9)
9221 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, (%r9)
9222 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, 64(%r9)
9223 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, 128(%r9)
9224 ; AVX512DQ-BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
9225 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, 128(%rax)
9226 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, 192(%rax)
9227 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, (%rax)
9228 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, 64(%rax)
9229 ; AVX512DQ-BW-NEXT: addq $2632, %rsp # imm = 0xA48
9230 ; AVX512DQ-BW-NEXT: vzeroupper
9231 ; AVX512DQ-BW-NEXT: retq
9233 ; AVX512DQ-BW-FCP-LABEL: load_i64_stride6_vf32:
9234 ; AVX512DQ-BW-FCP: # %bb.0:
9235 ; AVX512DQ-BW-FCP-NEXT: subq $2632, %rsp # imm = 0xA48
9236 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1280(%rdi), %zmm2
9237 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1344(%rdi), %zmm21
9238 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 896(%rdi), %zmm1
9239 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 960(%rdi), %zmm19
9240 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 448(%rdi), %zmm18
9241 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 384(%rdi), %zmm3
9242 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 512(%rdi), %zmm4
9243 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 576(%rdi), %zmm0
9244 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 128(%rdi), %zmm5
9245 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 192(%rdi), %zmm25
9246 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,6,0,10,0,6,0,10]
9247 ; AVX512DQ-BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
9248 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm7
9249 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm5, %zmm6, %zmm7
9250 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9251 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm7 = [0,6,12,0]
9252 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm8
9253 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm4, %zmm6, %zmm8
9254 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9255 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm8
9256 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm18, %zmm7, %zmm8
9257 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9258 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm8
9259 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
9260 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9261 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm2, %zmm21, %zmm6
9262 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9263 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [1,7,0,11,1,7,0,11]
9264 ; AVX512DQ-BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
9265 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm8
9266 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm6, %zmm8
9267 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9268 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm8
9269 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm6
9270 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm10 = [1,7,13,0]
9271 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
9272 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm4, %zmm8, %zmm1
9273 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9274 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm1
9275 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm5, %zmm8, %zmm1
9276 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9277 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm2, %zmm21, %zmm8
9278 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9279 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [10,4,10,4,10,4,10,4]
9280 ; AVX512DQ-BW-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
9281 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm8
9282 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
9283 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9284 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm8
9285 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
9286 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9287 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm8
9288 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
9289 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9290 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
9291 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9292 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [11,5,11,5,11,5,11,5]
9293 ; AVX512DQ-BW-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
9294 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm8
9295 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm19, %zmm1, %zmm8
9296 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9297 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm8
9298 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
9299 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9300 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm8
9301 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm8
9302 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9303 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm21, %zmm2, %zmm1
9304 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9305 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [12,0,0,6,12,0,0,6]
9306 ; AVX512DQ-BW-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
9307 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm1
9308 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm8, %zmm1
9309 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9310 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [13,0,1,7,13,0,1,7]
9311 ; AVX512DQ-BW-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
9312 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm4
9313 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9314 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
9315 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm25, %zmm8, %zmm0
9316 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9317 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm5
9318 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9319 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm0
9320 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm19, %zmm8, %zmm0
9321 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9322 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm21, %zmm2, %zmm8
9323 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9324 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm21, %zmm1, %zmm2
9325 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9326 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm0
9327 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm18, %zmm10, %zmm0
9328 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9329 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm19, %zmm1, %zmm6
9330 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9331 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm11 = [10,0,6,0]
9332 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm0
9333 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm3, %zmm11, %zmm0
9334 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9335 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm31 = [11,1,7,0]
9336 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm0
9337 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm3, %zmm31, %zmm0
9338 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9339 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [4,10,4,10,4,10,4,10]
9340 ; AVX512DQ-BW-FCP-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
9341 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm0
9342 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm18, %zmm5, %zmm0
9343 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9344 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
9345 ; AVX512DQ-BW-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
9346 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm18, %zmm0, %zmm3
9347 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9348 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm25
9349 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
9350 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm29
9351 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm2
9352 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm7, %zmm29
9353 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm3
9354 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm10, %zmm3
9355 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9356 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
9357 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm25, %zmm11, %zmm3
9358 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9359 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
9360 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm25, %zmm31, %zmm3
9361 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9362 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm3
9363 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm3
9364 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9365 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm25
9366 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 320(%rdi), %zmm9
9367 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 256(%rdi), %zmm27
9368 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm27, %zmm1
9369 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm9, %zmm5, %zmm1
9370 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9371 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 704(%rdi), %zmm30
9372 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 640(%rdi), %zmm26
9373 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm1
9374 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm30, %zmm5, %zmm1
9375 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9376 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1088(%rdi), %zmm7
9377 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1024(%rdi), %zmm20
9378 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm1
9379 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm5, %zmm1
9380 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9381 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 832(%rdi), %zmm1
9382 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 768(%rdi), %zmm8
9383 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm15
9384 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm15
9385 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1472(%rdi), %zmm6
9386 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1408(%rdi), %zmm21
9387 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm3
9388 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm6, %zmm5, %zmm3
9389 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9390 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1216(%rdi), %zmm19
9391 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1152(%rdi), %zmm4
9392 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm19, %zmm4, %zmm2
9393 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
9394 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm12
9395 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm10, %zmm12
9396 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm19, %zmm4, %zmm10
9397 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9398 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm13
9399 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm8, %zmm11, %zmm13
9400 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm19, %zmm11
9401 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm14
9402 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm8, %zmm31, %zmm14
9403 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm19, %zmm31
9404 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm2
9405 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm2
9406 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9407 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm19, %zmm4, %zmm5
9408 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm19, %zmm0, %zmm4
9409 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm8
9410 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm19
9411 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm19
9412 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm22
9413 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm30, %zmm0, %zmm22
9414 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm27, %zmm17
9415 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm9, %zmm0, %zmm17
9416 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm18
9417 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm6, %zmm0, %zmm18
9418 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
9419 ; AVX512DQ-BW-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
9420 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm24
9421 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm30, %zmm1, %zmm24
9422 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
9423 ; AVX512DQ-BW-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
9424 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm0
9425 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm30, %zmm2, %zmm0
9426 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9427 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
9428 ; AVX512DQ-BW-FCP-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
9429 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm30, %zmm0
9430 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm26, %zmm3, %zmm0
9431 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9432 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
9433 ; AVX512DQ-BW-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
9434 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm26, %zmm0, %zmm30
9435 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm27, %zmm26
9436 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm26
9437 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm27, %zmm23
9438 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm9, %zmm2, %zmm23
9439 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm28
9440 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm27, %zmm3, %zmm28
9441 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm27, %zmm0, %zmm9
9442 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm27
9443 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm27
9444 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm6, %zmm21, %zmm1
9445 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm10
9446 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm2, %zmm10
9447 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm6, %zmm21, %zmm2
9448 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm16
9449 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm20, %zmm3, %zmm16
9450 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm21, %zmm6, %zmm3
9451 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm21, %zmm0, %zmm6
9452 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm20, %zmm0, %zmm7
9453 ; AVX512DQ-BW-FCP-NEXT: movb $56, %al
9454 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k1
9455 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9456 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
9457 ; AVX512DQ-BW-FCP-NEXT: movb $-64, %al
9458 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k2
9459 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9460 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
9461 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9462 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
9463 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
9464 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
9465 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
9466 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9467 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9468 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm15 {%k1}
9469 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9470 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm15 {%k2}
9471 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
9472 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
9473 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
9474 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
9475 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm0 {%k2}
9476 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm20
9477 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9478 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
9479 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm12 {%k2}
9480 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9481 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
9482 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm21 {%k1}
9483 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm21 {%k2}
9484 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9485 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
9486 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
9487 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm19 {%k2}
9488 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9489 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
9490 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm0 {%k1}
9491 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm0 {%k2}
9492 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm17
9493 ; AVX512DQ-BW-FCP-NEXT: movb $24, %al
9494 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k2
9495 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9496 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm13 {%k2}
9497 ; AVX512DQ-BW-FCP-NEXT: movb $-32, %al
9498 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k1
9499 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm27, %zmm13 {%k1}
9500 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9501 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
9502 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
9503 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm18 {%k1}
9504 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm22
9505 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9506 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
9507 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
9508 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm18 {%k1}
9509 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm24
9510 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9511 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
9512 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm11 {%k1}
9513 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9514 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm14 {%k2}
9515 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm14 {%k1}
9516 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9517 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9518 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
9519 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9520 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
9521 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm18
9522 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9523 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9524 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
9525 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm1 {%k1}
9526 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm23
9527 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9528 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
9529 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm31 {%k1}
9530 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9531 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
9532 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
9533 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9534 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
9535 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9536 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
9537 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9538 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
9539 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm28, %zmm2 {%k1}
9540 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
9541 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm5, %zmm10, %zmm5
9542 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm5 {%k1}
9543 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
9544 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
9545 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm30, %zmm3 {%k1}
9546 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
9547 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm25, %zmm10, %zmm10
9548 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm10 {%k1}
9549 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
9550 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm4, %zmm9, %zmm4
9551 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm4 {%k1}
9552 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
9553 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm8, %zmm6, %zmm6
9554 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm6 {%k1}
9555 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, 192(%rsi)
9556 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm15, 128(%rsi)
9557 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
9558 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm7, 64(%rsi)
9559 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm29, (%rsi)
9560 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, 192(%rdx)
9561 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, (%rdx)
9562 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm21, 64(%rdx)
9563 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm12, 128(%rdx)
9564 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm11, 192(%rcx)
9565 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm24, (%rcx)
9566 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm22, 64(%rcx)
9567 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, 128(%rcx)
9568 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm31, 192(%r8)
9569 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm23, (%r8)
9570 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, 64(%r8)
9571 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm14, 128(%r8)
9572 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, 192(%r9)
9573 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, (%r9)
9574 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, 64(%r9)
9575 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, 128(%r9)
9576 ; AVX512DQ-BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
9577 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, 128(%rax)
9578 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, 192(%rax)
9579 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, (%rax)
9580 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, 64(%rax)
9581 ; AVX512DQ-BW-FCP-NEXT: addq $2632, %rsp # imm = 0xA48
9582 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
9583 ; AVX512DQ-BW-FCP-NEXT: retq
9584 %wide.vec = load <192 x i64>, ptr %in.vec, align 64
9585 %strided.vec0 = shufflevector <192 x i64> %wide.vec, <192 x i64> poison, <32 x i32> <i32 0, i32 6, i32 12, i32 18, i32 24, i32 30, i32 36, i32 42, i32 48, i32 54, i32 60, i32 66, i32 72, i32 78, i32 84, i32 90, i32 96, i32 102, i32 108, i32 114, i32 120, i32 126, i32 132, i32 138, i32 144, i32 150, i32 156, i32 162, i32 168, i32 174, i32 180, i32 186>
9586 %strided.vec1 = shufflevector <192 x i64> %wide.vec, <192 x i64> poison, <32 x i32> <i32 1, i32 7, i32 13, i32 19, i32 25, i32 31, i32 37, i32 43, i32 49, i32 55, i32 61, i32 67, i32 73, i32 79, i32 85, i32 91, i32 97, i32 103, i32 109, i32 115, i32 121, i32 127, i32 133, i32 139, i32 145, i32 151, i32 157, i32 163, i32 169, i32 175, i32 181, i32 187>
9587 %strided.vec2 = shufflevector <192 x i64> %wide.vec, <192 x i64> poison, <32 x i32> <i32 2, i32 8, i32 14, i32 20, i32 26, i32 32, i32 38, i32 44, i32 50, i32 56, i32 62, i32 68, i32 74, i32 80, i32 86, i32 92, i32 98, i32 104, i32 110, i32 116, i32 122, i32 128, i32 134, i32 140, i32 146, i32 152, i32 158, i32 164, i32 170, i32 176, i32 182, i32 188>
9588 %strided.vec3 = shufflevector <192 x i64> %wide.vec, <192 x i64> poison, <32 x i32> <i32 3, i32 9, i32 15, i32 21, i32 27, i32 33, i32 39, i32 45, i32 51, i32 57, i32 63, i32 69, i32 75, i32 81, i32 87, i32 93, i32 99, i32 105, i32 111, i32 117, i32 123, i32 129, i32 135, i32 141, i32 147, i32 153, i32 159, i32 165, i32 171, i32 177, i32 183, i32 189>
9589 %strided.vec4 = shufflevector <192 x i64> %wide.vec, <192 x i64> poison, <32 x i32> <i32 4, i32 10, i32 16, i32 22, i32 28, i32 34, i32 40, i32 46, i32 52, i32 58, i32 64, i32 70, i32 76, i32 82, i32 88, i32 94, i32 100, i32 106, i32 112, i32 118, i32 124, i32 130, i32 136, i32 142, i32 148, i32 154, i32 160, i32 166, i32 172, i32 178, i32 184, i32 190>
9590 %strided.vec5 = shufflevector <192 x i64> %wide.vec, <192 x i64> poison, <32 x i32> <i32 5, i32 11, i32 17, i32 23, i32 29, i32 35, i32 41, i32 47, i32 53, i32 59, i32 65, i32 71, i32 77, i32 83, i32 89, i32 95, i32 101, i32 107, i32 113, i32 119, i32 125, i32 131, i32 137, i32 143, i32 149, i32 155, i32 161, i32 167, i32 173, i32 179, i32 185, i32 191>
9591 store <32 x i64> %strided.vec0, ptr %out.vec0, align 64
9592 store <32 x i64> %strided.vec1, ptr %out.vec1, align 64
9593 store <32 x i64> %strided.vec2, ptr %out.vec2, align 64
9594 store <32 x i64> %strided.vec3, ptr %out.vec3, align 64
9595 store <32 x i64> %strided.vec4, ptr %out.vec4, align 64
9596 store <32 x i64> %strided.vec5, ptr %out.vec5, align 64
9600 define void @load_i64_stride6_vf64(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr %out.vec2, ptr %out.vec3, ptr %out.vec4, ptr %out.vec5) nounwind {
9601 ; SSE-LABEL: load_i64_stride6_vf64:
9603 ; SSE-NEXT: subq $2712, %rsp # imm = 0xA98
9604 ; SSE-NEXT: movaps 816(%rdi), %xmm0
9605 ; SSE-NEXT: movaps 720(%rdi), %xmm1
9606 ; SSE-NEXT: movaps 672(%rdi), %xmm8
9607 ; SSE-NEXT: movaps 624(%rdi), %xmm2
9608 ; SSE-NEXT: movaps 576(%rdi), %xmm9
9609 ; SSE-NEXT: movaps 528(%rdi), %xmm3
9610 ; SSE-NEXT: movaps 480(%rdi), %xmm10
9611 ; SSE-NEXT: movaps 432(%rdi), %xmm4
9612 ; SSE-NEXT: movaps 384(%rdi), %xmm11
9613 ; SSE-NEXT: movaps 336(%rdi), %xmm5
9614 ; SSE-NEXT: movaps 288(%rdi), %xmm12
9615 ; SSE-NEXT: movaps 240(%rdi), %xmm6
9616 ; SSE-NEXT: movaps 192(%rdi), %xmm13
9617 ; SSE-NEXT: movaps 144(%rdi), %xmm7
9618 ; SSE-NEXT: movaps 96(%rdi), %xmm14
9619 ; SSE-NEXT: movaps %xmm14, %xmm15
9620 ; SSE-NEXT: movlhps {{.*#+}} xmm15 = xmm15[0],xmm7[0]
9621 ; SSE-NEXT: movaps %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9622 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm7[1]
9623 ; SSE-NEXT: movaps %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9624 ; SSE-NEXT: movaps %xmm13, %xmm7
9625 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm6[0]
9626 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9627 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm6[1]
9628 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9629 ; SSE-NEXT: movaps %xmm12, %xmm6
9630 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm5[0]
9631 ; SSE-NEXT: movaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9632 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm5[1]
9633 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9634 ; SSE-NEXT: movaps %xmm11, %xmm5
9635 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm4[0]
9636 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9637 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm4[1]
9638 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9639 ; SSE-NEXT: movaps %xmm10, %xmm4
9640 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm3[0]
9641 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9642 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm3[1]
9643 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9644 ; SSE-NEXT: movaps %xmm9, %xmm3
9645 ; SSE-NEXT: movlhps {{.*#+}} xmm3 = xmm3[0],xmm2[0]
9646 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9647 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm2[1]
9648 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9649 ; SSE-NEXT: movaps %xmm8, %xmm2
9650 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm1[0]
9651 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9652 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm1[1]
9653 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9654 ; SSE-NEXT: movaps 768(%rdi), %xmm1
9655 ; SSE-NEXT: movaps %xmm1, %xmm2
9656 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9657 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9658 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9659 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9660 ; SSE-NEXT: movaps 912(%rdi), %xmm0
9661 ; SSE-NEXT: movaps 864(%rdi), %xmm1
9662 ; SSE-NEXT: movaps %xmm1, %xmm2
9663 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9664 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9665 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9666 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9667 ; SSE-NEXT: movaps 1008(%rdi), %xmm0
9668 ; SSE-NEXT: movaps 960(%rdi), %xmm1
9669 ; SSE-NEXT: movaps %xmm1, %xmm2
9670 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9671 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9672 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9673 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9674 ; SSE-NEXT: movaps 1104(%rdi), %xmm0
9675 ; SSE-NEXT: movaps 1056(%rdi), %xmm1
9676 ; SSE-NEXT: movaps %xmm1, %xmm2
9677 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9678 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9679 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9680 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9681 ; SSE-NEXT: movaps 1200(%rdi), %xmm0
9682 ; SSE-NEXT: movaps 1152(%rdi), %xmm1
9683 ; SSE-NEXT: movaps %xmm1, %xmm2
9684 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9685 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9686 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9687 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9688 ; SSE-NEXT: movaps 1296(%rdi), %xmm0
9689 ; SSE-NEXT: movaps 1248(%rdi), %xmm1
9690 ; SSE-NEXT: movaps %xmm1, %xmm2
9691 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9692 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9693 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9694 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9695 ; SSE-NEXT: movaps 1392(%rdi), %xmm0
9696 ; SSE-NEXT: movaps 1344(%rdi), %xmm1
9697 ; SSE-NEXT: movaps %xmm1, %xmm2
9698 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9699 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9700 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9701 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9702 ; SSE-NEXT: movaps 1488(%rdi), %xmm0
9703 ; SSE-NEXT: movaps 1440(%rdi), %xmm1
9704 ; SSE-NEXT: movaps %xmm1, %xmm2
9705 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9706 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9707 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9708 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9709 ; SSE-NEXT: movaps 1584(%rdi), %xmm0
9710 ; SSE-NEXT: movaps 1536(%rdi), %xmm1
9711 ; SSE-NEXT: movaps %xmm1, %xmm2
9712 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9713 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9714 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9715 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9716 ; SSE-NEXT: movaps 1680(%rdi), %xmm0
9717 ; SSE-NEXT: movaps 1632(%rdi), %xmm1
9718 ; SSE-NEXT: movaps %xmm1, %xmm2
9719 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9720 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9721 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9722 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9723 ; SSE-NEXT: movaps 1776(%rdi), %xmm0
9724 ; SSE-NEXT: movaps 1728(%rdi), %xmm1
9725 ; SSE-NEXT: movaps %xmm1, %xmm2
9726 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9727 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9728 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9729 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9730 ; SSE-NEXT: movaps 1872(%rdi), %xmm0
9731 ; SSE-NEXT: movaps 1824(%rdi), %xmm1
9732 ; SSE-NEXT: movaps %xmm1, %xmm2
9733 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9734 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9735 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9736 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9737 ; SSE-NEXT: movaps 1968(%rdi), %xmm0
9738 ; SSE-NEXT: movaps 1920(%rdi), %xmm1
9739 ; SSE-NEXT: movaps %xmm1, %xmm2
9740 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9741 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9742 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9743 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9744 ; SSE-NEXT: movaps 2064(%rdi), %xmm0
9745 ; SSE-NEXT: movaps 2016(%rdi), %xmm1
9746 ; SSE-NEXT: movaps %xmm1, %xmm2
9747 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9748 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9749 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9750 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9751 ; SSE-NEXT: movaps 2160(%rdi), %xmm0
9752 ; SSE-NEXT: movaps 2112(%rdi), %xmm1
9753 ; SSE-NEXT: movaps %xmm1, %xmm2
9754 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9755 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9756 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9757 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9758 ; SSE-NEXT: movaps 2256(%rdi), %xmm0
9759 ; SSE-NEXT: movaps 2208(%rdi), %xmm1
9760 ; SSE-NEXT: movaps %xmm1, %xmm2
9761 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9762 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9763 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9764 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9765 ; SSE-NEXT: movaps 2352(%rdi), %xmm0
9766 ; SSE-NEXT: movaps 2304(%rdi), %xmm1
9767 ; SSE-NEXT: movaps %xmm1, %xmm2
9768 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9769 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9770 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9771 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9772 ; SSE-NEXT: movaps 2448(%rdi), %xmm0
9773 ; SSE-NEXT: movaps 2400(%rdi), %xmm1
9774 ; SSE-NEXT: movaps %xmm1, %xmm2
9775 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9776 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9777 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9778 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9779 ; SSE-NEXT: movaps 2544(%rdi), %xmm0
9780 ; SSE-NEXT: movaps 2496(%rdi), %xmm1
9781 ; SSE-NEXT: movaps %xmm1, %xmm2
9782 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9783 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9784 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9785 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9786 ; SSE-NEXT: movaps 2640(%rdi), %xmm0
9787 ; SSE-NEXT: movaps 2592(%rdi), %xmm1
9788 ; SSE-NEXT: movaps %xmm1, %xmm2
9789 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9790 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9791 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9792 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9793 ; SSE-NEXT: movaps 2736(%rdi), %xmm0
9794 ; SSE-NEXT: movaps 2688(%rdi), %xmm1
9795 ; SSE-NEXT: movaps %xmm1, %xmm2
9796 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9797 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9798 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9799 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9800 ; SSE-NEXT: movaps 2832(%rdi), %xmm0
9801 ; SSE-NEXT: movaps 2784(%rdi), %xmm1
9802 ; SSE-NEXT: movaps %xmm1, %xmm2
9803 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9804 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9805 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9806 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9807 ; SSE-NEXT: movaps 2928(%rdi), %xmm0
9808 ; SSE-NEXT: movaps 2880(%rdi), %xmm1
9809 ; SSE-NEXT: movaps %xmm1, %xmm2
9810 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9811 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9812 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9813 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9814 ; SSE-NEXT: movaps 3024(%rdi), %xmm0
9815 ; SSE-NEXT: movaps 2976(%rdi), %xmm1
9816 ; SSE-NEXT: movaps %xmm1, %xmm2
9817 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9818 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9819 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9820 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9821 ; SSE-NEXT: movaps (%rdi), %xmm1
9822 ; SSE-NEXT: movaps 48(%rdi), %xmm0
9823 ; SSE-NEXT: movaps %xmm1, %xmm2
9824 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9825 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9826 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9827 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9828 ; SSE-NEXT: movaps 64(%rdi), %xmm0
9829 ; SSE-NEXT: movaps 16(%rdi), %xmm1
9830 ; SSE-NEXT: movaps %xmm1, %xmm2
9831 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9832 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9833 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9834 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9835 ; SSE-NEXT: movaps 160(%rdi), %xmm0
9836 ; SSE-NEXT: movaps 112(%rdi), %xmm1
9837 ; SSE-NEXT: movaps %xmm1, %xmm2
9838 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9839 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9840 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9841 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9842 ; SSE-NEXT: movaps 256(%rdi), %xmm0
9843 ; SSE-NEXT: movaps 208(%rdi), %xmm1
9844 ; SSE-NEXT: movaps %xmm1, %xmm2
9845 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9846 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9847 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9848 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9849 ; SSE-NEXT: movaps 352(%rdi), %xmm0
9850 ; SSE-NEXT: movaps 304(%rdi), %xmm1
9851 ; SSE-NEXT: movaps %xmm1, %xmm2
9852 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9853 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9854 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9855 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9856 ; SSE-NEXT: movaps 448(%rdi), %xmm0
9857 ; SSE-NEXT: movaps 400(%rdi), %xmm1
9858 ; SSE-NEXT: movaps %xmm1, %xmm2
9859 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9860 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9861 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9862 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9863 ; SSE-NEXT: movaps 544(%rdi), %xmm0
9864 ; SSE-NEXT: movaps 496(%rdi), %xmm1
9865 ; SSE-NEXT: movaps %xmm1, %xmm2
9866 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9867 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9868 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9869 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9870 ; SSE-NEXT: movaps 640(%rdi), %xmm0
9871 ; SSE-NEXT: movaps 592(%rdi), %xmm1
9872 ; SSE-NEXT: movaps %xmm1, %xmm2
9873 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9874 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9875 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9876 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9877 ; SSE-NEXT: movaps 736(%rdi), %xmm0
9878 ; SSE-NEXT: movaps 688(%rdi), %xmm1
9879 ; SSE-NEXT: movaps %xmm1, %xmm2
9880 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9881 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9882 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9883 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9884 ; SSE-NEXT: movaps 832(%rdi), %xmm0
9885 ; SSE-NEXT: movaps 784(%rdi), %xmm1
9886 ; SSE-NEXT: movaps %xmm1, %xmm2
9887 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9888 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9889 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9890 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9891 ; SSE-NEXT: movaps 928(%rdi), %xmm0
9892 ; SSE-NEXT: movaps 880(%rdi), %xmm1
9893 ; SSE-NEXT: movaps %xmm1, %xmm2
9894 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9895 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9896 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9897 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9898 ; SSE-NEXT: movaps 1024(%rdi), %xmm0
9899 ; SSE-NEXT: movaps 976(%rdi), %xmm1
9900 ; SSE-NEXT: movaps %xmm1, %xmm2
9901 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9902 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9903 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9904 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9905 ; SSE-NEXT: movaps 1120(%rdi), %xmm0
9906 ; SSE-NEXT: movaps 1072(%rdi), %xmm1
9907 ; SSE-NEXT: movaps %xmm1, %xmm2
9908 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9909 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9910 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9911 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9912 ; SSE-NEXT: movaps 1216(%rdi), %xmm0
9913 ; SSE-NEXT: movaps 1168(%rdi), %xmm1
9914 ; SSE-NEXT: movaps %xmm1, %xmm2
9915 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9916 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9917 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9918 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9919 ; SSE-NEXT: movaps 1312(%rdi), %xmm0
9920 ; SSE-NEXT: movaps 1264(%rdi), %xmm1
9921 ; SSE-NEXT: movaps %xmm1, %xmm2
9922 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9923 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9924 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9925 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9926 ; SSE-NEXT: movaps 1408(%rdi), %xmm0
9927 ; SSE-NEXT: movaps 1360(%rdi), %xmm1
9928 ; SSE-NEXT: movaps %xmm1, %xmm2
9929 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9930 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9931 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9932 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9933 ; SSE-NEXT: movaps 1504(%rdi), %xmm0
9934 ; SSE-NEXT: movaps 1456(%rdi), %xmm1
9935 ; SSE-NEXT: movaps %xmm1, %xmm2
9936 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9937 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9938 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9939 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9940 ; SSE-NEXT: movaps 1600(%rdi), %xmm0
9941 ; SSE-NEXT: movaps 1552(%rdi), %xmm1
9942 ; SSE-NEXT: movaps %xmm1, %xmm2
9943 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9944 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9945 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9946 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9947 ; SSE-NEXT: movaps 1696(%rdi), %xmm0
9948 ; SSE-NEXT: movaps 1648(%rdi), %xmm1
9949 ; SSE-NEXT: movaps %xmm1, %xmm2
9950 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9951 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9952 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9953 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9954 ; SSE-NEXT: movaps 1792(%rdi), %xmm0
9955 ; SSE-NEXT: movaps 1744(%rdi), %xmm1
9956 ; SSE-NEXT: movaps %xmm1, %xmm2
9957 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9958 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9959 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9960 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9961 ; SSE-NEXT: movaps 1888(%rdi), %xmm0
9962 ; SSE-NEXT: movaps 1840(%rdi), %xmm1
9963 ; SSE-NEXT: movaps %xmm1, %xmm2
9964 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9965 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9966 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9967 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9968 ; SSE-NEXT: movaps 1984(%rdi), %xmm0
9969 ; SSE-NEXT: movaps 1936(%rdi), %xmm1
9970 ; SSE-NEXT: movaps %xmm1, %xmm2
9971 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9972 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9973 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9974 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9975 ; SSE-NEXT: movaps 2080(%rdi), %xmm0
9976 ; SSE-NEXT: movaps 2032(%rdi), %xmm1
9977 ; SSE-NEXT: movaps %xmm1, %xmm2
9978 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9979 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9980 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9981 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9982 ; SSE-NEXT: movaps 2176(%rdi), %xmm0
9983 ; SSE-NEXT: movaps 2128(%rdi), %xmm1
9984 ; SSE-NEXT: movaps %xmm1, %xmm2
9985 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9986 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9987 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9988 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9989 ; SSE-NEXT: movaps 2272(%rdi), %xmm0
9990 ; SSE-NEXT: movaps 2224(%rdi), %xmm1
9991 ; SSE-NEXT: movaps %xmm1, %xmm2
9992 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
9993 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9994 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9995 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9996 ; SSE-NEXT: movaps 2368(%rdi), %xmm0
9997 ; SSE-NEXT: movaps 2320(%rdi), %xmm1
9998 ; SSE-NEXT: movaps %xmm1, %xmm2
9999 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10000 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10001 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10002 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10003 ; SSE-NEXT: movaps 2464(%rdi), %xmm0
10004 ; SSE-NEXT: movaps 2416(%rdi), %xmm1
10005 ; SSE-NEXT: movaps %xmm1, %xmm2
10006 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10007 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10008 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10009 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10010 ; SSE-NEXT: movaps 2560(%rdi), %xmm0
10011 ; SSE-NEXT: movaps 2512(%rdi), %xmm1
10012 ; SSE-NEXT: movaps %xmm1, %xmm2
10013 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10014 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10015 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10016 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10017 ; SSE-NEXT: movaps 2656(%rdi), %xmm0
10018 ; SSE-NEXT: movaps 2608(%rdi), %xmm1
10019 ; SSE-NEXT: movaps %xmm1, %xmm2
10020 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10021 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10022 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10023 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10024 ; SSE-NEXT: movaps 2752(%rdi), %xmm0
10025 ; SSE-NEXT: movaps 2704(%rdi), %xmm1
10026 ; SSE-NEXT: movaps %xmm1, %xmm2
10027 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10028 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10029 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10030 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10031 ; SSE-NEXT: movaps 2848(%rdi), %xmm0
10032 ; SSE-NEXT: movaps 2800(%rdi), %xmm1
10033 ; SSE-NEXT: movaps %xmm1, %xmm2
10034 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10035 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10036 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10037 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10038 ; SSE-NEXT: movaps 2944(%rdi), %xmm0
10039 ; SSE-NEXT: movaps 2896(%rdi), %xmm1
10040 ; SSE-NEXT: movaps %xmm1, %xmm2
10041 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10042 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10043 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10044 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10045 ; SSE-NEXT: movaps 3040(%rdi), %xmm0
10046 ; SSE-NEXT: movaps 2992(%rdi), %xmm1
10047 ; SSE-NEXT: movaps %xmm1, %xmm2
10048 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10049 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10050 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10051 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10052 ; SSE-NEXT: movaps 80(%rdi), %xmm0
10053 ; SSE-NEXT: movaps 32(%rdi), %xmm1
10054 ; SSE-NEXT: movaps %xmm1, %xmm2
10055 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10056 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10057 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10058 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10059 ; SSE-NEXT: movaps 176(%rdi), %xmm0
10060 ; SSE-NEXT: movaps 128(%rdi), %xmm1
10061 ; SSE-NEXT: movaps %xmm1, %xmm2
10062 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10063 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10064 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10065 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10066 ; SSE-NEXT: movaps 272(%rdi), %xmm0
10067 ; SSE-NEXT: movaps 224(%rdi), %xmm1
10068 ; SSE-NEXT: movaps %xmm1, %xmm2
10069 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10070 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10071 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10072 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10073 ; SSE-NEXT: movaps 368(%rdi), %xmm0
10074 ; SSE-NEXT: movaps 320(%rdi), %xmm1
10075 ; SSE-NEXT: movaps %xmm1, %xmm2
10076 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10077 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10078 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10079 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10080 ; SSE-NEXT: movaps 464(%rdi), %xmm0
10081 ; SSE-NEXT: movaps 416(%rdi), %xmm1
10082 ; SSE-NEXT: movaps %xmm1, %xmm2
10083 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10084 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10085 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10086 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10087 ; SSE-NEXT: movaps 560(%rdi), %xmm0
10088 ; SSE-NEXT: movaps 512(%rdi), %xmm1
10089 ; SSE-NEXT: movaps %xmm1, %xmm2
10090 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10091 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10092 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10093 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10094 ; SSE-NEXT: movaps 656(%rdi), %xmm0
10095 ; SSE-NEXT: movaps 608(%rdi), %xmm1
10096 ; SSE-NEXT: movaps %xmm1, %xmm2
10097 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10098 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10099 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10100 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10101 ; SSE-NEXT: movaps 752(%rdi), %xmm0
10102 ; SSE-NEXT: movaps 704(%rdi), %xmm1
10103 ; SSE-NEXT: movaps %xmm1, %xmm2
10104 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10105 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10106 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10107 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10108 ; SSE-NEXT: movaps 848(%rdi), %xmm0
10109 ; SSE-NEXT: movaps 800(%rdi), %xmm1
10110 ; SSE-NEXT: movaps %xmm1, %xmm2
10111 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10112 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10113 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10114 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10115 ; SSE-NEXT: movaps 944(%rdi), %xmm0
10116 ; SSE-NEXT: movaps 896(%rdi), %xmm1
10117 ; SSE-NEXT: movaps %xmm1, %xmm2
10118 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10119 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10120 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10121 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10122 ; SSE-NEXT: movaps 1040(%rdi), %xmm0
10123 ; SSE-NEXT: movaps 992(%rdi), %xmm1
10124 ; SSE-NEXT: movaps %xmm1, %xmm2
10125 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10126 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10127 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10128 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10129 ; SSE-NEXT: movaps 1136(%rdi), %xmm0
10130 ; SSE-NEXT: movaps 1088(%rdi), %xmm1
10131 ; SSE-NEXT: movaps %xmm1, %xmm2
10132 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10133 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10134 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10135 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10136 ; SSE-NEXT: movaps 1232(%rdi), %xmm0
10137 ; SSE-NEXT: movaps 1184(%rdi), %xmm1
10138 ; SSE-NEXT: movaps %xmm1, %xmm2
10139 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10140 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10141 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10142 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10143 ; SSE-NEXT: movaps 1328(%rdi), %xmm0
10144 ; SSE-NEXT: movaps 1280(%rdi), %xmm1
10145 ; SSE-NEXT: movaps %xmm1, %xmm2
10146 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10147 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10148 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10149 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10150 ; SSE-NEXT: movaps 1424(%rdi), %xmm0
10151 ; SSE-NEXT: movaps 1376(%rdi), %xmm1
10152 ; SSE-NEXT: movaps %xmm1, %xmm2
10153 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10154 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10155 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10156 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10157 ; SSE-NEXT: movaps 1520(%rdi), %xmm0
10158 ; SSE-NEXT: movaps 1472(%rdi), %xmm1
10159 ; SSE-NEXT: movaps %xmm1, %xmm2
10160 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10161 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10162 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10163 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10164 ; SSE-NEXT: movaps 1616(%rdi), %xmm0
10165 ; SSE-NEXT: movaps 1568(%rdi), %xmm1
10166 ; SSE-NEXT: movaps %xmm1, %xmm2
10167 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10168 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10169 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10170 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10171 ; SSE-NEXT: movaps 1712(%rdi), %xmm0
10172 ; SSE-NEXT: movaps 1664(%rdi), %xmm1
10173 ; SSE-NEXT: movaps %xmm1, %xmm2
10174 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10175 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10176 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10177 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10178 ; SSE-NEXT: movaps 1808(%rdi), %xmm0
10179 ; SSE-NEXT: movaps 1760(%rdi), %xmm1
10180 ; SSE-NEXT: movaps %xmm1, %xmm2
10181 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10182 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10183 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10184 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10185 ; SSE-NEXT: movaps 1904(%rdi), %xmm0
10186 ; SSE-NEXT: movaps 1856(%rdi), %xmm1
10187 ; SSE-NEXT: movaps %xmm1, %xmm2
10188 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10189 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10190 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10191 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10192 ; SSE-NEXT: movaps 2000(%rdi), %xmm0
10193 ; SSE-NEXT: movaps 1952(%rdi), %xmm1
10194 ; SSE-NEXT: movaps %xmm1, %xmm2
10195 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10196 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10197 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10198 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10199 ; SSE-NEXT: movaps 2096(%rdi), %xmm0
10200 ; SSE-NEXT: movaps 2048(%rdi), %xmm1
10201 ; SSE-NEXT: movaps %xmm1, %xmm2
10202 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10203 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10204 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10205 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10206 ; SSE-NEXT: movaps 2192(%rdi), %xmm0
10207 ; SSE-NEXT: movaps 2144(%rdi), %xmm1
10208 ; SSE-NEXT: movaps %xmm1, %xmm2
10209 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10210 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10211 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10212 ; SSE-NEXT: movaps %xmm1, (%rsp) # 16-byte Spill
10213 ; SSE-NEXT: movaps 2288(%rdi), %xmm0
10214 ; SSE-NEXT: movaps 2240(%rdi), %xmm15
10215 ; SSE-NEXT: movaps %xmm15, %xmm1
10216 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
10217 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10218 ; SSE-NEXT: unpckhpd {{.*#+}} xmm15 = xmm15[1],xmm0[1]
10219 ; SSE-NEXT: movaps 2384(%rdi), %xmm0
10220 ; SSE-NEXT: movaps 2336(%rdi), %xmm12
10221 ; SSE-NEXT: movaps %xmm12, %xmm1
10222 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
10223 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10224 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm0[1]
10225 ; SSE-NEXT: movaps 2480(%rdi), %xmm0
10226 ; SSE-NEXT: movaps 2432(%rdi), %xmm14
10227 ; SSE-NEXT: movaps %xmm14, %xmm1
10228 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
10229 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10230 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm0[1]
10231 ; SSE-NEXT: movaps 2576(%rdi), %xmm0
10232 ; SSE-NEXT: movaps 2528(%rdi), %xmm10
10233 ; SSE-NEXT: movaps %xmm10, %xmm13
10234 ; SSE-NEXT: movlhps {{.*#+}} xmm13 = xmm13[0],xmm0[0]
10235 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm0[1]
10236 ; SSE-NEXT: movaps 2672(%rdi), %xmm0
10237 ; SSE-NEXT: movaps 2624(%rdi), %xmm9
10238 ; SSE-NEXT: movaps %xmm9, %xmm11
10239 ; SSE-NEXT: movlhps {{.*#+}} xmm11 = xmm11[0],xmm0[0]
10240 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm0[1]
10241 ; SSE-NEXT: movaps 2768(%rdi), %xmm0
10242 ; SSE-NEXT: movaps 2720(%rdi), %xmm5
10243 ; SSE-NEXT: movaps %xmm5, %xmm8
10244 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm0[0]
10245 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm0[1]
10246 ; SSE-NEXT: movaps 2864(%rdi), %xmm0
10247 ; SSE-NEXT: movaps 2816(%rdi), %xmm6
10248 ; SSE-NEXT: movaps %xmm6, %xmm7
10249 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm0[0]
10250 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm0[1]
10251 ; SSE-NEXT: movaps 2960(%rdi), %xmm0
10252 ; SSE-NEXT: movaps 2912(%rdi), %xmm3
10253 ; SSE-NEXT: movaps %xmm3, %xmm4
10254 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm0[0]
10255 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm0[1]
10256 ; SSE-NEXT: movaps 3056(%rdi), %xmm0
10257 ; SSE-NEXT: movaps 3008(%rdi), %xmm1
10258 ; SSE-NEXT: movaps %xmm1, %xmm2
10259 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
10260 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
10261 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10262 ; SSE-NEXT: movaps %xmm0, 496(%rsi)
10263 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10264 ; SSE-NEXT: movaps %xmm0, 480(%rsi)
10265 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10266 ; SSE-NEXT: movaps %xmm0, 464(%rsi)
10267 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10268 ; SSE-NEXT: movaps %xmm0, 448(%rsi)
10269 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10270 ; SSE-NEXT: movaps %xmm0, 432(%rsi)
10271 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10272 ; SSE-NEXT: movaps %xmm0, 416(%rsi)
10273 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10274 ; SSE-NEXT: movaps %xmm0, 400(%rsi)
10275 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10276 ; SSE-NEXT: movaps %xmm0, 384(%rsi)
10277 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10278 ; SSE-NEXT: movaps %xmm0, 368(%rsi)
10279 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10280 ; SSE-NEXT: movaps %xmm0, 352(%rsi)
10281 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10282 ; SSE-NEXT: movaps %xmm0, 336(%rsi)
10283 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10284 ; SSE-NEXT: movaps %xmm0, 320(%rsi)
10285 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10286 ; SSE-NEXT: movaps %xmm0, 304(%rsi)
10287 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10288 ; SSE-NEXT: movaps %xmm0, 288(%rsi)
10289 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10290 ; SSE-NEXT: movaps %xmm0, 272(%rsi)
10291 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10292 ; SSE-NEXT: movaps %xmm0, 256(%rsi)
10293 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10294 ; SSE-NEXT: movaps %xmm0, 240(%rsi)
10295 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10296 ; SSE-NEXT: movaps %xmm0, 224(%rsi)
10297 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10298 ; SSE-NEXT: movaps %xmm0, 208(%rsi)
10299 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10300 ; SSE-NEXT: movaps %xmm0, 192(%rsi)
10301 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10302 ; SSE-NEXT: movaps %xmm0, 176(%rsi)
10303 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10304 ; SSE-NEXT: movaps %xmm0, 160(%rsi)
10305 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10306 ; SSE-NEXT: movaps %xmm0, 144(%rsi)
10307 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10308 ; SSE-NEXT: movaps %xmm0, 128(%rsi)
10309 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10310 ; SSE-NEXT: movaps %xmm0, 112(%rsi)
10311 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10312 ; SSE-NEXT: movaps %xmm0, 96(%rsi)
10313 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10314 ; SSE-NEXT: movaps %xmm0, 80(%rsi)
10315 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10316 ; SSE-NEXT: movaps %xmm0, 64(%rsi)
10317 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10318 ; SSE-NEXT: movaps %xmm0, 48(%rsi)
10319 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10320 ; SSE-NEXT: movaps %xmm0, 32(%rsi)
10321 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10322 ; SSE-NEXT: movaps %xmm0, 16(%rsi)
10323 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10324 ; SSE-NEXT: movaps %xmm0, (%rsi)
10325 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10326 ; SSE-NEXT: movaps %xmm0, 496(%rdx)
10327 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10328 ; SSE-NEXT: movaps %xmm0, 480(%rdx)
10329 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10330 ; SSE-NEXT: movaps %xmm0, 464(%rdx)
10331 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10332 ; SSE-NEXT: movaps %xmm0, 448(%rdx)
10333 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10334 ; SSE-NEXT: movaps %xmm0, 432(%rdx)
10335 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10336 ; SSE-NEXT: movaps %xmm0, 416(%rdx)
10337 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10338 ; SSE-NEXT: movaps %xmm0, 400(%rdx)
10339 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10340 ; SSE-NEXT: movaps %xmm0, 384(%rdx)
10341 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10342 ; SSE-NEXT: movaps %xmm0, 368(%rdx)
10343 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10344 ; SSE-NEXT: movaps %xmm0, 352(%rdx)
10345 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10346 ; SSE-NEXT: movaps %xmm0, 336(%rdx)
10347 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10348 ; SSE-NEXT: movaps %xmm0, 320(%rdx)
10349 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10350 ; SSE-NEXT: movaps %xmm0, 304(%rdx)
10351 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10352 ; SSE-NEXT: movaps %xmm0, 288(%rdx)
10353 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10354 ; SSE-NEXT: movaps %xmm0, 272(%rdx)
10355 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10356 ; SSE-NEXT: movaps %xmm0, 256(%rdx)
10357 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10358 ; SSE-NEXT: movaps %xmm0, 240(%rdx)
10359 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10360 ; SSE-NEXT: movaps %xmm0, 224(%rdx)
10361 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10362 ; SSE-NEXT: movaps %xmm0, 208(%rdx)
10363 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10364 ; SSE-NEXT: movaps %xmm0, 192(%rdx)
10365 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10366 ; SSE-NEXT: movaps %xmm0, 176(%rdx)
10367 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10368 ; SSE-NEXT: movaps %xmm0, 160(%rdx)
10369 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10370 ; SSE-NEXT: movaps %xmm0, 144(%rdx)
10371 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10372 ; SSE-NEXT: movaps %xmm0, 128(%rdx)
10373 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10374 ; SSE-NEXT: movaps %xmm0, 112(%rdx)
10375 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10376 ; SSE-NEXT: movaps %xmm0, 96(%rdx)
10377 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10378 ; SSE-NEXT: movaps %xmm0, 80(%rdx)
10379 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10380 ; SSE-NEXT: movaps %xmm0, 64(%rdx)
10381 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10382 ; SSE-NEXT: movaps %xmm0, 48(%rdx)
10383 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10384 ; SSE-NEXT: movaps %xmm0, 32(%rdx)
10385 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10386 ; SSE-NEXT: movaps %xmm0, 16(%rdx)
10387 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10388 ; SSE-NEXT: movaps %xmm0, (%rdx)
10389 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10390 ; SSE-NEXT: movaps %xmm0, 496(%rcx)
10391 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10392 ; SSE-NEXT: movaps %xmm0, 480(%rcx)
10393 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10394 ; SSE-NEXT: movaps %xmm0, 464(%rcx)
10395 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10396 ; SSE-NEXT: movaps %xmm0, 448(%rcx)
10397 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10398 ; SSE-NEXT: movaps %xmm0, 432(%rcx)
10399 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10400 ; SSE-NEXT: movaps %xmm0, 416(%rcx)
10401 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10402 ; SSE-NEXT: movaps %xmm0, 400(%rcx)
10403 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10404 ; SSE-NEXT: movaps %xmm0, 384(%rcx)
10405 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10406 ; SSE-NEXT: movaps %xmm0, 368(%rcx)
10407 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10408 ; SSE-NEXT: movaps %xmm0, 352(%rcx)
10409 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10410 ; SSE-NEXT: movaps %xmm0, 336(%rcx)
10411 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10412 ; SSE-NEXT: movaps %xmm0, 320(%rcx)
10413 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10414 ; SSE-NEXT: movaps %xmm0, 304(%rcx)
10415 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10416 ; SSE-NEXT: movaps %xmm0, 288(%rcx)
10417 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10418 ; SSE-NEXT: movaps %xmm0, 272(%rcx)
10419 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10420 ; SSE-NEXT: movaps %xmm0, 256(%rcx)
10421 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10422 ; SSE-NEXT: movaps %xmm0, 240(%rcx)
10423 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10424 ; SSE-NEXT: movaps %xmm0, 224(%rcx)
10425 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10426 ; SSE-NEXT: movaps %xmm0, 208(%rcx)
10427 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10428 ; SSE-NEXT: movaps %xmm0, 192(%rcx)
10429 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10430 ; SSE-NEXT: movaps %xmm0, 176(%rcx)
10431 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10432 ; SSE-NEXT: movaps %xmm0, 160(%rcx)
10433 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10434 ; SSE-NEXT: movaps %xmm0, 144(%rcx)
10435 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10436 ; SSE-NEXT: movaps %xmm0, 128(%rcx)
10437 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10438 ; SSE-NEXT: movaps %xmm0, 112(%rcx)
10439 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10440 ; SSE-NEXT: movaps %xmm0, 96(%rcx)
10441 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10442 ; SSE-NEXT: movaps %xmm0, 80(%rcx)
10443 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10444 ; SSE-NEXT: movaps %xmm0, 64(%rcx)
10445 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10446 ; SSE-NEXT: movaps %xmm0, 48(%rcx)
10447 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10448 ; SSE-NEXT: movaps %xmm0, 32(%rcx)
10449 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10450 ; SSE-NEXT: movaps %xmm0, 16(%rcx)
10451 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10452 ; SSE-NEXT: movaps %xmm0, (%rcx)
10453 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10454 ; SSE-NEXT: movaps %xmm0, 496(%r8)
10455 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10456 ; SSE-NEXT: movaps %xmm0, 480(%r8)
10457 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10458 ; SSE-NEXT: movaps %xmm0, 464(%r8)
10459 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10460 ; SSE-NEXT: movaps %xmm0, 448(%r8)
10461 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10462 ; SSE-NEXT: movaps %xmm0, 432(%r8)
10463 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10464 ; SSE-NEXT: movaps %xmm0, 416(%r8)
10465 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10466 ; SSE-NEXT: movaps %xmm0, 400(%r8)
10467 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10468 ; SSE-NEXT: movaps %xmm0, 384(%r8)
10469 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10470 ; SSE-NEXT: movaps %xmm0, 368(%r8)
10471 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10472 ; SSE-NEXT: movaps %xmm0, 352(%r8)
10473 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10474 ; SSE-NEXT: movaps %xmm0, 336(%r8)
10475 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10476 ; SSE-NEXT: movaps %xmm0, 320(%r8)
10477 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10478 ; SSE-NEXT: movaps %xmm0, 304(%r8)
10479 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10480 ; SSE-NEXT: movaps %xmm0, 288(%r8)
10481 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10482 ; SSE-NEXT: movaps %xmm0, 272(%r8)
10483 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10484 ; SSE-NEXT: movaps %xmm0, 256(%r8)
10485 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10486 ; SSE-NEXT: movaps %xmm0, 240(%r8)
10487 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10488 ; SSE-NEXT: movaps %xmm0, 224(%r8)
10489 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10490 ; SSE-NEXT: movaps %xmm0, 208(%r8)
10491 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10492 ; SSE-NEXT: movaps %xmm0, 192(%r8)
10493 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10494 ; SSE-NEXT: movaps %xmm0, 176(%r8)
10495 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10496 ; SSE-NEXT: movaps %xmm0, 160(%r8)
10497 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10498 ; SSE-NEXT: movaps %xmm0, 144(%r8)
10499 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10500 ; SSE-NEXT: movaps %xmm0, 128(%r8)
10501 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10502 ; SSE-NEXT: movaps %xmm0, 112(%r8)
10503 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10504 ; SSE-NEXT: movaps %xmm0, 96(%r8)
10505 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10506 ; SSE-NEXT: movaps %xmm0, 80(%r8)
10507 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10508 ; SSE-NEXT: movaps %xmm0, 64(%r8)
10509 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10510 ; SSE-NEXT: movaps %xmm0, 48(%r8)
10511 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10512 ; SSE-NEXT: movaps %xmm0, 32(%r8)
10513 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10514 ; SSE-NEXT: movaps %xmm0, 16(%r8)
10515 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10516 ; SSE-NEXT: movaps %xmm0, (%r8)
10517 ; SSE-NEXT: movaps %xmm2, 496(%r9)
10518 ; SSE-NEXT: movaps %xmm4, 480(%r9)
10519 ; SSE-NEXT: movaps %xmm7, 464(%r9)
10520 ; SSE-NEXT: movaps %xmm8, 448(%r9)
10521 ; SSE-NEXT: movaps %xmm11, 432(%r9)
10522 ; SSE-NEXT: movaps %xmm13, 416(%r9)
10523 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10524 ; SSE-NEXT: movaps %xmm0, 400(%r9)
10525 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10526 ; SSE-NEXT: movaps %xmm0, 384(%r9)
10527 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10528 ; SSE-NEXT: movaps %xmm0, 368(%r9)
10529 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10530 ; SSE-NEXT: movaps %xmm0, 352(%r9)
10531 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10532 ; SSE-NEXT: movaps %xmm0, 336(%r9)
10533 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10534 ; SSE-NEXT: movaps %xmm0, 320(%r9)
10535 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10536 ; SSE-NEXT: movaps %xmm0, 304(%r9)
10537 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10538 ; SSE-NEXT: movaps %xmm0, 288(%r9)
10539 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10540 ; SSE-NEXT: movaps %xmm0, 272(%r9)
10541 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10542 ; SSE-NEXT: movaps %xmm0, 256(%r9)
10543 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10544 ; SSE-NEXT: movaps %xmm0, 240(%r9)
10545 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10546 ; SSE-NEXT: movaps %xmm0, 224(%r9)
10547 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10548 ; SSE-NEXT: movaps %xmm0, 208(%r9)
10549 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10550 ; SSE-NEXT: movaps %xmm0, 192(%r9)
10551 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10552 ; SSE-NEXT: movaps %xmm0, 176(%r9)
10553 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10554 ; SSE-NEXT: movaps %xmm0, 160(%r9)
10555 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10556 ; SSE-NEXT: movaps %xmm0, 144(%r9)
10557 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10558 ; SSE-NEXT: movaps %xmm0, 128(%r9)
10559 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10560 ; SSE-NEXT: movaps %xmm0, 112(%r9)
10561 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10562 ; SSE-NEXT: movaps %xmm0, 96(%r9)
10563 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10564 ; SSE-NEXT: movaps %xmm0, 80(%r9)
10565 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10566 ; SSE-NEXT: movaps %xmm0, 64(%r9)
10567 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10568 ; SSE-NEXT: movaps %xmm0, 48(%r9)
10569 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10570 ; SSE-NEXT: movaps %xmm0, 32(%r9)
10571 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10572 ; SSE-NEXT: movaps %xmm0, 16(%r9)
10573 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10574 ; SSE-NEXT: movaps %xmm0, (%r9)
10575 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
10576 ; SSE-NEXT: movaps %xmm1, 496(%rax)
10577 ; SSE-NEXT: movaps %xmm3, 480(%rax)
10578 ; SSE-NEXT: movaps %xmm6, 464(%rax)
10579 ; SSE-NEXT: movaps %xmm5, 448(%rax)
10580 ; SSE-NEXT: movaps %xmm9, 432(%rax)
10581 ; SSE-NEXT: movaps %xmm10, 416(%rax)
10582 ; SSE-NEXT: movaps %xmm14, 400(%rax)
10583 ; SSE-NEXT: movaps %xmm12, 384(%rax)
10584 ; SSE-NEXT: movaps %xmm15, 368(%rax)
10585 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
10586 ; SSE-NEXT: movaps %xmm0, 352(%rax)
10587 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10588 ; SSE-NEXT: movaps %xmm0, 336(%rax)
10589 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10590 ; SSE-NEXT: movaps %xmm0, 320(%rax)
10591 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10592 ; SSE-NEXT: movaps %xmm0, 304(%rax)
10593 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10594 ; SSE-NEXT: movaps %xmm0, 288(%rax)
10595 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10596 ; SSE-NEXT: movaps %xmm0, 272(%rax)
10597 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10598 ; SSE-NEXT: movaps %xmm0, 256(%rax)
10599 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10600 ; SSE-NEXT: movaps %xmm0, 240(%rax)
10601 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10602 ; SSE-NEXT: movaps %xmm0, 224(%rax)
10603 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10604 ; SSE-NEXT: movaps %xmm0, 208(%rax)
10605 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10606 ; SSE-NEXT: movaps %xmm0, 192(%rax)
10607 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10608 ; SSE-NEXT: movaps %xmm0, 176(%rax)
10609 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10610 ; SSE-NEXT: movaps %xmm0, 160(%rax)
10611 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10612 ; SSE-NEXT: movaps %xmm0, 144(%rax)
10613 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10614 ; SSE-NEXT: movaps %xmm0, 128(%rax)
10615 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10616 ; SSE-NEXT: movaps %xmm0, 112(%rax)
10617 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10618 ; SSE-NEXT: movaps %xmm0, 96(%rax)
10619 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10620 ; SSE-NEXT: movaps %xmm0, 80(%rax)
10621 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10622 ; SSE-NEXT: movaps %xmm0, 64(%rax)
10623 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10624 ; SSE-NEXT: movaps %xmm0, 48(%rax)
10625 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10626 ; SSE-NEXT: movaps %xmm0, 32(%rax)
10627 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10628 ; SSE-NEXT: movaps %xmm0, 16(%rax)
10629 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10630 ; SSE-NEXT: movaps %xmm0, (%rax)
10631 ; SSE-NEXT: addq $2712, %rsp # imm = 0xA98
10634 ; AVX-LABEL: load_i64_stride6_vf64:
10636 ; AVX-NEXT: subq $3768, %rsp # imm = 0xEB8
10637 ; AVX-NEXT: vmovaps 1088(%rdi), %ymm2
10638 ; AVX-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10639 ; AVX-NEXT: vmovaps 704(%rdi), %ymm3
10640 ; AVX-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10641 ; AVX-NEXT: vmovaps 320(%rdi), %ymm1
10642 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10643 ; AVX-NEXT: vinsertf128 $1, 288(%rdi), %ymm0, %ymm0
10644 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10645 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
10646 ; AVX-NEXT: vmovaps 240(%rdi), %xmm4
10647 ; AVX-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10648 ; AVX-NEXT: vmovaps 192(%rdi), %xmm1
10649 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10650 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm4[0]
10651 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10652 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10653 ; AVX-NEXT: vinsertf128 $1, 672(%rdi), %ymm0, %ymm0
10654 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10655 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
10656 ; AVX-NEXT: vmovaps 624(%rdi), %xmm3
10657 ; AVX-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10658 ; AVX-NEXT: vmovaps 576(%rdi), %xmm1
10659 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10660 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
10661 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10662 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10663 ; AVX-NEXT: vinsertf128 $1, 1056(%rdi), %ymm0, %ymm0
10664 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10665 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm2[0],ymm0[2],ymm2[2]
10666 ; AVX-NEXT: vmovaps 1008(%rdi), %xmm2
10667 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10668 ; AVX-NEXT: vmovaps 960(%rdi), %xmm1
10669 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10670 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10671 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10672 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10673 ; AVX-NEXT: vmovaps 1472(%rdi), %ymm1
10674 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10675 ; AVX-NEXT: vinsertf128 $1, 1440(%rdi), %ymm0, %ymm0
10676 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10677 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
10678 ; AVX-NEXT: vmovaps 1392(%rdi), %xmm2
10679 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10680 ; AVX-NEXT: vmovaps 1344(%rdi), %xmm1
10681 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10682 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10683 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10684 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10685 ; AVX-NEXT: vmovaps 1856(%rdi), %ymm1
10686 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10687 ; AVX-NEXT: vinsertf128 $1, 1824(%rdi), %ymm0, %ymm0
10688 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10689 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
10690 ; AVX-NEXT: vmovaps 1776(%rdi), %xmm2
10691 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10692 ; AVX-NEXT: vmovaps 1728(%rdi), %xmm1
10693 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10694 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10695 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10696 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10697 ; AVX-NEXT: vmovaps 2240(%rdi), %ymm1
10698 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10699 ; AVX-NEXT: vinsertf128 $1, 2208(%rdi), %ymm0, %ymm0
10700 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10701 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
10702 ; AVX-NEXT: vmovaps 2160(%rdi), %xmm2
10703 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10704 ; AVX-NEXT: vmovaps 2112(%rdi), %xmm1
10705 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10706 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10707 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10708 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10709 ; AVX-NEXT: vmovaps 2624(%rdi), %ymm1
10710 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10711 ; AVX-NEXT: vinsertf128 $1, 2592(%rdi), %ymm0, %ymm0
10712 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10713 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
10714 ; AVX-NEXT: vmovaps 2544(%rdi), %xmm2
10715 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10716 ; AVX-NEXT: vmovaps 2496(%rdi), %xmm1
10717 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10718 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10719 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10720 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10721 ; AVX-NEXT: vmovaps 3008(%rdi), %ymm1
10722 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10723 ; AVX-NEXT: vinsertf128 $1, 2976(%rdi), %ymm0, %ymm0
10724 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10725 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
10726 ; AVX-NEXT: vmovaps 2928(%rdi), %xmm2
10727 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10728 ; AVX-NEXT: vmovaps 2880(%rdi), %xmm1
10729 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10730 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10731 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10732 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10733 ; AVX-NEXT: vmovaps 128(%rdi), %ymm1
10734 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10735 ; AVX-NEXT: vinsertf128 $1, 96(%rdi), %ymm0, %ymm0
10736 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10737 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
10738 ; AVX-NEXT: vmovaps (%rdi), %xmm2
10739 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10740 ; AVX-NEXT: vmovaps 48(%rdi), %xmm1
10741 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10742 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
10743 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10744 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10745 ; AVX-NEXT: vmovaps 512(%rdi), %ymm1
10746 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10747 ; AVX-NEXT: vinsertf128 $1, 480(%rdi), %ymm0, %ymm0
10748 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10749 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
10750 ; AVX-NEXT: vmovaps 432(%rdi), %xmm2
10751 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10752 ; AVX-NEXT: vmovaps 384(%rdi), %xmm1
10753 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10754 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10755 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10756 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10757 ; AVX-NEXT: vmovaps 896(%rdi), %ymm1
10758 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10759 ; AVX-NEXT: vinsertf128 $1, 864(%rdi), %ymm0, %ymm0
10760 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10761 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
10762 ; AVX-NEXT: vmovaps 816(%rdi), %xmm2
10763 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10764 ; AVX-NEXT: vmovaps 768(%rdi), %xmm1
10765 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10766 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10767 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10768 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10769 ; AVX-NEXT: vmovaps 1280(%rdi), %ymm1
10770 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10771 ; AVX-NEXT: vinsertf128 $1, 1248(%rdi), %ymm0, %ymm0
10772 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10773 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
10774 ; AVX-NEXT: vmovaps 1200(%rdi), %xmm2
10775 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10776 ; AVX-NEXT: vmovaps 1152(%rdi), %xmm1
10777 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10778 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10779 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10780 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10781 ; AVX-NEXT: vmovaps 1664(%rdi), %ymm1
10782 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10783 ; AVX-NEXT: vinsertf128 $1, 1632(%rdi), %ymm0, %ymm0
10784 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10785 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
10786 ; AVX-NEXT: vmovaps 1584(%rdi), %xmm14
10787 ; AVX-NEXT: vmovaps 1536(%rdi), %xmm13
10788 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm13[0],xmm14[0]
10789 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10790 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10791 ; AVX-NEXT: vmovaps 2048(%rdi), %ymm12
10792 ; AVX-NEXT: vinsertf128 $1, 2016(%rdi), %ymm0, %ymm11
10793 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm11[0],ymm12[0],ymm11[2],ymm12[2]
10794 ; AVX-NEXT: vmovaps 1968(%rdi), %xmm10
10795 ; AVX-NEXT: vmovaps 1920(%rdi), %xmm9
10796 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm9[0],xmm10[0]
10797 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10798 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10799 ; AVX-NEXT: vmovaps 2432(%rdi), %ymm8
10800 ; AVX-NEXT: vinsertf128 $1, 2400(%rdi), %ymm0, %ymm7
10801 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm7[0],ymm8[0],ymm7[2],ymm8[2]
10802 ; AVX-NEXT: vmovaps 2352(%rdi), %xmm6
10803 ; AVX-NEXT: vmovaps 2304(%rdi), %xmm5
10804 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm6[0]
10805 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10806 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10807 ; AVX-NEXT: vmovaps 2816(%rdi), %ymm4
10808 ; AVX-NEXT: vinsertf128 $1, 2784(%rdi), %ymm0, %ymm3
10809 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm3[0],ymm4[0],ymm3[2],ymm4[2]
10810 ; AVX-NEXT: vmovaps 2736(%rdi), %xmm2
10811 ; AVX-NEXT: vmovaps 2688(%rdi), %xmm1
10812 ; AVX-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
10813 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
10814 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10815 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10816 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10817 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10818 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
10819 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
10820 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
10821 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
10822 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10823 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10824 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10825 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10826 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
10827 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
10828 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
10829 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
10830 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10831 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10832 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10833 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10834 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
10835 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
10836 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
10837 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
10838 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10839 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10840 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10841 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10842 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
10843 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
10844 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
10845 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
10846 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10847 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10848 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10849 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10850 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
10851 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
10852 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
10853 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
10854 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10855 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10856 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10857 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10858 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
10859 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
10860 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
10861 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
10862 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10863 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10864 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10865 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10866 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
10867 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
10868 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
10869 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
10870 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10871 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10872 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10873 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10874 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
10875 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
10876 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
10877 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
10878 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10879 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10880 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10881 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10882 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
10883 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
10884 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
10885 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
10886 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10887 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10888 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10889 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10890 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
10891 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
10892 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
10893 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
10894 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10895 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10896 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10897 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10898 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
10899 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
10900 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
10901 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
10902 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10903 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10904 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10905 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10906 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
10907 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
10908 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
10909 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
10910 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10911 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10912 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10913 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10914 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm13 = xmm13[1],xmm14[1]
10915 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm13[0,1,2,3],ymm0[4,5,6,7]
10916 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10917 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm11[1],ymm12[1],ymm11[3],ymm12[3]
10918 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm9 = xmm9[1],xmm10[1]
10919 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm9[0,1,2,3],ymm0[4,5,6,7]
10920 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10921 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm7[1],ymm8[1],ymm7[3],ymm8[3]
10922 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm5 = xmm5[1],xmm6[1]
10923 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm5[0,1,2,3],ymm0[4,5,6,7]
10924 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10925 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm3[1],ymm4[1],ymm3[3],ymm4[3]
10926 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
10927 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10928 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10929 ; AVX-NEXT: vmovaps 288(%rdi), %ymm1
10930 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10931 ; AVX-NEXT: vinsertf128 $1, 352(%rdi), %ymm0, %ymm0
10932 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10933 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
10934 ; AVX-NEXT: vmovaps 256(%rdi), %xmm2
10935 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10936 ; AVX-NEXT: vmovaps 208(%rdi), %xmm1
10937 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10938 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10939 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10940 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10941 ; AVX-NEXT: vmovaps 672(%rdi), %ymm1
10942 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10943 ; AVX-NEXT: vinsertf128 $1, 736(%rdi), %ymm0, %ymm0
10944 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10945 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
10946 ; AVX-NEXT: vmovaps 640(%rdi), %xmm2
10947 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10948 ; AVX-NEXT: vmovaps 592(%rdi), %xmm1
10949 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10950 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10951 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10952 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10953 ; AVX-NEXT: vmovaps 1056(%rdi), %ymm1
10954 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10955 ; AVX-NEXT: vinsertf128 $1, 1120(%rdi), %ymm0, %ymm0
10956 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10957 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
10958 ; AVX-NEXT: vmovaps 1024(%rdi), %xmm2
10959 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10960 ; AVX-NEXT: vmovaps 976(%rdi), %xmm1
10961 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10962 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10963 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10964 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10965 ; AVX-NEXT: vmovaps 1440(%rdi), %ymm1
10966 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10967 ; AVX-NEXT: vinsertf128 $1, 1504(%rdi), %ymm0, %ymm0
10968 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10969 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
10970 ; AVX-NEXT: vmovaps 1408(%rdi), %xmm2
10971 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10972 ; AVX-NEXT: vmovaps 1360(%rdi), %xmm1
10973 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10974 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10975 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10976 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10977 ; AVX-NEXT: vmovaps 1824(%rdi), %ymm1
10978 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10979 ; AVX-NEXT: vinsertf128 $1, 1888(%rdi), %ymm0, %ymm0
10980 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10981 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
10982 ; AVX-NEXT: vmovaps 1792(%rdi), %xmm2
10983 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10984 ; AVX-NEXT: vmovaps 1744(%rdi), %xmm1
10985 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10986 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10987 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10988 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10989 ; AVX-NEXT: vmovaps 2208(%rdi), %ymm1
10990 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10991 ; AVX-NEXT: vinsertf128 $1, 2272(%rdi), %ymm0, %ymm0
10992 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10993 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
10994 ; AVX-NEXT: vmovaps 2176(%rdi), %xmm2
10995 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10996 ; AVX-NEXT: vmovaps 2128(%rdi), %xmm1
10997 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10998 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
10999 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11000 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11001 ; AVX-NEXT: vmovaps 2592(%rdi), %ymm1
11002 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11003 ; AVX-NEXT: vinsertf128 $1, 2656(%rdi), %ymm0, %ymm0
11004 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11005 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11006 ; AVX-NEXT: vmovaps 2560(%rdi), %xmm2
11007 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11008 ; AVX-NEXT: vmovaps 2512(%rdi), %xmm1
11009 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11010 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11011 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11012 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11013 ; AVX-NEXT: vmovaps 2976(%rdi), %ymm1
11014 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11015 ; AVX-NEXT: vinsertf128 $1, 3040(%rdi), %ymm0, %ymm0
11016 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11017 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11018 ; AVX-NEXT: vmovaps 2944(%rdi), %xmm2
11019 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11020 ; AVX-NEXT: vmovaps 2896(%rdi), %xmm1
11021 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11022 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11023 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11024 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11025 ; AVX-NEXT: vmovaps 2784(%rdi), %ymm1
11026 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11027 ; AVX-NEXT: vinsertf128 $1, 2848(%rdi), %ymm0, %ymm0
11028 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11029 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11030 ; AVX-NEXT: vmovaps 2752(%rdi), %xmm2
11031 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11032 ; AVX-NEXT: vmovaps 2704(%rdi), %xmm1
11033 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11034 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11035 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11036 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11037 ; AVX-NEXT: vmovaps 2400(%rdi), %ymm1
11038 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11039 ; AVX-NEXT: vinsertf128 $1, 2464(%rdi), %ymm0, %ymm0
11040 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11041 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11042 ; AVX-NEXT: vmovaps 2368(%rdi), %xmm2
11043 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11044 ; AVX-NEXT: vmovaps 2320(%rdi), %xmm1
11045 ; AVX-NEXT: vmovaps %xmm1, (%rsp) # 16-byte Spill
11046 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11047 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11048 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11049 ; AVX-NEXT: vmovaps 2016(%rdi), %ymm1
11050 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11051 ; AVX-NEXT: vinsertf128 $1, 2080(%rdi), %ymm0, %ymm0
11052 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11053 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11054 ; AVX-NEXT: vmovaps 1984(%rdi), %xmm2
11055 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11056 ; AVX-NEXT: vmovaps 1936(%rdi), %xmm1
11057 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11058 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11059 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11060 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11061 ; AVX-NEXT: vmovaps 1632(%rdi), %ymm1
11062 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11063 ; AVX-NEXT: vinsertf128 $1, 1696(%rdi), %ymm0, %ymm0
11064 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11065 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11066 ; AVX-NEXT: vmovaps 1600(%rdi), %xmm2
11067 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11068 ; AVX-NEXT: vmovaps 1552(%rdi), %xmm1
11069 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11070 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11071 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11072 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11073 ; AVX-NEXT: vmovaps 1248(%rdi), %ymm1
11074 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11075 ; AVX-NEXT: vinsertf128 $1, 1312(%rdi), %ymm0, %ymm0
11076 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11077 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11078 ; AVX-NEXT: vmovaps 1216(%rdi), %xmm12
11079 ; AVX-NEXT: vmovaps 1168(%rdi), %xmm11
11080 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm12[0]
11081 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11082 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11083 ; AVX-NEXT: vmovaps 864(%rdi), %ymm10
11084 ; AVX-NEXT: vinsertf128 $1, 928(%rdi), %ymm0, %ymm9
11085 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm10[0],ymm9[0],ymm10[2],ymm9[2]
11086 ; AVX-NEXT: vmovaps 832(%rdi), %xmm8
11087 ; AVX-NEXT: vmovaps 784(%rdi), %xmm7
11088 ; AVX-NEXT: vmovlhps {{.*#+}} xmm15 = xmm7[0],xmm8[0]
11089 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11090 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11091 ; AVX-NEXT: vmovaps 480(%rdi), %ymm6
11092 ; AVX-NEXT: vinsertf128 $1, 544(%rdi), %ymm0, %ymm15
11093 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm6[0],ymm15[0],ymm6[2],ymm15[2]
11094 ; AVX-NEXT: vmovaps 448(%rdi), %xmm5
11095 ; AVX-NEXT: vmovaps 400(%rdi), %xmm4
11096 ; AVX-NEXT: vmovlhps {{.*#+}} xmm14 = xmm4[0],xmm5[0]
11097 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm14[0,1,2,3],ymm0[4,5,6,7]
11098 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11099 ; AVX-NEXT: vmovaps 96(%rdi), %ymm2
11100 ; AVX-NEXT: vmovaps 16(%rdi), %xmm14
11101 ; AVX-NEXT: vinsertf128 $1, 160(%rdi), %ymm0, %ymm1
11102 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
11103 ; AVX-NEXT: vmovaps 64(%rdi), %xmm0
11104 ; AVX-NEXT: vmovlhps {{.*#+}} xmm13 = xmm14[0],xmm0[0]
11105 ; AVX-NEXT: vblendps {{.*#+}} ymm3 = ymm13[0,1,2,3],ymm3[4,5,6,7]
11106 ; AVX-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11107 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm2[1],ymm1[1],ymm2[3],ymm1[3]
11108 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm14[1],xmm0[1]
11109 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
11110 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11111 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11112 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11113 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11114 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11115 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11116 ; AVX-NEXT: # xmm1 = xmm1[1],mem[1]
11117 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11118 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11119 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm6[1],ymm15[1],ymm6[3],ymm15[3]
11120 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm4[1],xmm5[1]
11121 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11122 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11123 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11124 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11125 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11126 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11127 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11128 ; AVX-NEXT: # xmm1 = xmm1[1],mem[1]
11129 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11130 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11131 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm10[1],ymm9[1],ymm10[3],ymm9[3]
11132 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm7[1],xmm8[1]
11133 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11134 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11135 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11136 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11137 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11138 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11139 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11140 ; AVX-NEXT: # xmm1 = xmm1[1],mem[1]
11141 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11142 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11143 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11144 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11145 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11146 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm11[1],xmm12[1]
11147 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11148 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11149 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11150 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11151 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11152 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11153 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11154 ; AVX-NEXT: # xmm1 = xmm1[1],mem[1]
11155 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11156 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11157 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11158 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11159 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11160 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11161 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11162 ; AVX-NEXT: # xmm1 = xmm1[1],mem[1]
11163 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11164 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11165 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11166 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11167 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11168 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11169 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11170 ; AVX-NEXT: # xmm1 = xmm1[1],mem[1]
11171 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11172 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11173 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11174 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11175 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11176 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11177 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11178 ; AVX-NEXT: # xmm1 = xmm1[1],mem[1]
11179 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11180 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11181 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11182 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11183 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11184 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11185 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11186 ; AVX-NEXT: # xmm1 = xmm1[1],mem[1]
11187 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11188 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11189 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11190 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11191 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11192 ; AVX-NEXT: vmovaps (%rsp), %xmm1 # 16-byte Reload
11193 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11194 ; AVX-NEXT: # xmm1 = xmm1[1],mem[1]
11195 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11196 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11197 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11198 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11199 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11200 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11201 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11202 ; AVX-NEXT: # xmm1 = xmm1[1],mem[1]
11203 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11204 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11205 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11206 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11207 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11208 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11209 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11210 ; AVX-NEXT: # xmm1 = xmm1[1],mem[1]
11211 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11212 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11213 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11214 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11215 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11216 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11217 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11218 ; AVX-NEXT: # xmm1 = xmm1[1],mem[1]
11219 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11220 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11221 ; AVX-NEXT: vmovaps 160(%rdi), %ymm1
11222 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11223 ; AVX-NEXT: vmovaps 32(%rdi), %xmm2
11224 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11225 ; AVX-NEXT: vinsertf128 $1, 128(%rdi), %ymm0, %ymm0
11226 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11227 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11228 ; AVX-NEXT: vmovaps 80(%rdi), %xmm1
11229 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11230 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
11231 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11232 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11233 ; AVX-NEXT: vmovaps 352(%rdi), %ymm1
11234 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11235 ; AVX-NEXT: vinsertf128 $1, 320(%rdi), %ymm0, %ymm0
11236 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11237 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11238 ; AVX-NEXT: vmovaps 272(%rdi), %xmm2
11239 ; AVX-NEXT: vmovaps %xmm2, (%rsp) # 16-byte Spill
11240 ; AVX-NEXT: vmovaps 224(%rdi), %xmm1
11241 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11242 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11243 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11244 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11245 ; AVX-NEXT: vmovaps 544(%rdi), %ymm1
11246 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11247 ; AVX-NEXT: vinsertf128 $1, 512(%rdi), %ymm0, %ymm0
11248 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11249 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11250 ; AVX-NEXT: vmovaps 464(%rdi), %xmm2
11251 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11252 ; AVX-NEXT: vmovaps 416(%rdi), %xmm1
11253 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11254 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11255 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11256 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11257 ; AVX-NEXT: vmovaps 736(%rdi), %ymm1
11258 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11259 ; AVX-NEXT: vinsertf128 $1, 704(%rdi), %ymm0, %ymm0
11260 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11261 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11262 ; AVX-NEXT: vmovaps 656(%rdi), %xmm2
11263 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11264 ; AVX-NEXT: vmovaps 608(%rdi), %xmm1
11265 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11266 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11267 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11268 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11269 ; AVX-NEXT: vmovaps 928(%rdi), %ymm1
11270 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11271 ; AVX-NEXT: vinsertf128 $1, 896(%rdi), %ymm0, %ymm0
11272 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11273 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11274 ; AVX-NEXT: vmovaps 848(%rdi), %xmm2
11275 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11276 ; AVX-NEXT: vmovaps 800(%rdi), %xmm1
11277 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11278 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11279 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11280 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11281 ; AVX-NEXT: vmovaps 1120(%rdi), %ymm1
11282 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11283 ; AVX-NEXT: vinsertf128 $1, 1088(%rdi), %ymm0, %ymm0
11284 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11285 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11286 ; AVX-NEXT: vmovaps 1040(%rdi), %xmm2
11287 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11288 ; AVX-NEXT: vmovaps 992(%rdi), %xmm1
11289 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11290 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11291 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11292 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11293 ; AVX-NEXT: vmovaps 1312(%rdi), %ymm1
11294 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11295 ; AVX-NEXT: vinsertf128 $1, 1280(%rdi), %ymm0, %ymm0
11296 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11297 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11298 ; AVX-NEXT: vmovaps 1232(%rdi), %xmm2
11299 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11300 ; AVX-NEXT: vmovaps 1184(%rdi), %xmm1
11301 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11302 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11303 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11304 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11305 ; AVX-NEXT: vmovaps 1504(%rdi), %ymm1
11306 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11307 ; AVX-NEXT: vinsertf128 $1, 1472(%rdi), %ymm0, %ymm0
11308 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11309 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11310 ; AVX-NEXT: vmovaps 1424(%rdi), %xmm2
11311 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11312 ; AVX-NEXT: vmovaps 1376(%rdi), %xmm1
11313 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11314 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11315 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11316 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11317 ; AVX-NEXT: vmovaps 1696(%rdi), %ymm1
11318 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11319 ; AVX-NEXT: vinsertf128 $1, 1664(%rdi), %ymm0, %ymm0
11320 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11321 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11322 ; AVX-NEXT: vmovaps 1616(%rdi), %xmm2
11323 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11324 ; AVX-NEXT: vmovaps 1568(%rdi), %xmm1
11325 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11326 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11327 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11328 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11329 ; AVX-NEXT: vmovaps 1888(%rdi), %ymm1
11330 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11331 ; AVX-NEXT: vinsertf128 $1, 1856(%rdi), %ymm0, %ymm0
11332 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11333 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11334 ; AVX-NEXT: vmovaps 1808(%rdi), %xmm2
11335 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11336 ; AVX-NEXT: vmovaps 1760(%rdi), %xmm1
11337 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11338 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11339 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11340 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11341 ; AVX-NEXT: vmovaps 2080(%rdi), %ymm1
11342 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11343 ; AVX-NEXT: vinsertf128 $1, 2048(%rdi), %ymm0, %ymm0
11344 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11345 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11346 ; AVX-NEXT: vmovaps 2000(%rdi), %xmm2
11347 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11348 ; AVX-NEXT: vmovaps 1952(%rdi), %xmm1
11349 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11350 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11351 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11352 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11353 ; AVX-NEXT: vmovaps 2272(%rdi), %ymm1
11354 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11355 ; AVX-NEXT: vinsertf128 $1, 2240(%rdi), %ymm0, %ymm0
11356 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11357 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11358 ; AVX-NEXT: vmovaps 2192(%rdi), %xmm2
11359 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11360 ; AVX-NEXT: vmovaps 2144(%rdi), %xmm1
11361 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11362 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11363 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11364 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11365 ; AVX-NEXT: vmovaps 2464(%rdi), %ymm1
11366 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11367 ; AVX-NEXT: vinsertf128 $1, 2432(%rdi), %ymm0, %ymm0
11368 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11369 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11370 ; AVX-NEXT: vmovaps 2384(%rdi), %xmm14
11371 ; AVX-NEXT: vmovaps 2336(%rdi), %xmm13
11372 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm13[0],xmm14[0]
11373 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11374 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11375 ; AVX-NEXT: vmovaps 2656(%rdi), %ymm12
11376 ; AVX-NEXT: vinsertf128 $1, 2624(%rdi), %ymm0, %ymm11
11377 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm11[0],ymm12[0],ymm11[2],ymm12[2]
11378 ; AVX-NEXT: vmovaps 2576(%rdi), %xmm10
11379 ; AVX-NEXT: vmovaps 2528(%rdi), %xmm9
11380 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm9[0],xmm10[0]
11381 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11382 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11383 ; AVX-NEXT: vmovaps 2848(%rdi), %ymm8
11384 ; AVX-NEXT: vinsertf128 $1, 2816(%rdi), %ymm0, %ymm7
11385 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm7[0],ymm8[0],ymm7[2],ymm8[2]
11386 ; AVX-NEXT: vmovaps 2768(%rdi), %xmm6
11387 ; AVX-NEXT: vmovaps 2720(%rdi), %xmm5
11388 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm6[0]
11389 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11390 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11391 ; AVX-NEXT: vmovaps 3040(%rdi), %ymm4
11392 ; AVX-NEXT: vinsertf128 $1, 3008(%rdi), %ymm0, %ymm3
11393 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm3[0],ymm4[0],ymm3[2],ymm4[2]
11394 ; AVX-NEXT: vmovaps 2960(%rdi), %xmm2
11395 ; AVX-NEXT: vmovaps 2912(%rdi), %xmm1
11396 ; AVX-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
11397 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11398 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11399 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11400 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11401 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11402 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11403 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
11404 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
11405 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11406 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11407 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11408 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11409 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11410 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11411 ; AVX-NEXT: vunpckhpd (%rsp), %xmm15, %xmm15 # 16-byte Folded Reload
11412 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
11413 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11414 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11415 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11416 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11417 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11418 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11419 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
11420 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
11421 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11422 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11423 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11424 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11425 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11426 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11427 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
11428 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
11429 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11430 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11431 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11432 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11433 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11434 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11435 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
11436 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
11437 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11438 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11439 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11440 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11441 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11442 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11443 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
11444 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
11445 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11446 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11447 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11448 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11449 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11450 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11451 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
11452 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
11453 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11454 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11455 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11456 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11457 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11458 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11459 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
11460 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
11461 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11462 ; AVX-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
11463 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11464 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11465 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11466 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11467 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
11468 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
11469 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11470 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11471 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11472 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11473 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11474 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11475 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
11476 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
11477 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11478 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11479 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11480 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11481 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11482 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11483 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
11484 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
11485 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11486 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11487 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11488 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11489 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11490 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11491 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
11492 ; AVX-NEXT: # xmm15 = xmm15[1],mem[1]
11493 ; AVX-NEXT: vblendps {{.*#+}} ymm15 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11494 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11495 ; AVX-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11496 ; AVX-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11497 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm13 = xmm13[1],xmm14[1]
11498 ; AVX-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3],ymm0[4,5,6,7]
11499 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm11[1],ymm12[1],ymm11[3],ymm12[3]
11500 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm9 = xmm9[1],xmm10[1]
11501 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm9[0,1,2,3],ymm0[4,5,6,7]
11502 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm7 = ymm7[1],ymm8[1],ymm7[3],ymm8[3]
11503 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm5 = xmm5[1],xmm6[1]
11504 ; AVX-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm7[4,5,6,7]
11505 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm4[1],ymm3[3],ymm4[3]
11506 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
11507 ; AVX-NEXT: vblendps {{.*#+}} ymm4 = ymm1[0,1,2,3],ymm3[4,5,6,7]
11508 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11509 ; AVX-NEXT: vmovaps %ymm1, 448(%rsi)
11510 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11511 ; AVX-NEXT: vmovaps %ymm1, 384(%rsi)
11512 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11513 ; AVX-NEXT: vmovaps %ymm1, 320(%rsi)
11514 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11515 ; AVX-NEXT: vmovaps %ymm1, 256(%rsi)
11516 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11517 ; AVX-NEXT: vmovaps %ymm1, 192(%rsi)
11518 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11519 ; AVX-NEXT: vmovaps %ymm1, 128(%rsi)
11520 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11521 ; AVX-NEXT: vmovaps %ymm1, 64(%rsi)
11522 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11523 ; AVX-NEXT: vmovaps %ymm1, (%rsi)
11524 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11525 ; AVX-NEXT: vmovaps %ymm1, 480(%rsi)
11526 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11527 ; AVX-NEXT: vmovaps %ymm1, 416(%rsi)
11528 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11529 ; AVX-NEXT: vmovaps %ymm1, 352(%rsi)
11530 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11531 ; AVX-NEXT: vmovaps %ymm1, 288(%rsi)
11532 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11533 ; AVX-NEXT: vmovaps %ymm1, 224(%rsi)
11534 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11535 ; AVX-NEXT: vmovaps %ymm1, 160(%rsi)
11536 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11537 ; AVX-NEXT: vmovaps %ymm1, 96(%rsi)
11538 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11539 ; AVX-NEXT: vmovaps %ymm1, 32(%rsi)
11540 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11541 ; AVX-NEXT: vmovaps %ymm1, 448(%rdx)
11542 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11543 ; AVX-NEXT: vmovaps %ymm1, 384(%rdx)
11544 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11545 ; AVX-NEXT: vmovaps %ymm1, 320(%rdx)
11546 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11547 ; AVX-NEXT: vmovaps %ymm1, 256(%rdx)
11548 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11549 ; AVX-NEXT: vmovaps %ymm1, 192(%rdx)
11550 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11551 ; AVX-NEXT: vmovaps %ymm1, 128(%rdx)
11552 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11553 ; AVX-NEXT: vmovaps %ymm1, 64(%rdx)
11554 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11555 ; AVX-NEXT: vmovaps %ymm1, (%rdx)
11556 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11557 ; AVX-NEXT: vmovaps %ymm1, 480(%rdx)
11558 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11559 ; AVX-NEXT: vmovaps %ymm1, 416(%rdx)
11560 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11561 ; AVX-NEXT: vmovaps %ymm1, 352(%rdx)
11562 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11563 ; AVX-NEXT: vmovaps %ymm1, 288(%rdx)
11564 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11565 ; AVX-NEXT: vmovaps %ymm1, 224(%rdx)
11566 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11567 ; AVX-NEXT: vmovaps %ymm1, 160(%rdx)
11568 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11569 ; AVX-NEXT: vmovaps %ymm1, 96(%rdx)
11570 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11571 ; AVX-NEXT: vmovaps %ymm1, 32(%rdx)
11572 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11573 ; AVX-NEXT: vmovaps %ymm1, (%rcx)
11574 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11575 ; AVX-NEXT: vmovaps %ymm1, 64(%rcx)
11576 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11577 ; AVX-NEXT: vmovaps %ymm1, 128(%rcx)
11578 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11579 ; AVX-NEXT: vmovaps %ymm1, 192(%rcx)
11580 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11581 ; AVX-NEXT: vmovaps %ymm1, 256(%rcx)
11582 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11583 ; AVX-NEXT: vmovaps %ymm1, 320(%rcx)
11584 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11585 ; AVX-NEXT: vmovaps %ymm1, 384(%rcx)
11586 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11587 ; AVX-NEXT: vmovaps %ymm1, 448(%rcx)
11588 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11589 ; AVX-NEXT: vmovaps %ymm1, 480(%rcx)
11590 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11591 ; AVX-NEXT: vmovaps %ymm1, 416(%rcx)
11592 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11593 ; AVX-NEXT: vmovaps %ymm1, 352(%rcx)
11594 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11595 ; AVX-NEXT: vmovaps %ymm1, 288(%rcx)
11596 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11597 ; AVX-NEXT: vmovaps %ymm1, 224(%rcx)
11598 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11599 ; AVX-NEXT: vmovaps %ymm1, 160(%rcx)
11600 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11601 ; AVX-NEXT: vmovaps %ymm1, 96(%rcx)
11602 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11603 ; AVX-NEXT: vmovaps %ymm1, 32(%rcx)
11604 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11605 ; AVX-NEXT: vmovaps %ymm1, 480(%r8)
11606 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11607 ; AVX-NEXT: vmovaps %ymm1, 448(%r8)
11608 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11609 ; AVX-NEXT: vmovaps %ymm1, 416(%r8)
11610 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11611 ; AVX-NEXT: vmovaps %ymm1, 384(%r8)
11612 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11613 ; AVX-NEXT: vmovaps %ymm1, 352(%r8)
11614 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11615 ; AVX-NEXT: vmovaps %ymm1, 320(%r8)
11616 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11617 ; AVX-NEXT: vmovaps %ymm1, 288(%r8)
11618 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11619 ; AVX-NEXT: vmovaps %ymm1, 256(%r8)
11620 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11621 ; AVX-NEXT: vmovaps %ymm1, 224(%r8)
11622 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11623 ; AVX-NEXT: vmovaps %ymm1, 192(%r8)
11624 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11625 ; AVX-NEXT: vmovaps %ymm1, 160(%r8)
11626 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11627 ; AVX-NEXT: vmovaps %ymm1, 128(%r8)
11628 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11629 ; AVX-NEXT: vmovaps %ymm1, 96(%r8)
11630 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11631 ; AVX-NEXT: vmovaps %ymm1, 64(%r8)
11632 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11633 ; AVX-NEXT: vmovaps %ymm1, 32(%r8)
11634 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11635 ; AVX-NEXT: vmovaps %ymm1, (%r8)
11636 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11637 ; AVX-NEXT: vmovaps %ymm1, 480(%r9)
11638 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11639 ; AVX-NEXT: vmovaps %ymm1, 448(%r9)
11640 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11641 ; AVX-NEXT: vmovaps %ymm1, 416(%r9)
11642 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11643 ; AVX-NEXT: vmovaps %ymm1, 384(%r9)
11644 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11645 ; AVX-NEXT: vmovaps %ymm1, 352(%r9)
11646 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11647 ; AVX-NEXT: vmovaps %ymm1, 320(%r9)
11648 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11649 ; AVX-NEXT: vmovaps %ymm1, 288(%r9)
11650 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11651 ; AVX-NEXT: vmovaps %ymm1, 256(%r9)
11652 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11653 ; AVX-NEXT: vmovaps %ymm1, 224(%r9)
11654 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11655 ; AVX-NEXT: vmovaps %ymm1, 192(%r9)
11656 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11657 ; AVX-NEXT: vmovaps %ymm1, 160(%r9)
11658 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11659 ; AVX-NEXT: vmovaps %ymm1, 128(%r9)
11660 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11661 ; AVX-NEXT: vmovaps %ymm1, 96(%r9)
11662 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11663 ; AVX-NEXT: vmovaps %ymm1, 64(%r9)
11664 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11665 ; AVX-NEXT: vmovaps %ymm1, 32(%r9)
11666 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11667 ; AVX-NEXT: vmovaps %ymm1, (%r9)
11668 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
11669 ; AVX-NEXT: vmovaps %ymm4, 480(%rax)
11670 ; AVX-NEXT: vmovaps %ymm5, 448(%rax)
11671 ; AVX-NEXT: vmovaps %ymm0, 416(%rax)
11672 ; AVX-NEXT: vmovaps %ymm13, 384(%rax)
11673 ; AVX-NEXT: vmovaps %ymm15, 352(%rax)
11674 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11675 ; AVX-NEXT: vmovaps %ymm0, 320(%rax)
11676 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11677 ; AVX-NEXT: vmovaps %ymm0, 288(%rax)
11678 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11679 ; AVX-NEXT: vmovaps %ymm0, 256(%rax)
11680 ; AVX-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
11681 ; AVX-NEXT: vmovaps %ymm0, 224(%rax)
11682 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11683 ; AVX-NEXT: vmovaps %ymm0, 192(%rax)
11684 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11685 ; AVX-NEXT: vmovaps %ymm0, 160(%rax)
11686 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11687 ; AVX-NEXT: vmovaps %ymm0, 128(%rax)
11688 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11689 ; AVX-NEXT: vmovaps %ymm0, 96(%rax)
11690 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11691 ; AVX-NEXT: vmovaps %ymm0, 64(%rax)
11692 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11693 ; AVX-NEXT: vmovaps %ymm0, 32(%rax)
11694 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11695 ; AVX-NEXT: vmovaps %ymm0, (%rax)
11696 ; AVX-NEXT: addq $3768, %rsp # imm = 0xEB8
11697 ; AVX-NEXT: vzeroupper
11700 ; AVX2-LABEL: load_i64_stride6_vf64:
11702 ; AVX2-NEXT: subq $3432, %rsp # imm = 0xD68
11703 ; AVX2-NEXT: vmovaps 1088(%rdi), %ymm2
11704 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11705 ; AVX2-NEXT: vmovaps 1056(%rdi), %ymm4
11706 ; AVX2-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11707 ; AVX2-NEXT: vmovaps 704(%rdi), %ymm3
11708 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11709 ; AVX2-NEXT: vmovaps 672(%rdi), %ymm5
11710 ; AVX2-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11711 ; AVX2-NEXT: vmovaps 320(%rdi), %ymm1
11712 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11713 ; AVX2-NEXT: vmovaps 288(%rdi), %ymm6
11714 ; AVX2-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11715 ; AVX2-NEXT: vmovaps 240(%rdi), %xmm7
11716 ; AVX2-NEXT: vmovaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11717 ; AVX2-NEXT: vmovaps 192(%rdi), %xmm0
11718 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11719 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm7[0]
11720 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm6[0],ymm1[0],ymm6[2],ymm1[2]
11721 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
11722 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
11723 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11724 ; AVX2-NEXT: vmovaps 624(%rdi), %xmm1
11725 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11726 ; AVX2-NEXT: vmovaps 576(%rdi), %xmm0
11727 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11728 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
11729 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm5[0],ymm3[0],ymm5[2],ymm3[2]
11730 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
11731 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
11732 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11733 ; AVX2-NEXT: vmovaps 1008(%rdi), %xmm1
11734 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11735 ; AVX2-NEXT: vmovaps 960(%rdi), %xmm0
11736 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11737 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
11738 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm4[0],ymm2[0],ymm4[2],ymm2[2]
11739 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
11740 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
11741 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11742 ; AVX2-NEXT: vmovaps 1472(%rdi), %ymm0
11743 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11744 ; AVX2-NEXT: vmovaps 1440(%rdi), %ymm1
11745 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11746 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11747 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
11748 ; AVX2-NEXT: vmovaps 1392(%rdi), %xmm2
11749 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11750 ; AVX2-NEXT: vmovaps 1344(%rdi), %xmm1
11751 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11752 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11753 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11754 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11755 ; AVX2-NEXT: vmovaps 1856(%rdi), %ymm0
11756 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11757 ; AVX2-NEXT: vmovaps 1824(%rdi), %ymm1
11758 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11759 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11760 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
11761 ; AVX2-NEXT: vmovaps 1776(%rdi), %xmm2
11762 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11763 ; AVX2-NEXT: vmovaps 1728(%rdi), %xmm1
11764 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11765 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11766 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11767 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11768 ; AVX2-NEXT: vmovaps 2240(%rdi), %ymm0
11769 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11770 ; AVX2-NEXT: vmovaps 2208(%rdi), %ymm1
11771 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11772 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11773 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
11774 ; AVX2-NEXT: vmovaps 2160(%rdi), %xmm2
11775 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11776 ; AVX2-NEXT: vmovaps 2112(%rdi), %xmm1
11777 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11778 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11779 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11780 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11781 ; AVX2-NEXT: vmovaps 2624(%rdi), %ymm0
11782 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11783 ; AVX2-NEXT: vmovaps 2592(%rdi), %ymm1
11784 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11785 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11786 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
11787 ; AVX2-NEXT: vmovaps 2544(%rdi), %xmm2
11788 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11789 ; AVX2-NEXT: vmovaps 2496(%rdi), %xmm1
11790 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11791 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11792 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11793 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11794 ; AVX2-NEXT: vmovaps 3008(%rdi), %ymm0
11795 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11796 ; AVX2-NEXT: vmovaps 2976(%rdi), %ymm1
11797 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11798 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11799 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
11800 ; AVX2-NEXT: vmovaps 2928(%rdi), %xmm2
11801 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11802 ; AVX2-NEXT: vmovaps 2880(%rdi), %xmm1
11803 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11804 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
11805 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11806 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11807 ; AVX2-NEXT: vmovaps 128(%rdi), %ymm0
11808 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11809 ; AVX2-NEXT: vmovaps 96(%rdi), %ymm1
11810 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11811 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11812 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
11813 ; AVX2-NEXT: vmovaps (%rdi), %xmm2
11814 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11815 ; AVX2-NEXT: vmovaps 48(%rdi), %xmm1
11816 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11817 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
11818 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11819 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11820 ; AVX2-NEXT: vmovaps 512(%rdi), %ymm0
11821 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11822 ; AVX2-NEXT: vmovaps 480(%rdi), %ymm1
11823 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11824 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11825 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
11826 ; AVX2-NEXT: vmovaps 432(%rdi), %xmm1
11827 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11828 ; AVX2-NEXT: vmovaps 384(%rdi), %xmm13
11829 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm13[0],xmm1[0]
11830 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11831 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11832 ; AVX2-NEXT: vmovaps 896(%rdi), %ymm0
11833 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11834 ; AVX2-NEXT: vmovaps 864(%rdi), %ymm1
11835 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11836 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11837 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
11838 ; AVX2-NEXT: vmovaps 816(%rdi), %xmm1
11839 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11840 ; AVX2-NEXT: vmovaps 768(%rdi), %xmm11
11841 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm1[0]
11842 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11843 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11844 ; AVX2-NEXT: vmovaps 1280(%rdi), %ymm0
11845 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11846 ; AVX2-NEXT: vmovaps 1248(%rdi), %ymm1
11847 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11848 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11849 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
11850 ; AVX2-NEXT: vmovaps 1200(%rdi), %xmm1
11851 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11852 ; AVX2-NEXT: vmovaps 1152(%rdi), %xmm9
11853 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm9[0],xmm1[0]
11854 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11855 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11856 ; AVX2-NEXT: vmovaps 1664(%rdi), %ymm0
11857 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11858 ; AVX2-NEXT: vmovaps 1632(%rdi), %ymm1
11859 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11860 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11861 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
11862 ; AVX2-NEXT: vmovaps 1584(%rdi), %xmm14
11863 ; AVX2-NEXT: vmovaps 1536(%rdi), %xmm7
11864 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm7[0],xmm14[0]
11865 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11866 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11867 ; AVX2-NEXT: vmovaps 2048(%rdi), %ymm12
11868 ; AVX2-NEXT: vmovaps 2016(%rdi), %ymm0
11869 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11870 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm12[0],ymm0[2],ymm12[2]
11871 ; AVX2-NEXT: vmovups %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11872 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
11873 ; AVX2-NEXT: vmovaps 1968(%rdi), %xmm10
11874 ; AVX2-NEXT: vmovaps 1920(%rdi), %xmm5
11875 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm10[0]
11876 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11877 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11878 ; AVX2-NEXT: vmovaps 2432(%rdi), %ymm8
11879 ; AVX2-NEXT: vmovaps 2400(%rdi), %ymm0
11880 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11881 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm8[0],ymm0[2],ymm8[2]
11882 ; AVX2-NEXT: vmovups %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11883 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
11884 ; AVX2-NEXT: vmovaps 2352(%rdi), %xmm6
11885 ; AVX2-NEXT: vmovaps 2304(%rdi), %xmm3
11886 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm6[0]
11887 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11888 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11889 ; AVX2-NEXT: vmovaps 2816(%rdi), %ymm4
11890 ; AVX2-NEXT: vmovaps 2784(%rdi), %ymm0
11891 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11892 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
11893 ; AVX2-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11894 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
11895 ; AVX2-NEXT: vmovaps 2736(%rdi), %xmm2
11896 ; AVX2-NEXT: vmovaps 2688(%rdi), %xmm1
11897 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
11898 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
11899 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11900 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
11901 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
11902 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
11903 ; AVX2-NEXT: vbroadcastsd 296(%rdi), %ymm15
11904 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
11905 ; AVX2-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
11906 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
11907 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11908 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
11909 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
11910 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
11911 ; AVX2-NEXT: vbroadcastsd 680(%rdi), %ymm15
11912 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
11913 ; AVX2-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
11914 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
11915 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11916 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
11917 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
11918 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
11919 ; AVX2-NEXT: vbroadcastsd 1064(%rdi), %ymm15
11920 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
11921 ; AVX2-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
11922 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
11923 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11924 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
11925 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
11926 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
11927 ; AVX2-NEXT: vbroadcastsd 1448(%rdi), %ymm15
11928 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
11929 ; AVX2-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
11930 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
11931 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11932 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
11933 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
11934 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
11935 ; AVX2-NEXT: vbroadcastsd 1832(%rdi), %ymm15
11936 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
11937 ; AVX2-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
11938 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
11939 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11940 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
11941 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
11942 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
11943 ; AVX2-NEXT: vbroadcastsd 2216(%rdi), %ymm15
11944 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
11945 ; AVX2-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
11946 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
11947 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11948 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
11949 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
11950 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
11951 ; AVX2-NEXT: vbroadcastsd 2600(%rdi), %ymm15
11952 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
11953 ; AVX2-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
11954 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
11955 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11956 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
11957 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
11958 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
11959 ; AVX2-NEXT: vbroadcastsd 2984(%rdi), %ymm15
11960 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
11961 ; AVX2-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
11962 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
11963 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11964 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
11965 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
11966 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
11967 ; AVX2-NEXT: vbroadcastsd 104(%rdi), %ymm15
11968 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
11969 ; AVX2-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
11970 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
11971 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11972 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm13, %xmm0 # 16-byte Folded Reload
11973 ; AVX2-NEXT: # xmm0 = xmm13[1],mem[1]
11974 ; AVX2-NEXT: vbroadcastsd 488(%rdi), %ymm13
11975 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm13, %ymm13 # 32-byte Folded Reload
11976 ; AVX2-NEXT: # ymm13 = ymm13[1],mem[1],ymm13[3],mem[3]
11977 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm13[4,5,6,7]
11978 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11979 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm11, %xmm0 # 16-byte Folded Reload
11980 ; AVX2-NEXT: # xmm0 = xmm11[1],mem[1]
11981 ; AVX2-NEXT: vbroadcastsd 872(%rdi), %ymm11
11982 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm11, %ymm11 # 32-byte Folded Reload
11983 ; AVX2-NEXT: # ymm11 = ymm11[1],mem[1],ymm11[3],mem[3]
11984 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm11[4,5,6,7]
11985 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11986 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9, %xmm0 # 16-byte Folded Reload
11987 ; AVX2-NEXT: # xmm0 = xmm9[1],mem[1]
11988 ; AVX2-NEXT: vbroadcastsd 1256(%rdi), %ymm9
11989 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm9 # 32-byte Folded Reload
11990 ; AVX2-NEXT: # ymm9 = ymm9[1],mem[1],ymm9[3],mem[3]
11991 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm9[4,5,6,7]
11992 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11993 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm14[1]
11994 ; AVX2-NEXT: vbroadcastsd 1640(%rdi), %ymm7
11995 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm7, %ymm7 # 32-byte Folded Reload
11996 ; AVX2-NEXT: # ymm7 = ymm7[1],mem[1],ymm7[3],mem[3]
11997 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm7[4,5,6,7]
11998 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11999 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm10[1]
12000 ; AVX2-NEXT: vbroadcastsd 2024(%rdi), %ymm5
12001 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm5[1],ymm12[1],ymm5[3],ymm12[3]
12002 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm5[4,5,6,7]
12003 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12004 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm6[1]
12005 ; AVX2-NEXT: vbroadcastsd 2408(%rdi), %ymm3
12006 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm8[1],ymm3[3],ymm8[3]
12007 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
12008 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12009 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm2[1]
12010 ; AVX2-NEXT: vbroadcastsd 2792(%rdi), %ymm1
12011 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm4[1],ymm1[3],ymm4[3]
12012 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12013 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12014 ; AVX2-NEXT: vbroadcastsd 352(%rdi), %ymm0
12015 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12016 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12017 ; AVX2-NEXT: vmovaps 256(%rdi), %xmm2
12018 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12019 ; AVX2-NEXT: vmovaps 208(%rdi), %xmm1
12020 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12021 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12022 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12023 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12024 ; AVX2-NEXT: vbroadcastsd 736(%rdi), %ymm0
12025 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12026 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12027 ; AVX2-NEXT: vmovaps 640(%rdi), %xmm2
12028 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12029 ; AVX2-NEXT: vmovaps 592(%rdi), %xmm1
12030 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12031 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12032 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12033 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12034 ; AVX2-NEXT: vbroadcastsd 1120(%rdi), %ymm0
12035 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12036 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12037 ; AVX2-NEXT: vmovaps 1024(%rdi), %xmm2
12038 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12039 ; AVX2-NEXT: vmovaps 976(%rdi), %xmm1
12040 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12041 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12042 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12043 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12044 ; AVX2-NEXT: vbroadcastsd 1504(%rdi), %ymm0
12045 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12046 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12047 ; AVX2-NEXT: vmovaps 1408(%rdi), %xmm2
12048 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12049 ; AVX2-NEXT: vmovaps 1360(%rdi), %xmm1
12050 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12051 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12052 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12053 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12054 ; AVX2-NEXT: vbroadcastsd 1888(%rdi), %ymm0
12055 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12056 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12057 ; AVX2-NEXT: vmovaps 1792(%rdi), %xmm2
12058 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12059 ; AVX2-NEXT: vmovaps 1744(%rdi), %xmm1
12060 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12061 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12062 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12063 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12064 ; AVX2-NEXT: vbroadcastsd 2272(%rdi), %ymm0
12065 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12066 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12067 ; AVX2-NEXT: vmovaps 2176(%rdi), %xmm2
12068 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12069 ; AVX2-NEXT: vmovaps 2128(%rdi), %xmm1
12070 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12071 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12072 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12073 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12074 ; AVX2-NEXT: vbroadcastsd 2656(%rdi), %ymm0
12075 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12076 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12077 ; AVX2-NEXT: vmovaps 2560(%rdi), %xmm2
12078 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12079 ; AVX2-NEXT: vmovaps 2512(%rdi), %xmm1
12080 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12081 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12082 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12083 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12084 ; AVX2-NEXT: vbroadcastsd 3040(%rdi), %ymm0
12085 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12086 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12087 ; AVX2-NEXT: vmovaps 2944(%rdi), %xmm2
12088 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12089 ; AVX2-NEXT: vmovaps 2896(%rdi), %xmm1
12090 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12091 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12092 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12093 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12094 ; AVX2-NEXT: vbroadcastsd 2848(%rdi), %ymm0
12095 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12096 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12097 ; AVX2-NEXT: vmovaps 2752(%rdi), %xmm2
12098 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12099 ; AVX2-NEXT: vmovaps 2704(%rdi), %xmm1
12100 ; AVX2-NEXT: vmovaps %xmm1, (%rsp) # 16-byte Spill
12101 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12102 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12103 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12104 ; AVX2-NEXT: vbroadcastsd 2464(%rdi), %ymm0
12105 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12106 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12107 ; AVX2-NEXT: vmovaps 2368(%rdi), %xmm2
12108 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12109 ; AVX2-NEXT: vmovaps 2320(%rdi), %xmm1
12110 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12111 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12112 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12113 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12114 ; AVX2-NEXT: vbroadcastsd 2080(%rdi), %ymm0
12115 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12116 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12117 ; AVX2-NEXT: vmovaps 1984(%rdi), %xmm2
12118 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12119 ; AVX2-NEXT: vmovaps 1936(%rdi), %xmm1
12120 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12121 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12122 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12123 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12124 ; AVX2-NEXT: vbroadcastsd 1696(%rdi), %ymm0
12125 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12126 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12127 ; AVX2-NEXT: vmovaps 1600(%rdi), %xmm12
12128 ; AVX2-NEXT: vmovaps 1552(%rdi), %xmm11
12129 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm12[0]
12130 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12131 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12132 ; AVX2-NEXT: vbroadcastsd 1312(%rdi), %ymm0
12133 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
12134 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm10[0],ymm0[0],ymm10[2],ymm0[2]
12135 ; AVX2-NEXT: vmovaps 1216(%rdi), %xmm9
12136 ; AVX2-NEXT: vmovaps 1168(%rdi), %xmm8
12137 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm8[0],xmm9[0]
12138 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12139 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12140 ; AVX2-NEXT: vbroadcastsd 928(%rdi), %ymm0
12141 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
12142 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm7[0],ymm0[0],ymm7[2],ymm0[2]
12143 ; AVX2-NEXT: vmovaps 832(%rdi), %xmm6
12144 ; AVX2-NEXT: vmovaps 784(%rdi), %xmm5
12145 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm13 = xmm5[0],xmm6[0]
12146 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm13[0,1,2,3],ymm0[4,5,6,7]
12147 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12148 ; AVX2-NEXT: vbroadcastsd 544(%rdi), %ymm0
12149 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
12150 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm4[0],ymm0[0],ymm4[2],ymm0[2]
12151 ; AVX2-NEXT: vmovaps 448(%rdi), %xmm13
12152 ; AVX2-NEXT: vmovaps 400(%rdi), %xmm3
12153 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm15 = xmm3[0],xmm13[0]
12154 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
12155 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12156 ; AVX2-NEXT: vbroadcastsd 160(%rdi), %ymm0
12157 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
12158 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm2[0],ymm0[0],ymm2[2],ymm0[2]
12159 ; AVX2-NEXT: vmovaps 16(%rdi), %xmm15
12160 ; AVX2-NEXT: vmovaps 64(%rdi), %xmm0
12161 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm14 = xmm15[0],xmm0[0]
12162 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm14[0,1,2,3],ymm1[4,5,6,7]
12163 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12164 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm15[1],xmm0[1]
12165 ; AVX2-NEXT: vmovaps 160(%rdi), %ymm14
12166 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm2[1],ymm14[1],ymm2[3],ymm14[3]
12167 ; AVX2-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12168 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12169 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12170 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12171 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12172 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12173 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
12174 ; AVX2-NEXT: vmovaps 352(%rdi), %ymm2
12175 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12176 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
12177 ; AVX2-NEXT: vmovaps %ymm2, %ymm15
12178 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12179 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12180 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12181 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12182 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm13[1]
12183 ; AVX2-NEXT: vmovaps 544(%rdi), %ymm2
12184 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm4[1],ymm2[1],ymm4[3],ymm2[3]
12185 ; AVX2-NEXT: vmovaps %ymm2, %ymm3
12186 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12187 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12188 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12189 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12190 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12191 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12192 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
12193 ; AVX2-NEXT: vmovaps 736(%rdi), %ymm2
12194 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12195 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
12196 ; AVX2-NEXT: vmovaps %ymm2, %ymm4
12197 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12198 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12199 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12200 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12201 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm6[1]
12202 ; AVX2-NEXT: vmovaps 928(%rdi), %ymm2
12203 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm7[1],ymm2[1],ymm7[3],ymm2[3]
12204 ; AVX2-NEXT: vmovaps %ymm2, %ymm5
12205 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12206 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12207 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12208 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12209 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12210 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12211 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
12212 ; AVX2-NEXT: vmovaps 1120(%rdi), %ymm2
12213 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12214 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
12215 ; AVX2-NEXT: vmovaps %ymm2, %ymm6
12216 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12217 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12218 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12219 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12220 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm8[1],xmm9[1]
12221 ; AVX2-NEXT: vmovaps 1312(%rdi), %ymm2
12222 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm10[1],ymm2[1],ymm10[3],ymm2[3]
12223 ; AVX2-NEXT: vmovaps %ymm2, %ymm7
12224 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12225 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12226 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12227 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12228 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12229 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12230 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
12231 ; AVX2-NEXT: vmovaps 1504(%rdi), %ymm2
12232 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12233 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
12234 ; AVX2-NEXT: vmovaps %ymm2, %ymm8
12235 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12236 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12237 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12238 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12239 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm11[1],xmm12[1]
12240 ; AVX2-NEXT: vmovaps 1696(%rdi), %ymm2
12241 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12242 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
12243 ; AVX2-NEXT: vmovaps %ymm2, %ymm9
12244 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12245 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12246 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12247 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12248 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12249 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12250 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
12251 ; AVX2-NEXT: vmovaps 1888(%rdi), %ymm2
12252 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12253 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
12254 ; AVX2-NEXT: vmovaps %ymm2, %ymm10
12255 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12256 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12257 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12258 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12259 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12260 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12261 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
12262 ; AVX2-NEXT: vmovaps 2080(%rdi), %ymm2
12263 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12264 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
12265 ; AVX2-NEXT: vmovaps %ymm2, %ymm11
12266 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12267 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12268 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12269 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12270 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12271 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12272 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
12273 ; AVX2-NEXT: vmovaps 2272(%rdi), %ymm2
12274 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12275 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12276 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
12277 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12278 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12279 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12280 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12281 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12282 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
12283 ; AVX2-NEXT: vmovaps 2464(%rdi), %ymm2
12284 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12285 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12286 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
12287 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12288 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12289 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12290 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12291 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12292 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
12293 ; AVX2-NEXT: vmovaps 2656(%rdi), %ymm2
12294 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12295 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12296 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
12297 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12298 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12299 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12300 ; AVX2-NEXT: vmovaps (%rsp), %xmm0 # 16-byte Reload
12301 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12302 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
12303 ; AVX2-NEXT: vmovaps 2848(%rdi), %ymm2
12304 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12305 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12306 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
12307 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12308 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12309 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12310 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12311 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12312 ; AVX2-NEXT: # xmm0 = xmm0[1],mem[1]
12313 ; AVX2-NEXT: vmovaps 3040(%rdi), %ymm2
12314 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12315 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
12316 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12317 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
12318 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12319 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12320 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12321 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm14[0],ymm0[2],ymm14[2]
12322 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12323 ; AVX2-NEXT: vmovaps 32(%rdi), %xmm12
12324 ; AVX2-NEXT: vmovaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12325 ; AVX2-NEXT: vmovaps 80(%rdi), %xmm1
12326 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12327 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm12[0],xmm1[0]
12328 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12329 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12330 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12331 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm15[0],ymm0[2],ymm15[2]
12332 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12333 ; AVX2-NEXT: vmovaps 272(%rdi), %xmm12
12334 ; AVX2-NEXT: vmovaps %xmm12, (%rsp) # 16-byte Spill
12335 ; AVX2-NEXT: vmovaps 224(%rdi), %xmm1
12336 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12337 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm12[0]
12338 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12339 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12340 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12341 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
12342 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12343 ; AVX2-NEXT: vmovaps 464(%rdi), %xmm3
12344 ; AVX2-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12345 ; AVX2-NEXT: vmovaps 416(%rdi), %xmm1
12346 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12347 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
12348 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12349 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12350 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12351 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
12352 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12353 ; AVX2-NEXT: vmovaps 656(%rdi), %xmm3
12354 ; AVX2-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12355 ; AVX2-NEXT: vmovaps 608(%rdi), %xmm1
12356 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12357 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
12358 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12359 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12360 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12361 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm5[0],ymm0[2],ymm5[2]
12362 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12363 ; AVX2-NEXT: vmovaps 848(%rdi), %xmm3
12364 ; AVX2-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12365 ; AVX2-NEXT: vmovaps 800(%rdi), %xmm1
12366 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12367 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
12368 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12369 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12370 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12371 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm6[0],ymm0[2],ymm6[2]
12372 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12373 ; AVX2-NEXT: vmovaps 1040(%rdi), %xmm3
12374 ; AVX2-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12375 ; AVX2-NEXT: vmovaps 992(%rdi), %xmm1
12376 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12377 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
12378 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12379 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12380 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12381 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm7[0],ymm0[2],ymm7[2]
12382 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12383 ; AVX2-NEXT: vmovaps 1232(%rdi), %xmm3
12384 ; AVX2-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12385 ; AVX2-NEXT: vmovaps 1184(%rdi), %xmm1
12386 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12387 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
12388 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12389 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12390 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12391 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm8[0],ymm0[2],ymm8[2]
12392 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12393 ; AVX2-NEXT: vmovaps 1424(%rdi), %xmm3
12394 ; AVX2-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12395 ; AVX2-NEXT: vmovaps 1376(%rdi), %xmm1
12396 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12397 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
12398 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12399 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12400 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12401 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm9[0],ymm0[2],ymm9[2]
12402 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12403 ; AVX2-NEXT: vmovaps 1616(%rdi), %xmm3
12404 ; AVX2-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12405 ; AVX2-NEXT: vmovaps 1568(%rdi), %xmm1
12406 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12407 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
12408 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12409 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12410 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12411 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm10[0],ymm0[2],ymm10[2]
12412 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12413 ; AVX2-NEXT: vmovaps 1808(%rdi), %xmm1
12414 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12415 ; AVX2-NEXT: vmovaps 1760(%rdi), %xmm13
12416 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm13[0],xmm1[0]
12417 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12418 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12419 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12420 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm11[0],ymm0[2],ymm11[2]
12421 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12422 ; AVX2-NEXT: vmovaps 2000(%rdi), %xmm1
12423 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12424 ; AVX2-NEXT: vmovaps 1952(%rdi), %xmm11
12425 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm1[0]
12426 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12427 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12428 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12429 ; AVX2-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12430 ; AVX2-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
12431 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12432 ; AVX2-NEXT: vmovaps 2192(%rdi), %xmm1
12433 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12434 ; AVX2-NEXT: vmovaps 2144(%rdi), %xmm9
12435 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm9[0],xmm1[0]
12436 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12437 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12438 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12439 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
12440 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm14[0],ymm0[2],ymm14[2]
12441 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12442 ; AVX2-NEXT: vmovaps 2384(%rdi), %xmm12
12443 ; AVX2-NEXT: vmovaps 2336(%rdi), %xmm7
12444 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm7[0],xmm12[0]
12445 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12446 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12447 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12448 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
12449 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm10[0],ymm0[2],ymm10[2]
12450 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12451 ; AVX2-NEXT: vmovaps 2576(%rdi), %xmm8
12452 ; AVX2-NEXT: vmovaps 2528(%rdi), %xmm5
12453 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm8[0]
12454 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12455 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12456 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12457 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
12458 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm6[0],ymm0[2],ymm6[2]
12459 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12460 ; AVX2-NEXT: vmovaps 2768(%rdi), %xmm4
12461 ; AVX2-NEXT: vmovaps 2720(%rdi), %xmm3
12462 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm4[0]
12463 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12464 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12465 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12466 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm2[0],ymm0[2],ymm2[2]
12467 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12468 ; AVX2-NEXT: vmovaps 2960(%rdi), %xmm2
12469 ; AVX2-NEXT: vmovaps 2912(%rdi), %xmm1
12470 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
12471 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
12472 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12473 ; AVX2-NEXT: vbroadcastsd 136(%rdi), %ymm0
12474 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12475 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12476 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
12477 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
12478 ; AVX2-NEXT: # xmm15 = xmm15[1],mem[1]
12479 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
12480 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12481 ; AVX2-NEXT: vbroadcastsd 328(%rdi), %ymm0
12482 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12483 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12484 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
12485 ; AVX2-NEXT: vunpckhpd (%rsp), %xmm15, %xmm15 # 16-byte Folded Reload
12486 ; AVX2-NEXT: # xmm15 = xmm15[1],mem[1]
12487 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
12488 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12489 ; AVX2-NEXT: vbroadcastsd 520(%rdi), %ymm0
12490 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12491 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12492 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
12493 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
12494 ; AVX2-NEXT: # xmm15 = xmm15[1],mem[1]
12495 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
12496 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12497 ; AVX2-NEXT: vbroadcastsd 712(%rdi), %ymm0
12498 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12499 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12500 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
12501 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
12502 ; AVX2-NEXT: # xmm15 = xmm15[1],mem[1]
12503 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
12504 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12505 ; AVX2-NEXT: vbroadcastsd 904(%rdi), %ymm0
12506 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12507 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12508 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
12509 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
12510 ; AVX2-NEXT: # xmm15 = xmm15[1],mem[1]
12511 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
12512 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12513 ; AVX2-NEXT: vbroadcastsd 1096(%rdi), %ymm0
12514 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12515 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12516 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
12517 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
12518 ; AVX2-NEXT: # xmm15 = xmm15[1],mem[1]
12519 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
12520 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12521 ; AVX2-NEXT: vbroadcastsd 1288(%rdi), %ymm0
12522 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12523 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12524 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
12525 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
12526 ; AVX2-NEXT: # xmm15 = xmm15[1],mem[1]
12527 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
12528 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12529 ; AVX2-NEXT: vbroadcastsd 1480(%rdi), %ymm0
12530 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12531 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12532 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
12533 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
12534 ; AVX2-NEXT: # xmm15 = xmm15[1],mem[1]
12535 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
12536 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12537 ; AVX2-NEXT: vbroadcastsd 1672(%rdi), %ymm0
12538 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12539 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12540 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
12541 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
12542 ; AVX2-NEXT: # xmm15 = xmm15[1],mem[1]
12543 ; AVX2-NEXT: vblendps {{.*#+}} ymm15 = ymm15[0,1,2,3],ymm0[4,5,6,7]
12544 ; AVX2-NEXT: vbroadcastsd 1864(%rdi), %ymm0
12545 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12546 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12547 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm13, %xmm13 # 16-byte Folded Reload
12548 ; AVX2-NEXT: # xmm13 = xmm13[1],mem[1]
12549 ; AVX2-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3],ymm0[4,5,6,7]
12550 ; AVX2-NEXT: vbroadcastsd 2056(%rdi), %ymm0
12551 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12552 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12553 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm11, %xmm11 # 16-byte Folded Reload
12554 ; AVX2-NEXT: # xmm11 = xmm11[1],mem[1]
12555 ; AVX2-NEXT: vblendps {{.*#+}} ymm11 = ymm11[0,1,2,3],ymm0[4,5,6,7]
12556 ; AVX2-NEXT: vbroadcastsd 2248(%rdi), %ymm0
12557 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12558 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12559 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9, %xmm9 # 16-byte Folded Reload
12560 ; AVX2-NEXT: # xmm9 = xmm9[1],mem[1]
12561 ; AVX2-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm0[4,5,6,7]
12562 ; AVX2-NEXT: vbroadcastsd 2440(%rdi), %ymm0
12563 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm14[1],ymm0[3],ymm14[3]
12564 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm7 = xmm7[1],xmm12[1]
12565 ; AVX2-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm0[4,5,6,7]
12566 ; AVX2-NEXT: vbroadcastsd 2632(%rdi), %ymm0
12567 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm10[1],ymm0[3],ymm10[3]
12568 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm5 = xmm5[1],xmm8[1]
12569 ; AVX2-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm0[4,5,6,7]
12570 ; AVX2-NEXT: vbroadcastsd 2824(%rdi), %ymm0
12571 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm6[1],ymm0[3],ymm6[3]
12572 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm4[1]
12573 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm0[4,5,6,7]
12574 ; AVX2-NEXT: vbroadcastsd 3016(%rdi), %ymm0
12575 ; AVX2-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12576 ; AVX2-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12577 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
12578 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12579 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12580 ; AVX2-NEXT: vmovaps %ymm1, 448(%rsi)
12581 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12582 ; AVX2-NEXT: vmovaps %ymm1, 384(%rsi)
12583 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12584 ; AVX2-NEXT: vmovaps %ymm1, 320(%rsi)
12585 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12586 ; AVX2-NEXT: vmovaps %ymm1, 256(%rsi)
12587 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12588 ; AVX2-NEXT: vmovaps %ymm1, 192(%rsi)
12589 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12590 ; AVX2-NEXT: vmovaps %ymm1, 128(%rsi)
12591 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12592 ; AVX2-NEXT: vmovaps %ymm1, 64(%rsi)
12593 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12594 ; AVX2-NEXT: vmovaps %ymm1, (%rsi)
12595 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12596 ; AVX2-NEXT: vmovaps %ymm1, 480(%rsi)
12597 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12598 ; AVX2-NEXT: vmovaps %ymm1, 416(%rsi)
12599 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12600 ; AVX2-NEXT: vmovaps %ymm1, 352(%rsi)
12601 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12602 ; AVX2-NEXT: vmovaps %ymm1, 288(%rsi)
12603 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12604 ; AVX2-NEXT: vmovaps %ymm1, 224(%rsi)
12605 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12606 ; AVX2-NEXT: vmovaps %ymm1, 160(%rsi)
12607 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12608 ; AVX2-NEXT: vmovaps %ymm1, 96(%rsi)
12609 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12610 ; AVX2-NEXT: vmovaps %ymm1, 32(%rsi)
12611 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12612 ; AVX2-NEXT: vmovaps %ymm1, 448(%rdx)
12613 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12614 ; AVX2-NEXT: vmovaps %ymm1, 384(%rdx)
12615 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12616 ; AVX2-NEXT: vmovaps %ymm1, 320(%rdx)
12617 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12618 ; AVX2-NEXT: vmovaps %ymm1, 256(%rdx)
12619 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12620 ; AVX2-NEXT: vmovaps %ymm1, 192(%rdx)
12621 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12622 ; AVX2-NEXT: vmovaps %ymm1, 128(%rdx)
12623 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12624 ; AVX2-NEXT: vmovaps %ymm1, 64(%rdx)
12625 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12626 ; AVX2-NEXT: vmovaps %ymm1, (%rdx)
12627 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12628 ; AVX2-NEXT: vmovaps %ymm1, 480(%rdx)
12629 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12630 ; AVX2-NEXT: vmovaps %ymm1, 416(%rdx)
12631 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12632 ; AVX2-NEXT: vmovaps %ymm1, 352(%rdx)
12633 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12634 ; AVX2-NEXT: vmovaps %ymm1, 288(%rdx)
12635 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12636 ; AVX2-NEXT: vmovaps %ymm1, 224(%rdx)
12637 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12638 ; AVX2-NEXT: vmovaps %ymm1, 160(%rdx)
12639 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12640 ; AVX2-NEXT: vmovaps %ymm1, 96(%rdx)
12641 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12642 ; AVX2-NEXT: vmovaps %ymm1, 32(%rdx)
12643 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12644 ; AVX2-NEXT: vmovaps %ymm1, (%rcx)
12645 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12646 ; AVX2-NEXT: vmovaps %ymm1, 64(%rcx)
12647 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12648 ; AVX2-NEXT: vmovaps %ymm1, 128(%rcx)
12649 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12650 ; AVX2-NEXT: vmovaps %ymm1, 192(%rcx)
12651 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12652 ; AVX2-NEXT: vmovaps %ymm1, 256(%rcx)
12653 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12654 ; AVX2-NEXT: vmovaps %ymm1, 320(%rcx)
12655 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12656 ; AVX2-NEXT: vmovaps %ymm1, 384(%rcx)
12657 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12658 ; AVX2-NEXT: vmovaps %ymm1, 448(%rcx)
12659 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12660 ; AVX2-NEXT: vmovaps %ymm1, 480(%rcx)
12661 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12662 ; AVX2-NEXT: vmovaps %ymm1, 416(%rcx)
12663 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12664 ; AVX2-NEXT: vmovaps %ymm1, 352(%rcx)
12665 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12666 ; AVX2-NEXT: vmovaps %ymm1, 288(%rcx)
12667 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12668 ; AVX2-NEXT: vmovaps %ymm1, 224(%rcx)
12669 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12670 ; AVX2-NEXT: vmovaps %ymm1, 160(%rcx)
12671 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12672 ; AVX2-NEXT: vmovaps %ymm1, 96(%rcx)
12673 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12674 ; AVX2-NEXT: vmovaps %ymm1, 32(%rcx)
12675 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12676 ; AVX2-NEXT: vmovaps %ymm1, 480(%r8)
12677 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12678 ; AVX2-NEXT: vmovaps %ymm1, 448(%r8)
12679 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12680 ; AVX2-NEXT: vmovaps %ymm1, 416(%r8)
12681 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12682 ; AVX2-NEXT: vmovaps %ymm1, 384(%r8)
12683 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12684 ; AVX2-NEXT: vmovaps %ymm1, 352(%r8)
12685 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12686 ; AVX2-NEXT: vmovaps %ymm1, 320(%r8)
12687 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12688 ; AVX2-NEXT: vmovaps %ymm1, 288(%r8)
12689 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12690 ; AVX2-NEXT: vmovaps %ymm1, 256(%r8)
12691 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12692 ; AVX2-NEXT: vmovaps %ymm1, 224(%r8)
12693 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12694 ; AVX2-NEXT: vmovaps %ymm1, 192(%r8)
12695 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12696 ; AVX2-NEXT: vmovaps %ymm1, 160(%r8)
12697 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12698 ; AVX2-NEXT: vmovaps %ymm1, 128(%r8)
12699 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12700 ; AVX2-NEXT: vmovaps %ymm1, 96(%r8)
12701 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12702 ; AVX2-NEXT: vmovaps %ymm1, 64(%r8)
12703 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12704 ; AVX2-NEXT: vmovaps %ymm1, 32(%r8)
12705 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12706 ; AVX2-NEXT: vmovaps %ymm1, (%r8)
12707 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12708 ; AVX2-NEXT: vmovaps %ymm1, 480(%r9)
12709 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12710 ; AVX2-NEXT: vmovaps %ymm1, 448(%r9)
12711 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12712 ; AVX2-NEXT: vmovaps %ymm1, 416(%r9)
12713 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12714 ; AVX2-NEXT: vmovaps %ymm1, 384(%r9)
12715 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12716 ; AVX2-NEXT: vmovaps %ymm1, 352(%r9)
12717 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12718 ; AVX2-NEXT: vmovaps %ymm1, 320(%r9)
12719 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12720 ; AVX2-NEXT: vmovaps %ymm1, 288(%r9)
12721 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12722 ; AVX2-NEXT: vmovaps %ymm1, 256(%r9)
12723 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12724 ; AVX2-NEXT: vmovaps %ymm1, 224(%r9)
12725 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12726 ; AVX2-NEXT: vmovaps %ymm1, 192(%r9)
12727 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12728 ; AVX2-NEXT: vmovaps %ymm1, 160(%r9)
12729 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12730 ; AVX2-NEXT: vmovaps %ymm1, 128(%r9)
12731 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12732 ; AVX2-NEXT: vmovaps %ymm1, 96(%r9)
12733 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12734 ; AVX2-NEXT: vmovaps %ymm1, 64(%r9)
12735 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12736 ; AVX2-NEXT: vmovaps %ymm1, 32(%r9)
12737 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12738 ; AVX2-NEXT: vmovaps %ymm1, (%r9)
12739 ; AVX2-NEXT: movq {{[0-9]+}}(%rsp), %rax
12740 ; AVX2-NEXT: vmovaps %ymm0, 480(%rax)
12741 ; AVX2-NEXT: vmovaps %ymm3, 448(%rax)
12742 ; AVX2-NEXT: vmovaps %ymm5, 416(%rax)
12743 ; AVX2-NEXT: vmovaps %ymm7, 384(%rax)
12744 ; AVX2-NEXT: vmovaps %ymm9, 352(%rax)
12745 ; AVX2-NEXT: vmovaps %ymm11, 320(%rax)
12746 ; AVX2-NEXT: vmovaps %ymm13, 288(%rax)
12747 ; AVX2-NEXT: vmovaps %ymm15, 256(%rax)
12748 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12749 ; AVX2-NEXT: vmovaps %ymm0, 224(%rax)
12750 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12751 ; AVX2-NEXT: vmovaps %ymm0, 192(%rax)
12752 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12753 ; AVX2-NEXT: vmovaps %ymm0, 160(%rax)
12754 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12755 ; AVX2-NEXT: vmovaps %ymm0, 128(%rax)
12756 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12757 ; AVX2-NEXT: vmovaps %ymm0, 96(%rax)
12758 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12759 ; AVX2-NEXT: vmovaps %ymm0, 64(%rax)
12760 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12761 ; AVX2-NEXT: vmovaps %ymm0, 32(%rax)
12762 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12763 ; AVX2-NEXT: vmovaps %ymm0, (%rax)
12764 ; AVX2-NEXT: addq $3432, %rsp # imm = 0xD68
12765 ; AVX2-NEXT: vzeroupper
12768 ; AVX2-FP-LABEL: load_i64_stride6_vf64:
12769 ; AVX2-FP: # %bb.0:
12770 ; AVX2-FP-NEXT: subq $3432, %rsp # imm = 0xD68
12771 ; AVX2-FP-NEXT: vmovaps 1088(%rdi), %ymm2
12772 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12773 ; AVX2-FP-NEXT: vmovaps 1056(%rdi), %ymm4
12774 ; AVX2-FP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12775 ; AVX2-FP-NEXT: vmovaps 704(%rdi), %ymm3
12776 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12777 ; AVX2-FP-NEXT: vmovaps 672(%rdi), %ymm5
12778 ; AVX2-FP-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12779 ; AVX2-FP-NEXT: vmovaps 320(%rdi), %ymm1
12780 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12781 ; AVX2-FP-NEXT: vmovaps 288(%rdi), %ymm6
12782 ; AVX2-FP-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12783 ; AVX2-FP-NEXT: vmovaps 240(%rdi), %xmm7
12784 ; AVX2-FP-NEXT: vmovaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12785 ; AVX2-FP-NEXT: vmovaps 192(%rdi), %xmm0
12786 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12787 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm7[0]
12788 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm6[0],ymm1[0],ymm6[2],ymm1[2]
12789 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
12790 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12791 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12792 ; AVX2-FP-NEXT: vmovaps 624(%rdi), %xmm1
12793 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12794 ; AVX2-FP-NEXT: vmovaps 576(%rdi), %xmm0
12795 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12796 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
12797 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm5[0],ymm3[0],ymm5[2],ymm3[2]
12798 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
12799 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12800 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12801 ; AVX2-FP-NEXT: vmovaps 1008(%rdi), %xmm1
12802 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12803 ; AVX2-FP-NEXT: vmovaps 960(%rdi), %xmm0
12804 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12805 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
12806 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm4[0],ymm2[0],ymm4[2],ymm2[2]
12807 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
12808 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12809 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12810 ; AVX2-FP-NEXT: vmovaps 1472(%rdi), %ymm0
12811 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12812 ; AVX2-FP-NEXT: vmovaps 1440(%rdi), %ymm1
12813 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12814 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12815 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12816 ; AVX2-FP-NEXT: vmovaps 1392(%rdi), %xmm2
12817 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12818 ; AVX2-FP-NEXT: vmovaps 1344(%rdi), %xmm1
12819 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12820 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12821 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12822 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12823 ; AVX2-FP-NEXT: vmovaps 1856(%rdi), %ymm0
12824 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12825 ; AVX2-FP-NEXT: vmovaps 1824(%rdi), %ymm1
12826 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12827 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12828 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12829 ; AVX2-FP-NEXT: vmovaps 1776(%rdi), %xmm2
12830 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12831 ; AVX2-FP-NEXT: vmovaps 1728(%rdi), %xmm1
12832 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12833 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12834 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12835 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12836 ; AVX2-FP-NEXT: vmovaps 2240(%rdi), %ymm0
12837 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12838 ; AVX2-FP-NEXT: vmovaps 2208(%rdi), %ymm1
12839 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12840 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12841 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12842 ; AVX2-FP-NEXT: vmovaps 2160(%rdi), %xmm2
12843 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12844 ; AVX2-FP-NEXT: vmovaps 2112(%rdi), %xmm1
12845 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12846 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12847 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12848 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12849 ; AVX2-FP-NEXT: vmovaps 2624(%rdi), %ymm0
12850 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12851 ; AVX2-FP-NEXT: vmovaps 2592(%rdi), %ymm1
12852 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12853 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12854 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12855 ; AVX2-FP-NEXT: vmovaps 2544(%rdi), %xmm2
12856 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12857 ; AVX2-FP-NEXT: vmovaps 2496(%rdi), %xmm1
12858 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12859 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12860 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12861 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12862 ; AVX2-FP-NEXT: vmovaps 3008(%rdi), %ymm0
12863 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12864 ; AVX2-FP-NEXT: vmovaps 2976(%rdi), %ymm1
12865 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12866 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12867 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12868 ; AVX2-FP-NEXT: vmovaps 2928(%rdi), %xmm2
12869 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12870 ; AVX2-FP-NEXT: vmovaps 2880(%rdi), %xmm1
12871 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12872 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
12873 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12874 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12875 ; AVX2-FP-NEXT: vmovaps 128(%rdi), %ymm0
12876 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12877 ; AVX2-FP-NEXT: vmovaps 96(%rdi), %ymm1
12878 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12879 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12880 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12881 ; AVX2-FP-NEXT: vmovaps (%rdi), %xmm2
12882 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12883 ; AVX2-FP-NEXT: vmovaps 48(%rdi), %xmm1
12884 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12885 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
12886 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12887 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12888 ; AVX2-FP-NEXT: vmovaps 512(%rdi), %ymm0
12889 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12890 ; AVX2-FP-NEXT: vmovaps 480(%rdi), %ymm1
12891 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12892 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12893 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12894 ; AVX2-FP-NEXT: vmovaps 432(%rdi), %xmm1
12895 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12896 ; AVX2-FP-NEXT: vmovaps 384(%rdi), %xmm13
12897 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm13[0],xmm1[0]
12898 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12899 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12900 ; AVX2-FP-NEXT: vmovaps 896(%rdi), %ymm0
12901 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12902 ; AVX2-FP-NEXT: vmovaps 864(%rdi), %ymm1
12903 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12904 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12905 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12906 ; AVX2-FP-NEXT: vmovaps 816(%rdi), %xmm1
12907 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12908 ; AVX2-FP-NEXT: vmovaps 768(%rdi), %xmm11
12909 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm1[0]
12910 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12911 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12912 ; AVX2-FP-NEXT: vmovaps 1280(%rdi), %ymm0
12913 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12914 ; AVX2-FP-NEXT: vmovaps 1248(%rdi), %ymm1
12915 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12916 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12917 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12918 ; AVX2-FP-NEXT: vmovaps 1200(%rdi), %xmm1
12919 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12920 ; AVX2-FP-NEXT: vmovaps 1152(%rdi), %xmm9
12921 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm9[0],xmm1[0]
12922 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12923 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12924 ; AVX2-FP-NEXT: vmovaps 1664(%rdi), %ymm0
12925 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12926 ; AVX2-FP-NEXT: vmovaps 1632(%rdi), %ymm1
12927 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12928 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12929 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12930 ; AVX2-FP-NEXT: vmovaps 1584(%rdi), %xmm14
12931 ; AVX2-FP-NEXT: vmovaps 1536(%rdi), %xmm7
12932 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm7[0],xmm14[0]
12933 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12934 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12935 ; AVX2-FP-NEXT: vmovaps 2048(%rdi), %ymm12
12936 ; AVX2-FP-NEXT: vmovaps 2016(%rdi), %ymm0
12937 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12938 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm12[0],ymm0[2],ymm12[2]
12939 ; AVX2-FP-NEXT: vmovups %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12940 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12941 ; AVX2-FP-NEXT: vmovaps 1968(%rdi), %xmm10
12942 ; AVX2-FP-NEXT: vmovaps 1920(%rdi), %xmm5
12943 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm10[0]
12944 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12945 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12946 ; AVX2-FP-NEXT: vmovaps 2432(%rdi), %ymm8
12947 ; AVX2-FP-NEXT: vmovaps 2400(%rdi), %ymm0
12948 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12949 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm8[0],ymm0[2],ymm8[2]
12950 ; AVX2-FP-NEXT: vmovups %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12951 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12952 ; AVX2-FP-NEXT: vmovaps 2352(%rdi), %xmm6
12953 ; AVX2-FP-NEXT: vmovaps 2304(%rdi), %xmm3
12954 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm6[0]
12955 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12956 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12957 ; AVX2-FP-NEXT: vmovaps 2816(%rdi), %ymm4
12958 ; AVX2-FP-NEXT: vmovaps 2784(%rdi), %ymm0
12959 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12960 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
12961 ; AVX2-FP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12962 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
12963 ; AVX2-FP-NEXT: vmovaps 2736(%rdi), %xmm2
12964 ; AVX2-FP-NEXT: vmovaps 2688(%rdi), %xmm1
12965 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
12966 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
12967 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12968 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12969 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12970 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
12971 ; AVX2-FP-NEXT: vbroadcastsd 296(%rdi), %ymm15
12972 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
12973 ; AVX2-FP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
12974 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
12975 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12976 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12977 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12978 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
12979 ; AVX2-FP-NEXT: vbroadcastsd 680(%rdi), %ymm15
12980 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
12981 ; AVX2-FP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
12982 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
12983 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12984 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12985 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12986 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
12987 ; AVX2-FP-NEXT: vbroadcastsd 1064(%rdi), %ymm15
12988 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
12989 ; AVX2-FP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
12990 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
12991 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12992 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12993 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12994 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
12995 ; AVX2-FP-NEXT: vbroadcastsd 1448(%rdi), %ymm15
12996 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
12997 ; AVX2-FP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
12998 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
12999 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13000 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
13001 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13002 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13003 ; AVX2-FP-NEXT: vbroadcastsd 1832(%rdi), %ymm15
13004 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
13005 ; AVX2-FP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
13006 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
13007 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13008 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
13009 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13010 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13011 ; AVX2-FP-NEXT: vbroadcastsd 2216(%rdi), %ymm15
13012 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
13013 ; AVX2-FP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
13014 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
13015 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13016 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
13017 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13018 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13019 ; AVX2-FP-NEXT: vbroadcastsd 2600(%rdi), %ymm15
13020 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
13021 ; AVX2-FP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
13022 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
13023 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13024 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
13025 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13026 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13027 ; AVX2-FP-NEXT: vbroadcastsd 2984(%rdi), %ymm15
13028 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
13029 ; AVX2-FP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
13030 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
13031 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13032 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
13033 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13034 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13035 ; AVX2-FP-NEXT: vbroadcastsd 104(%rdi), %ymm15
13036 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
13037 ; AVX2-FP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
13038 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
13039 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13040 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm13, %xmm0 # 16-byte Folded Reload
13041 ; AVX2-FP-NEXT: # xmm0 = xmm13[1],mem[1]
13042 ; AVX2-FP-NEXT: vbroadcastsd 488(%rdi), %ymm13
13043 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm13, %ymm13 # 32-byte Folded Reload
13044 ; AVX2-FP-NEXT: # ymm13 = ymm13[1],mem[1],ymm13[3],mem[3]
13045 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm13[4,5,6,7]
13046 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13047 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm11, %xmm0 # 16-byte Folded Reload
13048 ; AVX2-FP-NEXT: # xmm0 = xmm11[1],mem[1]
13049 ; AVX2-FP-NEXT: vbroadcastsd 872(%rdi), %ymm11
13050 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm11, %ymm11 # 32-byte Folded Reload
13051 ; AVX2-FP-NEXT: # ymm11 = ymm11[1],mem[1],ymm11[3],mem[3]
13052 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm11[4,5,6,7]
13053 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13054 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9, %xmm0 # 16-byte Folded Reload
13055 ; AVX2-FP-NEXT: # xmm0 = xmm9[1],mem[1]
13056 ; AVX2-FP-NEXT: vbroadcastsd 1256(%rdi), %ymm9
13057 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm9 # 32-byte Folded Reload
13058 ; AVX2-FP-NEXT: # ymm9 = ymm9[1],mem[1],ymm9[3],mem[3]
13059 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm9[4,5,6,7]
13060 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13061 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm14[1]
13062 ; AVX2-FP-NEXT: vbroadcastsd 1640(%rdi), %ymm7
13063 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm7, %ymm7 # 32-byte Folded Reload
13064 ; AVX2-FP-NEXT: # ymm7 = ymm7[1],mem[1],ymm7[3],mem[3]
13065 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm7[4,5,6,7]
13066 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13067 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm10[1]
13068 ; AVX2-FP-NEXT: vbroadcastsd 2024(%rdi), %ymm5
13069 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm5[1],ymm12[1],ymm5[3],ymm12[3]
13070 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm5[4,5,6,7]
13071 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13072 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm6[1]
13073 ; AVX2-FP-NEXT: vbroadcastsd 2408(%rdi), %ymm3
13074 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm8[1],ymm3[3],ymm8[3]
13075 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
13076 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13077 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm2[1]
13078 ; AVX2-FP-NEXT: vbroadcastsd 2792(%rdi), %ymm1
13079 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm4[1],ymm1[3],ymm4[3]
13080 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13081 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13082 ; AVX2-FP-NEXT: vbroadcastsd 352(%rdi), %ymm0
13083 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13084 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13085 ; AVX2-FP-NEXT: vmovaps 256(%rdi), %xmm2
13086 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13087 ; AVX2-FP-NEXT: vmovaps 208(%rdi), %xmm1
13088 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13089 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13090 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13091 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13092 ; AVX2-FP-NEXT: vbroadcastsd 736(%rdi), %ymm0
13093 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13094 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13095 ; AVX2-FP-NEXT: vmovaps 640(%rdi), %xmm2
13096 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13097 ; AVX2-FP-NEXT: vmovaps 592(%rdi), %xmm1
13098 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13099 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13100 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13101 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13102 ; AVX2-FP-NEXT: vbroadcastsd 1120(%rdi), %ymm0
13103 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13104 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13105 ; AVX2-FP-NEXT: vmovaps 1024(%rdi), %xmm2
13106 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13107 ; AVX2-FP-NEXT: vmovaps 976(%rdi), %xmm1
13108 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13109 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13110 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13111 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13112 ; AVX2-FP-NEXT: vbroadcastsd 1504(%rdi), %ymm0
13113 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13114 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13115 ; AVX2-FP-NEXT: vmovaps 1408(%rdi), %xmm2
13116 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13117 ; AVX2-FP-NEXT: vmovaps 1360(%rdi), %xmm1
13118 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13119 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13120 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13121 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13122 ; AVX2-FP-NEXT: vbroadcastsd 1888(%rdi), %ymm0
13123 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13124 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13125 ; AVX2-FP-NEXT: vmovaps 1792(%rdi), %xmm2
13126 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13127 ; AVX2-FP-NEXT: vmovaps 1744(%rdi), %xmm1
13128 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13129 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13130 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13131 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13132 ; AVX2-FP-NEXT: vbroadcastsd 2272(%rdi), %ymm0
13133 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13134 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13135 ; AVX2-FP-NEXT: vmovaps 2176(%rdi), %xmm2
13136 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13137 ; AVX2-FP-NEXT: vmovaps 2128(%rdi), %xmm1
13138 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13139 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13140 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13141 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13142 ; AVX2-FP-NEXT: vbroadcastsd 2656(%rdi), %ymm0
13143 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13144 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13145 ; AVX2-FP-NEXT: vmovaps 2560(%rdi), %xmm2
13146 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13147 ; AVX2-FP-NEXT: vmovaps 2512(%rdi), %xmm1
13148 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13149 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13150 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13151 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13152 ; AVX2-FP-NEXT: vbroadcastsd 3040(%rdi), %ymm0
13153 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13154 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13155 ; AVX2-FP-NEXT: vmovaps 2944(%rdi), %xmm2
13156 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13157 ; AVX2-FP-NEXT: vmovaps 2896(%rdi), %xmm1
13158 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13159 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13160 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13161 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13162 ; AVX2-FP-NEXT: vbroadcastsd 2848(%rdi), %ymm0
13163 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13164 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13165 ; AVX2-FP-NEXT: vmovaps 2752(%rdi), %xmm2
13166 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13167 ; AVX2-FP-NEXT: vmovaps 2704(%rdi), %xmm1
13168 ; AVX2-FP-NEXT: vmovaps %xmm1, (%rsp) # 16-byte Spill
13169 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13170 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13171 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13172 ; AVX2-FP-NEXT: vbroadcastsd 2464(%rdi), %ymm0
13173 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13174 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13175 ; AVX2-FP-NEXT: vmovaps 2368(%rdi), %xmm2
13176 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13177 ; AVX2-FP-NEXT: vmovaps 2320(%rdi), %xmm1
13178 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13179 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13180 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13181 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13182 ; AVX2-FP-NEXT: vbroadcastsd 2080(%rdi), %ymm0
13183 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13184 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13185 ; AVX2-FP-NEXT: vmovaps 1984(%rdi), %xmm2
13186 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13187 ; AVX2-FP-NEXT: vmovaps 1936(%rdi), %xmm1
13188 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13189 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13190 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13191 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13192 ; AVX2-FP-NEXT: vbroadcastsd 1696(%rdi), %ymm0
13193 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13194 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13195 ; AVX2-FP-NEXT: vmovaps 1600(%rdi), %xmm12
13196 ; AVX2-FP-NEXT: vmovaps 1552(%rdi), %xmm11
13197 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm12[0]
13198 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13199 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13200 ; AVX2-FP-NEXT: vbroadcastsd 1312(%rdi), %ymm0
13201 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
13202 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm10[0],ymm0[0],ymm10[2],ymm0[2]
13203 ; AVX2-FP-NEXT: vmovaps 1216(%rdi), %xmm9
13204 ; AVX2-FP-NEXT: vmovaps 1168(%rdi), %xmm8
13205 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm8[0],xmm9[0]
13206 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13207 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13208 ; AVX2-FP-NEXT: vbroadcastsd 928(%rdi), %ymm0
13209 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
13210 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm7[0],ymm0[0],ymm7[2],ymm0[2]
13211 ; AVX2-FP-NEXT: vmovaps 832(%rdi), %xmm6
13212 ; AVX2-FP-NEXT: vmovaps 784(%rdi), %xmm5
13213 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm13 = xmm5[0],xmm6[0]
13214 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm13[0,1,2,3],ymm0[4,5,6,7]
13215 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13216 ; AVX2-FP-NEXT: vbroadcastsd 544(%rdi), %ymm0
13217 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
13218 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm4[0],ymm0[0],ymm4[2],ymm0[2]
13219 ; AVX2-FP-NEXT: vmovaps 448(%rdi), %xmm13
13220 ; AVX2-FP-NEXT: vmovaps 400(%rdi), %xmm3
13221 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm15 = xmm3[0],xmm13[0]
13222 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
13223 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13224 ; AVX2-FP-NEXT: vbroadcastsd 160(%rdi), %ymm0
13225 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
13226 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm2[0],ymm0[0],ymm2[2],ymm0[2]
13227 ; AVX2-FP-NEXT: vmovaps 16(%rdi), %xmm15
13228 ; AVX2-FP-NEXT: vmovaps 64(%rdi), %xmm0
13229 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm14 = xmm15[0],xmm0[0]
13230 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm14[0,1,2,3],ymm1[4,5,6,7]
13231 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13232 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm15[1],xmm0[1]
13233 ; AVX2-FP-NEXT: vmovaps 160(%rdi), %ymm14
13234 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm2[1],ymm14[1],ymm2[3],ymm14[3]
13235 ; AVX2-FP-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13236 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13237 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13238 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13239 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
13240 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13241 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13242 ; AVX2-FP-NEXT: vmovaps 352(%rdi), %ymm2
13243 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13244 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
13245 ; AVX2-FP-NEXT: vmovaps %ymm2, %ymm15
13246 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13247 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13248 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13249 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13250 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm13[1]
13251 ; AVX2-FP-NEXT: vmovaps 544(%rdi), %ymm2
13252 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm4[1],ymm2[1],ymm4[3],ymm2[3]
13253 ; AVX2-FP-NEXT: vmovaps %ymm2, %ymm3
13254 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13255 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13256 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13257 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13258 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
13259 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13260 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13261 ; AVX2-FP-NEXT: vmovaps 736(%rdi), %ymm2
13262 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13263 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
13264 ; AVX2-FP-NEXT: vmovaps %ymm2, %ymm4
13265 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13266 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13267 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13268 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13269 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm6[1]
13270 ; AVX2-FP-NEXT: vmovaps 928(%rdi), %ymm2
13271 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm7[1],ymm2[1],ymm7[3],ymm2[3]
13272 ; AVX2-FP-NEXT: vmovaps %ymm2, %ymm5
13273 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13274 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13275 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13276 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13277 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
13278 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13279 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13280 ; AVX2-FP-NEXT: vmovaps 1120(%rdi), %ymm2
13281 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13282 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
13283 ; AVX2-FP-NEXT: vmovaps %ymm2, %ymm6
13284 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13285 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13286 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13287 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13288 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm8[1],xmm9[1]
13289 ; AVX2-FP-NEXT: vmovaps 1312(%rdi), %ymm2
13290 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm10[1],ymm2[1],ymm10[3],ymm2[3]
13291 ; AVX2-FP-NEXT: vmovaps %ymm2, %ymm7
13292 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13293 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13294 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13295 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13296 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
13297 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13298 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13299 ; AVX2-FP-NEXT: vmovaps 1504(%rdi), %ymm2
13300 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13301 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
13302 ; AVX2-FP-NEXT: vmovaps %ymm2, %ymm8
13303 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13304 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13305 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13306 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13307 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm11[1],xmm12[1]
13308 ; AVX2-FP-NEXT: vmovaps 1696(%rdi), %ymm2
13309 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13310 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
13311 ; AVX2-FP-NEXT: vmovaps %ymm2, %ymm9
13312 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13313 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13314 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13315 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13316 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
13317 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13318 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13319 ; AVX2-FP-NEXT: vmovaps 1888(%rdi), %ymm2
13320 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13321 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
13322 ; AVX2-FP-NEXT: vmovaps %ymm2, %ymm10
13323 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13324 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13325 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13326 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13327 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
13328 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13329 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13330 ; AVX2-FP-NEXT: vmovaps 2080(%rdi), %ymm2
13331 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13332 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
13333 ; AVX2-FP-NEXT: vmovaps %ymm2, %ymm11
13334 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13335 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13336 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13337 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13338 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
13339 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13340 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13341 ; AVX2-FP-NEXT: vmovaps 2272(%rdi), %ymm2
13342 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13343 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13344 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
13345 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13346 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13347 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13348 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
13349 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13350 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13351 ; AVX2-FP-NEXT: vmovaps 2464(%rdi), %ymm2
13352 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13353 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13354 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
13355 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13356 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13357 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13358 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
13359 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13360 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13361 ; AVX2-FP-NEXT: vmovaps 2656(%rdi), %ymm2
13362 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13363 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13364 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
13365 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13366 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13367 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13368 ; AVX2-FP-NEXT: vmovaps (%rsp), %xmm0 # 16-byte Reload
13369 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13370 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13371 ; AVX2-FP-NEXT: vmovaps 2848(%rdi), %ymm2
13372 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13373 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13374 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
13375 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13376 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13377 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13378 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
13379 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13380 ; AVX2-FP-NEXT: # xmm0 = xmm0[1],mem[1]
13381 ; AVX2-FP-NEXT: vmovaps 3040(%rdi), %ymm2
13382 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13383 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
13384 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13385 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
13386 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13387 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13388 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13389 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm14[0],ymm0[2],ymm14[2]
13390 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13391 ; AVX2-FP-NEXT: vmovaps 32(%rdi), %xmm12
13392 ; AVX2-FP-NEXT: vmovaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13393 ; AVX2-FP-NEXT: vmovaps 80(%rdi), %xmm1
13394 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13395 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm12[0],xmm1[0]
13396 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13397 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13398 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13399 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm15[0],ymm0[2],ymm15[2]
13400 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13401 ; AVX2-FP-NEXT: vmovaps 272(%rdi), %xmm12
13402 ; AVX2-FP-NEXT: vmovaps %xmm12, (%rsp) # 16-byte Spill
13403 ; AVX2-FP-NEXT: vmovaps 224(%rdi), %xmm1
13404 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13405 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm12[0]
13406 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13407 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13408 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13409 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
13410 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13411 ; AVX2-FP-NEXT: vmovaps 464(%rdi), %xmm3
13412 ; AVX2-FP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13413 ; AVX2-FP-NEXT: vmovaps 416(%rdi), %xmm1
13414 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13415 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
13416 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13417 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13418 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13419 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
13420 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13421 ; AVX2-FP-NEXT: vmovaps 656(%rdi), %xmm3
13422 ; AVX2-FP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13423 ; AVX2-FP-NEXT: vmovaps 608(%rdi), %xmm1
13424 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13425 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
13426 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13427 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13428 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13429 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm5[0],ymm0[2],ymm5[2]
13430 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13431 ; AVX2-FP-NEXT: vmovaps 848(%rdi), %xmm3
13432 ; AVX2-FP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13433 ; AVX2-FP-NEXT: vmovaps 800(%rdi), %xmm1
13434 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13435 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
13436 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13437 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13438 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13439 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm6[0],ymm0[2],ymm6[2]
13440 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13441 ; AVX2-FP-NEXT: vmovaps 1040(%rdi), %xmm3
13442 ; AVX2-FP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13443 ; AVX2-FP-NEXT: vmovaps 992(%rdi), %xmm1
13444 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13445 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
13446 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13447 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13448 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13449 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm7[0],ymm0[2],ymm7[2]
13450 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13451 ; AVX2-FP-NEXT: vmovaps 1232(%rdi), %xmm3
13452 ; AVX2-FP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13453 ; AVX2-FP-NEXT: vmovaps 1184(%rdi), %xmm1
13454 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13455 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
13456 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13457 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13458 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13459 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm8[0],ymm0[2],ymm8[2]
13460 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13461 ; AVX2-FP-NEXT: vmovaps 1424(%rdi), %xmm3
13462 ; AVX2-FP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13463 ; AVX2-FP-NEXT: vmovaps 1376(%rdi), %xmm1
13464 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13465 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
13466 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13467 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13468 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13469 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm9[0],ymm0[2],ymm9[2]
13470 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13471 ; AVX2-FP-NEXT: vmovaps 1616(%rdi), %xmm3
13472 ; AVX2-FP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13473 ; AVX2-FP-NEXT: vmovaps 1568(%rdi), %xmm1
13474 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13475 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
13476 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13477 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13478 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13479 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm10[0],ymm0[2],ymm10[2]
13480 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13481 ; AVX2-FP-NEXT: vmovaps 1808(%rdi), %xmm1
13482 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13483 ; AVX2-FP-NEXT: vmovaps 1760(%rdi), %xmm13
13484 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm13[0],xmm1[0]
13485 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13486 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13487 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13488 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm11[0],ymm0[2],ymm11[2]
13489 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13490 ; AVX2-FP-NEXT: vmovaps 2000(%rdi), %xmm1
13491 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13492 ; AVX2-FP-NEXT: vmovaps 1952(%rdi), %xmm11
13493 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm1[0]
13494 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13495 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13496 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13497 ; AVX2-FP-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13498 ; AVX2-FP-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
13499 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13500 ; AVX2-FP-NEXT: vmovaps 2192(%rdi), %xmm1
13501 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13502 ; AVX2-FP-NEXT: vmovaps 2144(%rdi), %xmm9
13503 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm9[0],xmm1[0]
13504 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13505 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13506 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13507 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
13508 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm14[0],ymm0[2],ymm14[2]
13509 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13510 ; AVX2-FP-NEXT: vmovaps 2384(%rdi), %xmm12
13511 ; AVX2-FP-NEXT: vmovaps 2336(%rdi), %xmm7
13512 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm7[0],xmm12[0]
13513 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13514 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13515 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13516 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
13517 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm10[0],ymm0[2],ymm10[2]
13518 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13519 ; AVX2-FP-NEXT: vmovaps 2576(%rdi), %xmm8
13520 ; AVX2-FP-NEXT: vmovaps 2528(%rdi), %xmm5
13521 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm8[0]
13522 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13523 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13524 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13525 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
13526 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm6[0],ymm0[2],ymm6[2]
13527 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13528 ; AVX2-FP-NEXT: vmovaps 2768(%rdi), %xmm4
13529 ; AVX2-FP-NEXT: vmovaps 2720(%rdi), %xmm3
13530 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm4[0]
13531 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13532 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13533 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13534 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm2[0],ymm0[2],ymm2[2]
13535 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13536 ; AVX2-FP-NEXT: vmovaps 2960(%rdi), %xmm2
13537 ; AVX2-FP-NEXT: vmovaps 2912(%rdi), %xmm1
13538 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
13539 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
13540 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13541 ; AVX2-FP-NEXT: vbroadcastsd 136(%rdi), %ymm0
13542 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13543 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13544 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
13545 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
13546 ; AVX2-FP-NEXT: # xmm15 = xmm15[1],mem[1]
13547 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
13548 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13549 ; AVX2-FP-NEXT: vbroadcastsd 328(%rdi), %ymm0
13550 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13551 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13552 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
13553 ; AVX2-FP-NEXT: vunpckhpd (%rsp), %xmm15, %xmm15 # 16-byte Folded Reload
13554 ; AVX2-FP-NEXT: # xmm15 = xmm15[1],mem[1]
13555 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
13556 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13557 ; AVX2-FP-NEXT: vbroadcastsd 520(%rdi), %ymm0
13558 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13559 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13560 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
13561 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
13562 ; AVX2-FP-NEXT: # xmm15 = xmm15[1],mem[1]
13563 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
13564 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13565 ; AVX2-FP-NEXT: vbroadcastsd 712(%rdi), %ymm0
13566 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13567 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13568 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
13569 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
13570 ; AVX2-FP-NEXT: # xmm15 = xmm15[1],mem[1]
13571 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
13572 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13573 ; AVX2-FP-NEXT: vbroadcastsd 904(%rdi), %ymm0
13574 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13575 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13576 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
13577 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
13578 ; AVX2-FP-NEXT: # xmm15 = xmm15[1],mem[1]
13579 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
13580 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13581 ; AVX2-FP-NEXT: vbroadcastsd 1096(%rdi), %ymm0
13582 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13583 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13584 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
13585 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
13586 ; AVX2-FP-NEXT: # xmm15 = xmm15[1],mem[1]
13587 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
13588 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13589 ; AVX2-FP-NEXT: vbroadcastsd 1288(%rdi), %ymm0
13590 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13591 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13592 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
13593 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
13594 ; AVX2-FP-NEXT: # xmm15 = xmm15[1],mem[1]
13595 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
13596 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13597 ; AVX2-FP-NEXT: vbroadcastsd 1480(%rdi), %ymm0
13598 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13599 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13600 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
13601 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
13602 ; AVX2-FP-NEXT: # xmm15 = xmm15[1],mem[1]
13603 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
13604 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13605 ; AVX2-FP-NEXT: vbroadcastsd 1672(%rdi), %ymm0
13606 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13607 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13608 ; AVX2-FP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
13609 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
13610 ; AVX2-FP-NEXT: # xmm15 = xmm15[1],mem[1]
13611 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm15 = ymm15[0,1,2,3],ymm0[4,5,6,7]
13612 ; AVX2-FP-NEXT: vbroadcastsd 1864(%rdi), %ymm0
13613 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13614 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13615 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm13, %xmm13 # 16-byte Folded Reload
13616 ; AVX2-FP-NEXT: # xmm13 = xmm13[1],mem[1]
13617 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3],ymm0[4,5,6,7]
13618 ; AVX2-FP-NEXT: vbroadcastsd 2056(%rdi), %ymm0
13619 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13620 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13621 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm11, %xmm11 # 16-byte Folded Reload
13622 ; AVX2-FP-NEXT: # xmm11 = xmm11[1],mem[1]
13623 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm11 = ymm11[0,1,2,3],ymm0[4,5,6,7]
13624 ; AVX2-FP-NEXT: vbroadcastsd 2248(%rdi), %ymm0
13625 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13626 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13627 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9, %xmm9 # 16-byte Folded Reload
13628 ; AVX2-FP-NEXT: # xmm9 = xmm9[1],mem[1]
13629 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm0[4,5,6,7]
13630 ; AVX2-FP-NEXT: vbroadcastsd 2440(%rdi), %ymm0
13631 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm14[1],ymm0[3],ymm14[3]
13632 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm7 = xmm7[1],xmm12[1]
13633 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm0[4,5,6,7]
13634 ; AVX2-FP-NEXT: vbroadcastsd 2632(%rdi), %ymm0
13635 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm10[1],ymm0[3],ymm10[3]
13636 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm5 = xmm5[1],xmm8[1]
13637 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm0[4,5,6,7]
13638 ; AVX2-FP-NEXT: vbroadcastsd 2824(%rdi), %ymm0
13639 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm6[1],ymm0[3],ymm6[3]
13640 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm4[1]
13641 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm0[4,5,6,7]
13642 ; AVX2-FP-NEXT: vbroadcastsd 3016(%rdi), %ymm0
13643 ; AVX2-FP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13644 ; AVX2-FP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13645 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
13646 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13647 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13648 ; AVX2-FP-NEXT: vmovaps %ymm1, 448(%rsi)
13649 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13650 ; AVX2-FP-NEXT: vmovaps %ymm1, 384(%rsi)
13651 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13652 ; AVX2-FP-NEXT: vmovaps %ymm1, 320(%rsi)
13653 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13654 ; AVX2-FP-NEXT: vmovaps %ymm1, 256(%rsi)
13655 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13656 ; AVX2-FP-NEXT: vmovaps %ymm1, 192(%rsi)
13657 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13658 ; AVX2-FP-NEXT: vmovaps %ymm1, 128(%rsi)
13659 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13660 ; AVX2-FP-NEXT: vmovaps %ymm1, 64(%rsi)
13661 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13662 ; AVX2-FP-NEXT: vmovaps %ymm1, (%rsi)
13663 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13664 ; AVX2-FP-NEXT: vmovaps %ymm1, 480(%rsi)
13665 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13666 ; AVX2-FP-NEXT: vmovaps %ymm1, 416(%rsi)
13667 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13668 ; AVX2-FP-NEXT: vmovaps %ymm1, 352(%rsi)
13669 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13670 ; AVX2-FP-NEXT: vmovaps %ymm1, 288(%rsi)
13671 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13672 ; AVX2-FP-NEXT: vmovaps %ymm1, 224(%rsi)
13673 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13674 ; AVX2-FP-NEXT: vmovaps %ymm1, 160(%rsi)
13675 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13676 ; AVX2-FP-NEXT: vmovaps %ymm1, 96(%rsi)
13677 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13678 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%rsi)
13679 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13680 ; AVX2-FP-NEXT: vmovaps %ymm1, 448(%rdx)
13681 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13682 ; AVX2-FP-NEXT: vmovaps %ymm1, 384(%rdx)
13683 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13684 ; AVX2-FP-NEXT: vmovaps %ymm1, 320(%rdx)
13685 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13686 ; AVX2-FP-NEXT: vmovaps %ymm1, 256(%rdx)
13687 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13688 ; AVX2-FP-NEXT: vmovaps %ymm1, 192(%rdx)
13689 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13690 ; AVX2-FP-NEXT: vmovaps %ymm1, 128(%rdx)
13691 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13692 ; AVX2-FP-NEXT: vmovaps %ymm1, 64(%rdx)
13693 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13694 ; AVX2-FP-NEXT: vmovaps %ymm1, (%rdx)
13695 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13696 ; AVX2-FP-NEXT: vmovaps %ymm1, 480(%rdx)
13697 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13698 ; AVX2-FP-NEXT: vmovaps %ymm1, 416(%rdx)
13699 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13700 ; AVX2-FP-NEXT: vmovaps %ymm1, 352(%rdx)
13701 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13702 ; AVX2-FP-NEXT: vmovaps %ymm1, 288(%rdx)
13703 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13704 ; AVX2-FP-NEXT: vmovaps %ymm1, 224(%rdx)
13705 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13706 ; AVX2-FP-NEXT: vmovaps %ymm1, 160(%rdx)
13707 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13708 ; AVX2-FP-NEXT: vmovaps %ymm1, 96(%rdx)
13709 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13710 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%rdx)
13711 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13712 ; AVX2-FP-NEXT: vmovaps %ymm1, (%rcx)
13713 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13714 ; AVX2-FP-NEXT: vmovaps %ymm1, 64(%rcx)
13715 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13716 ; AVX2-FP-NEXT: vmovaps %ymm1, 128(%rcx)
13717 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13718 ; AVX2-FP-NEXT: vmovaps %ymm1, 192(%rcx)
13719 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13720 ; AVX2-FP-NEXT: vmovaps %ymm1, 256(%rcx)
13721 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13722 ; AVX2-FP-NEXT: vmovaps %ymm1, 320(%rcx)
13723 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13724 ; AVX2-FP-NEXT: vmovaps %ymm1, 384(%rcx)
13725 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13726 ; AVX2-FP-NEXT: vmovaps %ymm1, 448(%rcx)
13727 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13728 ; AVX2-FP-NEXT: vmovaps %ymm1, 480(%rcx)
13729 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13730 ; AVX2-FP-NEXT: vmovaps %ymm1, 416(%rcx)
13731 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13732 ; AVX2-FP-NEXT: vmovaps %ymm1, 352(%rcx)
13733 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13734 ; AVX2-FP-NEXT: vmovaps %ymm1, 288(%rcx)
13735 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13736 ; AVX2-FP-NEXT: vmovaps %ymm1, 224(%rcx)
13737 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13738 ; AVX2-FP-NEXT: vmovaps %ymm1, 160(%rcx)
13739 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13740 ; AVX2-FP-NEXT: vmovaps %ymm1, 96(%rcx)
13741 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13742 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%rcx)
13743 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13744 ; AVX2-FP-NEXT: vmovaps %ymm1, 480(%r8)
13745 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13746 ; AVX2-FP-NEXT: vmovaps %ymm1, 448(%r8)
13747 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13748 ; AVX2-FP-NEXT: vmovaps %ymm1, 416(%r8)
13749 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13750 ; AVX2-FP-NEXT: vmovaps %ymm1, 384(%r8)
13751 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13752 ; AVX2-FP-NEXT: vmovaps %ymm1, 352(%r8)
13753 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13754 ; AVX2-FP-NEXT: vmovaps %ymm1, 320(%r8)
13755 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13756 ; AVX2-FP-NEXT: vmovaps %ymm1, 288(%r8)
13757 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13758 ; AVX2-FP-NEXT: vmovaps %ymm1, 256(%r8)
13759 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13760 ; AVX2-FP-NEXT: vmovaps %ymm1, 224(%r8)
13761 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13762 ; AVX2-FP-NEXT: vmovaps %ymm1, 192(%r8)
13763 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13764 ; AVX2-FP-NEXT: vmovaps %ymm1, 160(%r8)
13765 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13766 ; AVX2-FP-NEXT: vmovaps %ymm1, 128(%r8)
13767 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13768 ; AVX2-FP-NEXT: vmovaps %ymm1, 96(%r8)
13769 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13770 ; AVX2-FP-NEXT: vmovaps %ymm1, 64(%r8)
13771 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13772 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%r8)
13773 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13774 ; AVX2-FP-NEXT: vmovaps %ymm1, (%r8)
13775 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13776 ; AVX2-FP-NEXT: vmovaps %ymm1, 480(%r9)
13777 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13778 ; AVX2-FP-NEXT: vmovaps %ymm1, 448(%r9)
13779 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13780 ; AVX2-FP-NEXT: vmovaps %ymm1, 416(%r9)
13781 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13782 ; AVX2-FP-NEXT: vmovaps %ymm1, 384(%r9)
13783 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13784 ; AVX2-FP-NEXT: vmovaps %ymm1, 352(%r9)
13785 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13786 ; AVX2-FP-NEXT: vmovaps %ymm1, 320(%r9)
13787 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13788 ; AVX2-FP-NEXT: vmovaps %ymm1, 288(%r9)
13789 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13790 ; AVX2-FP-NEXT: vmovaps %ymm1, 256(%r9)
13791 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13792 ; AVX2-FP-NEXT: vmovaps %ymm1, 224(%r9)
13793 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13794 ; AVX2-FP-NEXT: vmovaps %ymm1, 192(%r9)
13795 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13796 ; AVX2-FP-NEXT: vmovaps %ymm1, 160(%r9)
13797 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13798 ; AVX2-FP-NEXT: vmovaps %ymm1, 128(%r9)
13799 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13800 ; AVX2-FP-NEXT: vmovaps %ymm1, 96(%r9)
13801 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13802 ; AVX2-FP-NEXT: vmovaps %ymm1, 64(%r9)
13803 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13804 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%r9)
13805 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13806 ; AVX2-FP-NEXT: vmovaps %ymm1, (%r9)
13807 ; AVX2-FP-NEXT: movq {{[0-9]+}}(%rsp), %rax
13808 ; AVX2-FP-NEXT: vmovaps %ymm0, 480(%rax)
13809 ; AVX2-FP-NEXT: vmovaps %ymm3, 448(%rax)
13810 ; AVX2-FP-NEXT: vmovaps %ymm5, 416(%rax)
13811 ; AVX2-FP-NEXT: vmovaps %ymm7, 384(%rax)
13812 ; AVX2-FP-NEXT: vmovaps %ymm9, 352(%rax)
13813 ; AVX2-FP-NEXT: vmovaps %ymm11, 320(%rax)
13814 ; AVX2-FP-NEXT: vmovaps %ymm13, 288(%rax)
13815 ; AVX2-FP-NEXT: vmovaps %ymm15, 256(%rax)
13816 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13817 ; AVX2-FP-NEXT: vmovaps %ymm0, 224(%rax)
13818 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13819 ; AVX2-FP-NEXT: vmovaps %ymm0, 192(%rax)
13820 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13821 ; AVX2-FP-NEXT: vmovaps %ymm0, 160(%rax)
13822 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13823 ; AVX2-FP-NEXT: vmovaps %ymm0, 128(%rax)
13824 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13825 ; AVX2-FP-NEXT: vmovaps %ymm0, 96(%rax)
13826 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13827 ; AVX2-FP-NEXT: vmovaps %ymm0, 64(%rax)
13828 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13829 ; AVX2-FP-NEXT: vmovaps %ymm0, 32(%rax)
13830 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13831 ; AVX2-FP-NEXT: vmovaps %ymm0, (%rax)
13832 ; AVX2-FP-NEXT: addq $3432, %rsp # imm = 0xD68
13833 ; AVX2-FP-NEXT: vzeroupper
13834 ; AVX2-FP-NEXT: retq
13836 ; AVX2-FCP-LABEL: load_i64_stride6_vf64:
13837 ; AVX2-FCP: # %bb.0:
13838 ; AVX2-FCP-NEXT: subq $3432, %rsp # imm = 0xD68
13839 ; AVX2-FCP-NEXT: vmovaps 1088(%rdi), %ymm2
13840 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13841 ; AVX2-FCP-NEXT: vmovaps 1056(%rdi), %ymm4
13842 ; AVX2-FCP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13843 ; AVX2-FCP-NEXT: vmovaps 704(%rdi), %ymm3
13844 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13845 ; AVX2-FCP-NEXT: vmovaps 672(%rdi), %ymm5
13846 ; AVX2-FCP-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13847 ; AVX2-FCP-NEXT: vmovaps 320(%rdi), %ymm1
13848 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13849 ; AVX2-FCP-NEXT: vmovaps 288(%rdi), %ymm6
13850 ; AVX2-FCP-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13851 ; AVX2-FCP-NEXT: vmovaps 240(%rdi), %xmm7
13852 ; AVX2-FCP-NEXT: vmovaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13853 ; AVX2-FCP-NEXT: vmovaps 192(%rdi), %xmm0
13854 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13855 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm7[0]
13856 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm6[0],ymm1[0],ymm6[2],ymm1[2]
13857 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
13858 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13859 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13860 ; AVX2-FCP-NEXT: vmovaps 624(%rdi), %xmm1
13861 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13862 ; AVX2-FCP-NEXT: vmovaps 576(%rdi), %xmm0
13863 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13864 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
13865 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm5[0],ymm3[0],ymm5[2],ymm3[2]
13866 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
13867 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13868 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13869 ; AVX2-FCP-NEXT: vmovaps 1008(%rdi), %xmm1
13870 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13871 ; AVX2-FCP-NEXT: vmovaps 960(%rdi), %xmm0
13872 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13873 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
13874 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm4[0],ymm2[0],ymm4[2],ymm2[2]
13875 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,0,3]
13876 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
13877 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13878 ; AVX2-FCP-NEXT: vmovaps 1472(%rdi), %ymm0
13879 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13880 ; AVX2-FCP-NEXT: vmovaps 1440(%rdi), %ymm1
13881 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13882 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13883 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13884 ; AVX2-FCP-NEXT: vmovaps 1392(%rdi), %xmm2
13885 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13886 ; AVX2-FCP-NEXT: vmovaps 1344(%rdi), %xmm1
13887 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13888 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13889 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13890 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13891 ; AVX2-FCP-NEXT: vmovaps 1856(%rdi), %ymm0
13892 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13893 ; AVX2-FCP-NEXT: vmovaps 1824(%rdi), %ymm1
13894 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13895 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13896 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13897 ; AVX2-FCP-NEXT: vmovaps 1776(%rdi), %xmm2
13898 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13899 ; AVX2-FCP-NEXT: vmovaps 1728(%rdi), %xmm1
13900 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13901 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13902 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13903 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13904 ; AVX2-FCP-NEXT: vmovaps 2240(%rdi), %ymm0
13905 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13906 ; AVX2-FCP-NEXT: vmovaps 2208(%rdi), %ymm1
13907 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13908 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13909 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13910 ; AVX2-FCP-NEXT: vmovaps 2160(%rdi), %xmm2
13911 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13912 ; AVX2-FCP-NEXT: vmovaps 2112(%rdi), %xmm1
13913 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13914 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13915 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13916 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13917 ; AVX2-FCP-NEXT: vmovaps 2624(%rdi), %ymm0
13918 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13919 ; AVX2-FCP-NEXT: vmovaps 2592(%rdi), %ymm1
13920 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13921 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13922 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13923 ; AVX2-FCP-NEXT: vmovaps 2544(%rdi), %xmm2
13924 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13925 ; AVX2-FCP-NEXT: vmovaps 2496(%rdi), %xmm1
13926 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13927 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13928 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13929 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13930 ; AVX2-FCP-NEXT: vmovaps 3008(%rdi), %ymm0
13931 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13932 ; AVX2-FCP-NEXT: vmovaps 2976(%rdi), %ymm1
13933 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13934 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13935 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13936 ; AVX2-FCP-NEXT: vmovaps 2928(%rdi), %xmm2
13937 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13938 ; AVX2-FCP-NEXT: vmovaps 2880(%rdi), %xmm1
13939 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13940 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
13941 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13942 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13943 ; AVX2-FCP-NEXT: vmovaps 128(%rdi), %ymm0
13944 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13945 ; AVX2-FCP-NEXT: vmovaps 96(%rdi), %ymm1
13946 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13947 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13948 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13949 ; AVX2-FCP-NEXT: vmovaps (%rdi), %xmm2
13950 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13951 ; AVX2-FCP-NEXT: vmovaps 48(%rdi), %xmm1
13952 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13953 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
13954 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13955 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13956 ; AVX2-FCP-NEXT: vmovaps 512(%rdi), %ymm0
13957 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13958 ; AVX2-FCP-NEXT: vmovaps 480(%rdi), %ymm1
13959 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13960 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13961 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13962 ; AVX2-FCP-NEXT: vmovaps 432(%rdi), %xmm1
13963 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13964 ; AVX2-FCP-NEXT: vmovaps 384(%rdi), %xmm13
13965 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm13[0],xmm1[0]
13966 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13967 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13968 ; AVX2-FCP-NEXT: vmovaps 896(%rdi), %ymm0
13969 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13970 ; AVX2-FCP-NEXT: vmovaps 864(%rdi), %ymm1
13971 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13972 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13973 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13974 ; AVX2-FCP-NEXT: vmovaps 816(%rdi), %xmm1
13975 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13976 ; AVX2-FCP-NEXT: vmovaps 768(%rdi), %xmm11
13977 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm1[0]
13978 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13979 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13980 ; AVX2-FCP-NEXT: vmovaps 1280(%rdi), %ymm0
13981 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13982 ; AVX2-FCP-NEXT: vmovaps 1248(%rdi), %ymm1
13983 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13984 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13985 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13986 ; AVX2-FCP-NEXT: vmovaps 1200(%rdi), %xmm1
13987 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13988 ; AVX2-FCP-NEXT: vmovaps 1152(%rdi), %xmm9
13989 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm9[0],xmm1[0]
13990 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13991 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13992 ; AVX2-FCP-NEXT: vmovaps 1664(%rdi), %ymm0
13993 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13994 ; AVX2-FCP-NEXT: vmovaps 1632(%rdi), %ymm1
13995 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13996 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13997 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
13998 ; AVX2-FCP-NEXT: vmovaps 1584(%rdi), %xmm14
13999 ; AVX2-FCP-NEXT: vmovaps 1536(%rdi), %xmm7
14000 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm7[0],xmm14[0]
14001 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14002 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14003 ; AVX2-FCP-NEXT: vmovaps 2048(%rdi), %ymm12
14004 ; AVX2-FCP-NEXT: vmovaps 2016(%rdi), %ymm0
14005 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14006 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm12[0],ymm0[2],ymm12[2]
14007 ; AVX2-FCP-NEXT: vmovups %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14008 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14009 ; AVX2-FCP-NEXT: vmovaps 1968(%rdi), %xmm10
14010 ; AVX2-FCP-NEXT: vmovaps 1920(%rdi), %xmm5
14011 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm10[0]
14012 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14013 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14014 ; AVX2-FCP-NEXT: vmovaps 2432(%rdi), %ymm8
14015 ; AVX2-FCP-NEXT: vmovaps 2400(%rdi), %ymm0
14016 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14017 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm8[0],ymm0[2],ymm8[2]
14018 ; AVX2-FCP-NEXT: vmovups %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14019 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14020 ; AVX2-FCP-NEXT: vmovaps 2352(%rdi), %xmm6
14021 ; AVX2-FCP-NEXT: vmovaps 2304(%rdi), %xmm3
14022 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm6[0]
14023 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14024 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14025 ; AVX2-FCP-NEXT: vmovaps 2816(%rdi), %ymm4
14026 ; AVX2-FCP-NEXT: vmovaps 2784(%rdi), %ymm0
14027 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14028 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
14029 ; AVX2-FCP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14030 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14031 ; AVX2-FCP-NEXT: vmovaps 2736(%rdi), %xmm2
14032 ; AVX2-FCP-NEXT: vmovaps 2688(%rdi), %xmm1
14033 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
14034 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
14035 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14036 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14037 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14038 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14039 ; AVX2-FCP-NEXT: vbroadcastsd 296(%rdi), %ymm15
14040 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
14041 ; AVX2-FCP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
14042 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
14043 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14044 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14045 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14046 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14047 ; AVX2-FCP-NEXT: vbroadcastsd 680(%rdi), %ymm15
14048 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
14049 ; AVX2-FCP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
14050 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
14051 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14052 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14053 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14054 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14055 ; AVX2-FCP-NEXT: vbroadcastsd 1064(%rdi), %ymm15
14056 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
14057 ; AVX2-FCP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
14058 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
14059 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14060 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14061 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14062 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14063 ; AVX2-FCP-NEXT: vbroadcastsd 1448(%rdi), %ymm15
14064 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
14065 ; AVX2-FCP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
14066 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
14067 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14068 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14069 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14070 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14071 ; AVX2-FCP-NEXT: vbroadcastsd 1832(%rdi), %ymm15
14072 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
14073 ; AVX2-FCP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
14074 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
14075 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14076 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14077 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14078 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14079 ; AVX2-FCP-NEXT: vbroadcastsd 2216(%rdi), %ymm15
14080 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
14081 ; AVX2-FCP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
14082 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
14083 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14084 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14085 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14086 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14087 ; AVX2-FCP-NEXT: vbroadcastsd 2600(%rdi), %ymm15
14088 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
14089 ; AVX2-FCP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
14090 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
14091 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14092 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14093 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14094 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14095 ; AVX2-FCP-NEXT: vbroadcastsd 2984(%rdi), %ymm15
14096 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
14097 ; AVX2-FCP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
14098 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
14099 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14100 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14101 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14102 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14103 ; AVX2-FCP-NEXT: vbroadcastsd 104(%rdi), %ymm15
14104 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
14105 ; AVX2-FCP-NEXT: # ymm15 = ymm15[1],mem[1],ymm15[3],mem[3]
14106 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
14107 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14108 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm13, %xmm0 # 16-byte Folded Reload
14109 ; AVX2-FCP-NEXT: # xmm0 = xmm13[1],mem[1]
14110 ; AVX2-FCP-NEXT: vbroadcastsd 488(%rdi), %ymm13
14111 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm13, %ymm13 # 32-byte Folded Reload
14112 ; AVX2-FCP-NEXT: # ymm13 = ymm13[1],mem[1],ymm13[3],mem[3]
14113 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm13[4,5,6,7]
14114 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14115 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm11, %xmm0 # 16-byte Folded Reload
14116 ; AVX2-FCP-NEXT: # xmm0 = xmm11[1],mem[1]
14117 ; AVX2-FCP-NEXT: vbroadcastsd 872(%rdi), %ymm11
14118 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm11, %ymm11 # 32-byte Folded Reload
14119 ; AVX2-FCP-NEXT: # ymm11 = ymm11[1],mem[1],ymm11[3],mem[3]
14120 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm11[4,5,6,7]
14121 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14122 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9, %xmm0 # 16-byte Folded Reload
14123 ; AVX2-FCP-NEXT: # xmm0 = xmm9[1],mem[1]
14124 ; AVX2-FCP-NEXT: vbroadcastsd 1256(%rdi), %ymm9
14125 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm9 # 32-byte Folded Reload
14126 ; AVX2-FCP-NEXT: # ymm9 = ymm9[1],mem[1],ymm9[3],mem[3]
14127 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm9[4,5,6,7]
14128 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14129 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm14[1]
14130 ; AVX2-FCP-NEXT: vbroadcastsd 1640(%rdi), %ymm7
14131 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm7, %ymm7 # 32-byte Folded Reload
14132 ; AVX2-FCP-NEXT: # ymm7 = ymm7[1],mem[1],ymm7[3],mem[3]
14133 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm7[4,5,6,7]
14134 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14135 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm10[1]
14136 ; AVX2-FCP-NEXT: vbroadcastsd 2024(%rdi), %ymm5
14137 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm5[1],ymm12[1],ymm5[3],ymm12[3]
14138 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm5[4,5,6,7]
14139 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14140 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm6[1]
14141 ; AVX2-FCP-NEXT: vbroadcastsd 2408(%rdi), %ymm3
14142 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm8[1],ymm3[3],ymm8[3]
14143 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
14144 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14145 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm2[1]
14146 ; AVX2-FCP-NEXT: vbroadcastsd 2792(%rdi), %ymm1
14147 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm4[1],ymm1[3],ymm4[3]
14148 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14149 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14150 ; AVX2-FCP-NEXT: vbroadcastsd 352(%rdi), %ymm0
14151 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14152 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14153 ; AVX2-FCP-NEXT: vmovaps 256(%rdi), %xmm2
14154 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14155 ; AVX2-FCP-NEXT: vmovaps 208(%rdi), %xmm1
14156 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14157 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
14158 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14159 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14160 ; AVX2-FCP-NEXT: vbroadcastsd 736(%rdi), %ymm0
14161 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14162 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14163 ; AVX2-FCP-NEXT: vmovaps 640(%rdi), %xmm2
14164 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14165 ; AVX2-FCP-NEXT: vmovaps 592(%rdi), %xmm1
14166 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14167 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
14168 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14169 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14170 ; AVX2-FCP-NEXT: vbroadcastsd 1120(%rdi), %ymm0
14171 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14172 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14173 ; AVX2-FCP-NEXT: vmovaps 1024(%rdi), %xmm2
14174 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14175 ; AVX2-FCP-NEXT: vmovaps 976(%rdi), %xmm1
14176 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14177 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
14178 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14179 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14180 ; AVX2-FCP-NEXT: vbroadcastsd 1504(%rdi), %ymm0
14181 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14182 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14183 ; AVX2-FCP-NEXT: vmovaps 1408(%rdi), %xmm2
14184 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14185 ; AVX2-FCP-NEXT: vmovaps 1360(%rdi), %xmm1
14186 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14187 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
14188 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14189 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14190 ; AVX2-FCP-NEXT: vbroadcastsd 1888(%rdi), %ymm0
14191 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14192 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14193 ; AVX2-FCP-NEXT: vmovaps 1792(%rdi), %xmm2
14194 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14195 ; AVX2-FCP-NEXT: vmovaps 1744(%rdi), %xmm1
14196 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14197 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
14198 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14199 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14200 ; AVX2-FCP-NEXT: vbroadcastsd 2272(%rdi), %ymm0
14201 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14202 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14203 ; AVX2-FCP-NEXT: vmovaps 2176(%rdi), %xmm2
14204 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14205 ; AVX2-FCP-NEXT: vmovaps 2128(%rdi), %xmm1
14206 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14207 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
14208 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14209 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14210 ; AVX2-FCP-NEXT: vbroadcastsd 2656(%rdi), %ymm0
14211 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14212 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14213 ; AVX2-FCP-NEXT: vmovaps 2560(%rdi), %xmm2
14214 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14215 ; AVX2-FCP-NEXT: vmovaps 2512(%rdi), %xmm1
14216 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14217 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
14218 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14219 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14220 ; AVX2-FCP-NEXT: vbroadcastsd 3040(%rdi), %ymm0
14221 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14222 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14223 ; AVX2-FCP-NEXT: vmovaps 2944(%rdi), %xmm2
14224 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14225 ; AVX2-FCP-NEXT: vmovaps 2896(%rdi), %xmm1
14226 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14227 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
14228 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14229 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14230 ; AVX2-FCP-NEXT: vbroadcastsd 2848(%rdi), %ymm0
14231 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14232 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14233 ; AVX2-FCP-NEXT: vmovaps 2752(%rdi), %xmm2
14234 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14235 ; AVX2-FCP-NEXT: vmovaps 2704(%rdi), %xmm1
14236 ; AVX2-FCP-NEXT: vmovaps %xmm1, (%rsp) # 16-byte Spill
14237 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
14238 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14239 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14240 ; AVX2-FCP-NEXT: vbroadcastsd 2464(%rdi), %ymm0
14241 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14242 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14243 ; AVX2-FCP-NEXT: vmovaps 2368(%rdi), %xmm2
14244 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14245 ; AVX2-FCP-NEXT: vmovaps 2320(%rdi), %xmm1
14246 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14247 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
14248 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14249 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14250 ; AVX2-FCP-NEXT: vbroadcastsd 2080(%rdi), %ymm0
14251 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14252 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14253 ; AVX2-FCP-NEXT: vmovaps 1984(%rdi), %xmm2
14254 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14255 ; AVX2-FCP-NEXT: vmovaps 1936(%rdi), %xmm1
14256 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14257 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
14258 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14259 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14260 ; AVX2-FCP-NEXT: vbroadcastsd 1696(%rdi), %ymm0
14261 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14262 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14263 ; AVX2-FCP-NEXT: vmovaps 1600(%rdi), %xmm12
14264 ; AVX2-FCP-NEXT: vmovaps 1552(%rdi), %xmm11
14265 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm12[0]
14266 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14267 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14268 ; AVX2-FCP-NEXT: vbroadcastsd 1312(%rdi), %ymm0
14269 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
14270 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm10[0],ymm0[0],ymm10[2],ymm0[2]
14271 ; AVX2-FCP-NEXT: vmovaps 1216(%rdi), %xmm9
14272 ; AVX2-FCP-NEXT: vmovaps 1168(%rdi), %xmm8
14273 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm8[0],xmm9[0]
14274 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14275 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14276 ; AVX2-FCP-NEXT: vbroadcastsd 928(%rdi), %ymm0
14277 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
14278 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm7[0],ymm0[0],ymm7[2],ymm0[2]
14279 ; AVX2-FCP-NEXT: vmovaps 832(%rdi), %xmm6
14280 ; AVX2-FCP-NEXT: vmovaps 784(%rdi), %xmm5
14281 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm13 = xmm5[0],xmm6[0]
14282 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm13[0,1,2,3],ymm0[4,5,6,7]
14283 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14284 ; AVX2-FCP-NEXT: vbroadcastsd 544(%rdi), %ymm0
14285 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
14286 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm4[0],ymm0[0],ymm4[2],ymm0[2]
14287 ; AVX2-FCP-NEXT: vmovaps 448(%rdi), %xmm13
14288 ; AVX2-FCP-NEXT: vmovaps 400(%rdi), %xmm3
14289 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm15 = xmm3[0],xmm13[0]
14290 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
14291 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14292 ; AVX2-FCP-NEXT: vbroadcastsd 160(%rdi), %ymm0
14293 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
14294 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm2[0],ymm0[0],ymm2[2],ymm0[2]
14295 ; AVX2-FCP-NEXT: vmovaps 16(%rdi), %xmm15
14296 ; AVX2-FCP-NEXT: vmovaps 64(%rdi), %xmm0
14297 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm14 = xmm15[0],xmm0[0]
14298 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm14[0,1,2,3],ymm1[4,5,6,7]
14299 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14300 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm15[1],xmm0[1]
14301 ; AVX2-FCP-NEXT: vmovaps 160(%rdi), %ymm14
14302 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm2[1],ymm14[1],ymm2[3],ymm14[3]
14303 ; AVX2-FCP-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14304 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14305 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14306 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14307 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14308 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14309 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14310 ; AVX2-FCP-NEXT: vmovaps 352(%rdi), %ymm2
14311 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14312 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
14313 ; AVX2-FCP-NEXT: vmovaps %ymm2, %ymm15
14314 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14315 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14316 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14317 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14318 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm13[1]
14319 ; AVX2-FCP-NEXT: vmovaps 544(%rdi), %ymm2
14320 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm4[1],ymm2[1],ymm4[3],ymm2[3]
14321 ; AVX2-FCP-NEXT: vmovaps %ymm2, %ymm3
14322 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14323 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14324 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14325 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14326 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14327 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14328 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14329 ; AVX2-FCP-NEXT: vmovaps 736(%rdi), %ymm2
14330 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14331 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
14332 ; AVX2-FCP-NEXT: vmovaps %ymm2, %ymm4
14333 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14334 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14335 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14336 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14337 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm6[1]
14338 ; AVX2-FCP-NEXT: vmovaps 928(%rdi), %ymm2
14339 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm7[1],ymm2[1],ymm7[3],ymm2[3]
14340 ; AVX2-FCP-NEXT: vmovaps %ymm2, %ymm5
14341 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14342 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14343 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14344 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14345 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14346 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14347 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14348 ; AVX2-FCP-NEXT: vmovaps 1120(%rdi), %ymm2
14349 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14350 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
14351 ; AVX2-FCP-NEXT: vmovaps %ymm2, %ymm6
14352 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14353 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14354 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14355 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14356 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm8[1],xmm9[1]
14357 ; AVX2-FCP-NEXT: vmovaps 1312(%rdi), %ymm2
14358 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm10[1],ymm2[1],ymm10[3],ymm2[3]
14359 ; AVX2-FCP-NEXT: vmovaps %ymm2, %ymm7
14360 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14361 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14362 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14363 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14364 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14365 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14366 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14367 ; AVX2-FCP-NEXT: vmovaps 1504(%rdi), %ymm2
14368 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14369 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
14370 ; AVX2-FCP-NEXT: vmovaps %ymm2, %ymm8
14371 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14372 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14373 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14374 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14375 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm11[1],xmm12[1]
14376 ; AVX2-FCP-NEXT: vmovaps 1696(%rdi), %ymm2
14377 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14378 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
14379 ; AVX2-FCP-NEXT: vmovaps %ymm2, %ymm9
14380 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14381 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14382 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14383 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14384 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14385 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14386 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14387 ; AVX2-FCP-NEXT: vmovaps 1888(%rdi), %ymm2
14388 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14389 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
14390 ; AVX2-FCP-NEXT: vmovaps %ymm2, %ymm10
14391 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14392 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14393 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14394 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14395 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14396 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14397 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14398 ; AVX2-FCP-NEXT: vmovaps 2080(%rdi), %ymm2
14399 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14400 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
14401 ; AVX2-FCP-NEXT: vmovaps %ymm2, %ymm11
14402 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14403 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14404 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14405 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14406 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14407 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14408 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14409 ; AVX2-FCP-NEXT: vmovaps 2272(%rdi), %ymm2
14410 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14411 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14412 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
14413 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14414 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14415 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14416 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14417 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14418 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14419 ; AVX2-FCP-NEXT: vmovaps 2464(%rdi), %ymm2
14420 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14421 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14422 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
14423 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14424 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14425 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14426 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14427 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14428 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14429 ; AVX2-FCP-NEXT: vmovaps 2656(%rdi), %ymm2
14430 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14431 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14432 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
14433 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14434 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14435 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14436 ; AVX2-FCP-NEXT: vmovaps (%rsp), %xmm0 # 16-byte Reload
14437 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14438 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14439 ; AVX2-FCP-NEXT: vmovaps 2848(%rdi), %ymm2
14440 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14441 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14442 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
14443 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14444 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14445 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14446 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14447 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14448 ; AVX2-FCP-NEXT: # xmm0 = xmm0[1],mem[1]
14449 ; AVX2-FCP-NEXT: vmovaps 3040(%rdi), %ymm2
14450 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14451 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
14452 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14453 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,2,1]
14454 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
14455 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14456 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14457 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm14[0],ymm0[2],ymm14[2]
14458 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14459 ; AVX2-FCP-NEXT: vmovaps 32(%rdi), %xmm12
14460 ; AVX2-FCP-NEXT: vmovaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14461 ; AVX2-FCP-NEXT: vmovaps 80(%rdi), %xmm1
14462 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14463 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm12[0],xmm1[0]
14464 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14465 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14466 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14467 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm15[0],ymm0[2],ymm15[2]
14468 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14469 ; AVX2-FCP-NEXT: vmovaps 272(%rdi), %xmm12
14470 ; AVX2-FCP-NEXT: vmovaps %xmm12, (%rsp) # 16-byte Spill
14471 ; AVX2-FCP-NEXT: vmovaps 224(%rdi), %xmm1
14472 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14473 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm12[0]
14474 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14475 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14476 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14477 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
14478 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14479 ; AVX2-FCP-NEXT: vmovaps 464(%rdi), %xmm3
14480 ; AVX2-FCP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14481 ; AVX2-FCP-NEXT: vmovaps 416(%rdi), %xmm1
14482 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14483 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
14484 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14485 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14486 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14487 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
14488 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14489 ; AVX2-FCP-NEXT: vmovaps 656(%rdi), %xmm3
14490 ; AVX2-FCP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14491 ; AVX2-FCP-NEXT: vmovaps 608(%rdi), %xmm1
14492 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14493 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
14494 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14495 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14496 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14497 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm5[0],ymm0[2],ymm5[2]
14498 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14499 ; AVX2-FCP-NEXT: vmovaps 848(%rdi), %xmm3
14500 ; AVX2-FCP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14501 ; AVX2-FCP-NEXT: vmovaps 800(%rdi), %xmm1
14502 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14503 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
14504 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14505 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14506 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14507 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm6[0],ymm0[2],ymm6[2]
14508 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14509 ; AVX2-FCP-NEXT: vmovaps 1040(%rdi), %xmm3
14510 ; AVX2-FCP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14511 ; AVX2-FCP-NEXT: vmovaps 992(%rdi), %xmm1
14512 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14513 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
14514 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14515 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14516 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14517 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm7[0],ymm0[2],ymm7[2]
14518 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14519 ; AVX2-FCP-NEXT: vmovaps 1232(%rdi), %xmm3
14520 ; AVX2-FCP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14521 ; AVX2-FCP-NEXT: vmovaps 1184(%rdi), %xmm1
14522 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14523 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
14524 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14525 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14526 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14527 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm8[0],ymm0[2],ymm8[2]
14528 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14529 ; AVX2-FCP-NEXT: vmovaps 1424(%rdi), %xmm3
14530 ; AVX2-FCP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14531 ; AVX2-FCP-NEXT: vmovaps 1376(%rdi), %xmm1
14532 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14533 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
14534 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14535 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14536 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14537 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm9[0],ymm0[2],ymm9[2]
14538 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14539 ; AVX2-FCP-NEXT: vmovaps 1616(%rdi), %xmm3
14540 ; AVX2-FCP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14541 ; AVX2-FCP-NEXT: vmovaps 1568(%rdi), %xmm1
14542 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14543 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
14544 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14545 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14546 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14547 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm10[0],ymm0[2],ymm10[2]
14548 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14549 ; AVX2-FCP-NEXT: vmovaps 1808(%rdi), %xmm1
14550 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14551 ; AVX2-FCP-NEXT: vmovaps 1760(%rdi), %xmm13
14552 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm13[0],xmm1[0]
14553 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14554 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14555 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14556 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm11[0],ymm0[2],ymm11[2]
14557 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14558 ; AVX2-FCP-NEXT: vmovaps 2000(%rdi), %xmm1
14559 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14560 ; AVX2-FCP-NEXT: vmovaps 1952(%rdi), %xmm11
14561 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm11[0],xmm1[0]
14562 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14563 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14564 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14565 ; AVX2-FCP-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14566 ; AVX2-FCP-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
14567 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14568 ; AVX2-FCP-NEXT: vmovaps 2192(%rdi), %xmm1
14569 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
14570 ; AVX2-FCP-NEXT: vmovaps 2144(%rdi), %xmm9
14571 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm9[0],xmm1[0]
14572 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14573 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14574 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14575 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
14576 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm14[0],ymm0[2],ymm14[2]
14577 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14578 ; AVX2-FCP-NEXT: vmovaps 2384(%rdi), %xmm12
14579 ; AVX2-FCP-NEXT: vmovaps 2336(%rdi), %xmm7
14580 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm7[0],xmm12[0]
14581 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14582 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14583 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14584 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
14585 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm10[0],ymm0[2],ymm10[2]
14586 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14587 ; AVX2-FCP-NEXT: vmovaps 2576(%rdi), %xmm8
14588 ; AVX2-FCP-NEXT: vmovaps 2528(%rdi), %xmm5
14589 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm8[0]
14590 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14591 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14592 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14593 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
14594 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm6[0],ymm0[2],ymm6[2]
14595 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14596 ; AVX2-FCP-NEXT: vmovaps 2768(%rdi), %xmm4
14597 ; AVX2-FCP-NEXT: vmovaps 2720(%rdi), %xmm3
14598 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm4[0]
14599 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14600 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14601 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14602 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm2[0],ymm0[2],ymm2[2]
14603 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,0,3]
14604 ; AVX2-FCP-NEXT: vmovaps 2960(%rdi), %xmm2
14605 ; AVX2-FCP-NEXT: vmovaps 2912(%rdi), %xmm1
14606 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm15 = xmm1[0],xmm2[0]
14607 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
14608 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14609 ; AVX2-FCP-NEXT: vbroadcastsd 136(%rdi), %ymm0
14610 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14611 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14612 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
14613 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
14614 ; AVX2-FCP-NEXT: # xmm15 = xmm15[1],mem[1]
14615 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
14616 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14617 ; AVX2-FCP-NEXT: vbroadcastsd 328(%rdi), %ymm0
14618 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14619 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14620 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
14621 ; AVX2-FCP-NEXT: vunpckhpd (%rsp), %xmm15, %xmm15 # 16-byte Folded Reload
14622 ; AVX2-FCP-NEXT: # xmm15 = xmm15[1],mem[1]
14623 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
14624 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14625 ; AVX2-FCP-NEXT: vbroadcastsd 520(%rdi), %ymm0
14626 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14627 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14628 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
14629 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
14630 ; AVX2-FCP-NEXT: # xmm15 = xmm15[1],mem[1]
14631 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
14632 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14633 ; AVX2-FCP-NEXT: vbroadcastsd 712(%rdi), %ymm0
14634 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14635 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14636 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
14637 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
14638 ; AVX2-FCP-NEXT: # xmm15 = xmm15[1],mem[1]
14639 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
14640 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14641 ; AVX2-FCP-NEXT: vbroadcastsd 904(%rdi), %ymm0
14642 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14643 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14644 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
14645 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
14646 ; AVX2-FCP-NEXT: # xmm15 = xmm15[1],mem[1]
14647 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
14648 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14649 ; AVX2-FCP-NEXT: vbroadcastsd 1096(%rdi), %ymm0
14650 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14651 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14652 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
14653 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
14654 ; AVX2-FCP-NEXT: # xmm15 = xmm15[1],mem[1]
14655 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
14656 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14657 ; AVX2-FCP-NEXT: vbroadcastsd 1288(%rdi), %ymm0
14658 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14659 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14660 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
14661 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
14662 ; AVX2-FCP-NEXT: # xmm15 = xmm15[1],mem[1]
14663 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
14664 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14665 ; AVX2-FCP-NEXT: vbroadcastsd 1480(%rdi), %ymm0
14666 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14667 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14668 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
14669 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
14670 ; AVX2-FCP-NEXT: # xmm15 = xmm15[1],mem[1]
14671 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm0[4,5,6,7]
14672 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14673 ; AVX2-FCP-NEXT: vbroadcastsd 1672(%rdi), %ymm0
14674 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14675 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14676 ; AVX2-FCP-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
14677 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
14678 ; AVX2-FCP-NEXT: # xmm15 = xmm15[1],mem[1]
14679 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm15 = ymm15[0,1,2,3],ymm0[4,5,6,7]
14680 ; AVX2-FCP-NEXT: vbroadcastsd 1864(%rdi), %ymm0
14681 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14682 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14683 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm13, %xmm13 # 16-byte Folded Reload
14684 ; AVX2-FCP-NEXT: # xmm13 = xmm13[1],mem[1]
14685 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3],ymm0[4,5,6,7]
14686 ; AVX2-FCP-NEXT: vbroadcastsd 2056(%rdi), %ymm0
14687 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14688 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14689 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm11, %xmm11 # 16-byte Folded Reload
14690 ; AVX2-FCP-NEXT: # xmm11 = xmm11[1],mem[1]
14691 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm11 = ymm11[0,1,2,3],ymm0[4,5,6,7]
14692 ; AVX2-FCP-NEXT: vbroadcastsd 2248(%rdi), %ymm0
14693 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14694 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14695 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9, %xmm9 # 16-byte Folded Reload
14696 ; AVX2-FCP-NEXT: # xmm9 = xmm9[1],mem[1]
14697 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm0[4,5,6,7]
14698 ; AVX2-FCP-NEXT: vbroadcastsd 2440(%rdi), %ymm0
14699 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm14[1],ymm0[3],ymm14[3]
14700 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm7 = xmm7[1],xmm12[1]
14701 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm0[4,5,6,7]
14702 ; AVX2-FCP-NEXT: vbroadcastsd 2632(%rdi), %ymm0
14703 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm10[1],ymm0[3],ymm10[3]
14704 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm5 = xmm5[1],xmm8[1]
14705 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm0[4,5,6,7]
14706 ; AVX2-FCP-NEXT: vbroadcastsd 2824(%rdi), %ymm0
14707 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm6[1],ymm0[3],ymm6[3]
14708 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm4[1]
14709 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm0[4,5,6,7]
14710 ; AVX2-FCP-NEXT: vbroadcastsd 3016(%rdi), %ymm0
14711 ; AVX2-FCP-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14712 ; AVX2-FCP-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14713 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
14714 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
14715 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14716 ; AVX2-FCP-NEXT: vmovaps %ymm1, 448(%rsi)
14717 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14718 ; AVX2-FCP-NEXT: vmovaps %ymm1, 384(%rsi)
14719 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14720 ; AVX2-FCP-NEXT: vmovaps %ymm1, 320(%rsi)
14721 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14722 ; AVX2-FCP-NEXT: vmovaps %ymm1, 256(%rsi)
14723 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14724 ; AVX2-FCP-NEXT: vmovaps %ymm1, 192(%rsi)
14725 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14726 ; AVX2-FCP-NEXT: vmovaps %ymm1, 128(%rsi)
14727 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14728 ; AVX2-FCP-NEXT: vmovaps %ymm1, 64(%rsi)
14729 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14730 ; AVX2-FCP-NEXT: vmovaps %ymm1, (%rsi)
14731 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14732 ; AVX2-FCP-NEXT: vmovaps %ymm1, 480(%rsi)
14733 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14734 ; AVX2-FCP-NEXT: vmovaps %ymm1, 416(%rsi)
14735 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14736 ; AVX2-FCP-NEXT: vmovaps %ymm1, 352(%rsi)
14737 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14738 ; AVX2-FCP-NEXT: vmovaps %ymm1, 288(%rsi)
14739 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14740 ; AVX2-FCP-NEXT: vmovaps %ymm1, 224(%rsi)
14741 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14742 ; AVX2-FCP-NEXT: vmovaps %ymm1, 160(%rsi)
14743 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14744 ; AVX2-FCP-NEXT: vmovaps %ymm1, 96(%rsi)
14745 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14746 ; AVX2-FCP-NEXT: vmovaps %ymm1, 32(%rsi)
14747 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14748 ; AVX2-FCP-NEXT: vmovaps %ymm1, 448(%rdx)
14749 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14750 ; AVX2-FCP-NEXT: vmovaps %ymm1, 384(%rdx)
14751 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14752 ; AVX2-FCP-NEXT: vmovaps %ymm1, 320(%rdx)
14753 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14754 ; AVX2-FCP-NEXT: vmovaps %ymm1, 256(%rdx)
14755 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14756 ; AVX2-FCP-NEXT: vmovaps %ymm1, 192(%rdx)
14757 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14758 ; AVX2-FCP-NEXT: vmovaps %ymm1, 128(%rdx)
14759 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14760 ; AVX2-FCP-NEXT: vmovaps %ymm1, 64(%rdx)
14761 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14762 ; AVX2-FCP-NEXT: vmovaps %ymm1, (%rdx)
14763 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14764 ; AVX2-FCP-NEXT: vmovaps %ymm1, 480(%rdx)
14765 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14766 ; AVX2-FCP-NEXT: vmovaps %ymm1, 416(%rdx)
14767 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14768 ; AVX2-FCP-NEXT: vmovaps %ymm1, 352(%rdx)
14769 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14770 ; AVX2-FCP-NEXT: vmovaps %ymm1, 288(%rdx)
14771 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14772 ; AVX2-FCP-NEXT: vmovaps %ymm1, 224(%rdx)
14773 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14774 ; AVX2-FCP-NEXT: vmovaps %ymm1, 160(%rdx)
14775 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14776 ; AVX2-FCP-NEXT: vmovaps %ymm1, 96(%rdx)
14777 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14778 ; AVX2-FCP-NEXT: vmovaps %ymm1, 32(%rdx)
14779 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14780 ; AVX2-FCP-NEXT: vmovaps %ymm1, (%rcx)
14781 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14782 ; AVX2-FCP-NEXT: vmovaps %ymm1, 64(%rcx)
14783 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14784 ; AVX2-FCP-NEXT: vmovaps %ymm1, 128(%rcx)
14785 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14786 ; AVX2-FCP-NEXT: vmovaps %ymm1, 192(%rcx)
14787 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14788 ; AVX2-FCP-NEXT: vmovaps %ymm1, 256(%rcx)
14789 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14790 ; AVX2-FCP-NEXT: vmovaps %ymm1, 320(%rcx)
14791 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14792 ; AVX2-FCP-NEXT: vmovaps %ymm1, 384(%rcx)
14793 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14794 ; AVX2-FCP-NEXT: vmovaps %ymm1, 448(%rcx)
14795 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14796 ; AVX2-FCP-NEXT: vmovaps %ymm1, 480(%rcx)
14797 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14798 ; AVX2-FCP-NEXT: vmovaps %ymm1, 416(%rcx)
14799 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14800 ; AVX2-FCP-NEXT: vmovaps %ymm1, 352(%rcx)
14801 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14802 ; AVX2-FCP-NEXT: vmovaps %ymm1, 288(%rcx)
14803 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14804 ; AVX2-FCP-NEXT: vmovaps %ymm1, 224(%rcx)
14805 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14806 ; AVX2-FCP-NEXT: vmovaps %ymm1, 160(%rcx)
14807 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14808 ; AVX2-FCP-NEXT: vmovaps %ymm1, 96(%rcx)
14809 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14810 ; AVX2-FCP-NEXT: vmovaps %ymm1, 32(%rcx)
14811 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14812 ; AVX2-FCP-NEXT: vmovaps %ymm1, 480(%r8)
14813 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14814 ; AVX2-FCP-NEXT: vmovaps %ymm1, 448(%r8)
14815 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14816 ; AVX2-FCP-NEXT: vmovaps %ymm1, 416(%r8)
14817 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14818 ; AVX2-FCP-NEXT: vmovaps %ymm1, 384(%r8)
14819 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14820 ; AVX2-FCP-NEXT: vmovaps %ymm1, 352(%r8)
14821 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14822 ; AVX2-FCP-NEXT: vmovaps %ymm1, 320(%r8)
14823 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14824 ; AVX2-FCP-NEXT: vmovaps %ymm1, 288(%r8)
14825 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14826 ; AVX2-FCP-NEXT: vmovaps %ymm1, 256(%r8)
14827 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14828 ; AVX2-FCP-NEXT: vmovaps %ymm1, 224(%r8)
14829 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14830 ; AVX2-FCP-NEXT: vmovaps %ymm1, 192(%r8)
14831 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14832 ; AVX2-FCP-NEXT: vmovaps %ymm1, 160(%r8)
14833 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14834 ; AVX2-FCP-NEXT: vmovaps %ymm1, 128(%r8)
14835 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14836 ; AVX2-FCP-NEXT: vmovaps %ymm1, 96(%r8)
14837 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14838 ; AVX2-FCP-NEXT: vmovaps %ymm1, 64(%r8)
14839 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14840 ; AVX2-FCP-NEXT: vmovaps %ymm1, 32(%r8)
14841 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14842 ; AVX2-FCP-NEXT: vmovaps %ymm1, (%r8)
14843 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14844 ; AVX2-FCP-NEXT: vmovaps %ymm1, 480(%r9)
14845 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14846 ; AVX2-FCP-NEXT: vmovaps %ymm1, 448(%r9)
14847 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14848 ; AVX2-FCP-NEXT: vmovaps %ymm1, 416(%r9)
14849 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14850 ; AVX2-FCP-NEXT: vmovaps %ymm1, 384(%r9)
14851 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14852 ; AVX2-FCP-NEXT: vmovaps %ymm1, 352(%r9)
14853 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14854 ; AVX2-FCP-NEXT: vmovaps %ymm1, 320(%r9)
14855 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14856 ; AVX2-FCP-NEXT: vmovaps %ymm1, 288(%r9)
14857 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14858 ; AVX2-FCP-NEXT: vmovaps %ymm1, 256(%r9)
14859 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14860 ; AVX2-FCP-NEXT: vmovaps %ymm1, 224(%r9)
14861 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14862 ; AVX2-FCP-NEXT: vmovaps %ymm1, 192(%r9)
14863 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14864 ; AVX2-FCP-NEXT: vmovaps %ymm1, 160(%r9)
14865 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14866 ; AVX2-FCP-NEXT: vmovaps %ymm1, 128(%r9)
14867 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14868 ; AVX2-FCP-NEXT: vmovaps %ymm1, 96(%r9)
14869 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14870 ; AVX2-FCP-NEXT: vmovaps %ymm1, 64(%r9)
14871 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14872 ; AVX2-FCP-NEXT: vmovaps %ymm1, 32(%r9)
14873 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
14874 ; AVX2-FCP-NEXT: vmovaps %ymm1, (%r9)
14875 ; AVX2-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
14876 ; AVX2-FCP-NEXT: vmovaps %ymm0, 480(%rax)
14877 ; AVX2-FCP-NEXT: vmovaps %ymm3, 448(%rax)
14878 ; AVX2-FCP-NEXT: vmovaps %ymm5, 416(%rax)
14879 ; AVX2-FCP-NEXT: vmovaps %ymm7, 384(%rax)
14880 ; AVX2-FCP-NEXT: vmovaps %ymm9, 352(%rax)
14881 ; AVX2-FCP-NEXT: vmovaps %ymm11, 320(%rax)
14882 ; AVX2-FCP-NEXT: vmovaps %ymm13, 288(%rax)
14883 ; AVX2-FCP-NEXT: vmovaps %ymm15, 256(%rax)
14884 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14885 ; AVX2-FCP-NEXT: vmovaps %ymm0, 224(%rax)
14886 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14887 ; AVX2-FCP-NEXT: vmovaps %ymm0, 192(%rax)
14888 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14889 ; AVX2-FCP-NEXT: vmovaps %ymm0, 160(%rax)
14890 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14891 ; AVX2-FCP-NEXT: vmovaps %ymm0, 128(%rax)
14892 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14893 ; AVX2-FCP-NEXT: vmovaps %ymm0, 96(%rax)
14894 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14895 ; AVX2-FCP-NEXT: vmovaps %ymm0, 64(%rax)
14896 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14897 ; AVX2-FCP-NEXT: vmovaps %ymm0, 32(%rax)
14898 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14899 ; AVX2-FCP-NEXT: vmovaps %ymm0, (%rax)
14900 ; AVX2-FCP-NEXT: addq $3432, %rsp # imm = 0xD68
14901 ; AVX2-FCP-NEXT: vzeroupper
14902 ; AVX2-FCP-NEXT: retq
14904 ; AVX512-LABEL: load_i64_stride6_vf64:
14906 ; AVX512-NEXT: subq $7240, %rsp # imm = 0x1C48
14907 ; AVX512-NEXT: vmovdqa64 2048(%rdi), %zmm3
14908 ; AVX512-NEXT: vmovdqa64 1280(%rdi), %zmm4
14909 ; AVX512-NEXT: vmovdqa64 1344(%rdi), %zmm0
14910 ; AVX512-NEXT: vmovdqa64 896(%rdi), %zmm5
14911 ; AVX512-NEXT: vmovdqa64 960(%rdi), %zmm26
14912 ; AVX512-NEXT: vmovdqa64 512(%rdi), %zmm2
14913 ; AVX512-NEXT: vmovdqa64 576(%rdi), %zmm1
14914 ; AVX512-NEXT: vmovdqa64 128(%rdi), %zmm6
14915 ; AVX512-NEXT: vmovdqa64 192(%rdi), %zmm29
14916 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [0,6,0,10,0,6,0,10]
14917 ; AVX512-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
14918 ; AVX512-NEXT: vmovdqa64 %zmm29, %zmm8
14919 ; AVX512-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
14920 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14921 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm8
14922 ; AVX512-NEXT: vpermt2q %zmm2, %zmm7, %zmm8
14923 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14924 ; AVX512-NEXT: vmovdqa64 %zmm26, %zmm8
14925 ; AVX512-NEXT: vpermt2q %zmm5, %zmm7, %zmm8
14926 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14927 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm8
14928 ; AVX512-NEXT: vpermt2q %zmm4, %zmm7, %zmm8
14929 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14930 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,7,0,11,1,7,0,11]
14931 ; AVX512-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
14932 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm9
14933 ; AVX512-NEXT: vpermt2q %zmm2, %zmm8, %zmm9
14934 ; AVX512-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14935 ; AVX512-NEXT: vmovdqa64 %zmm29, %zmm9
14936 ; AVX512-NEXT: vpermt2q %zmm6, %zmm8, %zmm9
14937 ; AVX512-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14938 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm9
14939 ; AVX512-NEXT: vpermt2q %zmm4, %zmm8, %zmm9
14940 ; AVX512-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14941 ; AVX512-NEXT: vmovdqa64 %zmm26, %zmm9
14942 ; AVX512-NEXT: vpermt2q %zmm5, %zmm8, %zmm9
14943 ; AVX512-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14944 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [10,4,10,4,10,4,10,4]
14945 ; AVX512-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14946 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm10
14947 ; AVX512-NEXT: vpermt2q %zmm1, %zmm9, %zmm10
14948 ; AVX512-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14949 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm10
14950 ; AVX512-NEXT: vpermt2q %zmm29, %zmm9, %zmm10
14951 ; AVX512-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14952 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm10
14953 ; AVX512-NEXT: vpermt2q %zmm0, %zmm9, %zmm10
14954 ; AVX512-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14955 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm10
14956 ; AVX512-NEXT: vpermt2q %zmm26, %zmm9, %zmm10
14957 ; AVX512-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14958 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [11,5,11,5,11,5,11,5]
14959 ; AVX512-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14960 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm11
14961 ; AVX512-NEXT: vpermt2q %zmm1, %zmm10, %zmm11
14962 ; AVX512-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14963 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm11
14964 ; AVX512-NEXT: vpermt2q %zmm29, %zmm10, %zmm11
14965 ; AVX512-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14966 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm11
14967 ; AVX512-NEXT: vpermt2q %zmm0, %zmm10, %zmm11
14968 ; AVX512-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14969 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm11
14970 ; AVX512-NEXT: vpermt2q %zmm26, %zmm10, %zmm11
14971 ; AVX512-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14972 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [12,0,0,6,12,0,0,6]
14973 ; AVX512-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
14974 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm13
14975 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm12
14976 ; AVX512-NEXT: vpermt2q %zmm1, %zmm11, %zmm13
14977 ; AVX512-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14978 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [13,0,1,7,13,0,1,7]
14979 ; AVX512-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
14980 ; AVX512-NEXT: vpermt2q %zmm1, %zmm2, %zmm12
14981 ; AVX512-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14982 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm1
14983 ; AVX512-NEXT: vpermt2q %zmm29, %zmm11, %zmm1
14984 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14985 ; AVX512-NEXT: vpermt2q %zmm29, %zmm2, %zmm6
14986 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14987 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm1
14988 ; AVX512-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
14989 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14990 ; AVX512-NEXT: vpermt2q %zmm0, %zmm2, %zmm4
14991 ; AVX512-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14992 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm0
14993 ; AVX512-NEXT: vpermt2q %zmm26, %zmm11, %zmm0
14994 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14995 ; AVX512-NEXT: vpermt2q %zmm26, %zmm2, %zmm5
14996 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14997 ; AVX512-NEXT: vmovdqa64 2112(%rdi), %zmm0
14998 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1
14999 ; AVX512-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
15000 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15001 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1
15002 ; AVX512-NEXT: vpermt2q %zmm3, %zmm8, %zmm1
15003 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15004 ; AVX512-NEXT: vmovdqa64 %zmm3, %zmm1
15005 ; AVX512-NEXT: vpermt2q %zmm0, %zmm9, %zmm1
15006 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15007 ; AVX512-NEXT: vmovdqa64 %zmm3, %zmm1
15008 ; AVX512-NEXT: vpermt2q %zmm0, %zmm10, %zmm1
15009 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15010 ; AVX512-NEXT: vmovdqa64 %zmm3, %zmm1
15011 ; AVX512-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
15012 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15013 ; AVX512-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
15014 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15015 ; AVX512-NEXT: vmovdqa64 1664(%rdi), %zmm1
15016 ; AVX512-NEXT: vmovdqa64 1728(%rdi), %zmm0
15017 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm3
15018 ; AVX512-NEXT: vpermt2q %zmm1, %zmm7, %zmm3
15019 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15020 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm3
15021 ; AVX512-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
15022 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15023 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
15024 ; AVX512-NEXT: vpermt2q %zmm0, %zmm9, %zmm3
15025 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15026 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
15027 ; AVX512-NEXT: vpermt2q %zmm0, %zmm10, %zmm3
15028 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15029 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
15030 ; AVX512-NEXT: vpermt2q %zmm0, %zmm11, %zmm3
15031 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15032 ; AVX512-NEXT: vpermt2q %zmm0, %zmm2, %zmm1
15033 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15034 ; AVX512-NEXT: vmovdqa64 2432(%rdi), %zmm3
15035 ; AVX512-NEXT: vmovdqa64 2496(%rdi), %zmm0
15036 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1
15037 ; AVX512-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
15038 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15039 ; AVX512-NEXT: vmovdqa64 2816(%rdi), %zmm4
15040 ; AVX512-NEXT: vmovdqa64 2880(%rdi), %zmm1
15041 ; AVX512-NEXT: vpermi2q %zmm4, %zmm1, %zmm7
15042 ; AVX512-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15043 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5
15044 ; AVX512-NEXT: vpermt2q %zmm3, %zmm8, %zmm5
15045 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15046 ; AVX512-NEXT: vpermi2q %zmm4, %zmm1, %zmm8
15047 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15048 ; AVX512-NEXT: vmovdqa64 %zmm3, %zmm5
15049 ; AVX512-NEXT: vpermt2q %zmm0, %zmm9, %zmm5
15050 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15051 ; AVX512-NEXT: vpermi2q %zmm1, %zmm4, %zmm9
15052 ; AVX512-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15053 ; AVX512-NEXT: vmovdqa64 %zmm3, %zmm5
15054 ; AVX512-NEXT: vpermt2q %zmm0, %zmm10, %zmm5
15055 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15056 ; AVX512-NEXT: vpermi2q %zmm1, %zmm4, %zmm10
15057 ; AVX512-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15058 ; AVX512-NEXT: vmovdqa64 %zmm3, %zmm5
15059 ; AVX512-NEXT: vpermt2q %zmm0, %zmm11, %zmm5
15060 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15061 ; AVX512-NEXT: vpermi2q %zmm1, %zmm4, %zmm11
15062 ; AVX512-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15063 ; AVX512-NEXT: vpermt2q %zmm1, %zmm2, %zmm4
15064 ; AVX512-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15065 ; AVX512-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
15066 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15067 ; AVX512-NEXT: vmovdqa64 448(%rdi), %zmm1
15068 ; AVX512-NEXT: vmovdqa64 384(%rdi), %zmm2
15069 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm8 = [0,6,12,0]
15070 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm0
15071 ; AVX512-NEXT: vpermt2q %zmm1, %zmm8, %zmm0
15072 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15073 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm9 = [1,7,13,0]
15074 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm0
15075 ; AVX512-NEXT: vpermt2q %zmm1, %zmm9, %zmm0
15076 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15077 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm10 = [10,0,6,0]
15078 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm0
15079 ; AVX512-NEXT: vpermt2q %zmm2, %zmm10, %zmm0
15080 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15081 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm11 = [11,1,7,0]
15082 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm0
15083 ; AVX512-NEXT: vpermt2q %zmm2, %zmm11, %zmm0
15084 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15085 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm4 = [4,10,4,10,4,10,4,10]
15086 ; AVX512-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15087 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm0
15088 ; AVX512-NEXT: vpermt2q %zmm1, %zmm4, %zmm0
15089 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15090 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
15091 ; AVX512-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15092 ; AVX512-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
15093 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15094 ; AVX512-NEXT: vmovdqa64 (%rdi), %zmm2
15095 ; AVX512-NEXT: vmovdqa64 64(%rdi), %zmm1
15096 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm3
15097 ; AVX512-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
15098 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15099 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm3
15100 ; AVX512-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
15101 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15102 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
15103 ; AVX512-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
15104 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15105 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
15106 ; AVX512-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
15107 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15108 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm3
15109 ; AVX512-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
15110 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15111 ; AVX512-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
15112 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15113 ; AVX512-NEXT: vmovdqa64 1216(%rdi), %zmm1
15114 ; AVX512-NEXT: vmovdqa64 1152(%rdi), %zmm2
15115 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm3
15116 ; AVX512-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
15117 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15118 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm3
15119 ; AVX512-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
15120 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15121 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
15122 ; AVX512-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
15123 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15124 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
15125 ; AVX512-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
15126 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15127 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm3
15128 ; AVX512-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
15129 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15130 ; AVX512-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
15131 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15132 ; AVX512-NEXT: vmovdqa64 832(%rdi), %zmm1
15133 ; AVX512-NEXT: vmovdqa64 768(%rdi), %zmm2
15134 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm3
15135 ; AVX512-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
15136 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15137 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm3
15138 ; AVX512-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
15139 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15140 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
15141 ; AVX512-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
15142 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15143 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
15144 ; AVX512-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
15145 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15146 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm3
15147 ; AVX512-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
15148 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15149 ; AVX512-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
15150 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15151 ; AVX512-NEXT: vmovdqa64 1984(%rdi), %zmm1
15152 ; AVX512-NEXT: vmovdqa64 1920(%rdi), %zmm2
15153 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm3
15154 ; AVX512-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
15155 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15156 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm3
15157 ; AVX512-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
15158 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15159 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
15160 ; AVX512-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
15161 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15162 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
15163 ; AVX512-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
15164 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15165 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm3
15166 ; AVX512-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
15167 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15168 ; AVX512-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
15169 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15170 ; AVX512-NEXT: vmovdqa64 1600(%rdi), %zmm1
15171 ; AVX512-NEXT: vmovdqa64 1536(%rdi), %zmm30
15172 ; AVX512-NEXT: vmovdqa64 %zmm30, %zmm2
15173 ; AVX512-NEXT: vpermt2q %zmm1, %zmm8, %zmm2
15174 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15175 ; AVX512-NEXT: vmovdqa64 %zmm30, %zmm2
15176 ; AVX512-NEXT: vpermt2q %zmm1, %zmm9, %zmm2
15177 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15178 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm2
15179 ; AVX512-NEXT: vpermt2q %zmm30, %zmm10, %zmm2
15180 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15181 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm2
15182 ; AVX512-NEXT: vpermt2q %zmm30, %zmm11, %zmm2
15183 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15184 ; AVX512-NEXT: vmovdqa64 %zmm30, %zmm2
15185 ; AVX512-NEXT: vpermt2q %zmm1, %zmm4, %zmm2
15186 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15187 ; AVX512-NEXT: vpermt2q %zmm1, %zmm0, %zmm30
15188 ; AVX512-NEXT: vmovdqa64 320(%rdi), %zmm26
15189 ; AVX512-NEXT: vmovdqa64 256(%rdi), %zmm18
15190 ; AVX512-NEXT: vmovdqa64 %zmm18, %zmm1
15191 ; AVX512-NEXT: vpermt2q %zmm26, %zmm4, %zmm1
15192 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15193 ; AVX512-NEXT: vmovdqa64 704(%rdi), %zmm21
15194 ; AVX512-NEXT: vmovdqa64 640(%rdi), %zmm13
15195 ; AVX512-NEXT: vmovdqa64 %zmm13, %zmm1
15196 ; AVX512-NEXT: vpermt2q %zmm21, %zmm4, %zmm1
15197 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15198 ; AVX512-NEXT: vmovdqa64 1088(%rdi), %zmm24
15199 ; AVX512-NEXT: vmovdqa64 1024(%rdi), %zmm19
15200 ; AVX512-NEXT: vmovdqa64 %zmm19, %zmm1
15201 ; AVX512-NEXT: vpermt2q %zmm24, %zmm4, %zmm1
15202 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15203 ; AVX512-NEXT: vmovdqa64 1472(%rdi), %zmm28
15204 ; AVX512-NEXT: vmovdqa64 1408(%rdi), %zmm16
15205 ; AVX512-NEXT: vmovdqa64 %zmm16, %zmm1
15206 ; AVX512-NEXT: vpermt2q %zmm28, %zmm4, %zmm1
15207 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15208 ; AVX512-NEXT: vmovdqa64 1856(%rdi), %zmm25
15209 ; AVX512-NEXT: vmovdqa64 1792(%rdi), %zmm7
15210 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm1
15211 ; AVX512-NEXT: vpermt2q %zmm25, %zmm4, %zmm1
15212 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15213 ; AVX512-NEXT: vmovdqa64 2240(%rdi), %zmm23
15214 ; AVX512-NEXT: vmovdqa64 2176(%rdi), %zmm17
15215 ; AVX512-NEXT: vmovdqa64 %zmm17, %zmm1
15216 ; AVX512-NEXT: vpermt2q %zmm23, %zmm4, %zmm1
15217 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15218 ; AVX512-NEXT: vmovdqa64 2624(%rdi), %zmm20
15219 ; AVX512-NEXT: vmovdqa64 2560(%rdi), %zmm5
15220 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm1
15221 ; AVX512-NEXT: vpermt2q %zmm20, %zmm4, %zmm1
15222 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15223 ; AVX512-NEXT: vmovdqa64 2368(%rdi), %zmm1
15224 ; AVX512-NEXT: vmovdqa64 2304(%rdi), %zmm22
15225 ; AVX512-NEXT: vmovdqa64 %zmm22, %zmm29
15226 ; AVX512-NEXT: vpermt2q %zmm1, %zmm8, %zmm29
15227 ; AVX512-NEXT: vmovdqa64 3008(%rdi), %zmm14
15228 ; AVX512-NEXT: vmovdqa64 2944(%rdi), %zmm31
15229 ; AVX512-NEXT: vmovdqa64 %zmm31, %zmm2
15230 ; AVX512-NEXT: vpermt2q %zmm14, %zmm4, %zmm2
15231 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15232 ; AVX512-NEXT: vmovdqa64 2752(%rdi), %zmm2
15233 ; AVX512-NEXT: vmovdqa64 2688(%rdi), %zmm12
15234 ; AVX512-NEXT: vpermi2q %zmm2, %zmm12, %zmm8
15235 ; AVX512-NEXT: vmovdqa64 %zmm22, %zmm3
15236 ; AVX512-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
15237 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15238 ; AVX512-NEXT: vpermi2q %zmm2, %zmm12, %zmm9
15239 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
15240 ; AVX512-NEXT: vpermt2q %zmm22, %zmm10, %zmm3
15241 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15242 ; AVX512-NEXT: vpermi2q %zmm12, %zmm2, %zmm10
15243 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm3
15244 ; AVX512-NEXT: vpermt2q %zmm22, %zmm11, %zmm3
15245 ; AVX512-NEXT: vmovdqu64 %zmm3, (%rsp) # 64-byte Spill
15246 ; AVX512-NEXT: vpermi2q %zmm12, %zmm2, %zmm11
15247 ; AVX512-NEXT: vmovdqa64 %zmm22, %zmm3
15248 ; AVX512-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
15249 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15250 ; AVX512-NEXT: vpermi2q %zmm2, %zmm12, %zmm4
15251 ; AVX512-NEXT: vpermt2q %zmm2, %zmm0, %zmm12
15252 ; AVX512-NEXT: vpermt2q %zmm1, %zmm0, %zmm22
15253 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm1
15254 ; AVX512-NEXT: vpermt2q %zmm20, %zmm0, %zmm1
15255 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15256 ; AVX512-NEXT: vmovdqa64 %zmm13, %zmm1
15257 ; AVX512-NEXT: vpermt2q %zmm21, %zmm0, %zmm1
15258 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15259 ; AVX512-NEXT: vmovdqa64 %zmm18, %zmm1
15260 ; AVX512-NEXT: vpermt2q %zmm26, %zmm0, %zmm1
15261 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15262 ; AVX512-NEXT: vmovdqa64 %zmm16, %zmm1
15263 ; AVX512-NEXT: vpermt2q %zmm28, %zmm0, %zmm1
15264 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15265 ; AVX512-NEXT: vmovdqa64 %zmm19, %zmm1
15266 ; AVX512-NEXT: vpermt2q %zmm24, %zmm0, %zmm1
15267 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15268 ; AVX512-NEXT: vmovdqa64 %zmm17, %zmm1
15269 ; AVX512-NEXT: vpermt2q %zmm23, %zmm0, %zmm1
15270 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15271 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm1
15272 ; AVX512-NEXT: vpermt2q %zmm25, %zmm0, %zmm1
15273 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15274 ; AVX512-NEXT: vmovdqa64 %zmm31, %zmm1
15275 ; AVX512-NEXT: vpermt2q %zmm14, %zmm0, %zmm1
15276 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15277 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
15278 ; AVX512-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
15279 ; AVX512-NEXT: vmovdqa64 %zmm13, %zmm0
15280 ; AVX512-NEXT: vpermt2q %zmm21, %zmm1, %zmm0
15281 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15282 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
15283 ; AVX512-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
15284 ; AVX512-NEXT: vmovdqa64 %zmm13, %zmm0
15285 ; AVX512-NEXT: vpermt2q %zmm21, %zmm2, %zmm0
15286 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15287 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
15288 ; AVX512-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
15289 ; AVX512-NEXT: vmovdqa64 %zmm21, %zmm0
15290 ; AVX512-NEXT: vpermt2q %zmm13, %zmm3, %zmm0
15291 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15292 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
15293 ; AVX512-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
15294 ; AVX512-NEXT: vpermt2q %zmm13, %zmm0, %zmm21
15295 ; AVX512-NEXT: vmovdqa64 %zmm18, %zmm6
15296 ; AVX512-NEXT: vpermt2q %zmm26, %zmm1, %zmm6
15297 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15298 ; AVX512-NEXT: vmovdqa64 %zmm18, %zmm6
15299 ; AVX512-NEXT: vpermt2q %zmm26, %zmm2, %zmm6
15300 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15301 ; AVX512-NEXT: vmovdqa64 %zmm26, %zmm6
15302 ; AVX512-NEXT: vpermt2q %zmm18, %zmm3, %zmm6
15303 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15304 ; AVX512-NEXT: vpermt2q %zmm18, %zmm0, %zmm26
15305 ; AVX512-NEXT: vmovdqa64 %zmm16, %zmm27
15306 ; AVX512-NEXT: vpermt2q %zmm28, %zmm1, %zmm27
15307 ; AVX512-NEXT: vmovdqa64 %zmm16, %zmm6
15308 ; AVX512-NEXT: vpermt2q %zmm28, %zmm2, %zmm6
15309 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15310 ; AVX512-NEXT: vmovdqa64 %zmm28, %zmm6
15311 ; AVX512-NEXT: vpermt2q %zmm16, %zmm3, %zmm6
15312 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15313 ; AVX512-NEXT: vpermt2q %zmm16, %zmm0, %zmm28
15314 ; AVX512-NEXT: vmovdqa64 %zmm19, %zmm18
15315 ; AVX512-NEXT: vpermt2q %zmm24, %zmm1, %zmm18
15316 ; AVX512-NEXT: vmovdqa64 %zmm19, %zmm6
15317 ; AVX512-NEXT: vpermt2q %zmm24, %zmm2, %zmm6
15318 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15319 ; AVX512-NEXT: vmovdqa64 %zmm24, %zmm6
15320 ; AVX512-NEXT: vpermt2q %zmm19, %zmm3, %zmm6
15321 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15322 ; AVX512-NEXT: vpermt2q %zmm19, %zmm0, %zmm24
15323 ; AVX512-NEXT: vmovdqa64 %zmm17, %zmm15
15324 ; AVX512-NEXT: vpermt2q %zmm23, %zmm1, %zmm15
15325 ; AVX512-NEXT: vmovdqa64 %zmm17, %zmm19
15326 ; AVX512-NEXT: vpermt2q %zmm23, %zmm2, %zmm19
15327 ; AVX512-NEXT: vmovdqa64 %zmm23, %zmm6
15328 ; AVX512-NEXT: vpermt2q %zmm17, %zmm3, %zmm6
15329 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15330 ; AVX512-NEXT: vpermt2q %zmm17, %zmm0, %zmm23
15331 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm13
15332 ; AVX512-NEXT: vpermt2q %zmm25, %zmm1, %zmm13
15333 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm17
15334 ; AVX512-NEXT: vpermt2q %zmm25, %zmm2, %zmm17
15335 ; AVX512-NEXT: vmovdqa64 %zmm25, %zmm6
15336 ; AVX512-NEXT: vpermt2q %zmm7, %zmm3, %zmm6
15337 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15338 ; AVX512-NEXT: vpermt2q %zmm7, %zmm0, %zmm25
15339 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm6
15340 ; AVX512-NEXT: vpermt2q %zmm20, %zmm1, %zmm6
15341 ; AVX512-NEXT: vpermi2q %zmm14, %zmm31, %zmm1
15342 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm7
15343 ; AVX512-NEXT: vpermt2q %zmm20, %zmm2, %zmm7
15344 ; AVX512-NEXT: vpermi2q %zmm14, %zmm31, %zmm2
15345 ; AVX512-NEXT: vmovdqa64 %zmm20, %zmm16
15346 ; AVX512-NEXT: vpermt2q %zmm5, %zmm3, %zmm16
15347 ; AVX512-NEXT: vpermi2q %zmm31, %zmm14, %zmm3
15348 ; AVX512-NEXT: vpermt2q %zmm31, %zmm0, %zmm14
15349 ; AVX512-NEXT: vpermt2q %zmm5, %zmm0, %zmm20
15350 ; AVX512-NEXT: movb $56, %al
15351 ; AVX512-NEXT: kmovw %eax, %k1
15352 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15353 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15354 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15355 ; AVX512-NEXT: movb $-64, %al
15356 ; AVX512-NEXT: kmovw %eax, %k2
15357 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15358 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15359 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15360 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15361 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15362 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15363 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15364 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15365 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15366 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15367 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15368 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15369 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15370 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15371 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15372 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15373 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15374 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15375 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15376 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15377 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15378 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15379 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15380 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15381 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15382 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15383 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15384 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15385 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15386 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15387 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15388 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15389 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15390 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15391 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
15392 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15393 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
15394 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15395 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
15396 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15397 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
15398 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15399 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15400 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15401 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15402 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15403 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15404 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15405 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15406 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15407 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15408 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15409 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15410 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15411 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15412 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15413 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15414 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15415 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15416 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15417 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15418 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15419 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15420 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15421 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15422 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15423 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15424 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15425 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15426 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15427 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15428 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15429 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15430 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15431 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15432 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15433 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15434 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15435 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15436 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15437 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15438 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15439 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15440 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15441 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm9 {%k1}
15442 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15443 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
15444 ; AVX512-NEXT: movb $24, %al
15445 ; AVX512-NEXT: kmovw %eax, %k2
15446 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15447 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15448 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15449 ; AVX512-NEXT: movb $-32, %al
15450 ; AVX512-NEXT: kmovw %eax, %k1
15451 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm5 {%k1}
15452 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15453 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15454 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15455 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15456 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15457 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15458 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15459 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15460 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15461 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15462 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15463 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
15464 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15465 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15466 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15467 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15468 ; AVX512-NEXT: vmovdqa64 %zmm27, %zmm5 {%k1}
15469 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15470 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15471 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15472 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15473 ; AVX512-NEXT: vmovdqa64 %zmm18, %zmm5 {%k1}
15474 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15475 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15476 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15477 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15478 ; AVX512-NEXT: vmovdqa64 %zmm15, %zmm5 {%k1}
15479 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15480 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15481 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15482 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15483 ; AVX512-NEXT: vmovdqa64 %zmm13, %zmm5 {%k1}
15484 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15485 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15486 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
15487 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm10 {%k1}
15488 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15489 ; AVX512-NEXT: vmovdqu64 (%rsp), %zmm1 # 64-byte Reload
15490 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
15491 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm1 {%k1}
15492 ; AVX512-NEXT: vmovdqu64 %zmm1, (%rsp) # 64-byte Spill
15493 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15494 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15495 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
15496 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15497 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
15498 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15499 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15500 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15501 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
15502 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15503 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
15504 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15505 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15506 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15507 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
15508 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15509 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
15510 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15511 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15512 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15513 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
15514 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15515 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
15516 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15517 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15518 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
15519 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
15520 ; AVX512-NEXT: vmovdqa64 %zmm19, %zmm31 {%k1}
15521 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15522 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
15523 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm27 {%k2}
15524 ; AVX512-NEXT: vmovdqa64 %zmm17, %zmm27 {%k1}
15525 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15526 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
15527 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm11 {%k1}
15528 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15529 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
15530 ; AVX512-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
15531 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15532 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
15533 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15534 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
15535 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15536 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm5 # 16-byte Folded Reload
15537 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15538 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm5 {%k1}
15539 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15540 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm6 # 16-byte Folded Reload
15541 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15542 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm6 {%k1}
15543 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15544 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm7 # 16-byte Folded Reload
15545 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15546 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm7 {%k1}
15547 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15548 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm13 # 16-byte Folded Reload
15549 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15550 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm13 {%k1}
15551 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15552 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm15 # 16-byte Folded Reload
15553 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15554 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm15 {%k1}
15555 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15556 ; AVX512-NEXT: vinserti32x4 $0, %xmm4, %zmm2, %zmm4
15557 ; AVX512-NEXT: vmovdqa64 %zmm3, %zmm4 {%k1}
15558 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15559 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
15560 ; AVX512-NEXT: vmovdqa64 %zmm21, %zmm2 {%k1}
15561 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
15562 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
15563 ; AVX512-NEXT: vmovdqa64 %zmm26, %zmm3 {%k1}
15564 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
15565 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm16, %zmm16 # 16-byte Folded Reload
15566 ; AVX512-NEXT: vmovdqa64 %zmm28, %zmm16 {%k1}
15567 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
15568 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm17, %zmm17 # 16-byte Folded Reload
15569 ; AVX512-NEXT: vmovdqa64 %zmm24, %zmm17 {%k1}
15570 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
15571 ; AVX512-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm18, %zmm18 # 16-byte Folded Reload
15572 ; AVX512-NEXT: vmovdqa64 %zmm23, %zmm18 {%k1}
15573 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
15574 ; AVX512-NEXT: vinserti32x4 $0, %xmm30, %zmm19, %zmm19
15575 ; AVX512-NEXT: vmovdqa64 %zmm25, %zmm19 {%k1}
15576 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
15577 ; AVX512-NEXT: vinserti32x4 $0, %xmm12, %zmm21, %zmm12
15578 ; AVX512-NEXT: vmovdqa64 %zmm14, %zmm12 {%k1}
15579 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
15580 ; AVX512-NEXT: vinserti32x4 $0, %xmm22, %zmm14, %zmm14
15581 ; AVX512-NEXT: vmovdqa64 %zmm20, %zmm14 {%k1}
15582 ; AVX512-NEXT: vmovdqa64 %zmm8, 448(%rsi)
15583 ; AVX512-NEXT: vmovdqa64 %zmm29, 384(%rsi)
15584 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15585 ; AVX512-NEXT: vmovaps %zmm8, 320(%rsi)
15586 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15587 ; AVX512-NEXT: vmovaps %zmm8, 256(%rsi)
15588 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15589 ; AVX512-NEXT: vmovaps %zmm8, 192(%rsi)
15590 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15591 ; AVX512-NEXT: vmovaps %zmm8, 128(%rsi)
15592 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15593 ; AVX512-NEXT: vmovaps %zmm8, 64(%rsi)
15594 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15595 ; AVX512-NEXT: vmovaps %zmm8, (%rsi)
15596 ; AVX512-NEXT: vmovdqa64 %zmm9, 448(%rdx)
15597 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15598 ; AVX512-NEXT: vmovaps %zmm8, 256(%rdx)
15599 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15600 ; AVX512-NEXT: vmovaps %zmm8, 320(%rdx)
15601 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15602 ; AVX512-NEXT: vmovaps %zmm8, 128(%rdx)
15603 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15604 ; AVX512-NEXT: vmovaps %zmm8, 192(%rdx)
15605 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15606 ; AVX512-NEXT: vmovaps %zmm8, (%rdx)
15607 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15608 ; AVX512-NEXT: vmovaps %zmm8, 64(%rdx)
15609 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15610 ; AVX512-NEXT: vmovaps %zmm8, 384(%rdx)
15611 ; AVX512-NEXT: vmovdqa64 %zmm10, 448(%rcx)
15612 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15613 ; AVX512-NEXT: vmovaps %zmm8, 256(%rcx)
15614 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15615 ; AVX512-NEXT: vmovaps %zmm8, 320(%rcx)
15616 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15617 ; AVX512-NEXT: vmovaps %zmm8, 128(%rcx)
15618 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15619 ; AVX512-NEXT: vmovaps %zmm8, 192(%rcx)
15620 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15621 ; AVX512-NEXT: vmovaps %zmm8, (%rcx)
15622 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15623 ; AVX512-NEXT: vmovaps %zmm8, 64(%rcx)
15624 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15625 ; AVX512-NEXT: vmovaps %zmm8, 384(%rcx)
15626 ; AVX512-NEXT: vmovdqa64 %zmm11, 448(%r8)
15627 ; AVX512-NEXT: vmovdqa64 %zmm27, 256(%r8)
15628 ; AVX512-NEXT: vmovdqa64 %zmm31, 320(%r8)
15629 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15630 ; AVX512-NEXT: vmovaps %zmm8, 128(%r8)
15631 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15632 ; AVX512-NEXT: vmovaps %zmm8, 192(%r8)
15633 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15634 ; AVX512-NEXT: vmovaps %zmm8, (%r8)
15635 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15636 ; AVX512-NEXT: vmovaps %zmm8, 64(%r8)
15637 ; AVX512-NEXT: vmovups (%rsp), %zmm8 # 64-byte Reload
15638 ; AVX512-NEXT: vmovaps %zmm8, 384(%r8)
15639 ; AVX512-NEXT: vmovdqa64 %zmm4, 448(%r9)
15640 ; AVX512-NEXT: vmovdqa64 %zmm15, 256(%r9)
15641 ; AVX512-NEXT: vmovdqa64 %zmm13, 320(%r9)
15642 ; AVX512-NEXT: vmovdqa64 %zmm7, 128(%r9)
15643 ; AVX512-NEXT: vmovdqa64 %zmm6, 192(%r9)
15644 ; AVX512-NEXT: vmovdqa64 %zmm5, (%r9)
15645 ; AVX512-NEXT: vmovdqa64 %zmm1, 64(%r9)
15646 ; AVX512-NEXT: vmovdqa64 %zmm0, 384(%r9)
15647 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
15648 ; AVX512-NEXT: vmovdqa64 %zmm14, 384(%rax)
15649 ; AVX512-NEXT: vmovdqa64 %zmm12, 448(%rax)
15650 ; AVX512-NEXT: vmovdqa64 %zmm19, 256(%rax)
15651 ; AVX512-NEXT: vmovdqa64 %zmm18, 320(%rax)
15652 ; AVX512-NEXT: vmovdqa64 %zmm17, 128(%rax)
15653 ; AVX512-NEXT: vmovdqa64 %zmm16, 192(%rax)
15654 ; AVX512-NEXT: vmovdqa64 %zmm3, (%rax)
15655 ; AVX512-NEXT: vmovdqa64 %zmm2, 64(%rax)
15656 ; AVX512-NEXT: addq $7240, %rsp # imm = 0x1C48
15657 ; AVX512-NEXT: vzeroupper
15658 ; AVX512-NEXT: retq
15660 ; AVX512-FCP-LABEL: load_i64_stride6_vf64:
15661 ; AVX512-FCP: # %bb.0:
15662 ; AVX512-FCP-NEXT: subq $7240, %rsp # imm = 0x1C48
15663 ; AVX512-FCP-NEXT: vmovdqa64 2048(%rdi), %zmm3
15664 ; AVX512-FCP-NEXT: vmovdqa64 1280(%rdi), %zmm4
15665 ; AVX512-FCP-NEXT: vmovdqa64 1344(%rdi), %zmm0
15666 ; AVX512-FCP-NEXT: vmovdqa64 896(%rdi), %zmm5
15667 ; AVX512-FCP-NEXT: vmovdqa64 960(%rdi), %zmm26
15668 ; AVX512-FCP-NEXT: vmovdqa64 512(%rdi), %zmm2
15669 ; AVX512-FCP-NEXT: vmovdqa64 576(%rdi), %zmm1
15670 ; AVX512-FCP-NEXT: vmovdqa64 128(%rdi), %zmm6
15671 ; AVX512-FCP-NEXT: vmovdqa64 192(%rdi), %zmm29
15672 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [0,6,0,10,0,6,0,10]
15673 ; AVX512-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
15674 ; AVX512-FCP-NEXT: vmovdqa64 %zmm29, %zmm8
15675 ; AVX512-FCP-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
15676 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15677 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm8
15678 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm7, %zmm8
15679 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15680 ; AVX512-FCP-NEXT: vmovdqa64 %zmm26, %zmm8
15681 ; AVX512-FCP-NEXT: vpermt2q %zmm5, %zmm7, %zmm8
15682 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15683 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm8
15684 ; AVX512-FCP-NEXT: vpermt2q %zmm4, %zmm7, %zmm8
15685 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15686 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,7,0,11,1,7,0,11]
15687 ; AVX512-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
15688 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm9
15689 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm8, %zmm9
15690 ; AVX512-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15691 ; AVX512-FCP-NEXT: vmovdqa64 %zmm29, %zmm9
15692 ; AVX512-FCP-NEXT: vpermt2q %zmm6, %zmm8, %zmm9
15693 ; AVX512-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15694 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm9
15695 ; AVX512-FCP-NEXT: vpermt2q %zmm4, %zmm8, %zmm9
15696 ; AVX512-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15697 ; AVX512-FCP-NEXT: vmovdqa64 %zmm26, %zmm9
15698 ; AVX512-FCP-NEXT: vpermt2q %zmm5, %zmm8, %zmm9
15699 ; AVX512-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15700 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [10,4,10,4,10,4,10,4]
15701 ; AVX512-FCP-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15702 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm10
15703 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm10
15704 ; AVX512-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15705 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm10
15706 ; AVX512-FCP-NEXT: vpermt2q %zmm29, %zmm9, %zmm10
15707 ; AVX512-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15708 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm10
15709 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm10
15710 ; AVX512-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15711 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm10
15712 ; AVX512-FCP-NEXT: vpermt2q %zmm26, %zmm9, %zmm10
15713 ; AVX512-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15714 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [11,5,11,5,11,5,11,5]
15715 ; AVX512-FCP-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15716 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm11
15717 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm10, %zmm11
15718 ; AVX512-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15719 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm11
15720 ; AVX512-FCP-NEXT: vpermt2q %zmm29, %zmm10, %zmm11
15721 ; AVX512-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15722 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm11
15723 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm11
15724 ; AVX512-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15725 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm11
15726 ; AVX512-FCP-NEXT: vpermt2q %zmm26, %zmm10, %zmm11
15727 ; AVX512-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15728 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [12,0,0,6,12,0,0,6]
15729 ; AVX512-FCP-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
15730 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm13
15731 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm12
15732 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm11, %zmm13
15733 ; AVX512-FCP-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15734 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [13,0,1,7,13,0,1,7]
15735 ; AVX512-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
15736 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm12
15737 ; AVX512-FCP-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15738 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm1
15739 ; AVX512-FCP-NEXT: vpermt2q %zmm29, %zmm11, %zmm1
15740 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15741 ; AVX512-FCP-NEXT: vpermt2q %zmm29, %zmm2, %zmm6
15742 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15743 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm1
15744 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
15745 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15746 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm4
15747 ; AVX512-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15748 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
15749 ; AVX512-FCP-NEXT: vpermt2q %zmm26, %zmm11, %zmm0
15750 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15751 ; AVX512-FCP-NEXT: vpermt2q %zmm26, %zmm2, %zmm5
15752 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15753 ; AVX512-FCP-NEXT: vmovdqa64 2112(%rdi), %zmm0
15754 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
15755 ; AVX512-FCP-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
15756 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15757 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
15758 ; AVX512-FCP-NEXT: vpermt2q %zmm3, %zmm8, %zmm1
15759 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15760 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, %zmm1
15761 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm1
15762 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15763 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, %zmm1
15764 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm1
15765 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15766 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, %zmm1
15767 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
15768 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15769 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
15770 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15771 ; AVX512-FCP-NEXT: vmovdqa64 1664(%rdi), %zmm1
15772 ; AVX512-FCP-NEXT: vmovdqa64 1728(%rdi), %zmm0
15773 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm3
15774 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm7, %zmm3
15775 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15776 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm3
15777 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
15778 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15779 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
15780 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm3
15781 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15782 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
15783 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm3
15784 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15785 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
15786 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm3
15787 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15788 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm1
15789 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15790 ; AVX512-FCP-NEXT: vmovdqa64 2432(%rdi), %zmm3
15791 ; AVX512-FCP-NEXT: vmovdqa64 2496(%rdi), %zmm0
15792 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
15793 ; AVX512-FCP-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
15794 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15795 ; AVX512-FCP-NEXT: vmovdqa64 2816(%rdi), %zmm4
15796 ; AVX512-FCP-NEXT: vmovdqa64 2880(%rdi), %zmm1
15797 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm1, %zmm7
15798 ; AVX512-FCP-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15799 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5
15800 ; AVX512-FCP-NEXT: vpermt2q %zmm3, %zmm8, %zmm5
15801 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15802 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm1, %zmm8
15803 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15804 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, %zmm5
15805 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm5
15806 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15807 ; AVX512-FCP-NEXT: vpermi2q %zmm1, %zmm4, %zmm9
15808 ; AVX512-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15809 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, %zmm5
15810 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm5
15811 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15812 ; AVX512-FCP-NEXT: vpermi2q %zmm1, %zmm4, %zmm10
15813 ; AVX512-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15814 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, %zmm5
15815 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm5
15816 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15817 ; AVX512-FCP-NEXT: vpermi2q %zmm1, %zmm4, %zmm11
15818 ; AVX512-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15819 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm4
15820 ; AVX512-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15821 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
15822 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15823 ; AVX512-FCP-NEXT: vmovdqa64 448(%rdi), %zmm1
15824 ; AVX512-FCP-NEXT: vmovdqa64 384(%rdi), %zmm2
15825 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [0,6,12,0]
15826 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm0
15827 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm0
15828 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15829 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm9 = [1,7,13,0]
15830 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm0
15831 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm0
15832 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15833 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm10 = [10,0,6,0]
15834 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm0
15835 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm0
15836 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15837 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm11 = [11,1,7,0]
15838 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm0
15839 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm0
15840 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15841 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm4 = [4,10,4,10,4,10,4,10]
15842 ; AVX512-FCP-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15843 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm0
15844 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm0
15845 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15846 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
15847 ; AVX512-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15848 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
15849 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15850 ; AVX512-FCP-NEXT: vmovdqa64 (%rdi), %zmm2
15851 ; AVX512-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
15852 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
15853 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
15854 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15855 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
15856 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
15857 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15858 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
15859 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
15860 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15861 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
15862 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
15863 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15864 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
15865 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
15866 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15867 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
15868 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15869 ; AVX512-FCP-NEXT: vmovdqa64 1216(%rdi), %zmm1
15870 ; AVX512-FCP-NEXT: vmovdqa64 1152(%rdi), %zmm2
15871 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
15872 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
15873 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15874 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
15875 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
15876 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15877 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
15878 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
15879 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15880 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
15881 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
15882 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15883 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
15884 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
15885 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15886 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
15887 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15888 ; AVX512-FCP-NEXT: vmovdqa64 832(%rdi), %zmm1
15889 ; AVX512-FCP-NEXT: vmovdqa64 768(%rdi), %zmm2
15890 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
15891 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
15892 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15893 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
15894 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
15895 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15896 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
15897 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
15898 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15899 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
15900 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
15901 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15902 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
15903 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
15904 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15905 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
15906 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15907 ; AVX512-FCP-NEXT: vmovdqa64 1984(%rdi), %zmm1
15908 ; AVX512-FCP-NEXT: vmovdqa64 1920(%rdi), %zmm2
15909 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
15910 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
15911 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15912 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
15913 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
15914 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15915 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
15916 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
15917 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15918 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
15919 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
15920 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15921 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
15922 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
15923 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15924 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
15925 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15926 ; AVX512-FCP-NEXT: vmovdqa64 1600(%rdi), %zmm1
15927 ; AVX512-FCP-NEXT: vmovdqa64 1536(%rdi), %zmm30
15928 ; AVX512-FCP-NEXT: vmovdqa64 %zmm30, %zmm2
15929 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm2
15930 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15931 ; AVX512-FCP-NEXT: vmovdqa64 %zmm30, %zmm2
15932 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm2
15933 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15934 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm2
15935 ; AVX512-FCP-NEXT: vpermt2q %zmm30, %zmm10, %zmm2
15936 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15937 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm2
15938 ; AVX512-FCP-NEXT: vpermt2q %zmm30, %zmm11, %zmm2
15939 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15940 ; AVX512-FCP-NEXT: vmovdqa64 %zmm30, %zmm2
15941 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm2
15942 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15943 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm30
15944 ; AVX512-FCP-NEXT: vmovdqa64 320(%rdi), %zmm26
15945 ; AVX512-FCP-NEXT: vmovdqa64 256(%rdi), %zmm18
15946 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, %zmm1
15947 ; AVX512-FCP-NEXT: vpermt2q %zmm26, %zmm4, %zmm1
15948 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15949 ; AVX512-FCP-NEXT: vmovdqa64 704(%rdi), %zmm21
15950 ; AVX512-FCP-NEXT: vmovdqa64 640(%rdi), %zmm13
15951 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, %zmm1
15952 ; AVX512-FCP-NEXT: vpermt2q %zmm21, %zmm4, %zmm1
15953 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15954 ; AVX512-FCP-NEXT: vmovdqa64 1088(%rdi), %zmm24
15955 ; AVX512-FCP-NEXT: vmovdqa64 1024(%rdi), %zmm19
15956 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, %zmm1
15957 ; AVX512-FCP-NEXT: vpermt2q %zmm24, %zmm4, %zmm1
15958 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15959 ; AVX512-FCP-NEXT: vmovdqa64 1472(%rdi), %zmm28
15960 ; AVX512-FCP-NEXT: vmovdqa64 1408(%rdi), %zmm16
15961 ; AVX512-FCP-NEXT: vmovdqa64 %zmm16, %zmm1
15962 ; AVX512-FCP-NEXT: vpermt2q %zmm28, %zmm4, %zmm1
15963 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15964 ; AVX512-FCP-NEXT: vmovdqa64 1856(%rdi), %zmm25
15965 ; AVX512-FCP-NEXT: vmovdqa64 1792(%rdi), %zmm7
15966 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm1
15967 ; AVX512-FCP-NEXT: vpermt2q %zmm25, %zmm4, %zmm1
15968 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15969 ; AVX512-FCP-NEXT: vmovdqa64 2240(%rdi), %zmm23
15970 ; AVX512-FCP-NEXT: vmovdqa64 2176(%rdi), %zmm17
15971 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, %zmm1
15972 ; AVX512-FCP-NEXT: vpermt2q %zmm23, %zmm4, %zmm1
15973 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15974 ; AVX512-FCP-NEXT: vmovdqa64 2624(%rdi), %zmm20
15975 ; AVX512-FCP-NEXT: vmovdqa64 2560(%rdi), %zmm5
15976 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
15977 ; AVX512-FCP-NEXT: vpermt2q %zmm20, %zmm4, %zmm1
15978 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15979 ; AVX512-FCP-NEXT: vmovdqa64 2368(%rdi), %zmm1
15980 ; AVX512-FCP-NEXT: vmovdqa64 2304(%rdi), %zmm22
15981 ; AVX512-FCP-NEXT: vmovdqa64 %zmm22, %zmm29
15982 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm29
15983 ; AVX512-FCP-NEXT: vmovdqa64 3008(%rdi), %zmm14
15984 ; AVX512-FCP-NEXT: vmovdqa64 2944(%rdi), %zmm31
15985 ; AVX512-FCP-NEXT: vmovdqa64 %zmm31, %zmm2
15986 ; AVX512-FCP-NEXT: vpermt2q %zmm14, %zmm4, %zmm2
15987 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15988 ; AVX512-FCP-NEXT: vmovdqa64 2752(%rdi), %zmm2
15989 ; AVX512-FCP-NEXT: vmovdqa64 2688(%rdi), %zmm12
15990 ; AVX512-FCP-NEXT: vpermi2q %zmm2, %zmm12, %zmm8
15991 ; AVX512-FCP-NEXT: vmovdqa64 %zmm22, %zmm3
15992 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
15993 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15994 ; AVX512-FCP-NEXT: vpermi2q %zmm2, %zmm12, %zmm9
15995 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
15996 ; AVX512-FCP-NEXT: vpermt2q %zmm22, %zmm10, %zmm3
15997 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15998 ; AVX512-FCP-NEXT: vpermi2q %zmm12, %zmm2, %zmm10
15999 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
16000 ; AVX512-FCP-NEXT: vpermt2q %zmm22, %zmm11, %zmm3
16001 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, (%rsp) # 64-byte Spill
16002 ; AVX512-FCP-NEXT: vpermi2q %zmm12, %zmm2, %zmm11
16003 ; AVX512-FCP-NEXT: vmovdqa64 %zmm22, %zmm3
16004 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
16005 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16006 ; AVX512-FCP-NEXT: vpermi2q %zmm2, %zmm12, %zmm4
16007 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm0, %zmm12
16008 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm22
16009 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
16010 ; AVX512-FCP-NEXT: vpermt2q %zmm20, %zmm0, %zmm1
16011 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16012 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, %zmm1
16013 ; AVX512-FCP-NEXT: vpermt2q %zmm21, %zmm0, %zmm1
16014 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16015 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, %zmm1
16016 ; AVX512-FCP-NEXT: vpermt2q %zmm26, %zmm0, %zmm1
16017 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16018 ; AVX512-FCP-NEXT: vmovdqa64 %zmm16, %zmm1
16019 ; AVX512-FCP-NEXT: vpermt2q %zmm28, %zmm0, %zmm1
16020 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16021 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, %zmm1
16022 ; AVX512-FCP-NEXT: vpermt2q %zmm24, %zmm0, %zmm1
16023 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16024 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, %zmm1
16025 ; AVX512-FCP-NEXT: vpermt2q %zmm23, %zmm0, %zmm1
16026 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16027 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm1
16028 ; AVX512-FCP-NEXT: vpermt2q %zmm25, %zmm0, %zmm1
16029 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16030 ; AVX512-FCP-NEXT: vmovdqa64 %zmm31, %zmm1
16031 ; AVX512-FCP-NEXT: vpermt2q %zmm14, %zmm0, %zmm1
16032 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16033 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
16034 ; AVX512-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
16035 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, %zmm0
16036 ; AVX512-FCP-NEXT: vpermt2q %zmm21, %zmm1, %zmm0
16037 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16038 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
16039 ; AVX512-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
16040 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, %zmm0
16041 ; AVX512-FCP-NEXT: vpermt2q %zmm21, %zmm2, %zmm0
16042 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16043 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
16044 ; AVX512-FCP-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
16045 ; AVX512-FCP-NEXT: vmovdqa64 %zmm21, %zmm0
16046 ; AVX512-FCP-NEXT: vpermt2q %zmm13, %zmm3, %zmm0
16047 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16048 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
16049 ; AVX512-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
16050 ; AVX512-FCP-NEXT: vpermt2q %zmm13, %zmm0, %zmm21
16051 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, %zmm6
16052 ; AVX512-FCP-NEXT: vpermt2q %zmm26, %zmm1, %zmm6
16053 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16054 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, %zmm6
16055 ; AVX512-FCP-NEXT: vpermt2q %zmm26, %zmm2, %zmm6
16056 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16057 ; AVX512-FCP-NEXT: vmovdqa64 %zmm26, %zmm6
16058 ; AVX512-FCP-NEXT: vpermt2q %zmm18, %zmm3, %zmm6
16059 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16060 ; AVX512-FCP-NEXT: vpermt2q %zmm18, %zmm0, %zmm26
16061 ; AVX512-FCP-NEXT: vmovdqa64 %zmm16, %zmm27
16062 ; AVX512-FCP-NEXT: vpermt2q %zmm28, %zmm1, %zmm27
16063 ; AVX512-FCP-NEXT: vmovdqa64 %zmm16, %zmm6
16064 ; AVX512-FCP-NEXT: vpermt2q %zmm28, %zmm2, %zmm6
16065 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16066 ; AVX512-FCP-NEXT: vmovdqa64 %zmm28, %zmm6
16067 ; AVX512-FCP-NEXT: vpermt2q %zmm16, %zmm3, %zmm6
16068 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16069 ; AVX512-FCP-NEXT: vpermt2q %zmm16, %zmm0, %zmm28
16070 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, %zmm18
16071 ; AVX512-FCP-NEXT: vpermt2q %zmm24, %zmm1, %zmm18
16072 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, %zmm6
16073 ; AVX512-FCP-NEXT: vpermt2q %zmm24, %zmm2, %zmm6
16074 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16075 ; AVX512-FCP-NEXT: vmovdqa64 %zmm24, %zmm6
16076 ; AVX512-FCP-NEXT: vpermt2q %zmm19, %zmm3, %zmm6
16077 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16078 ; AVX512-FCP-NEXT: vpermt2q %zmm19, %zmm0, %zmm24
16079 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, %zmm15
16080 ; AVX512-FCP-NEXT: vpermt2q %zmm23, %zmm1, %zmm15
16081 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, %zmm19
16082 ; AVX512-FCP-NEXT: vpermt2q %zmm23, %zmm2, %zmm19
16083 ; AVX512-FCP-NEXT: vmovdqa64 %zmm23, %zmm6
16084 ; AVX512-FCP-NEXT: vpermt2q %zmm17, %zmm3, %zmm6
16085 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16086 ; AVX512-FCP-NEXT: vpermt2q %zmm17, %zmm0, %zmm23
16087 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm13
16088 ; AVX512-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm13
16089 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm17
16090 ; AVX512-FCP-NEXT: vpermt2q %zmm25, %zmm2, %zmm17
16091 ; AVX512-FCP-NEXT: vmovdqa64 %zmm25, %zmm6
16092 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm3, %zmm6
16093 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16094 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm25
16095 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm6
16096 ; AVX512-FCP-NEXT: vpermt2q %zmm20, %zmm1, %zmm6
16097 ; AVX512-FCP-NEXT: vpermi2q %zmm14, %zmm31, %zmm1
16098 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm7
16099 ; AVX512-FCP-NEXT: vpermt2q %zmm20, %zmm2, %zmm7
16100 ; AVX512-FCP-NEXT: vpermi2q %zmm14, %zmm31, %zmm2
16101 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, %zmm16
16102 ; AVX512-FCP-NEXT: vpermt2q %zmm5, %zmm3, %zmm16
16103 ; AVX512-FCP-NEXT: vpermi2q %zmm31, %zmm14, %zmm3
16104 ; AVX512-FCP-NEXT: vpermt2q %zmm31, %zmm0, %zmm14
16105 ; AVX512-FCP-NEXT: vpermt2q %zmm5, %zmm0, %zmm20
16106 ; AVX512-FCP-NEXT: movb $56, %al
16107 ; AVX512-FCP-NEXT: kmovw %eax, %k1
16108 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16109 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16110 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16111 ; AVX512-FCP-NEXT: movb $-64, %al
16112 ; AVX512-FCP-NEXT: kmovw %eax, %k2
16113 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16114 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16115 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16116 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16117 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16118 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16119 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16120 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16121 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16122 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16123 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16124 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16125 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16126 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16127 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16128 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16129 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16130 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16131 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16132 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16133 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16134 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16135 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16136 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16137 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16138 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16139 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16140 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16141 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16142 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16143 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16144 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16145 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16146 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16147 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
16148 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16149 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
16150 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16151 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
16152 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16153 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
16154 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16155 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16156 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16157 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16158 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16159 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16160 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16161 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16162 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16163 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16164 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16165 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16166 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16167 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16168 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16169 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16170 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16171 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16172 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16173 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16174 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16175 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16176 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16177 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16178 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16179 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16180 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16181 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16182 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16183 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16184 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16185 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16186 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16187 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16188 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16189 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16190 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16191 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16192 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16193 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16194 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16195 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16196 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16197 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm9 {%k1}
16198 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16199 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
16200 ; AVX512-FCP-NEXT: movb $24, %al
16201 ; AVX512-FCP-NEXT: kmovw %eax, %k2
16202 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16203 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16204 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16205 ; AVX512-FCP-NEXT: movb $-32, %al
16206 ; AVX512-FCP-NEXT: kmovw %eax, %k1
16207 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm5 {%k1}
16208 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16209 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16210 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16211 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16212 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16213 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16214 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16215 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16216 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16217 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16218 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16219 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16220 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16221 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16222 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16223 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16224 ; AVX512-FCP-NEXT: vmovdqa64 %zmm27, %zmm5 {%k1}
16225 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16226 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16227 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16228 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16229 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, %zmm5 {%k1}
16230 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16231 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16232 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16233 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16234 ; AVX512-FCP-NEXT: vmovdqa64 %zmm15, %zmm5 {%k1}
16235 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16236 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16237 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16238 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16239 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, %zmm5 {%k1}
16240 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16241 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16242 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
16243 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm10 {%k1}
16244 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16245 ; AVX512-FCP-NEXT: vmovdqu64 (%rsp), %zmm1 # 64-byte Reload
16246 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
16247 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm1 {%k1}
16248 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, (%rsp) # 64-byte Spill
16249 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16250 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16251 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
16252 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16253 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
16254 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16255 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16256 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16257 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
16258 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16259 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
16260 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16261 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16262 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16263 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
16264 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16265 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
16266 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16267 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16268 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16269 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
16270 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16271 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
16272 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16273 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16274 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
16275 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
16276 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, %zmm31 {%k1}
16277 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16278 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
16279 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm27 {%k2}
16280 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, %zmm27 {%k1}
16281 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16282 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
16283 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm11 {%k1}
16284 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16285 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
16286 ; AVX512-FCP-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
16287 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16288 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
16289 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16290 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
16291 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16292 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm5 # 16-byte Folded Reload
16293 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16294 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm5 {%k1}
16295 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16296 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm6 # 16-byte Folded Reload
16297 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16298 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm6 {%k1}
16299 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16300 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm7 # 16-byte Folded Reload
16301 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16302 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm7 {%k1}
16303 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16304 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm13 # 16-byte Folded Reload
16305 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16306 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm13 {%k1}
16307 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16308 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm15 # 16-byte Folded Reload
16309 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16310 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm15 {%k1}
16311 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16312 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm4, %zmm2, %zmm4
16313 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, %zmm4 {%k1}
16314 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16315 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
16316 ; AVX512-FCP-NEXT: vmovdqa64 %zmm21, %zmm2 {%k1}
16317 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
16318 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
16319 ; AVX512-FCP-NEXT: vmovdqa64 %zmm26, %zmm3 {%k1}
16320 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
16321 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm16, %zmm16 # 16-byte Folded Reload
16322 ; AVX512-FCP-NEXT: vmovdqa64 %zmm28, %zmm16 {%k1}
16323 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
16324 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm17, %zmm17 # 16-byte Folded Reload
16325 ; AVX512-FCP-NEXT: vmovdqa64 %zmm24, %zmm17 {%k1}
16326 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
16327 ; AVX512-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm18, %zmm18 # 16-byte Folded Reload
16328 ; AVX512-FCP-NEXT: vmovdqa64 %zmm23, %zmm18 {%k1}
16329 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
16330 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm30, %zmm19, %zmm19
16331 ; AVX512-FCP-NEXT: vmovdqa64 %zmm25, %zmm19 {%k1}
16332 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
16333 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm12, %zmm21, %zmm12
16334 ; AVX512-FCP-NEXT: vmovdqa64 %zmm14, %zmm12 {%k1}
16335 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
16336 ; AVX512-FCP-NEXT: vinserti32x4 $0, %xmm22, %zmm14, %zmm14
16337 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, %zmm14 {%k1}
16338 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, 448(%rsi)
16339 ; AVX512-FCP-NEXT: vmovdqa64 %zmm29, 384(%rsi)
16340 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16341 ; AVX512-FCP-NEXT: vmovaps %zmm8, 320(%rsi)
16342 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16343 ; AVX512-FCP-NEXT: vmovaps %zmm8, 256(%rsi)
16344 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16345 ; AVX512-FCP-NEXT: vmovaps %zmm8, 192(%rsi)
16346 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16347 ; AVX512-FCP-NEXT: vmovaps %zmm8, 128(%rsi)
16348 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16349 ; AVX512-FCP-NEXT: vmovaps %zmm8, 64(%rsi)
16350 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16351 ; AVX512-FCP-NEXT: vmovaps %zmm8, (%rsi)
16352 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, 448(%rdx)
16353 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16354 ; AVX512-FCP-NEXT: vmovaps %zmm8, 256(%rdx)
16355 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16356 ; AVX512-FCP-NEXT: vmovaps %zmm8, 320(%rdx)
16357 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16358 ; AVX512-FCP-NEXT: vmovaps %zmm8, 128(%rdx)
16359 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16360 ; AVX512-FCP-NEXT: vmovaps %zmm8, 192(%rdx)
16361 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16362 ; AVX512-FCP-NEXT: vmovaps %zmm8, (%rdx)
16363 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16364 ; AVX512-FCP-NEXT: vmovaps %zmm8, 64(%rdx)
16365 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16366 ; AVX512-FCP-NEXT: vmovaps %zmm8, 384(%rdx)
16367 ; AVX512-FCP-NEXT: vmovdqa64 %zmm10, 448(%rcx)
16368 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16369 ; AVX512-FCP-NEXT: vmovaps %zmm8, 256(%rcx)
16370 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16371 ; AVX512-FCP-NEXT: vmovaps %zmm8, 320(%rcx)
16372 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16373 ; AVX512-FCP-NEXT: vmovaps %zmm8, 128(%rcx)
16374 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16375 ; AVX512-FCP-NEXT: vmovaps %zmm8, 192(%rcx)
16376 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16377 ; AVX512-FCP-NEXT: vmovaps %zmm8, (%rcx)
16378 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16379 ; AVX512-FCP-NEXT: vmovaps %zmm8, 64(%rcx)
16380 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16381 ; AVX512-FCP-NEXT: vmovaps %zmm8, 384(%rcx)
16382 ; AVX512-FCP-NEXT: vmovdqa64 %zmm11, 448(%r8)
16383 ; AVX512-FCP-NEXT: vmovdqa64 %zmm27, 256(%r8)
16384 ; AVX512-FCP-NEXT: vmovdqa64 %zmm31, 320(%r8)
16385 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16386 ; AVX512-FCP-NEXT: vmovaps %zmm8, 128(%r8)
16387 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16388 ; AVX512-FCP-NEXT: vmovaps %zmm8, 192(%r8)
16389 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16390 ; AVX512-FCP-NEXT: vmovaps %zmm8, (%r8)
16391 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16392 ; AVX512-FCP-NEXT: vmovaps %zmm8, 64(%r8)
16393 ; AVX512-FCP-NEXT: vmovups (%rsp), %zmm8 # 64-byte Reload
16394 ; AVX512-FCP-NEXT: vmovaps %zmm8, 384(%r8)
16395 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, 448(%r9)
16396 ; AVX512-FCP-NEXT: vmovdqa64 %zmm15, 256(%r9)
16397 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, 320(%r9)
16398 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, 128(%r9)
16399 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, 192(%r9)
16400 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, (%r9)
16401 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, 64(%r9)
16402 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, 384(%r9)
16403 ; AVX512-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
16404 ; AVX512-FCP-NEXT: vmovdqa64 %zmm14, 384(%rax)
16405 ; AVX512-FCP-NEXT: vmovdqa64 %zmm12, 448(%rax)
16406 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, 256(%rax)
16407 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, 320(%rax)
16408 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, 128(%rax)
16409 ; AVX512-FCP-NEXT: vmovdqa64 %zmm16, 192(%rax)
16410 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, (%rax)
16411 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, 64(%rax)
16412 ; AVX512-FCP-NEXT: addq $7240, %rsp # imm = 0x1C48
16413 ; AVX512-FCP-NEXT: vzeroupper
16414 ; AVX512-FCP-NEXT: retq
16416 ; AVX512DQ-LABEL: load_i64_stride6_vf64:
16417 ; AVX512DQ: # %bb.0:
16418 ; AVX512DQ-NEXT: subq $7240, %rsp # imm = 0x1C48
16419 ; AVX512DQ-NEXT: vmovdqa64 2048(%rdi), %zmm3
16420 ; AVX512DQ-NEXT: vmovdqa64 1280(%rdi), %zmm4
16421 ; AVX512DQ-NEXT: vmovdqa64 1344(%rdi), %zmm0
16422 ; AVX512DQ-NEXT: vmovdqa64 896(%rdi), %zmm5
16423 ; AVX512DQ-NEXT: vmovdqa64 960(%rdi), %zmm26
16424 ; AVX512DQ-NEXT: vmovdqa64 512(%rdi), %zmm2
16425 ; AVX512DQ-NEXT: vmovdqa64 576(%rdi), %zmm1
16426 ; AVX512DQ-NEXT: vmovdqa64 128(%rdi), %zmm6
16427 ; AVX512DQ-NEXT: vmovdqa64 192(%rdi), %zmm29
16428 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [0,6,0,10,0,6,0,10]
16429 ; AVX512DQ-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
16430 ; AVX512DQ-NEXT: vmovdqa64 %zmm29, %zmm8
16431 ; AVX512DQ-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
16432 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16433 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm8
16434 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm7, %zmm8
16435 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16436 ; AVX512DQ-NEXT: vmovdqa64 %zmm26, %zmm8
16437 ; AVX512DQ-NEXT: vpermt2q %zmm5, %zmm7, %zmm8
16438 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16439 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm8
16440 ; AVX512DQ-NEXT: vpermt2q %zmm4, %zmm7, %zmm8
16441 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16442 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,7,0,11,1,7,0,11]
16443 ; AVX512DQ-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
16444 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm9
16445 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm8, %zmm9
16446 ; AVX512DQ-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16447 ; AVX512DQ-NEXT: vmovdqa64 %zmm29, %zmm9
16448 ; AVX512DQ-NEXT: vpermt2q %zmm6, %zmm8, %zmm9
16449 ; AVX512DQ-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16450 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm9
16451 ; AVX512DQ-NEXT: vpermt2q %zmm4, %zmm8, %zmm9
16452 ; AVX512DQ-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16453 ; AVX512DQ-NEXT: vmovdqa64 %zmm26, %zmm9
16454 ; AVX512DQ-NEXT: vpermt2q %zmm5, %zmm8, %zmm9
16455 ; AVX512DQ-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16456 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [10,4,10,4,10,4,10,4]
16457 ; AVX512DQ-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
16458 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm10
16459 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm9, %zmm10
16460 ; AVX512DQ-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16461 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm10
16462 ; AVX512DQ-NEXT: vpermt2q %zmm29, %zmm9, %zmm10
16463 ; AVX512DQ-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16464 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm10
16465 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm9, %zmm10
16466 ; AVX512DQ-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16467 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm10
16468 ; AVX512DQ-NEXT: vpermt2q %zmm26, %zmm9, %zmm10
16469 ; AVX512DQ-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16470 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [11,5,11,5,11,5,11,5]
16471 ; AVX512DQ-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
16472 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm11
16473 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm10, %zmm11
16474 ; AVX512DQ-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16475 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm11
16476 ; AVX512DQ-NEXT: vpermt2q %zmm29, %zmm10, %zmm11
16477 ; AVX512DQ-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16478 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm11
16479 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm10, %zmm11
16480 ; AVX512DQ-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16481 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm11
16482 ; AVX512DQ-NEXT: vpermt2q %zmm26, %zmm10, %zmm11
16483 ; AVX512DQ-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16484 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [12,0,0,6,12,0,0,6]
16485 ; AVX512DQ-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
16486 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm13
16487 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm12
16488 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm11, %zmm13
16489 ; AVX512DQ-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16490 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [13,0,1,7,13,0,1,7]
16491 ; AVX512DQ-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
16492 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm2, %zmm12
16493 ; AVX512DQ-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16494 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm1
16495 ; AVX512DQ-NEXT: vpermt2q %zmm29, %zmm11, %zmm1
16496 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16497 ; AVX512DQ-NEXT: vpermt2q %zmm29, %zmm2, %zmm6
16498 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16499 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm1
16500 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
16501 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16502 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm2, %zmm4
16503 ; AVX512DQ-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16504 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm0
16505 ; AVX512DQ-NEXT: vpermt2q %zmm26, %zmm11, %zmm0
16506 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16507 ; AVX512DQ-NEXT: vpermt2q %zmm26, %zmm2, %zmm5
16508 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16509 ; AVX512DQ-NEXT: vmovdqa64 2112(%rdi), %zmm0
16510 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1
16511 ; AVX512DQ-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
16512 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16513 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1
16514 ; AVX512DQ-NEXT: vpermt2q %zmm3, %zmm8, %zmm1
16515 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16516 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, %zmm1
16517 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm9, %zmm1
16518 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16519 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, %zmm1
16520 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm10, %zmm1
16521 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16522 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, %zmm1
16523 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
16524 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16525 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
16526 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16527 ; AVX512DQ-NEXT: vmovdqa64 1664(%rdi), %zmm1
16528 ; AVX512DQ-NEXT: vmovdqa64 1728(%rdi), %zmm0
16529 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm3
16530 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm7, %zmm3
16531 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16532 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm3
16533 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
16534 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16535 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
16536 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm9, %zmm3
16537 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16538 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
16539 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm10, %zmm3
16540 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16541 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
16542 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm11, %zmm3
16543 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16544 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm2, %zmm1
16545 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16546 ; AVX512DQ-NEXT: vmovdqa64 2432(%rdi), %zmm3
16547 ; AVX512DQ-NEXT: vmovdqa64 2496(%rdi), %zmm0
16548 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1
16549 ; AVX512DQ-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
16550 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16551 ; AVX512DQ-NEXT: vmovdqa64 2816(%rdi), %zmm4
16552 ; AVX512DQ-NEXT: vmovdqa64 2880(%rdi), %zmm1
16553 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm1, %zmm7
16554 ; AVX512DQ-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16555 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5
16556 ; AVX512DQ-NEXT: vpermt2q %zmm3, %zmm8, %zmm5
16557 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16558 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm1, %zmm8
16559 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16560 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, %zmm5
16561 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm9, %zmm5
16562 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16563 ; AVX512DQ-NEXT: vpermi2q %zmm1, %zmm4, %zmm9
16564 ; AVX512DQ-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16565 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, %zmm5
16566 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm10, %zmm5
16567 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16568 ; AVX512DQ-NEXT: vpermi2q %zmm1, %zmm4, %zmm10
16569 ; AVX512DQ-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16570 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, %zmm5
16571 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm11, %zmm5
16572 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16573 ; AVX512DQ-NEXT: vpermi2q %zmm1, %zmm4, %zmm11
16574 ; AVX512DQ-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16575 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm2, %zmm4
16576 ; AVX512DQ-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16577 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
16578 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16579 ; AVX512DQ-NEXT: vmovdqa64 448(%rdi), %zmm1
16580 ; AVX512DQ-NEXT: vmovdqa64 384(%rdi), %zmm2
16581 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm8 = [0,6,12,0]
16582 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm0
16583 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm8, %zmm0
16584 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16585 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm9 = [1,7,13,0]
16586 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm0
16587 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm9, %zmm0
16588 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16589 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm10 = [10,0,6,0]
16590 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm0
16591 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm10, %zmm0
16592 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16593 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm11 = [11,1,7,0]
16594 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm0
16595 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm11, %zmm0
16596 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16597 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm4 = [4,10,4,10,4,10,4,10]
16598 ; AVX512DQ-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
16599 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm0
16600 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm4, %zmm0
16601 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16602 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
16603 ; AVX512DQ-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
16604 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
16605 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16606 ; AVX512DQ-NEXT: vmovdqa64 (%rdi), %zmm2
16607 ; AVX512DQ-NEXT: vmovdqa64 64(%rdi), %zmm1
16608 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm3
16609 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
16610 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16611 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm3
16612 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
16613 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16614 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
16615 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
16616 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16617 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
16618 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
16619 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16620 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm3
16621 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
16622 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16623 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
16624 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16625 ; AVX512DQ-NEXT: vmovdqa64 1216(%rdi), %zmm1
16626 ; AVX512DQ-NEXT: vmovdqa64 1152(%rdi), %zmm2
16627 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm3
16628 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
16629 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16630 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm3
16631 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
16632 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16633 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
16634 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
16635 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16636 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
16637 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
16638 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16639 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm3
16640 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
16641 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16642 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
16643 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16644 ; AVX512DQ-NEXT: vmovdqa64 832(%rdi), %zmm1
16645 ; AVX512DQ-NEXT: vmovdqa64 768(%rdi), %zmm2
16646 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm3
16647 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
16648 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16649 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm3
16650 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
16651 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16652 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
16653 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
16654 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16655 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
16656 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
16657 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16658 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm3
16659 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
16660 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16661 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
16662 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16663 ; AVX512DQ-NEXT: vmovdqa64 1984(%rdi), %zmm1
16664 ; AVX512DQ-NEXT: vmovdqa64 1920(%rdi), %zmm2
16665 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm3
16666 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
16667 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16668 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm3
16669 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
16670 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16671 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
16672 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
16673 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16674 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
16675 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
16676 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16677 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm3
16678 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
16679 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16680 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
16681 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16682 ; AVX512DQ-NEXT: vmovdqa64 1600(%rdi), %zmm1
16683 ; AVX512DQ-NEXT: vmovdqa64 1536(%rdi), %zmm30
16684 ; AVX512DQ-NEXT: vmovdqa64 %zmm30, %zmm2
16685 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm8, %zmm2
16686 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16687 ; AVX512DQ-NEXT: vmovdqa64 %zmm30, %zmm2
16688 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm9, %zmm2
16689 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16690 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm2
16691 ; AVX512DQ-NEXT: vpermt2q %zmm30, %zmm10, %zmm2
16692 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16693 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm2
16694 ; AVX512DQ-NEXT: vpermt2q %zmm30, %zmm11, %zmm2
16695 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16696 ; AVX512DQ-NEXT: vmovdqa64 %zmm30, %zmm2
16697 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm4, %zmm2
16698 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16699 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm0, %zmm30
16700 ; AVX512DQ-NEXT: vmovdqa64 320(%rdi), %zmm26
16701 ; AVX512DQ-NEXT: vmovdqa64 256(%rdi), %zmm18
16702 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, %zmm1
16703 ; AVX512DQ-NEXT: vpermt2q %zmm26, %zmm4, %zmm1
16704 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16705 ; AVX512DQ-NEXT: vmovdqa64 704(%rdi), %zmm21
16706 ; AVX512DQ-NEXT: vmovdqa64 640(%rdi), %zmm13
16707 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, %zmm1
16708 ; AVX512DQ-NEXT: vpermt2q %zmm21, %zmm4, %zmm1
16709 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16710 ; AVX512DQ-NEXT: vmovdqa64 1088(%rdi), %zmm24
16711 ; AVX512DQ-NEXT: vmovdqa64 1024(%rdi), %zmm19
16712 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, %zmm1
16713 ; AVX512DQ-NEXT: vpermt2q %zmm24, %zmm4, %zmm1
16714 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16715 ; AVX512DQ-NEXT: vmovdqa64 1472(%rdi), %zmm28
16716 ; AVX512DQ-NEXT: vmovdqa64 1408(%rdi), %zmm16
16717 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, %zmm1
16718 ; AVX512DQ-NEXT: vpermt2q %zmm28, %zmm4, %zmm1
16719 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16720 ; AVX512DQ-NEXT: vmovdqa64 1856(%rdi), %zmm25
16721 ; AVX512DQ-NEXT: vmovdqa64 1792(%rdi), %zmm7
16722 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm1
16723 ; AVX512DQ-NEXT: vpermt2q %zmm25, %zmm4, %zmm1
16724 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16725 ; AVX512DQ-NEXT: vmovdqa64 2240(%rdi), %zmm23
16726 ; AVX512DQ-NEXT: vmovdqa64 2176(%rdi), %zmm17
16727 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, %zmm1
16728 ; AVX512DQ-NEXT: vpermt2q %zmm23, %zmm4, %zmm1
16729 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16730 ; AVX512DQ-NEXT: vmovdqa64 2624(%rdi), %zmm20
16731 ; AVX512DQ-NEXT: vmovdqa64 2560(%rdi), %zmm5
16732 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm1
16733 ; AVX512DQ-NEXT: vpermt2q %zmm20, %zmm4, %zmm1
16734 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16735 ; AVX512DQ-NEXT: vmovdqa64 2368(%rdi), %zmm1
16736 ; AVX512DQ-NEXT: vmovdqa64 2304(%rdi), %zmm22
16737 ; AVX512DQ-NEXT: vmovdqa64 %zmm22, %zmm29
16738 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm8, %zmm29
16739 ; AVX512DQ-NEXT: vmovdqa64 3008(%rdi), %zmm14
16740 ; AVX512DQ-NEXT: vmovdqa64 2944(%rdi), %zmm31
16741 ; AVX512DQ-NEXT: vmovdqa64 %zmm31, %zmm2
16742 ; AVX512DQ-NEXT: vpermt2q %zmm14, %zmm4, %zmm2
16743 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16744 ; AVX512DQ-NEXT: vmovdqa64 2752(%rdi), %zmm2
16745 ; AVX512DQ-NEXT: vmovdqa64 2688(%rdi), %zmm12
16746 ; AVX512DQ-NEXT: vpermi2q %zmm2, %zmm12, %zmm8
16747 ; AVX512DQ-NEXT: vmovdqa64 %zmm22, %zmm3
16748 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
16749 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16750 ; AVX512DQ-NEXT: vpermi2q %zmm2, %zmm12, %zmm9
16751 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
16752 ; AVX512DQ-NEXT: vpermt2q %zmm22, %zmm10, %zmm3
16753 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16754 ; AVX512DQ-NEXT: vpermi2q %zmm12, %zmm2, %zmm10
16755 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm3
16756 ; AVX512DQ-NEXT: vpermt2q %zmm22, %zmm11, %zmm3
16757 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, (%rsp) # 64-byte Spill
16758 ; AVX512DQ-NEXT: vpermi2q %zmm12, %zmm2, %zmm11
16759 ; AVX512DQ-NEXT: vmovdqa64 %zmm22, %zmm3
16760 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
16761 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16762 ; AVX512DQ-NEXT: vpermi2q %zmm2, %zmm12, %zmm4
16763 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm0, %zmm12
16764 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm0, %zmm22
16765 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm1
16766 ; AVX512DQ-NEXT: vpermt2q %zmm20, %zmm0, %zmm1
16767 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16768 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, %zmm1
16769 ; AVX512DQ-NEXT: vpermt2q %zmm21, %zmm0, %zmm1
16770 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16771 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, %zmm1
16772 ; AVX512DQ-NEXT: vpermt2q %zmm26, %zmm0, %zmm1
16773 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16774 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, %zmm1
16775 ; AVX512DQ-NEXT: vpermt2q %zmm28, %zmm0, %zmm1
16776 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16777 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, %zmm1
16778 ; AVX512DQ-NEXT: vpermt2q %zmm24, %zmm0, %zmm1
16779 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16780 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, %zmm1
16781 ; AVX512DQ-NEXT: vpermt2q %zmm23, %zmm0, %zmm1
16782 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16783 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm1
16784 ; AVX512DQ-NEXT: vpermt2q %zmm25, %zmm0, %zmm1
16785 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16786 ; AVX512DQ-NEXT: vmovdqa64 %zmm31, %zmm1
16787 ; AVX512DQ-NEXT: vpermt2q %zmm14, %zmm0, %zmm1
16788 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16789 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
16790 ; AVX512DQ-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
16791 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, %zmm0
16792 ; AVX512DQ-NEXT: vpermt2q %zmm21, %zmm1, %zmm0
16793 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16794 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
16795 ; AVX512DQ-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
16796 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, %zmm0
16797 ; AVX512DQ-NEXT: vpermt2q %zmm21, %zmm2, %zmm0
16798 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16799 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
16800 ; AVX512DQ-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
16801 ; AVX512DQ-NEXT: vmovdqa64 %zmm21, %zmm0
16802 ; AVX512DQ-NEXT: vpermt2q %zmm13, %zmm3, %zmm0
16803 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16804 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
16805 ; AVX512DQ-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
16806 ; AVX512DQ-NEXT: vpermt2q %zmm13, %zmm0, %zmm21
16807 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, %zmm6
16808 ; AVX512DQ-NEXT: vpermt2q %zmm26, %zmm1, %zmm6
16809 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16810 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, %zmm6
16811 ; AVX512DQ-NEXT: vpermt2q %zmm26, %zmm2, %zmm6
16812 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16813 ; AVX512DQ-NEXT: vmovdqa64 %zmm26, %zmm6
16814 ; AVX512DQ-NEXT: vpermt2q %zmm18, %zmm3, %zmm6
16815 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16816 ; AVX512DQ-NEXT: vpermt2q %zmm18, %zmm0, %zmm26
16817 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, %zmm27
16818 ; AVX512DQ-NEXT: vpermt2q %zmm28, %zmm1, %zmm27
16819 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, %zmm6
16820 ; AVX512DQ-NEXT: vpermt2q %zmm28, %zmm2, %zmm6
16821 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16822 ; AVX512DQ-NEXT: vmovdqa64 %zmm28, %zmm6
16823 ; AVX512DQ-NEXT: vpermt2q %zmm16, %zmm3, %zmm6
16824 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16825 ; AVX512DQ-NEXT: vpermt2q %zmm16, %zmm0, %zmm28
16826 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, %zmm18
16827 ; AVX512DQ-NEXT: vpermt2q %zmm24, %zmm1, %zmm18
16828 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, %zmm6
16829 ; AVX512DQ-NEXT: vpermt2q %zmm24, %zmm2, %zmm6
16830 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16831 ; AVX512DQ-NEXT: vmovdqa64 %zmm24, %zmm6
16832 ; AVX512DQ-NEXT: vpermt2q %zmm19, %zmm3, %zmm6
16833 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16834 ; AVX512DQ-NEXT: vpermt2q %zmm19, %zmm0, %zmm24
16835 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, %zmm15
16836 ; AVX512DQ-NEXT: vpermt2q %zmm23, %zmm1, %zmm15
16837 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, %zmm19
16838 ; AVX512DQ-NEXT: vpermt2q %zmm23, %zmm2, %zmm19
16839 ; AVX512DQ-NEXT: vmovdqa64 %zmm23, %zmm6
16840 ; AVX512DQ-NEXT: vpermt2q %zmm17, %zmm3, %zmm6
16841 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16842 ; AVX512DQ-NEXT: vpermt2q %zmm17, %zmm0, %zmm23
16843 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm13
16844 ; AVX512DQ-NEXT: vpermt2q %zmm25, %zmm1, %zmm13
16845 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm17
16846 ; AVX512DQ-NEXT: vpermt2q %zmm25, %zmm2, %zmm17
16847 ; AVX512DQ-NEXT: vmovdqa64 %zmm25, %zmm6
16848 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm3, %zmm6
16849 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16850 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm0, %zmm25
16851 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm6
16852 ; AVX512DQ-NEXT: vpermt2q %zmm20, %zmm1, %zmm6
16853 ; AVX512DQ-NEXT: vpermi2q %zmm14, %zmm31, %zmm1
16854 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm7
16855 ; AVX512DQ-NEXT: vpermt2q %zmm20, %zmm2, %zmm7
16856 ; AVX512DQ-NEXT: vpermi2q %zmm14, %zmm31, %zmm2
16857 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, %zmm16
16858 ; AVX512DQ-NEXT: vpermt2q %zmm5, %zmm3, %zmm16
16859 ; AVX512DQ-NEXT: vpermi2q %zmm31, %zmm14, %zmm3
16860 ; AVX512DQ-NEXT: vpermt2q %zmm31, %zmm0, %zmm14
16861 ; AVX512DQ-NEXT: vpermt2q %zmm5, %zmm0, %zmm20
16862 ; AVX512DQ-NEXT: movb $56, %al
16863 ; AVX512DQ-NEXT: kmovw %eax, %k1
16864 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16865 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16866 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16867 ; AVX512DQ-NEXT: movb $-64, %al
16868 ; AVX512DQ-NEXT: kmovw %eax, %k2
16869 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16870 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16871 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16872 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16873 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16874 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16875 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16876 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16877 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16878 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16879 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16880 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16881 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16882 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16883 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16884 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16885 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16886 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16887 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16888 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16889 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16890 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16891 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16892 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16893 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16894 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16895 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16896 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16897 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16898 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16899 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16900 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16901 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16902 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16903 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
16904 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16905 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
16906 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16907 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
16908 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16909 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
16910 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16911 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16912 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16913 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16914 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16915 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16916 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16917 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16918 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16919 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16920 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16921 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16922 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16923 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16924 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16925 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16926 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16927 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16928 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16929 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16930 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16931 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16932 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16933 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16934 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16935 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16936 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16937 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16938 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16939 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16940 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16941 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16942 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16943 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16944 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16945 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16946 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16947 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16948 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16949 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16950 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16951 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16952 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16953 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm9 {%k1}
16954 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16955 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
16956 ; AVX512DQ-NEXT: movb $24, %al
16957 ; AVX512DQ-NEXT: kmovw %eax, %k2
16958 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16959 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16960 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16961 ; AVX512DQ-NEXT: movb $-32, %al
16962 ; AVX512DQ-NEXT: kmovw %eax, %k1
16963 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm5 {%k1}
16964 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16965 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16966 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16967 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16968 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16969 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16970 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16971 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16972 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16973 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16974 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16975 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16976 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16977 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16978 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16979 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16980 ; AVX512DQ-NEXT: vmovdqa64 %zmm27, %zmm5 {%k1}
16981 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16982 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16983 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16984 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16985 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, %zmm5 {%k1}
16986 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16987 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16988 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16989 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16990 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, %zmm5 {%k1}
16991 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16992 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16993 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16994 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16995 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, %zmm5 {%k1}
16996 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16997 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16998 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
16999 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm10 {%k1}
17000 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17001 ; AVX512DQ-NEXT: vmovdqu64 (%rsp), %zmm1 # 64-byte Reload
17002 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
17003 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm1 {%k1}
17004 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, (%rsp) # 64-byte Spill
17005 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17006 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17007 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
17008 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17009 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
17010 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17011 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17012 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17013 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
17014 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17015 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
17016 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17017 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17018 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17019 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
17020 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17021 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
17022 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17023 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17024 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17025 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
17026 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17027 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
17028 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17029 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17030 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
17031 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
17032 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, %zmm31 {%k1}
17033 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17034 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
17035 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm27 {%k2}
17036 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, %zmm27 {%k1}
17037 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17038 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
17039 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm11 {%k1}
17040 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17041 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
17042 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
17043 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17044 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
17045 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17046 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
17047 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17048 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm5 # 16-byte Folded Reload
17049 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17050 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm5 {%k1}
17051 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17052 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm6 # 16-byte Folded Reload
17053 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17054 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm6 {%k1}
17055 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17056 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm7 # 16-byte Folded Reload
17057 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17058 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm7 {%k1}
17059 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17060 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm13 # 16-byte Folded Reload
17061 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17062 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm13 {%k1}
17063 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17064 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm15 # 16-byte Folded Reload
17065 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17066 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm15 {%k1}
17067 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17068 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm4, %zmm2, %zmm4
17069 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, %zmm4 {%k1}
17070 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17071 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
17072 ; AVX512DQ-NEXT: vmovdqa64 %zmm21, %zmm2 {%k1}
17073 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
17074 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
17075 ; AVX512DQ-NEXT: vmovdqa64 %zmm26, %zmm3 {%k1}
17076 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
17077 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm16, %zmm16 # 16-byte Folded Reload
17078 ; AVX512DQ-NEXT: vmovdqa64 %zmm28, %zmm16 {%k1}
17079 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
17080 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm17, %zmm17 # 16-byte Folded Reload
17081 ; AVX512DQ-NEXT: vmovdqa64 %zmm24, %zmm17 {%k1}
17082 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
17083 ; AVX512DQ-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm18, %zmm18 # 16-byte Folded Reload
17084 ; AVX512DQ-NEXT: vmovdqa64 %zmm23, %zmm18 {%k1}
17085 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
17086 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm30, %zmm19, %zmm19
17087 ; AVX512DQ-NEXT: vmovdqa64 %zmm25, %zmm19 {%k1}
17088 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
17089 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm12, %zmm21, %zmm12
17090 ; AVX512DQ-NEXT: vmovdqa64 %zmm14, %zmm12 {%k1}
17091 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
17092 ; AVX512DQ-NEXT: vinserti32x4 $0, %xmm22, %zmm14, %zmm14
17093 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, %zmm14 {%k1}
17094 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, 448(%rsi)
17095 ; AVX512DQ-NEXT: vmovdqa64 %zmm29, 384(%rsi)
17096 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17097 ; AVX512DQ-NEXT: vmovaps %zmm8, 320(%rsi)
17098 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17099 ; AVX512DQ-NEXT: vmovaps %zmm8, 256(%rsi)
17100 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17101 ; AVX512DQ-NEXT: vmovaps %zmm8, 192(%rsi)
17102 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17103 ; AVX512DQ-NEXT: vmovaps %zmm8, 128(%rsi)
17104 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17105 ; AVX512DQ-NEXT: vmovaps %zmm8, 64(%rsi)
17106 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17107 ; AVX512DQ-NEXT: vmovaps %zmm8, (%rsi)
17108 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, 448(%rdx)
17109 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17110 ; AVX512DQ-NEXT: vmovaps %zmm8, 256(%rdx)
17111 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17112 ; AVX512DQ-NEXT: vmovaps %zmm8, 320(%rdx)
17113 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17114 ; AVX512DQ-NEXT: vmovaps %zmm8, 128(%rdx)
17115 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17116 ; AVX512DQ-NEXT: vmovaps %zmm8, 192(%rdx)
17117 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17118 ; AVX512DQ-NEXT: vmovaps %zmm8, (%rdx)
17119 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17120 ; AVX512DQ-NEXT: vmovaps %zmm8, 64(%rdx)
17121 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17122 ; AVX512DQ-NEXT: vmovaps %zmm8, 384(%rdx)
17123 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, 448(%rcx)
17124 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17125 ; AVX512DQ-NEXT: vmovaps %zmm8, 256(%rcx)
17126 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17127 ; AVX512DQ-NEXT: vmovaps %zmm8, 320(%rcx)
17128 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17129 ; AVX512DQ-NEXT: vmovaps %zmm8, 128(%rcx)
17130 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17131 ; AVX512DQ-NEXT: vmovaps %zmm8, 192(%rcx)
17132 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17133 ; AVX512DQ-NEXT: vmovaps %zmm8, (%rcx)
17134 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17135 ; AVX512DQ-NEXT: vmovaps %zmm8, 64(%rcx)
17136 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17137 ; AVX512DQ-NEXT: vmovaps %zmm8, 384(%rcx)
17138 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, 448(%r8)
17139 ; AVX512DQ-NEXT: vmovdqa64 %zmm27, 256(%r8)
17140 ; AVX512DQ-NEXT: vmovdqa64 %zmm31, 320(%r8)
17141 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17142 ; AVX512DQ-NEXT: vmovaps %zmm8, 128(%r8)
17143 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17144 ; AVX512DQ-NEXT: vmovaps %zmm8, 192(%r8)
17145 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17146 ; AVX512DQ-NEXT: vmovaps %zmm8, (%r8)
17147 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17148 ; AVX512DQ-NEXT: vmovaps %zmm8, 64(%r8)
17149 ; AVX512DQ-NEXT: vmovups (%rsp), %zmm8 # 64-byte Reload
17150 ; AVX512DQ-NEXT: vmovaps %zmm8, 384(%r8)
17151 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, 448(%r9)
17152 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, 256(%r9)
17153 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, 320(%r9)
17154 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, 128(%r9)
17155 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, 192(%r9)
17156 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, (%r9)
17157 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 64(%r9)
17158 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, 384(%r9)
17159 ; AVX512DQ-NEXT: movq {{[0-9]+}}(%rsp), %rax
17160 ; AVX512DQ-NEXT: vmovdqa64 %zmm14, 384(%rax)
17161 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, 448(%rax)
17162 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, 256(%rax)
17163 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, 320(%rax)
17164 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, 128(%rax)
17165 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, 192(%rax)
17166 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, (%rax)
17167 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 64(%rax)
17168 ; AVX512DQ-NEXT: addq $7240, %rsp # imm = 0x1C48
17169 ; AVX512DQ-NEXT: vzeroupper
17170 ; AVX512DQ-NEXT: retq
17172 ; AVX512DQ-FCP-LABEL: load_i64_stride6_vf64:
17173 ; AVX512DQ-FCP: # %bb.0:
17174 ; AVX512DQ-FCP-NEXT: subq $7240, %rsp # imm = 0x1C48
17175 ; AVX512DQ-FCP-NEXT: vmovdqa64 2048(%rdi), %zmm3
17176 ; AVX512DQ-FCP-NEXT: vmovdqa64 1280(%rdi), %zmm4
17177 ; AVX512DQ-FCP-NEXT: vmovdqa64 1344(%rdi), %zmm0
17178 ; AVX512DQ-FCP-NEXT: vmovdqa64 896(%rdi), %zmm5
17179 ; AVX512DQ-FCP-NEXT: vmovdqa64 960(%rdi), %zmm26
17180 ; AVX512DQ-FCP-NEXT: vmovdqa64 512(%rdi), %zmm2
17181 ; AVX512DQ-FCP-NEXT: vmovdqa64 576(%rdi), %zmm1
17182 ; AVX512DQ-FCP-NEXT: vmovdqa64 128(%rdi), %zmm6
17183 ; AVX512DQ-FCP-NEXT: vmovdqa64 192(%rdi), %zmm29
17184 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [0,6,0,10,0,6,0,10]
17185 ; AVX512DQ-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
17186 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm29, %zmm8
17187 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
17188 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17189 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm8
17190 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm7, %zmm8
17191 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17192 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm26, %zmm8
17193 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm5, %zmm7, %zmm8
17194 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17195 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm8
17196 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm4, %zmm7, %zmm8
17197 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17198 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,7,0,11,1,7,0,11]
17199 ; AVX512DQ-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
17200 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm9
17201 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm8, %zmm9
17202 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17203 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm29, %zmm9
17204 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm6, %zmm8, %zmm9
17205 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17206 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm9
17207 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm4, %zmm8, %zmm9
17208 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17209 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm26, %zmm9
17210 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm5, %zmm8, %zmm9
17211 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17212 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [10,4,10,4,10,4,10,4]
17213 ; AVX512DQ-FCP-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
17214 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm10
17215 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm10
17216 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17217 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm10
17218 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm29, %zmm9, %zmm10
17219 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17220 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm10
17221 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm10
17222 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17223 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm10
17224 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm26, %zmm9, %zmm10
17225 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17226 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [11,5,11,5,11,5,11,5]
17227 ; AVX512DQ-FCP-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
17228 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm11
17229 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm10, %zmm11
17230 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17231 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm11
17232 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm29, %zmm10, %zmm11
17233 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17234 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm11
17235 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm11
17236 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17237 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm11
17238 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm26, %zmm10, %zmm11
17239 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17240 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [12,0,0,6,12,0,0,6]
17241 ; AVX512DQ-FCP-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
17242 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm13
17243 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm12
17244 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm11, %zmm13
17245 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17246 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [13,0,1,7,13,0,1,7]
17247 ; AVX512DQ-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
17248 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm12
17249 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17250 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm1
17251 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm29, %zmm11, %zmm1
17252 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17253 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm29, %zmm2, %zmm6
17254 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17255 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm1
17256 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
17257 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17258 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm4
17259 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17260 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
17261 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm26, %zmm11, %zmm0
17262 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17263 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm26, %zmm2, %zmm5
17264 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17265 ; AVX512DQ-FCP-NEXT: vmovdqa64 2112(%rdi), %zmm0
17266 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
17267 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
17268 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17269 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
17270 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm3, %zmm8, %zmm1
17271 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17272 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, %zmm1
17273 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm1
17274 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17275 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, %zmm1
17276 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm1
17277 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17278 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, %zmm1
17279 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
17280 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17281 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
17282 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17283 ; AVX512DQ-FCP-NEXT: vmovdqa64 1664(%rdi), %zmm1
17284 ; AVX512DQ-FCP-NEXT: vmovdqa64 1728(%rdi), %zmm0
17285 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm3
17286 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm7, %zmm3
17287 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17288 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm3
17289 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
17290 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17291 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
17292 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm3
17293 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17294 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
17295 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm3
17296 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17297 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
17298 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm3
17299 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17300 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm1
17301 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17302 ; AVX512DQ-FCP-NEXT: vmovdqa64 2432(%rdi), %zmm3
17303 ; AVX512DQ-FCP-NEXT: vmovdqa64 2496(%rdi), %zmm0
17304 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
17305 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
17306 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17307 ; AVX512DQ-FCP-NEXT: vmovdqa64 2816(%rdi), %zmm4
17308 ; AVX512DQ-FCP-NEXT: vmovdqa64 2880(%rdi), %zmm1
17309 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm1, %zmm7
17310 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17311 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5
17312 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm3, %zmm8, %zmm5
17313 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17314 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm1, %zmm8
17315 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17316 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, %zmm5
17317 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm5
17318 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17319 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm1, %zmm4, %zmm9
17320 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17321 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, %zmm5
17322 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm5
17323 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17324 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm1, %zmm4, %zmm10
17325 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17326 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, %zmm5
17327 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm5
17328 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17329 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm1, %zmm4, %zmm11
17330 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17331 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm4
17332 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17333 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
17334 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17335 ; AVX512DQ-FCP-NEXT: vmovdqa64 448(%rdi), %zmm1
17336 ; AVX512DQ-FCP-NEXT: vmovdqa64 384(%rdi), %zmm2
17337 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [0,6,12,0]
17338 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm0
17339 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm0
17340 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17341 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm9 = [1,7,13,0]
17342 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm0
17343 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm0
17344 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17345 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm10 = [10,0,6,0]
17346 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm0
17347 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm0
17348 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17349 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm11 = [11,1,7,0]
17350 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm0
17351 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm0
17352 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17353 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm4 = [4,10,4,10,4,10,4,10]
17354 ; AVX512DQ-FCP-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
17355 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm0
17356 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm0
17357 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17358 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
17359 ; AVX512DQ-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
17360 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
17361 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17362 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rdi), %zmm2
17363 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
17364 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
17365 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
17366 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17367 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
17368 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
17369 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17370 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
17371 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
17372 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17373 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
17374 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
17375 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17376 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
17377 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
17378 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17379 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
17380 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17381 ; AVX512DQ-FCP-NEXT: vmovdqa64 1216(%rdi), %zmm1
17382 ; AVX512DQ-FCP-NEXT: vmovdqa64 1152(%rdi), %zmm2
17383 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
17384 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
17385 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17386 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
17387 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
17388 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17389 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
17390 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
17391 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17392 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
17393 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
17394 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17395 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
17396 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
17397 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17398 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
17399 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17400 ; AVX512DQ-FCP-NEXT: vmovdqa64 832(%rdi), %zmm1
17401 ; AVX512DQ-FCP-NEXT: vmovdqa64 768(%rdi), %zmm2
17402 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
17403 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
17404 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17405 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
17406 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
17407 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17408 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
17409 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
17410 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17411 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
17412 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
17413 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17414 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
17415 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
17416 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17417 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
17418 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17419 ; AVX512DQ-FCP-NEXT: vmovdqa64 1984(%rdi), %zmm1
17420 ; AVX512DQ-FCP-NEXT: vmovdqa64 1920(%rdi), %zmm2
17421 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
17422 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
17423 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17424 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
17425 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
17426 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17427 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
17428 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
17429 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17430 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
17431 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
17432 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17433 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
17434 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
17435 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17436 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
17437 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17438 ; AVX512DQ-FCP-NEXT: vmovdqa64 1600(%rdi), %zmm1
17439 ; AVX512DQ-FCP-NEXT: vmovdqa64 1536(%rdi), %zmm30
17440 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm30, %zmm2
17441 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm2
17442 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17443 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm30, %zmm2
17444 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm2
17445 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17446 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm2
17447 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm30, %zmm10, %zmm2
17448 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17449 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm2
17450 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm30, %zmm11, %zmm2
17451 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17452 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm30, %zmm2
17453 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm2
17454 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17455 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm30
17456 ; AVX512DQ-FCP-NEXT: vmovdqa64 320(%rdi), %zmm26
17457 ; AVX512DQ-FCP-NEXT: vmovdqa64 256(%rdi), %zmm18
17458 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, %zmm1
17459 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm26, %zmm4, %zmm1
17460 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17461 ; AVX512DQ-FCP-NEXT: vmovdqa64 704(%rdi), %zmm21
17462 ; AVX512DQ-FCP-NEXT: vmovdqa64 640(%rdi), %zmm13
17463 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, %zmm1
17464 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm21, %zmm4, %zmm1
17465 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17466 ; AVX512DQ-FCP-NEXT: vmovdqa64 1088(%rdi), %zmm24
17467 ; AVX512DQ-FCP-NEXT: vmovdqa64 1024(%rdi), %zmm19
17468 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, %zmm1
17469 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm24, %zmm4, %zmm1
17470 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17471 ; AVX512DQ-FCP-NEXT: vmovdqa64 1472(%rdi), %zmm28
17472 ; AVX512DQ-FCP-NEXT: vmovdqa64 1408(%rdi), %zmm16
17473 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm16, %zmm1
17474 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm28, %zmm4, %zmm1
17475 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17476 ; AVX512DQ-FCP-NEXT: vmovdqa64 1856(%rdi), %zmm25
17477 ; AVX512DQ-FCP-NEXT: vmovdqa64 1792(%rdi), %zmm7
17478 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm1
17479 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm25, %zmm4, %zmm1
17480 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17481 ; AVX512DQ-FCP-NEXT: vmovdqa64 2240(%rdi), %zmm23
17482 ; AVX512DQ-FCP-NEXT: vmovdqa64 2176(%rdi), %zmm17
17483 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, %zmm1
17484 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm23, %zmm4, %zmm1
17485 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17486 ; AVX512DQ-FCP-NEXT: vmovdqa64 2624(%rdi), %zmm20
17487 ; AVX512DQ-FCP-NEXT: vmovdqa64 2560(%rdi), %zmm5
17488 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
17489 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm20, %zmm4, %zmm1
17490 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17491 ; AVX512DQ-FCP-NEXT: vmovdqa64 2368(%rdi), %zmm1
17492 ; AVX512DQ-FCP-NEXT: vmovdqa64 2304(%rdi), %zmm22
17493 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm22, %zmm29
17494 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm29
17495 ; AVX512DQ-FCP-NEXT: vmovdqa64 3008(%rdi), %zmm14
17496 ; AVX512DQ-FCP-NEXT: vmovdqa64 2944(%rdi), %zmm31
17497 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm31, %zmm2
17498 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm14, %zmm4, %zmm2
17499 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17500 ; AVX512DQ-FCP-NEXT: vmovdqa64 2752(%rdi), %zmm2
17501 ; AVX512DQ-FCP-NEXT: vmovdqa64 2688(%rdi), %zmm12
17502 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm2, %zmm12, %zmm8
17503 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm22, %zmm3
17504 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
17505 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17506 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm2, %zmm12, %zmm9
17507 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
17508 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm22, %zmm10, %zmm3
17509 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17510 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm12, %zmm2, %zmm10
17511 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
17512 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm22, %zmm11, %zmm3
17513 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, (%rsp) # 64-byte Spill
17514 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm12, %zmm2, %zmm11
17515 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm22, %zmm3
17516 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
17517 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17518 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm2, %zmm12, %zmm4
17519 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm0, %zmm12
17520 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm22
17521 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
17522 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm20, %zmm0, %zmm1
17523 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17524 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, %zmm1
17525 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm21, %zmm0, %zmm1
17526 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17527 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, %zmm1
17528 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm26, %zmm0, %zmm1
17529 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17530 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm16, %zmm1
17531 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm28, %zmm0, %zmm1
17532 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17533 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, %zmm1
17534 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm24, %zmm0, %zmm1
17535 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17536 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, %zmm1
17537 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm23, %zmm0, %zmm1
17538 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17539 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm1
17540 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm25, %zmm0, %zmm1
17541 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17542 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm31, %zmm1
17543 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm14, %zmm0, %zmm1
17544 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17545 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
17546 ; AVX512DQ-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
17547 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, %zmm0
17548 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm21, %zmm1, %zmm0
17549 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17550 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
17551 ; AVX512DQ-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
17552 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, %zmm0
17553 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm21, %zmm2, %zmm0
17554 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17555 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
17556 ; AVX512DQ-FCP-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
17557 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm21, %zmm0
17558 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm13, %zmm3, %zmm0
17559 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17560 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
17561 ; AVX512DQ-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
17562 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm13, %zmm0, %zmm21
17563 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, %zmm6
17564 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm26, %zmm1, %zmm6
17565 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17566 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, %zmm6
17567 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm26, %zmm2, %zmm6
17568 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17569 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm26, %zmm6
17570 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm18, %zmm3, %zmm6
17571 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17572 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm18, %zmm0, %zmm26
17573 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm16, %zmm27
17574 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm28, %zmm1, %zmm27
17575 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm16, %zmm6
17576 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm28, %zmm2, %zmm6
17577 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17578 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm28, %zmm6
17579 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm16, %zmm3, %zmm6
17580 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17581 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm16, %zmm0, %zmm28
17582 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, %zmm18
17583 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm24, %zmm1, %zmm18
17584 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, %zmm6
17585 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm24, %zmm2, %zmm6
17586 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17587 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm24, %zmm6
17588 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm19, %zmm3, %zmm6
17589 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17590 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm19, %zmm0, %zmm24
17591 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, %zmm15
17592 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm23, %zmm1, %zmm15
17593 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, %zmm19
17594 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm23, %zmm2, %zmm19
17595 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm23, %zmm6
17596 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm17, %zmm3, %zmm6
17597 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17598 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm17, %zmm0, %zmm23
17599 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm13
17600 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm13
17601 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm17
17602 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm25, %zmm2, %zmm17
17603 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm25, %zmm6
17604 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm3, %zmm6
17605 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17606 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm25
17607 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm6
17608 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm20, %zmm1, %zmm6
17609 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm14, %zmm31, %zmm1
17610 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm7
17611 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm20, %zmm2, %zmm7
17612 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm14, %zmm31, %zmm2
17613 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, %zmm16
17614 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm5, %zmm3, %zmm16
17615 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm31, %zmm14, %zmm3
17616 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm31, %zmm0, %zmm14
17617 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm5, %zmm0, %zmm20
17618 ; AVX512DQ-FCP-NEXT: movb $56, %al
17619 ; AVX512DQ-FCP-NEXT: kmovw %eax, %k1
17620 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17621 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17622 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17623 ; AVX512DQ-FCP-NEXT: movb $-64, %al
17624 ; AVX512DQ-FCP-NEXT: kmovw %eax, %k2
17625 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17626 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17627 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17628 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17629 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17630 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17631 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17632 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17633 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17634 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17635 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17636 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17637 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17638 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17639 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17640 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17641 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17642 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17643 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17644 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17645 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17646 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17647 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17648 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17649 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17650 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17651 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17652 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17653 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17654 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17655 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17656 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17657 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17658 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17659 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
17660 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17661 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
17662 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17663 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
17664 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17665 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
17666 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17667 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17668 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17669 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17670 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17671 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17672 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17673 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17674 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17675 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17676 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17677 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17678 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17679 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17680 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17681 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17682 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17683 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17684 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17685 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17686 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17687 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17688 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17689 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17690 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17691 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17692 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17693 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17694 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17695 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17696 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17697 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17698 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17699 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17700 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17701 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17702 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17703 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17704 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17705 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17706 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17707 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17708 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17709 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm9 {%k1}
17710 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17711 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
17712 ; AVX512DQ-FCP-NEXT: movb $24, %al
17713 ; AVX512DQ-FCP-NEXT: kmovw %eax, %k2
17714 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17715 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17716 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17717 ; AVX512DQ-FCP-NEXT: movb $-32, %al
17718 ; AVX512DQ-FCP-NEXT: kmovw %eax, %k1
17719 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm5 {%k1}
17720 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17721 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17722 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17723 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17724 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17725 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17726 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17727 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17728 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17729 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17730 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17731 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17732 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17733 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17734 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17735 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17736 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm27, %zmm5 {%k1}
17737 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17738 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17739 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17740 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17741 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, %zmm5 {%k1}
17742 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17743 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17744 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17745 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17746 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm15, %zmm5 {%k1}
17747 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17748 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17749 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17750 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17751 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, %zmm5 {%k1}
17752 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17753 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17754 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
17755 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm10 {%k1}
17756 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17757 ; AVX512DQ-FCP-NEXT: vmovdqu64 (%rsp), %zmm1 # 64-byte Reload
17758 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
17759 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm1 {%k1}
17760 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, (%rsp) # 64-byte Spill
17761 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17762 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17763 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
17764 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17765 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
17766 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17767 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17768 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17769 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
17770 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17771 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
17772 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17773 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17774 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17775 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
17776 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17777 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
17778 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17779 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17780 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17781 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
17782 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17783 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
17784 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17785 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17786 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
17787 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
17788 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, %zmm31 {%k1}
17789 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17790 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
17791 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm27 {%k2}
17792 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, %zmm27 {%k1}
17793 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17794 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
17795 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm11 {%k1}
17796 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17797 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
17798 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
17799 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17800 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
17801 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17802 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
17803 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17804 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm5 # 16-byte Folded Reload
17805 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17806 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm5 {%k1}
17807 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17808 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm6 # 16-byte Folded Reload
17809 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17810 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm6 {%k1}
17811 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17812 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm7 # 16-byte Folded Reload
17813 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17814 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm7 {%k1}
17815 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17816 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm13 # 16-byte Folded Reload
17817 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17818 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm13 {%k1}
17819 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17820 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm15 # 16-byte Folded Reload
17821 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17822 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm15 {%k1}
17823 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17824 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm4, %zmm2, %zmm4
17825 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, %zmm4 {%k1}
17826 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17827 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
17828 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm21, %zmm2 {%k1}
17829 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
17830 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
17831 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm26, %zmm3 {%k1}
17832 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
17833 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm16, %zmm16 # 16-byte Folded Reload
17834 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm28, %zmm16 {%k1}
17835 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
17836 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm17, %zmm17 # 16-byte Folded Reload
17837 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm24, %zmm17 {%k1}
17838 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
17839 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm18, %zmm18 # 16-byte Folded Reload
17840 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm23, %zmm18 {%k1}
17841 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
17842 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm30, %zmm19, %zmm19
17843 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm25, %zmm19 {%k1}
17844 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
17845 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm12, %zmm21, %zmm12
17846 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm14, %zmm12 {%k1}
17847 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
17848 ; AVX512DQ-FCP-NEXT: vinserti32x4 $0, %xmm22, %zmm14, %zmm14
17849 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, %zmm14 {%k1}
17850 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, 448(%rsi)
17851 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm29, 384(%rsi)
17852 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17853 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 320(%rsi)
17854 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17855 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 256(%rsi)
17856 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17857 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 192(%rsi)
17858 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17859 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 128(%rsi)
17860 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17861 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 64(%rsi)
17862 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17863 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, (%rsi)
17864 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, 448(%rdx)
17865 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17866 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 256(%rdx)
17867 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17868 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 320(%rdx)
17869 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17870 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 128(%rdx)
17871 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17872 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 192(%rdx)
17873 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17874 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, (%rdx)
17875 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17876 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 64(%rdx)
17877 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17878 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 384(%rdx)
17879 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm10, 448(%rcx)
17880 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17881 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 256(%rcx)
17882 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17883 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 320(%rcx)
17884 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17885 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 128(%rcx)
17886 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17887 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 192(%rcx)
17888 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17889 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, (%rcx)
17890 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17891 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 64(%rcx)
17892 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17893 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 384(%rcx)
17894 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm11, 448(%r8)
17895 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm27, 256(%r8)
17896 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm31, 320(%r8)
17897 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17898 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 128(%r8)
17899 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17900 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 192(%r8)
17901 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17902 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, (%r8)
17903 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17904 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 64(%r8)
17905 ; AVX512DQ-FCP-NEXT: vmovups (%rsp), %zmm8 # 64-byte Reload
17906 ; AVX512DQ-FCP-NEXT: vmovaps %zmm8, 384(%r8)
17907 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, 448(%r9)
17908 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm15, 256(%r9)
17909 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, 320(%r9)
17910 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, 128(%r9)
17911 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, 192(%r9)
17912 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, (%r9)
17913 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, 64(%r9)
17914 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, 384(%r9)
17915 ; AVX512DQ-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
17916 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm14, 384(%rax)
17917 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm12, 448(%rax)
17918 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, 256(%rax)
17919 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, 320(%rax)
17920 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, 128(%rax)
17921 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm16, 192(%rax)
17922 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, (%rax)
17923 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, 64(%rax)
17924 ; AVX512DQ-FCP-NEXT: addq $7240, %rsp # imm = 0x1C48
17925 ; AVX512DQ-FCP-NEXT: vzeroupper
17926 ; AVX512DQ-FCP-NEXT: retq
17928 ; AVX512BW-LABEL: load_i64_stride6_vf64:
17929 ; AVX512BW: # %bb.0:
17930 ; AVX512BW-NEXT: subq $7240, %rsp # imm = 0x1C48
17931 ; AVX512BW-NEXT: vmovdqa64 2048(%rdi), %zmm3
17932 ; AVX512BW-NEXT: vmovdqa64 1280(%rdi), %zmm4
17933 ; AVX512BW-NEXT: vmovdqa64 1344(%rdi), %zmm0
17934 ; AVX512BW-NEXT: vmovdqa64 896(%rdi), %zmm5
17935 ; AVX512BW-NEXT: vmovdqa64 960(%rdi), %zmm26
17936 ; AVX512BW-NEXT: vmovdqa64 512(%rdi), %zmm2
17937 ; AVX512BW-NEXT: vmovdqa64 576(%rdi), %zmm1
17938 ; AVX512BW-NEXT: vmovdqa64 128(%rdi), %zmm6
17939 ; AVX512BW-NEXT: vmovdqa64 192(%rdi), %zmm29
17940 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [0,6,0,10,0,6,0,10]
17941 ; AVX512BW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
17942 ; AVX512BW-NEXT: vmovdqa64 %zmm29, %zmm8
17943 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
17944 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17945 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm8
17946 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm7, %zmm8
17947 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17948 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm8
17949 ; AVX512BW-NEXT: vpermt2q %zmm5, %zmm7, %zmm8
17950 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17951 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm8
17952 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm7, %zmm8
17953 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17954 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,7,0,11,1,7,0,11]
17955 ; AVX512BW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
17956 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm9
17957 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm8, %zmm9
17958 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17959 ; AVX512BW-NEXT: vmovdqa64 %zmm29, %zmm9
17960 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm8, %zmm9
17961 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17962 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm9
17963 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm8, %zmm9
17964 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17965 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm9
17966 ; AVX512BW-NEXT: vpermt2q %zmm5, %zmm8, %zmm9
17967 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17968 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [10,4,10,4,10,4,10,4]
17969 ; AVX512BW-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
17970 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm10
17971 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm10
17972 ; AVX512BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17973 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm10
17974 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm9, %zmm10
17975 ; AVX512BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17976 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm10
17977 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm9, %zmm10
17978 ; AVX512BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17979 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm10
17980 ; AVX512BW-NEXT: vpermt2q %zmm26, %zmm9, %zmm10
17981 ; AVX512BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17982 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [11,5,11,5,11,5,11,5]
17983 ; AVX512BW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
17984 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm11
17985 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm10, %zmm11
17986 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17987 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm11
17988 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm10, %zmm11
17989 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17990 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm11
17991 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm10, %zmm11
17992 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17993 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm11
17994 ; AVX512BW-NEXT: vpermt2q %zmm26, %zmm10, %zmm11
17995 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17996 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [12,0,0,6,12,0,0,6]
17997 ; AVX512BW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
17998 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm13
17999 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm12
18000 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm11, %zmm13
18001 ; AVX512BW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18002 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [13,0,1,7,13,0,1,7]
18003 ; AVX512BW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
18004 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm2, %zmm12
18005 ; AVX512BW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18006 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm1
18007 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm11, %zmm1
18008 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18009 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm2, %zmm6
18010 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18011 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm1
18012 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
18013 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18014 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm2, %zmm4
18015 ; AVX512BW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18016 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm0
18017 ; AVX512BW-NEXT: vpermt2q %zmm26, %zmm11, %zmm0
18018 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18019 ; AVX512BW-NEXT: vpermt2q %zmm26, %zmm2, %zmm5
18020 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18021 ; AVX512BW-NEXT: vmovdqa64 2112(%rdi), %zmm0
18022 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1
18023 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
18024 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18025 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1
18026 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm8, %zmm1
18027 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18028 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm1
18029 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm9, %zmm1
18030 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18031 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm1
18032 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm10, %zmm1
18033 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18034 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm1
18035 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
18036 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18037 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
18038 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18039 ; AVX512BW-NEXT: vmovdqa64 1664(%rdi), %zmm1
18040 ; AVX512BW-NEXT: vmovdqa64 1728(%rdi), %zmm0
18041 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm3
18042 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm7, %zmm3
18043 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18044 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm3
18045 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
18046 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18047 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
18048 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm9, %zmm3
18049 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18050 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
18051 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm10, %zmm3
18052 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18053 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
18054 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm11, %zmm3
18055 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18056 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm2, %zmm1
18057 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18058 ; AVX512BW-NEXT: vmovdqa64 2432(%rdi), %zmm3
18059 ; AVX512BW-NEXT: vmovdqa64 2496(%rdi), %zmm0
18060 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1
18061 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
18062 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18063 ; AVX512BW-NEXT: vmovdqa64 2816(%rdi), %zmm4
18064 ; AVX512BW-NEXT: vmovdqa64 2880(%rdi), %zmm1
18065 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm1, %zmm7
18066 ; AVX512BW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18067 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5
18068 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm8, %zmm5
18069 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18070 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm1, %zmm8
18071 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18072 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm5
18073 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm9, %zmm5
18074 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18075 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm4, %zmm9
18076 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18077 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm5
18078 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm10, %zmm5
18079 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18080 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm4, %zmm10
18081 ; AVX512BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18082 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm5
18083 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm11, %zmm5
18084 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18085 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm4, %zmm11
18086 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18087 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm2, %zmm4
18088 ; AVX512BW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18089 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
18090 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18091 ; AVX512BW-NEXT: vmovdqa64 448(%rdi), %zmm1
18092 ; AVX512BW-NEXT: vmovdqa64 384(%rdi), %zmm2
18093 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm8 = [0,6,12,0]
18094 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm0
18095 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm0
18096 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18097 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm9 = [1,7,13,0]
18098 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm0
18099 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm0
18100 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18101 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm10 = [10,0,6,0]
18102 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm0
18103 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm10, %zmm0
18104 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18105 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm11 = [11,1,7,0]
18106 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm0
18107 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm11, %zmm0
18108 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18109 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm4 = [4,10,4,10,4,10,4,10]
18110 ; AVX512BW-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
18111 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm0
18112 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm0
18113 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18114 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
18115 ; AVX512BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
18116 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
18117 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18118 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm2
18119 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %zmm1
18120 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm3
18121 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
18122 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18123 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm3
18124 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
18125 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18126 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
18127 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
18128 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18129 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
18130 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
18131 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18132 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm3
18133 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
18134 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18135 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
18136 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18137 ; AVX512BW-NEXT: vmovdqa64 1216(%rdi), %zmm1
18138 ; AVX512BW-NEXT: vmovdqa64 1152(%rdi), %zmm2
18139 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm3
18140 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
18141 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18142 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm3
18143 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
18144 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18145 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
18146 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
18147 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18148 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
18149 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
18150 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18151 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm3
18152 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
18153 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18154 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
18155 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18156 ; AVX512BW-NEXT: vmovdqa64 832(%rdi), %zmm1
18157 ; AVX512BW-NEXT: vmovdqa64 768(%rdi), %zmm2
18158 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm3
18159 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
18160 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18161 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm3
18162 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
18163 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18164 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
18165 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
18166 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18167 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
18168 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
18169 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18170 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm3
18171 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
18172 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18173 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
18174 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18175 ; AVX512BW-NEXT: vmovdqa64 1984(%rdi), %zmm1
18176 ; AVX512BW-NEXT: vmovdqa64 1920(%rdi), %zmm2
18177 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm3
18178 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
18179 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18180 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm3
18181 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
18182 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18183 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
18184 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
18185 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18186 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
18187 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
18188 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18189 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm3
18190 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
18191 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18192 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
18193 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18194 ; AVX512BW-NEXT: vmovdqa64 1600(%rdi), %zmm1
18195 ; AVX512BW-NEXT: vmovdqa64 1536(%rdi), %zmm30
18196 ; AVX512BW-NEXT: vmovdqa64 %zmm30, %zmm2
18197 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm2
18198 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18199 ; AVX512BW-NEXT: vmovdqa64 %zmm30, %zmm2
18200 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm2
18201 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18202 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm2
18203 ; AVX512BW-NEXT: vpermt2q %zmm30, %zmm10, %zmm2
18204 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18205 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm2
18206 ; AVX512BW-NEXT: vpermt2q %zmm30, %zmm11, %zmm2
18207 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18208 ; AVX512BW-NEXT: vmovdqa64 %zmm30, %zmm2
18209 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm2
18210 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18211 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm30
18212 ; AVX512BW-NEXT: vmovdqa64 320(%rdi), %zmm26
18213 ; AVX512BW-NEXT: vmovdqa64 256(%rdi), %zmm18
18214 ; AVX512BW-NEXT: vmovdqa64 %zmm18, %zmm1
18215 ; AVX512BW-NEXT: vpermt2q %zmm26, %zmm4, %zmm1
18216 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18217 ; AVX512BW-NEXT: vmovdqa64 704(%rdi), %zmm21
18218 ; AVX512BW-NEXT: vmovdqa64 640(%rdi), %zmm13
18219 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm1
18220 ; AVX512BW-NEXT: vpermt2q %zmm21, %zmm4, %zmm1
18221 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18222 ; AVX512BW-NEXT: vmovdqa64 1088(%rdi), %zmm24
18223 ; AVX512BW-NEXT: vmovdqa64 1024(%rdi), %zmm19
18224 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm1
18225 ; AVX512BW-NEXT: vpermt2q %zmm24, %zmm4, %zmm1
18226 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18227 ; AVX512BW-NEXT: vmovdqa64 1472(%rdi), %zmm28
18228 ; AVX512BW-NEXT: vmovdqa64 1408(%rdi), %zmm16
18229 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm1
18230 ; AVX512BW-NEXT: vpermt2q %zmm28, %zmm4, %zmm1
18231 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18232 ; AVX512BW-NEXT: vmovdqa64 1856(%rdi), %zmm25
18233 ; AVX512BW-NEXT: vmovdqa64 1792(%rdi), %zmm7
18234 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm1
18235 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm4, %zmm1
18236 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18237 ; AVX512BW-NEXT: vmovdqa64 2240(%rdi), %zmm23
18238 ; AVX512BW-NEXT: vmovdqa64 2176(%rdi), %zmm17
18239 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm1
18240 ; AVX512BW-NEXT: vpermt2q %zmm23, %zmm4, %zmm1
18241 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18242 ; AVX512BW-NEXT: vmovdqa64 2624(%rdi), %zmm20
18243 ; AVX512BW-NEXT: vmovdqa64 2560(%rdi), %zmm5
18244 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm1
18245 ; AVX512BW-NEXT: vpermt2q %zmm20, %zmm4, %zmm1
18246 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18247 ; AVX512BW-NEXT: vmovdqa64 2368(%rdi), %zmm1
18248 ; AVX512BW-NEXT: vmovdqa64 2304(%rdi), %zmm22
18249 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm29
18250 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm29
18251 ; AVX512BW-NEXT: vmovdqa64 3008(%rdi), %zmm14
18252 ; AVX512BW-NEXT: vmovdqa64 2944(%rdi), %zmm31
18253 ; AVX512BW-NEXT: vmovdqa64 %zmm31, %zmm2
18254 ; AVX512BW-NEXT: vpermt2q %zmm14, %zmm4, %zmm2
18255 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18256 ; AVX512BW-NEXT: vmovdqa64 2752(%rdi), %zmm2
18257 ; AVX512BW-NEXT: vmovdqa64 2688(%rdi), %zmm12
18258 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm12, %zmm8
18259 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm3
18260 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
18261 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18262 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm12, %zmm9
18263 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
18264 ; AVX512BW-NEXT: vpermt2q %zmm22, %zmm10, %zmm3
18265 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18266 ; AVX512BW-NEXT: vpermi2q %zmm12, %zmm2, %zmm10
18267 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
18268 ; AVX512BW-NEXT: vpermt2q %zmm22, %zmm11, %zmm3
18269 ; AVX512BW-NEXT: vmovdqu64 %zmm3, (%rsp) # 64-byte Spill
18270 ; AVX512BW-NEXT: vpermi2q %zmm12, %zmm2, %zmm11
18271 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm3
18272 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
18273 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18274 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm12, %zmm4
18275 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm0, %zmm12
18276 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm22
18277 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm1
18278 ; AVX512BW-NEXT: vpermt2q %zmm20, %zmm0, %zmm1
18279 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18280 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm1
18281 ; AVX512BW-NEXT: vpermt2q %zmm21, %zmm0, %zmm1
18282 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18283 ; AVX512BW-NEXT: vmovdqa64 %zmm18, %zmm1
18284 ; AVX512BW-NEXT: vpermt2q %zmm26, %zmm0, %zmm1
18285 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18286 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm1
18287 ; AVX512BW-NEXT: vpermt2q %zmm28, %zmm0, %zmm1
18288 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18289 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm1
18290 ; AVX512BW-NEXT: vpermt2q %zmm24, %zmm0, %zmm1
18291 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18292 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm1
18293 ; AVX512BW-NEXT: vpermt2q %zmm23, %zmm0, %zmm1
18294 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18295 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm1
18296 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm0, %zmm1
18297 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18298 ; AVX512BW-NEXT: vmovdqa64 %zmm31, %zmm1
18299 ; AVX512BW-NEXT: vpermt2q %zmm14, %zmm0, %zmm1
18300 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18301 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
18302 ; AVX512BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
18303 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm0
18304 ; AVX512BW-NEXT: vpermt2q %zmm21, %zmm1, %zmm0
18305 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18306 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
18307 ; AVX512BW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
18308 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm0
18309 ; AVX512BW-NEXT: vpermt2q %zmm21, %zmm2, %zmm0
18310 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18311 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
18312 ; AVX512BW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
18313 ; AVX512BW-NEXT: vmovdqa64 %zmm21, %zmm0
18314 ; AVX512BW-NEXT: vpermt2q %zmm13, %zmm3, %zmm0
18315 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18316 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
18317 ; AVX512BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
18318 ; AVX512BW-NEXT: vpermt2q %zmm13, %zmm0, %zmm21
18319 ; AVX512BW-NEXT: vmovdqa64 %zmm18, %zmm6
18320 ; AVX512BW-NEXT: vpermt2q %zmm26, %zmm1, %zmm6
18321 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18322 ; AVX512BW-NEXT: vmovdqa64 %zmm18, %zmm6
18323 ; AVX512BW-NEXT: vpermt2q %zmm26, %zmm2, %zmm6
18324 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18325 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm6
18326 ; AVX512BW-NEXT: vpermt2q %zmm18, %zmm3, %zmm6
18327 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18328 ; AVX512BW-NEXT: vpermt2q %zmm18, %zmm0, %zmm26
18329 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm27
18330 ; AVX512BW-NEXT: vpermt2q %zmm28, %zmm1, %zmm27
18331 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm6
18332 ; AVX512BW-NEXT: vpermt2q %zmm28, %zmm2, %zmm6
18333 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18334 ; AVX512BW-NEXT: vmovdqa64 %zmm28, %zmm6
18335 ; AVX512BW-NEXT: vpermt2q %zmm16, %zmm3, %zmm6
18336 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18337 ; AVX512BW-NEXT: vpermt2q %zmm16, %zmm0, %zmm28
18338 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm18
18339 ; AVX512BW-NEXT: vpermt2q %zmm24, %zmm1, %zmm18
18340 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm6
18341 ; AVX512BW-NEXT: vpermt2q %zmm24, %zmm2, %zmm6
18342 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18343 ; AVX512BW-NEXT: vmovdqa64 %zmm24, %zmm6
18344 ; AVX512BW-NEXT: vpermt2q %zmm19, %zmm3, %zmm6
18345 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18346 ; AVX512BW-NEXT: vpermt2q %zmm19, %zmm0, %zmm24
18347 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm15
18348 ; AVX512BW-NEXT: vpermt2q %zmm23, %zmm1, %zmm15
18349 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm19
18350 ; AVX512BW-NEXT: vpermt2q %zmm23, %zmm2, %zmm19
18351 ; AVX512BW-NEXT: vmovdqa64 %zmm23, %zmm6
18352 ; AVX512BW-NEXT: vpermt2q %zmm17, %zmm3, %zmm6
18353 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18354 ; AVX512BW-NEXT: vpermt2q %zmm17, %zmm0, %zmm23
18355 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm13
18356 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm1, %zmm13
18357 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm17
18358 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm2, %zmm17
18359 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm6
18360 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm3, %zmm6
18361 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18362 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm0, %zmm25
18363 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm6
18364 ; AVX512BW-NEXT: vpermt2q %zmm20, %zmm1, %zmm6
18365 ; AVX512BW-NEXT: vpermi2q %zmm14, %zmm31, %zmm1
18366 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm7
18367 ; AVX512BW-NEXT: vpermt2q %zmm20, %zmm2, %zmm7
18368 ; AVX512BW-NEXT: vpermi2q %zmm14, %zmm31, %zmm2
18369 ; AVX512BW-NEXT: vmovdqa64 %zmm20, %zmm16
18370 ; AVX512BW-NEXT: vpermt2q %zmm5, %zmm3, %zmm16
18371 ; AVX512BW-NEXT: vpermi2q %zmm31, %zmm14, %zmm3
18372 ; AVX512BW-NEXT: vpermt2q %zmm31, %zmm0, %zmm14
18373 ; AVX512BW-NEXT: vpermt2q %zmm5, %zmm0, %zmm20
18374 ; AVX512BW-NEXT: movb $56, %al
18375 ; AVX512BW-NEXT: kmovd %eax, %k1
18376 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18377 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18378 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18379 ; AVX512BW-NEXT: movb $-64, %al
18380 ; AVX512BW-NEXT: kmovd %eax, %k2
18381 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18382 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18383 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18384 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18385 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18386 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18387 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18388 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18389 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18390 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18391 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18392 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18393 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18394 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18395 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18396 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18397 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18398 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18399 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18400 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18401 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18402 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18403 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18404 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18405 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18406 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18407 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18408 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18409 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18410 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18411 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18412 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18413 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18414 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18415 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
18416 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18417 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
18418 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18419 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
18420 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18421 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
18422 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18423 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18424 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18425 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18426 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18427 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18428 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18429 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18430 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18431 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18432 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18433 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18434 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18435 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18436 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18437 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18438 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18439 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18440 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18441 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18442 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18443 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18444 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18445 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18446 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18447 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18448 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18449 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18450 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18451 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18452 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18453 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18454 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18455 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18456 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18457 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18458 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18459 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18460 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18461 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18462 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18463 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18464 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18465 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm9 {%k1}
18466 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18467 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
18468 ; AVX512BW-NEXT: movb $24, %al
18469 ; AVX512BW-NEXT: kmovd %eax, %k2
18470 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18471 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18472 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18473 ; AVX512BW-NEXT: movb $-32, %al
18474 ; AVX512BW-NEXT: kmovd %eax, %k1
18475 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm5 {%k1}
18476 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18477 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18478 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18479 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18480 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18481 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18482 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18483 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18484 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18485 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18486 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18487 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18488 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18489 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18490 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18491 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18492 ; AVX512BW-NEXT: vmovdqa64 %zmm27, %zmm5 {%k1}
18493 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18494 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18495 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18496 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18497 ; AVX512BW-NEXT: vmovdqa64 %zmm18, %zmm5 {%k1}
18498 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18499 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18500 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18501 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18502 ; AVX512BW-NEXT: vmovdqa64 %zmm15, %zmm5 {%k1}
18503 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18504 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18505 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18506 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18507 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm5 {%k1}
18508 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18509 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18510 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
18511 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm10 {%k1}
18512 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18513 ; AVX512BW-NEXT: vmovdqu64 (%rsp), %zmm1 # 64-byte Reload
18514 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
18515 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm1 {%k1}
18516 ; AVX512BW-NEXT: vmovdqu64 %zmm1, (%rsp) # 64-byte Spill
18517 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18518 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
18519 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
18520 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18521 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
18522 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18523 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18524 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
18525 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
18526 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18527 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
18528 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18529 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18530 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
18531 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
18532 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18533 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
18534 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18535 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18536 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
18537 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
18538 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18539 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
18540 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18541 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18542 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
18543 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
18544 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm31 {%k1}
18545 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18546 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
18547 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm27 {%k2}
18548 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm27 {%k1}
18549 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18550 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
18551 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm11 {%k1}
18552 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18553 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
18554 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
18555 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
18556 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
18557 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18558 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
18559 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18560 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm5 # 16-byte Folded Reload
18561 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18562 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm5 {%k1}
18563 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18564 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm6 # 16-byte Folded Reload
18565 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18566 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm6 {%k1}
18567 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18568 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm7 # 16-byte Folded Reload
18569 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18570 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm7 {%k1}
18571 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18572 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm13 # 16-byte Folded Reload
18573 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18574 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm13 {%k1}
18575 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18576 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm15 # 16-byte Folded Reload
18577 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18578 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm15 {%k1}
18579 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18580 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm4, %zmm2, %zmm4
18581 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm4 {%k1}
18582 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18583 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
18584 ; AVX512BW-NEXT: vmovdqa64 %zmm21, %zmm2 {%k1}
18585 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
18586 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
18587 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm3 {%k1}
18588 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
18589 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm16, %zmm16 # 16-byte Folded Reload
18590 ; AVX512BW-NEXT: vmovdqa64 %zmm28, %zmm16 {%k1}
18591 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
18592 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm17, %zmm17 # 16-byte Folded Reload
18593 ; AVX512BW-NEXT: vmovdqa64 %zmm24, %zmm17 {%k1}
18594 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
18595 ; AVX512BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm18, %zmm18 # 16-byte Folded Reload
18596 ; AVX512BW-NEXT: vmovdqa64 %zmm23, %zmm18 {%k1}
18597 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
18598 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm30, %zmm19, %zmm19
18599 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm19 {%k1}
18600 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
18601 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm12, %zmm21, %zmm12
18602 ; AVX512BW-NEXT: vmovdqa64 %zmm14, %zmm12 {%k1}
18603 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
18604 ; AVX512BW-NEXT: vinserti32x4 $0, %xmm22, %zmm14, %zmm14
18605 ; AVX512BW-NEXT: vmovdqa64 %zmm20, %zmm14 {%k1}
18606 ; AVX512BW-NEXT: vmovdqa64 %zmm8, 448(%rsi)
18607 ; AVX512BW-NEXT: vmovdqa64 %zmm29, 384(%rsi)
18608 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18609 ; AVX512BW-NEXT: vmovaps %zmm8, 320(%rsi)
18610 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18611 ; AVX512BW-NEXT: vmovaps %zmm8, 256(%rsi)
18612 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18613 ; AVX512BW-NEXT: vmovaps %zmm8, 192(%rsi)
18614 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18615 ; AVX512BW-NEXT: vmovaps %zmm8, 128(%rsi)
18616 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18617 ; AVX512BW-NEXT: vmovaps %zmm8, 64(%rsi)
18618 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18619 ; AVX512BW-NEXT: vmovaps %zmm8, (%rsi)
18620 ; AVX512BW-NEXT: vmovdqa64 %zmm9, 448(%rdx)
18621 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18622 ; AVX512BW-NEXT: vmovaps %zmm8, 256(%rdx)
18623 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18624 ; AVX512BW-NEXT: vmovaps %zmm8, 320(%rdx)
18625 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18626 ; AVX512BW-NEXT: vmovaps %zmm8, 128(%rdx)
18627 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18628 ; AVX512BW-NEXT: vmovaps %zmm8, 192(%rdx)
18629 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18630 ; AVX512BW-NEXT: vmovaps %zmm8, (%rdx)
18631 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18632 ; AVX512BW-NEXT: vmovaps %zmm8, 64(%rdx)
18633 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18634 ; AVX512BW-NEXT: vmovaps %zmm8, 384(%rdx)
18635 ; AVX512BW-NEXT: vmovdqa64 %zmm10, 448(%rcx)
18636 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18637 ; AVX512BW-NEXT: vmovaps %zmm8, 256(%rcx)
18638 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18639 ; AVX512BW-NEXT: vmovaps %zmm8, 320(%rcx)
18640 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18641 ; AVX512BW-NEXT: vmovaps %zmm8, 128(%rcx)
18642 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18643 ; AVX512BW-NEXT: vmovaps %zmm8, 192(%rcx)
18644 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18645 ; AVX512BW-NEXT: vmovaps %zmm8, (%rcx)
18646 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18647 ; AVX512BW-NEXT: vmovaps %zmm8, 64(%rcx)
18648 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18649 ; AVX512BW-NEXT: vmovaps %zmm8, 384(%rcx)
18650 ; AVX512BW-NEXT: vmovdqa64 %zmm11, 448(%r8)
18651 ; AVX512BW-NEXT: vmovdqa64 %zmm27, 256(%r8)
18652 ; AVX512BW-NEXT: vmovdqa64 %zmm31, 320(%r8)
18653 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18654 ; AVX512BW-NEXT: vmovaps %zmm8, 128(%r8)
18655 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18656 ; AVX512BW-NEXT: vmovaps %zmm8, 192(%r8)
18657 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18658 ; AVX512BW-NEXT: vmovaps %zmm8, (%r8)
18659 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18660 ; AVX512BW-NEXT: vmovaps %zmm8, 64(%r8)
18661 ; AVX512BW-NEXT: vmovups (%rsp), %zmm8 # 64-byte Reload
18662 ; AVX512BW-NEXT: vmovaps %zmm8, 384(%r8)
18663 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 448(%r9)
18664 ; AVX512BW-NEXT: vmovdqa64 %zmm15, 256(%r9)
18665 ; AVX512BW-NEXT: vmovdqa64 %zmm13, 320(%r9)
18666 ; AVX512BW-NEXT: vmovdqa64 %zmm7, 128(%r9)
18667 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 192(%r9)
18668 ; AVX512BW-NEXT: vmovdqa64 %zmm5, (%r9)
18669 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 64(%r9)
18670 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 384(%r9)
18671 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
18672 ; AVX512BW-NEXT: vmovdqa64 %zmm14, 384(%rax)
18673 ; AVX512BW-NEXT: vmovdqa64 %zmm12, 448(%rax)
18674 ; AVX512BW-NEXT: vmovdqa64 %zmm19, 256(%rax)
18675 ; AVX512BW-NEXT: vmovdqa64 %zmm18, 320(%rax)
18676 ; AVX512BW-NEXT: vmovdqa64 %zmm17, 128(%rax)
18677 ; AVX512BW-NEXT: vmovdqa64 %zmm16, 192(%rax)
18678 ; AVX512BW-NEXT: vmovdqa64 %zmm3, (%rax)
18679 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 64(%rax)
18680 ; AVX512BW-NEXT: addq $7240, %rsp # imm = 0x1C48
18681 ; AVX512BW-NEXT: vzeroupper
18682 ; AVX512BW-NEXT: retq
18684 ; AVX512BW-FCP-LABEL: load_i64_stride6_vf64:
18685 ; AVX512BW-FCP: # %bb.0:
18686 ; AVX512BW-FCP-NEXT: subq $7240, %rsp # imm = 0x1C48
18687 ; AVX512BW-FCP-NEXT: vmovdqa64 2048(%rdi), %zmm3
18688 ; AVX512BW-FCP-NEXT: vmovdqa64 1280(%rdi), %zmm4
18689 ; AVX512BW-FCP-NEXT: vmovdqa64 1344(%rdi), %zmm0
18690 ; AVX512BW-FCP-NEXT: vmovdqa64 896(%rdi), %zmm5
18691 ; AVX512BW-FCP-NEXT: vmovdqa64 960(%rdi), %zmm26
18692 ; AVX512BW-FCP-NEXT: vmovdqa64 512(%rdi), %zmm2
18693 ; AVX512BW-FCP-NEXT: vmovdqa64 576(%rdi), %zmm1
18694 ; AVX512BW-FCP-NEXT: vmovdqa64 128(%rdi), %zmm6
18695 ; AVX512BW-FCP-NEXT: vmovdqa64 192(%rdi), %zmm29
18696 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [0,6,0,10,0,6,0,10]
18697 ; AVX512BW-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
18698 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm29, %zmm8
18699 ; AVX512BW-FCP-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
18700 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18701 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm8
18702 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm7, %zmm8
18703 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18704 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm8
18705 ; AVX512BW-FCP-NEXT: vpermt2q %zmm5, %zmm7, %zmm8
18706 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18707 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm8
18708 ; AVX512BW-FCP-NEXT: vpermt2q %zmm4, %zmm7, %zmm8
18709 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18710 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,7,0,11,1,7,0,11]
18711 ; AVX512BW-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
18712 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm9
18713 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm8, %zmm9
18714 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18715 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm29, %zmm9
18716 ; AVX512BW-FCP-NEXT: vpermt2q %zmm6, %zmm8, %zmm9
18717 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18718 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm9
18719 ; AVX512BW-FCP-NEXT: vpermt2q %zmm4, %zmm8, %zmm9
18720 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18721 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm9
18722 ; AVX512BW-FCP-NEXT: vpermt2q %zmm5, %zmm8, %zmm9
18723 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18724 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [10,4,10,4,10,4,10,4]
18725 ; AVX512BW-FCP-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
18726 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm10
18727 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm10
18728 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18729 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm10
18730 ; AVX512BW-FCP-NEXT: vpermt2q %zmm29, %zmm9, %zmm10
18731 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18732 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm10
18733 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm10
18734 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18735 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm10
18736 ; AVX512BW-FCP-NEXT: vpermt2q %zmm26, %zmm9, %zmm10
18737 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18738 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [11,5,11,5,11,5,11,5]
18739 ; AVX512BW-FCP-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
18740 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm11
18741 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm10, %zmm11
18742 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18743 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm11
18744 ; AVX512BW-FCP-NEXT: vpermt2q %zmm29, %zmm10, %zmm11
18745 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18746 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm11
18747 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm11
18748 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18749 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm11
18750 ; AVX512BW-FCP-NEXT: vpermt2q %zmm26, %zmm10, %zmm11
18751 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18752 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [12,0,0,6,12,0,0,6]
18753 ; AVX512BW-FCP-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
18754 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm13
18755 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm12
18756 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm11, %zmm13
18757 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18758 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [13,0,1,7,13,0,1,7]
18759 ; AVX512BW-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
18760 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm12
18761 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18762 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm1
18763 ; AVX512BW-FCP-NEXT: vpermt2q %zmm29, %zmm11, %zmm1
18764 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18765 ; AVX512BW-FCP-NEXT: vpermt2q %zmm29, %zmm2, %zmm6
18766 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18767 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm1
18768 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
18769 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18770 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm4
18771 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18772 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
18773 ; AVX512BW-FCP-NEXT: vpermt2q %zmm26, %zmm11, %zmm0
18774 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18775 ; AVX512BW-FCP-NEXT: vpermt2q %zmm26, %zmm2, %zmm5
18776 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18777 ; AVX512BW-FCP-NEXT: vmovdqa64 2112(%rdi), %zmm0
18778 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
18779 ; AVX512BW-FCP-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
18780 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18781 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
18782 ; AVX512BW-FCP-NEXT: vpermt2q %zmm3, %zmm8, %zmm1
18783 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18784 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm1
18785 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm1
18786 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18787 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm1
18788 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm1
18789 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18790 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm1
18791 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
18792 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18793 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
18794 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18795 ; AVX512BW-FCP-NEXT: vmovdqa64 1664(%rdi), %zmm1
18796 ; AVX512BW-FCP-NEXT: vmovdqa64 1728(%rdi), %zmm0
18797 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm3
18798 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm7, %zmm3
18799 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18800 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm3
18801 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
18802 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18803 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
18804 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm3
18805 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18806 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
18807 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm3
18808 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18809 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
18810 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm3
18811 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18812 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm1
18813 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18814 ; AVX512BW-FCP-NEXT: vmovdqa64 2432(%rdi), %zmm3
18815 ; AVX512BW-FCP-NEXT: vmovdqa64 2496(%rdi), %zmm0
18816 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
18817 ; AVX512BW-FCP-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
18818 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18819 ; AVX512BW-FCP-NEXT: vmovdqa64 2816(%rdi), %zmm4
18820 ; AVX512BW-FCP-NEXT: vmovdqa64 2880(%rdi), %zmm1
18821 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm1, %zmm7
18822 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18823 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5
18824 ; AVX512BW-FCP-NEXT: vpermt2q %zmm3, %zmm8, %zmm5
18825 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18826 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm1, %zmm8
18827 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18828 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm5
18829 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm5
18830 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18831 ; AVX512BW-FCP-NEXT: vpermi2q %zmm1, %zmm4, %zmm9
18832 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18833 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm5
18834 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm5
18835 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18836 ; AVX512BW-FCP-NEXT: vpermi2q %zmm1, %zmm4, %zmm10
18837 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18838 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm5
18839 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm5
18840 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18841 ; AVX512BW-FCP-NEXT: vpermi2q %zmm1, %zmm4, %zmm11
18842 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18843 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm4
18844 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18845 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
18846 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18847 ; AVX512BW-FCP-NEXT: vmovdqa64 448(%rdi), %zmm1
18848 ; AVX512BW-FCP-NEXT: vmovdqa64 384(%rdi), %zmm2
18849 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [0,6,12,0]
18850 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm0
18851 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm0
18852 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18853 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm9 = [1,7,13,0]
18854 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm0
18855 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm0
18856 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18857 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm10 = [10,0,6,0]
18858 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm0
18859 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm0
18860 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18861 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm11 = [11,1,7,0]
18862 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm0
18863 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm0
18864 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18865 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm4 = [4,10,4,10,4,10,4,10]
18866 ; AVX512BW-FCP-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
18867 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm0
18868 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm0
18869 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18870 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
18871 ; AVX512BW-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
18872 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
18873 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18874 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm2
18875 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
18876 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
18877 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
18878 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18879 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
18880 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
18881 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18882 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
18883 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
18884 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18885 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
18886 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
18887 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18888 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
18889 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
18890 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18891 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
18892 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18893 ; AVX512BW-FCP-NEXT: vmovdqa64 1216(%rdi), %zmm1
18894 ; AVX512BW-FCP-NEXT: vmovdqa64 1152(%rdi), %zmm2
18895 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
18896 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
18897 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18898 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
18899 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
18900 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18901 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
18902 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
18903 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18904 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
18905 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
18906 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18907 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
18908 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
18909 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18910 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
18911 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18912 ; AVX512BW-FCP-NEXT: vmovdqa64 832(%rdi), %zmm1
18913 ; AVX512BW-FCP-NEXT: vmovdqa64 768(%rdi), %zmm2
18914 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
18915 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
18916 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18917 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
18918 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
18919 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18920 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
18921 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
18922 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18923 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
18924 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
18925 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18926 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
18927 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
18928 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18929 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
18930 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18931 ; AVX512BW-FCP-NEXT: vmovdqa64 1984(%rdi), %zmm1
18932 ; AVX512BW-FCP-NEXT: vmovdqa64 1920(%rdi), %zmm2
18933 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
18934 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
18935 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18936 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
18937 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
18938 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18939 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
18940 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
18941 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18942 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
18943 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
18944 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18945 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
18946 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
18947 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18948 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
18949 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18950 ; AVX512BW-FCP-NEXT: vmovdqa64 1600(%rdi), %zmm1
18951 ; AVX512BW-FCP-NEXT: vmovdqa64 1536(%rdi), %zmm30
18952 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm30, %zmm2
18953 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm2
18954 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18955 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm30, %zmm2
18956 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm2
18957 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18958 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm2
18959 ; AVX512BW-FCP-NEXT: vpermt2q %zmm30, %zmm10, %zmm2
18960 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18961 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm2
18962 ; AVX512BW-FCP-NEXT: vpermt2q %zmm30, %zmm11, %zmm2
18963 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18964 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm30, %zmm2
18965 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm2
18966 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18967 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm30
18968 ; AVX512BW-FCP-NEXT: vmovdqa64 320(%rdi), %zmm26
18969 ; AVX512BW-FCP-NEXT: vmovdqa64 256(%rdi), %zmm18
18970 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm1
18971 ; AVX512BW-FCP-NEXT: vpermt2q %zmm26, %zmm4, %zmm1
18972 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18973 ; AVX512BW-FCP-NEXT: vmovdqa64 704(%rdi), %zmm21
18974 ; AVX512BW-FCP-NEXT: vmovdqa64 640(%rdi), %zmm13
18975 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm1
18976 ; AVX512BW-FCP-NEXT: vpermt2q %zmm21, %zmm4, %zmm1
18977 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18978 ; AVX512BW-FCP-NEXT: vmovdqa64 1088(%rdi), %zmm24
18979 ; AVX512BW-FCP-NEXT: vmovdqa64 1024(%rdi), %zmm19
18980 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm1
18981 ; AVX512BW-FCP-NEXT: vpermt2q %zmm24, %zmm4, %zmm1
18982 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18983 ; AVX512BW-FCP-NEXT: vmovdqa64 1472(%rdi), %zmm28
18984 ; AVX512BW-FCP-NEXT: vmovdqa64 1408(%rdi), %zmm16
18985 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm1
18986 ; AVX512BW-FCP-NEXT: vpermt2q %zmm28, %zmm4, %zmm1
18987 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18988 ; AVX512BW-FCP-NEXT: vmovdqa64 1856(%rdi), %zmm25
18989 ; AVX512BW-FCP-NEXT: vmovdqa64 1792(%rdi), %zmm7
18990 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm1
18991 ; AVX512BW-FCP-NEXT: vpermt2q %zmm25, %zmm4, %zmm1
18992 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18993 ; AVX512BW-FCP-NEXT: vmovdqa64 2240(%rdi), %zmm23
18994 ; AVX512BW-FCP-NEXT: vmovdqa64 2176(%rdi), %zmm17
18995 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm1
18996 ; AVX512BW-FCP-NEXT: vpermt2q %zmm23, %zmm4, %zmm1
18997 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18998 ; AVX512BW-FCP-NEXT: vmovdqa64 2624(%rdi), %zmm20
18999 ; AVX512BW-FCP-NEXT: vmovdqa64 2560(%rdi), %zmm5
19000 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
19001 ; AVX512BW-FCP-NEXT: vpermt2q %zmm20, %zmm4, %zmm1
19002 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19003 ; AVX512BW-FCP-NEXT: vmovdqa64 2368(%rdi), %zmm1
19004 ; AVX512BW-FCP-NEXT: vmovdqa64 2304(%rdi), %zmm22
19005 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm29
19006 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm29
19007 ; AVX512BW-FCP-NEXT: vmovdqa64 3008(%rdi), %zmm14
19008 ; AVX512BW-FCP-NEXT: vmovdqa64 2944(%rdi), %zmm31
19009 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm31, %zmm2
19010 ; AVX512BW-FCP-NEXT: vpermt2q %zmm14, %zmm4, %zmm2
19011 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19012 ; AVX512BW-FCP-NEXT: vmovdqa64 2752(%rdi), %zmm2
19013 ; AVX512BW-FCP-NEXT: vmovdqa64 2688(%rdi), %zmm12
19014 ; AVX512BW-FCP-NEXT: vpermi2q %zmm2, %zmm12, %zmm8
19015 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm3
19016 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
19017 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19018 ; AVX512BW-FCP-NEXT: vpermi2q %zmm2, %zmm12, %zmm9
19019 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
19020 ; AVX512BW-FCP-NEXT: vpermt2q %zmm22, %zmm10, %zmm3
19021 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19022 ; AVX512BW-FCP-NEXT: vpermi2q %zmm12, %zmm2, %zmm10
19023 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
19024 ; AVX512BW-FCP-NEXT: vpermt2q %zmm22, %zmm11, %zmm3
19025 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, (%rsp) # 64-byte Spill
19026 ; AVX512BW-FCP-NEXT: vpermi2q %zmm12, %zmm2, %zmm11
19027 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm3
19028 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
19029 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19030 ; AVX512BW-FCP-NEXT: vpermi2q %zmm2, %zmm12, %zmm4
19031 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm0, %zmm12
19032 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm22
19033 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
19034 ; AVX512BW-FCP-NEXT: vpermt2q %zmm20, %zmm0, %zmm1
19035 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19036 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm1
19037 ; AVX512BW-FCP-NEXT: vpermt2q %zmm21, %zmm0, %zmm1
19038 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19039 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm1
19040 ; AVX512BW-FCP-NEXT: vpermt2q %zmm26, %zmm0, %zmm1
19041 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19042 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm1
19043 ; AVX512BW-FCP-NEXT: vpermt2q %zmm28, %zmm0, %zmm1
19044 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19045 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm1
19046 ; AVX512BW-FCP-NEXT: vpermt2q %zmm24, %zmm0, %zmm1
19047 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19048 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm1
19049 ; AVX512BW-FCP-NEXT: vpermt2q %zmm23, %zmm0, %zmm1
19050 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19051 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm1
19052 ; AVX512BW-FCP-NEXT: vpermt2q %zmm25, %zmm0, %zmm1
19053 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19054 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm31, %zmm1
19055 ; AVX512BW-FCP-NEXT: vpermt2q %zmm14, %zmm0, %zmm1
19056 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19057 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
19058 ; AVX512BW-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
19059 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm0
19060 ; AVX512BW-FCP-NEXT: vpermt2q %zmm21, %zmm1, %zmm0
19061 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19062 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
19063 ; AVX512BW-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
19064 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm0
19065 ; AVX512BW-FCP-NEXT: vpermt2q %zmm21, %zmm2, %zmm0
19066 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19067 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
19068 ; AVX512BW-FCP-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
19069 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm0
19070 ; AVX512BW-FCP-NEXT: vpermt2q %zmm13, %zmm3, %zmm0
19071 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19072 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
19073 ; AVX512BW-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
19074 ; AVX512BW-FCP-NEXT: vpermt2q %zmm13, %zmm0, %zmm21
19075 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm6
19076 ; AVX512BW-FCP-NEXT: vpermt2q %zmm26, %zmm1, %zmm6
19077 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19078 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm6
19079 ; AVX512BW-FCP-NEXT: vpermt2q %zmm26, %zmm2, %zmm6
19080 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19081 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm6
19082 ; AVX512BW-FCP-NEXT: vpermt2q %zmm18, %zmm3, %zmm6
19083 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19084 ; AVX512BW-FCP-NEXT: vpermt2q %zmm18, %zmm0, %zmm26
19085 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm27
19086 ; AVX512BW-FCP-NEXT: vpermt2q %zmm28, %zmm1, %zmm27
19087 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm6
19088 ; AVX512BW-FCP-NEXT: vpermt2q %zmm28, %zmm2, %zmm6
19089 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19090 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm28, %zmm6
19091 ; AVX512BW-FCP-NEXT: vpermt2q %zmm16, %zmm3, %zmm6
19092 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19093 ; AVX512BW-FCP-NEXT: vpermt2q %zmm16, %zmm0, %zmm28
19094 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm18
19095 ; AVX512BW-FCP-NEXT: vpermt2q %zmm24, %zmm1, %zmm18
19096 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm6
19097 ; AVX512BW-FCP-NEXT: vpermt2q %zmm24, %zmm2, %zmm6
19098 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19099 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm6
19100 ; AVX512BW-FCP-NEXT: vpermt2q %zmm19, %zmm3, %zmm6
19101 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19102 ; AVX512BW-FCP-NEXT: vpermt2q %zmm19, %zmm0, %zmm24
19103 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm15
19104 ; AVX512BW-FCP-NEXT: vpermt2q %zmm23, %zmm1, %zmm15
19105 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm19
19106 ; AVX512BW-FCP-NEXT: vpermt2q %zmm23, %zmm2, %zmm19
19107 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm6
19108 ; AVX512BW-FCP-NEXT: vpermt2q %zmm17, %zmm3, %zmm6
19109 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19110 ; AVX512BW-FCP-NEXT: vpermt2q %zmm17, %zmm0, %zmm23
19111 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm13
19112 ; AVX512BW-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm13
19113 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm17
19114 ; AVX512BW-FCP-NEXT: vpermt2q %zmm25, %zmm2, %zmm17
19115 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm6
19116 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm3, %zmm6
19117 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19118 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm25
19119 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm6
19120 ; AVX512BW-FCP-NEXT: vpermt2q %zmm20, %zmm1, %zmm6
19121 ; AVX512BW-FCP-NEXT: vpermi2q %zmm14, %zmm31, %zmm1
19122 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm7
19123 ; AVX512BW-FCP-NEXT: vpermt2q %zmm20, %zmm2, %zmm7
19124 ; AVX512BW-FCP-NEXT: vpermi2q %zmm14, %zmm31, %zmm2
19125 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm16
19126 ; AVX512BW-FCP-NEXT: vpermt2q %zmm5, %zmm3, %zmm16
19127 ; AVX512BW-FCP-NEXT: vpermi2q %zmm31, %zmm14, %zmm3
19128 ; AVX512BW-FCP-NEXT: vpermt2q %zmm31, %zmm0, %zmm14
19129 ; AVX512BW-FCP-NEXT: vpermt2q %zmm5, %zmm0, %zmm20
19130 ; AVX512BW-FCP-NEXT: movb $56, %al
19131 ; AVX512BW-FCP-NEXT: kmovd %eax, %k1
19132 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19133 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19134 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19135 ; AVX512BW-FCP-NEXT: movb $-64, %al
19136 ; AVX512BW-FCP-NEXT: kmovd %eax, %k2
19137 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19138 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19139 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19140 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19141 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19142 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19143 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19144 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19145 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19146 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19147 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19148 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19149 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19150 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19151 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19152 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19153 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19154 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19155 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19156 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19157 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19158 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19159 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19160 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19161 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19162 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19163 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19164 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19165 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19166 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19167 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19168 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19169 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19170 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19171 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
19172 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19173 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
19174 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19175 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
19176 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19177 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
19178 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19179 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19180 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19181 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19182 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19183 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19184 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19185 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19186 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19187 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19188 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19189 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19190 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19191 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19192 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19193 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19194 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19195 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19196 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19197 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19198 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19199 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19200 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19201 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19202 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19203 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19204 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19205 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19206 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19207 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19208 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19209 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19210 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19211 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19212 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19213 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19214 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19215 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19216 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19217 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19218 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19219 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19220 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19221 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm9 {%k1}
19222 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19223 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
19224 ; AVX512BW-FCP-NEXT: movb $24, %al
19225 ; AVX512BW-FCP-NEXT: kmovd %eax, %k2
19226 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19227 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19228 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19229 ; AVX512BW-FCP-NEXT: movb $-32, %al
19230 ; AVX512BW-FCP-NEXT: kmovd %eax, %k1
19231 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm5 {%k1}
19232 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19233 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19234 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19235 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19236 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19237 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19238 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19239 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19240 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19241 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19242 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19243 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19244 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19245 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19246 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19247 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19248 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm27, %zmm5 {%k1}
19249 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19250 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19251 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19252 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19253 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm5 {%k1}
19254 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19255 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19256 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19257 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19258 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm5 {%k1}
19259 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19260 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19261 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19262 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19263 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm5 {%k1}
19264 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19265 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19266 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
19267 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm10 {%k1}
19268 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19269 ; AVX512BW-FCP-NEXT: vmovdqu64 (%rsp), %zmm1 # 64-byte Reload
19270 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
19271 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm1 {%k1}
19272 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, (%rsp) # 64-byte Spill
19273 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19274 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19275 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
19276 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19277 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
19278 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19279 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19280 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19281 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
19282 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19283 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
19284 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19285 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19286 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19287 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
19288 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19289 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
19290 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19291 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19292 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19293 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
19294 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19295 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
19296 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19297 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19298 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
19299 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
19300 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm31 {%k1}
19301 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19302 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
19303 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm27 {%k2}
19304 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm27 {%k1}
19305 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19306 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
19307 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm11 {%k1}
19308 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19309 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
19310 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
19311 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19312 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
19313 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19314 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
19315 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19316 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm5 # 16-byte Folded Reload
19317 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19318 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm5 {%k1}
19319 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19320 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm6 # 16-byte Folded Reload
19321 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19322 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm6 {%k1}
19323 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19324 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm7 # 16-byte Folded Reload
19325 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19326 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm7 {%k1}
19327 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19328 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm13 # 16-byte Folded Reload
19329 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19330 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm13 {%k1}
19331 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19332 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm15 # 16-byte Folded Reload
19333 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19334 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm15 {%k1}
19335 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19336 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm4, %zmm2, %zmm4
19337 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm4 {%k1}
19338 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19339 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
19340 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm2 {%k1}
19341 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
19342 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
19343 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm3 {%k1}
19344 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
19345 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm16, %zmm16 # 16-byte Folded Reload
19346 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm28, %zmm16 {%k1}
19347 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
19348 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm17, %zmm17 # 16-byte Folded Reload
19349 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm17 {%k1}
19350 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
19351 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm18, %zmm18 # 16-byte Folded Reload
19352 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm18 {%k1}
19353 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
19354 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm30, %zmm19, %zmm19
19355 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm19 {%k1}
19356 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
19357 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm12, %zmm21, %zmm12
19358 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm14, %zmm12 {%k1}
19359 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
19360 ; AVX512BW-FCP-NEXT: vinserti32x4 $0, %xmm22, %zmm14, %zmm14
19361 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm14 {%k1}
19362 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, 448(%rsi)
19363 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm29, 384(%rsi)
19364 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19365 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 320(%rsi)
19366 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19367 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 256(%rsi)
19368 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19369 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 192(%rsi)
19370 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19371 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 128(%rsi)
19372 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19373 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 64(%rsi)
19374 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19375 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, (%rsi)
19376 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, 448(%rdx)
19377 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19378 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 256(%rdx)
19379 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19380 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 320(%rdx)
19381 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19382 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 128(%rdx)
19383 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19384 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 192(%rdx)
19385 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19386 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, (%rdx)
19387 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19388 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 64(%rdx)
19389 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19390 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 384(%rdx)
19391 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, 448(%rcx)
19392 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19393 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 256(%rcx)
19394 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19395 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 320(%rcx)
19396 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19397 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 128(%rcx)
19398 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19399 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 192(%rcx)
19400 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19401 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, (%rcx)
19402 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19403 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 64(%rcx)
19404 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19405 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 384(%rcx)
19406 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm11, 448(%r8)
19407 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm27, 256(%r8)
19408 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm31, 320(%r8)
19409 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19410 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 128(%r8)
19411 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19412 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 192(%r8)
19413 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19414 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, (%r8)
19415 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19416 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 64(%r8)
19417 ; AVX512BW-FCP-NEXT: vmovups (%rsp), %zmm8 # 64-byte Reload
19418 ; AVX512BW-FCP-NEXT: vmovaps %zmm8, 384(%r8)
19419 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, 448(%r9)
19420 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm15, 256(%r9)
19421 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, 320(%r9)
19422 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, 128(%r9)
19423 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, 192(%r9)
19424 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, (%r9)
19425 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, 64(%r9)
19426 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, 384(%r9)
19427 ; AVX512BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
19428 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm14, 384(%rax)
19429 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm12, 448(%rax)
19430 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, 256(%rax)
19431 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, 320(%rax)
19432 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, 128(%rax)
19433 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm16, 192(%rax)
19434 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, (%rax)
19435 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, 64(%rax)
19436 ; AVX512BW-FCP-NEXT: addq $7240, %rsp # imm = 0x1C48
19437 ; AVX512BW-FCP-NEXT: vzeroupper
19438 ; AVX512BW-FCP-NEXT: retq
19440 ; AVX512DQ-BW-LABEL: load_i64_stride6_vf64:
19441 ; AVX512DQ-BW: # %bb.0:
19442 ; AVX512DQ-BW-NEXT: subq $7240, %rsp # imm = 0x1C48
19443 ; AVX512DQ-BW-NEXT: vmovdqa64 2048(%rdi), %zmm3
19444 ; AVX512DQ-BW-NEXT: vmovdqa64 1280(%rdi), %zmm4
19445 ; AVX512DQ-BW-NEXT: vmovdqa64 1344(%rdi), %zmm0
19446 ; AVX512DQ-BW-NEXT: vmovdqa64 896(%rdi), %zmm5
19447 ; AVX512DQ-BW-NEXT: vmovdqa64 960(%rdi), %zmm26
19448 ; AVX512DQ-BW-NEXT: vmovdqa64 512(%rdi), %zmm2
19449 ; AVX512DQ-BW-NEXT: vmovdqa64 576(%rdi), %zmm1
19450 ; AVX512DQ-BW-NEXT: vmovdqa64 128(%rdi), %zmm6
19451 ; AVX512DQ-BW-NEXT: vmovdqa64 192(%rdi), %zmm29
19452 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [0,6,0,10,0,6,0,10]
19453 ; AVX512DQ-BW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
19454 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm29, %zmm8
19455 ; AVX512DQ-BW-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
19456 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19457 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm8
19458 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm7, %zmm8
19459 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19460 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm26, %zmm8
19461 ; AVX512DQ-BW-NEXT: vpermt2q %zmm5, %zmm7, %zmm8
19462 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19463 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm8
19464 ; AVX512DQ-BW-NEXT: vpermt2q %zmm4, %zmm7, %zmm8
19465 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19466 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,7,0,11,1,7,0,11]
19467 ; AVX512DQ-BW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
19468 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm9
19469 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm8, %zmm9
19470 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19471 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm29, %zmm9
19472 ; AVX512DQ-BW-NEXT: vpermt2q %zmm6, %zmm8, %zmm9
19473 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19474 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm9
19475 ; AVX512DQ-BW-NEXT: vpermt2q %zmm4, %zmm8, %zmm9
19476 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19477 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm26, %zmm9
19478 ; AVX512DQ-BW-NEXT: vpermt2q %zmm5, %zmm8, %zmm9
19479 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19480 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [10,4,10,4,10,4,10,4]
19481 ; AVX512DQ-BW-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
19482 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm10
19483 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm10
19484 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19485 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm10
19486 ; AVX512DQ-BW-NEXT: vpermt2q %zmm29, %zmm9, %zmm10
19487 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19488 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm10
19489 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm9, %zmm10
19490 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19491 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm10
19492 ; AVX512DQ-BW-NEXT: vpermt2q %zmm26, %zmm9, %zmm10
19493 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19494 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [11,5,11,5,11,5,11,5]
19495 ; AVX512DQ-BW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
19496 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm11
19497 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm10, %zmm11
19498 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19499 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm11
19500 ; AVX512DQ-BW-NEXT: vpermt2q %zmm29, %zmm10, %zmm11
19501 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19502 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm11
19503 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm10, %zmm11
19504 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19505 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm11
19506 ; AVX512DQ-BW-NEXT: vpermt2q %zmm26, %zmm10, %zmm11
19507 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19508 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [12,0,0,6,12,0,0,6]
19509 ; AVX512DQ-BW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
19510 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm13
19511 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm12
19512 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm11, %zmm13
19513 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19514 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [13,0,1,7,13,0,1,7]
19515 ; AVX512DQ-BW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
19516 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm2, %zmm12
19517 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19518 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm1
19519 ; AVX512DQ-BW-NEXT: vpermt2q %zmm29, %zmm11, %zmm1
19520 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19521 ; AVX512DQ-BW-NEXT: vpermt2q %zmm29, %zmm2, %zmm6
19522 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19523 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm1
19524 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
19525 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19526 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm2, %zmm4
19527 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19528 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm0
19529 ; AVX512DQ-BW-NEXT: vpermt2q %zmm26, %zmm11, %zmm0
19530 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19531 ; AVX512DQ-BW-NEXT: vpermt2q %zmm26, %zmm2, %zmm5
19532 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19533 ; AVX512DQ-BW-NEXT: vmovdqa64 2112(%rdi), %zmm0
19534 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1
19535 ; AVX512DQ-BW-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
19536 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19537 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1
19538 ; AVX512DQ-BW-NEXT: vpermt2q %zmm3, %zmm8, %zmm1
19539 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19540 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, %zmm1
19541 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm9, %zmm1
19542 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19543 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, %zmm1
19544 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm10, %zmm1
19545 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19546 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, %zmm1
19547 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
19548 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19549 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
19550 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19551 ; AVX512DQ-BW-NEXT: vmovdqa64 1664(%rdi), %zmm1
19552 ; AVX512DQ-BW-NEXT: vmovdqa64 1728(%rdi), %zmm0
19553 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm3
19554 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm7, %zmm3
19555 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19556 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm3
19557 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
19558 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19559 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
19560 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm9, %zmm3
19561 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19562 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
19563 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm10, %zmm3
19564 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19565 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
19566 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm11, %zmm3
19567 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19568 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm2, %zmm1
19569 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19570 ; AVX512DQ-BW-NEXT: vmovdqa64 2432(%rdi), %zmm3
19571 ; AVX512DQ-BW-NEXT: vmovdqa64 2496(%rdi), %zmm0
19572 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1
19573 ; AVX512DQ-BW-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
19574 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19575 ; AVX512DQ-BW-NEXT: vmovdqa64 2816(%rdi), %zmm4
19576 ; AVX512DQ-BW-NEXT: vmovdqa64 2880(%rdi), %zmm1
19577 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm1, %zmm7
19578 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19579 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5
19580 ; AVX512DQ-BW-NEXT: vpermt2q %zmm3, %zmm8, %zmm5
19581 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19582 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm1, %zmm8
19583 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19584 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, %zmm5
19585 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm9, %zmm5
19586 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19587 ; AVX512DQ-BW-NEXT: vpermi2q %zmm1, %zmm4, %zmm9
19588 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19589 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, %zmm5
19590 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm10, %zmm5
19591 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19592 ; AVX512DQ-BW-NEXT: vpermi2q %zmm1, %zmm4, %zmm10
19593 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19594 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, %zmm5
19595 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm11, %zmm5
19596 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19597 ; AVX512DQ-BW-NEXT: vpermi2q %zmm1, %zmm4, %zmm11
19598 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19599 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm2, %zmm4
19600 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19601 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
19602 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19603 ; AVX512DQ-BW-NEXT: vmovdqa64 448(%rdi), %zmm1
19604 ; AVX512DQ-BW-NEXT: vmovdqa64 384(%rdi), %zmm2
19605 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm8 = [0,6,12,0]
19606 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm0
19607 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm0
19608 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19609 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm9 = [1,7,13,0]
19610 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm0
19611 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm0
19612 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19613 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm10 = [10,0,6,0]
19614 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm0
19615 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm10, %zmm0
19616 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19617 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm11 = [11,1,7,0]
19618 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm0
19619 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm11, %zmm0
19620 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19621 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm4 = [4,10,4,10,4,10,4,10]
19622 ; AVX512DQ-BW-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
19623 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm0
19624 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm0
19625 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19626 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
19627 ; AVX512DQ-BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
19628 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
19629 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19630 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdi), %zmm2
19631 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rdi), %zmm1
19632 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm3
19633 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
19634 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19635 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm3
19636 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
19637 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19638 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
19639 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
19640 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19641 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
19642 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
19643 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19644 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm3
19645 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
19646 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19647 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
19648 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19649 ; AVX512DQ-BW-NEXT: vmovdqa64 1216(%rdi), %zmm1
19650 ; AVX512DQ-BW-NEXT: vmovdqa64 1152(%rdi), %zmm2
19651 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm3
19652 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
19653 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19654 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm3
19655 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
19656 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19657 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
19658 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
19659 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19660 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
19661 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
19662 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19663 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm3
19664 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
19665 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19666 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
19667 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19668 ; AVX512DQ-BW-NEXT: vmovdqa64 832(%rdi), %zmm1
19669 ; AVX512DQ-BW-NEXT: vmovdqa64 768(%rdi), %zmm2
19670 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm3
19671 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
19672 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19673 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm3
19674 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
19675 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19676 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
19677 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
19678 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19679 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
19680 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
19681 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19682 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm3
19683 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
19684 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19685 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
19686 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19687 ; AVX512DQ-BW-NEXT: vmovdqa64 1984(%rdi), %zmm1
19688 ; AVX512DQ-BW-NEXT: vmovdqa64 1920(%rdi), %zmm2
19689 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm3
19690 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
19691 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19692 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm3
19693 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
19694 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19695 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
19696 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
19697 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19698 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
19699 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
19700 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19701 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm3
19702 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
19703 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19704 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
19705 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19706 ; AVX512DQ-BW-NEXT: vmovdqa64 1600(%rdi), %zmm1
19707 ; AVX512DQ-BW-NEXT: vmovdqa64 1536(%rdi), %zmm30
19708 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm30, %zmm2
19709 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm2
19710 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19711 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm30, %zmm2
19712 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm2
19713 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19714 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm2
19715 ; AVX512DQ-BW-NEXT: vpermt2q %zmm30, %zmm10, %zmm2
19716 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19717 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm2
19718 ; AVX512DQ-BW-NEXT: vpermt2q %zmm30, %zmm11, %zmm2
19719 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19720 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm30, %zmm2
19721 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm2
19722 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19723 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm30
19724 ; AVX512DQ-BW-NEXT: vmovdqa64 320(%rdi), %zmm26
19725 ; AVX512DQ-BW-NEXT: vmovdqa64 256(%rdi), %zmm18
19726 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, %zmm1
19727 ; AVX512DQ-BW-NEXT: vpermt2q %zmm26, %zmm4, %zmm1
19728 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19729 ; AVX512DQ-BW-NEXT: vmovdqa64 704(%rdi), %zmm21
19730 ; AVX512DQ-BW-NEXT: vmovdqa64 640(%rdi), %zmm13
19731 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, %zmm1
19732 ; AVX512DQ-BW-NEXT: vpermt2q %zmm21, %zmm4, %zmm1
19733 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19734 ; AVX512DQ-BW-NEXT: vmovdqa64 1088(%rdi), %zmm24
19735 ; AVX512DQ-BW-NEXT: vmovdqa64 1024(%rdi), %zmm19
19736 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, %zmm1
19737 ; AVX512DQ-BW-NEXT: vpermt2q %zmm24, %zmm4, %zmm1
19738 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19739 ; AVX512DQ-BW-NEXT: vmovdqa64 1472(%rdi), %zmm28
19740 ; AVX512DQ-BW-NEXT: vmovdqa64 1408(%rdi), %zmm16
19741 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm16, %zmm1
19742 ; AVX512DQ-BW-NEXT: vpermt2q %zmm28, %zmm4, %zmm1
19743 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19744 ; AVX512DQ-BW-NEXT: vmovdqa64 1856(%rdi), %zmm25
19745 ; AVX512DQ-BW-NEXT: vmovdqa64 1792(%rdi), %zmm7
19746 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm1
19747 ; AVX512DQ-BW-NEXT: vpermt2q %zmm25, %zmm4, %zmm1
19748 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19749 ; AVX512DQ-BW-NEXT: vmovdqa64 2240(%rdi), %zmm23
19750 ; AVX512DQ-BW-NEXT: vmovdqa64 2176(%rdi), %zmm17
19751 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, %zmm1
19752 ; AVX512DQ-BW-NEXT: vpermt2q %zmm23, %zmm4, %zmm1
19753 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19754 ; AVX512DQ-BW-NEXT: vmovdqa64 2624(%rdi), %zmm20
19755 ; AVX512DQ-BW-NEXT: vmovdqa64 2560(%rdi), %zmm5
19756 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm1
19757 ; AVX512DQ-BW-NEXT: vpermt2q %zmm20, %zmm4, %zmm1
19758 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19759 ; AVX512DQ-BW-NEXT: vmovdqa64 2368(%rdi), %zmm1
19760 ; AVX512DQ-BW-NEXT: vmovdqa64 2304(%rdi), %zmm22
19761 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm22, %zmm29
19762 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm8, %zmm29
19763 ; AVX512DQ-BW-NEXT: vmovdqa64 3008(%rdi), %zmm14
19764 ; AVX512DQ-BW-NEXT: vmovdqa64 2944(%rdi), %zmm31
19765 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm31, %zmm2
19766 ; AVX512DQ-BW-NEXT: vpermt2q %zmm14, %zmm4, %zmm2
19767 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19768 ; AVX512DQ-BW-NEXT: vmovdqa64 2752(%rdi), %zmm2
19769 ; AVX512DQ-BW-NEXT: vmovdqa64 2688(%rdi), %zmm12
19770 ; AVX512DQ-BW-NEXT: vpermi2q %zmm2, %zmm12, %zmm8
19771 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm22, %zmm3
19772 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
19773 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19774 ; AVX512DQ-BW-NEXT: vpermi2q %zmm2, %zmm12, %zmm9
19775 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
19776 ; AVX512DQ-BW-NEXT: vpermt2q %zmm22, %zmm10, %zmm3
19777 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19778 ; AVX512DQ-BW-NEXT: vpermi2q %zmm12, %zmm2, %zmm10
19779 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm3
19780 ; AVX512DQ-BW-NEXT: vpermt2q %zmm22, %zmm11, %zmm3
19781 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, (%rsp) # 64-byte Spill
19782 ; AVX512DQ-BW-NEXT: vpermi2q %zmm12, %zmm2, %zmm11
19783 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm22, %zmm3
19784 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
19785 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19786 ; AVX512DQ-BW-NEXT: vpermi2q %zmm2, %zmm12, %zmm4
19787 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm0, %zmm12
19788 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm22
19789 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm1
19790 ; AVX512DQ-BW-NEXT: vpermt2q %zmm20, %zmm0, %zmm1
19791 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19792 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, %zmm1
19793 ; AVX512DQ-BW-NEXT: vpermt2q %zmm21, %zmm0, %zmm1
19794 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19795 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, %zmm1
19796 ; AVX512DQ-BW-NEXT: vpermt2q %zmm26, %zmm0, %zmm1
19797 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19798 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm16, %zmm1
19799 ; AVX512DQ-BW-NEXT: vpermt2q %zmm28, %zmm0, %zmm1
19800 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19801 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, %zmm1
19802 ; AVX512DQ-BW-NEXT: vpermt2q %zmm24, %zmm0, %zmm1
19803 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19804 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, %zmm1
19805 ; AVX512DQ-BW-NEXT: vpermt2q %zmm23, %zmm0, %zmm1
19806 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19807 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm1
19808 ; AVX512DQ-BW-NEXT: vpermt2q %zmm25, %zmm0, %zmm1
19809 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19810 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm31, %zmm1
19811 ; AVX512DQ-BW-NEXT: vpermt2q %zmm14, %zmm0, %zmm1
19812 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19813 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
19814 ; AVX512DQ-BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
19815 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, %zmm0
19816 ; AVX512DQ-BW-NEXT: vpermt2q %zmm21, %zmm1, %zmm0
19817 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19818 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
19819 ; AVX512DQ-BW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
19820 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, %zmm0
19821 ; AVX512DQ-BW-NEXT: vpermt2q %zmm21, %zmm2, %zmm0
19822 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19823 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
19824 ; AVX512DQ-BW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
19825 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm21, %zmm0
19826 ; AVX512DQ-BW-NEXT: vpermt2q %zmm13, %zmm3, %zmm0
19827 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19828 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
19829 ; AVX512DQ-BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
19830 ; AVX512DQ-BW-NEXT: vpermt2q %zmm13, %zmm0, %zmm21
19831 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, %zmm6
19832 ; AVX512DQ-BW-NEXT: vpermt2q %zmm26, %zmm1, %zmm6
19833 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19834 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, %zmm6
19835 ; AVX512DQ-BW-NEXT: vpermt2q %zmm26, %zmm2, %zmm6
19836 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19837 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm26, %zmm6
19838 ; AVX512DQ-BW-NEXT: vpermt2q %zmm18, %zmm3, %zmm6
19839 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19840 ; AVX512DQ-BW-NEXT: vpermt2q %zmm18, %zmm0, %zmm26
19841 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm16, %zmm27
19842 ; AVX512DQ-BW-NEXT: vpermt2q %zmm28, %zmm1, %zmm27
19843 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm16, %zmm6
19844 ; AVX512DQ-BW-NEXT: vpermt2q %zmm28, %zmm2, %zmm6
19845 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19846 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm28, %zmm6
19847 ; AVX512DQ-BW-NEXT: vpermt2q %zmm16, %zmm3, %zmm6
19848 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19849 ; AVX512DQ-BW-NEXT: vpermt2q %zmm16, %zmm0, %zmm28
19850 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, %zmm18
19851 ; AVX512DQ-BW-NEXT: vpermt2q %zmm24, %zmm1, %zmm18
19852 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, %zmm6
19853 ; AVX512DQ-BW-NEXT: vpermt2q %zmm24, %zmm2, %zmm6
19854 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19855 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm24, %zmm6
19856 ; AVX512DQ-BW-NEXT: vpermt2q %zmm19, %zmm3, %zmm6
19857 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19858 ; AVX512DQ-BW-NEXT: vpermt2q %zmm19, %zmm0, %zmm24
19859 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, %zmm15
19860 ; AVX512DQ-BW-NEXT: vpermt2q %zmm23, %zmm1, %zmm15
19861 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, %zmm19
19862 ; AVX512DQ-BW-NEXT: vpermt2q %zmm23, %zmm2, %zmm19
19863 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm23, %zmm6
19864 ; AVX512DQ-BW-NEXT: vpermt2q %zmm17, %zmm3, %zmm6
19865 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19866 ; AVX512DQ-BW-NEXT: vpermt2q %zmm17, %zmm0, %zmm23
19867 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm13
19868 ; AVX512DQ-BW-NEXT: vpermt2q %zmm25, %zmm1, %zmm13
19869 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm17
19870 ; AVX512DQ-BW-NEXT: vpermt2q %zmm25, %zmm2, %zmm17
19871 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm25, %zmm6
19872 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm3, %zmm6
19873 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19874 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm0, %zmm25
19875 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm6
19876 ; AVX512DQ-BW-NEXT: vpermt2q %zmm20, %zmm1, %zmm6
19877 ; AVX512DQ-BW-NEXT: vpermi2q %zmm14, %zmm31, %zmm1
19878 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm7
19879 ; AVX512DQ-BW-NEXT: vpermt2q %zmm20, %zmm2, %zmm7
19880 ; AVX512DQ-BW-NEXT: vpermi2q %zmm14, %zmm31, %zmm2
19881 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, %zmm16
19882 ; AVX512DQ-BW-NEXT: vpermt2q %zmm5, %zmm3, %zmm16
19883 ; AVX512DQ-BW-NEXT: vpermi2q %zmm31, %zmm14, %zmm3
19884 ; AVX512DQ-BW-NEXT: vpermt2q %zmm31, %zmm0, %zmm14
19885 ; AVX512DQ-BW-NEXT: vpermt2q %zmm5, %zmm0, %zmm20
19886 ; AVX512DQ-BW-NEXT: movb $56, %al
19887 ; AVX512DQ-BW-NEXT: kmovd %eax, %k1
19888 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19889 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19890 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19891 ; AVX512DQ-BW-NEXT: movb $-64, %al
19892 ; AVX512DQ-BW-NEXT: kmovd %eax, %k2
19893 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19894 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19895 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19896 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19897 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19898 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19899 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19900 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19901 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19902 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19903 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19904 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19905 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19906 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19907 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19908 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19909 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19910 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19911 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19912 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19913 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19914 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19915 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19916 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19917 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19918 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19919 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19920 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19921 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19922 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19923 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19924 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19925 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19926 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19927 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
19928 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19929 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
19930 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19931 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
19932 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19933 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
19934 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19935 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19936 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19937 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19938 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19939 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19940 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19941 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19942 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19943 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19944 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19945 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19946 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19947 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19948 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19949 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19950 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19951 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19952 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19953 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19954 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19955 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19956 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19957 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19958 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19959 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19960 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19961 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19962 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19963 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19964 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19965 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19966 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19967 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19968 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19969 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19970 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19971 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19972 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19973 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19974 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19975 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19976 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19977 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm9 {%k1}
19978 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19979 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
19980 ; AVX512DQ-BW-NEXT: movb $24, %al
19981 ; AVX512DQ-BW-NEXT: kmovd %eax, %k2
19982 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19983 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19984 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19985 ; AVX512DQ-BW-NEXT: movb $-32, %al
19986 ; AVX512DQ-BW-NEXT: kmovd %eax, %k1
19987 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm5 {%k1}
19988 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19989 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19990 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19991 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19992 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19993 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
19994 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19995 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19996 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19997 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
19998 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19999 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20000 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20001 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20002 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20003 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20004 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm27, %zmm5 {%k1}
20005 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20006 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20007 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20008 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20009 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, %zmm5 {%k1}
20010 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20011 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20012 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20013 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20014 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm15, %zmm5 {%k1}
20015 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20016 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20017 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20018 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20019 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, %zmm5 {%k1}
20020 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20021 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20022 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
20023 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm10 {%k1}
20024 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20025 ; AVX512DQ-BW-NEXT: vmovdqu64 (%rsp), %zmm1 # 64-byte Reload
20026 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
20027 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm1 {%k1}
20028 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, (%rsp) # 64-byte Spill
20029 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20030 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
20031 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
20032 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20033 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
20034 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20035 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20036 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
20037 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
20038 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20039 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
20040 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20041 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20042 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
20043 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
20044 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20045 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
20046 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20047 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20048 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
20049 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
20050 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20051 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
20052 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20053 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20054 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
20055 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
20056 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, %zmm31 {%k1}
20057 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20058 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
20059 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm27 {%k2}
20060 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, %zmm27 {%k1}
20061 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20062 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
20063 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm11 {%k1}
20064 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20065 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
20066 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
20067 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
20068 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
20069 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20070 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
20071 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20072 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm5 # 16-byte Folded Reload
20073 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20074 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm5 {%k1}
20075 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20076 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm6 # 16-byte Folded Reload
20077 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20078 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm6 {%k1}
20079 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20080 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm7 # 16-byte Folded Reload
20081 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20082 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm7 {%k1}
20083 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20084 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm13 # 16-byte Folded Reload
20085 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20086 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm13 {%k1}
20087 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20088 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm15 # 16-byte Folded Reload
20089 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20090 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm15 {%k1}
20091 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20092 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm4, %zmm2, %zmm4
20093 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, %zmm4 {%k1}
20094 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20095 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
20096 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm21, %zmm2 {%k1}
20097 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
20098 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
20099 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm26, %zmm3 {%k1}
20100 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
20101 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm16, %zmm16 # 16-byte Folded Reload
20102 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm28, %zmm16 {%k1}
20103 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
20104 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm17, %zmm17 # 16-byte Folded Reload
20105 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm24, %zmm17 {%k1}
20106 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
20107 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm18, %zmm18 # 16-byte Folded Reload
20108 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm23, %zmm18 {%k1}
20109 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
20110 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm30, %zmm19, %zmm19
20111 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm25, %zmm19 {%k1}
20112 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
20113 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm12, %zmm21, %zmm12
20114 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm14, %zmm12 {%k1}
20115 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
20116 ; AVX512DQ-BW-NEXT: vinserti32x4 $0, %xmm22, %zmm14, %zmm14
20117 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, %zmm14 {%k1}
20118 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, 448(%rsi)
20119 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm29, 384(%rsi)
20120 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20121 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 320(%rsi)
20122 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20123 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 256(%rsi)
20124 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20125 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 192(%rsi)
20126 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20127 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 128(%rsi)
20128 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20129 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 64(%rsi)
20130 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20131 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, (%rsi)
20132 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, 448(%rdx)
20133 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20134 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 256(%rdx)
20135 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20136 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 320(%rdx)
20137 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20138 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 128(%rdx)
20139 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20140 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 192(%rdx)
20141 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20142 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, (%rdx)
20143 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20144 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 64(%rdx)
20145 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20146 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 384(%rdx)
20147 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, 448(%rcx)
20148 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20149 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 256(%rcx)
20150 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20151 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 320(%rcx)
20152 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20153 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 128(%rcx)
20154 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20155 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 192(%rcx)
20156 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20157 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, (%rcx)
20158 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20159 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 64(%rcx)
20160 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20161 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 384(%rcx)
20162 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm11, 448(%r8)
20163 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm27, 256(%r8)
20164 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm31, 320(%r8)
20165 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20166 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 128(%r8)
20167 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20168 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 192(%r8)
20169 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20170 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, (%r8)
20171 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20172 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 64(%r8)
20173 ; AVX512DQ-BW-NEXT: vmovups (%rsp), %zmm8 # 64-byte Reload
20174 ; AVX512DQ-BW-NEXT: vmovaps %zmm8, 384(%r8)
20175 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, 448(%r9)
20176 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm15, 256(%r9)
20177 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, 320(%r9)
20178 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, 128(%r9)
20179 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, 192(%r9)
20180 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, (%r9)
20181 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, 64(%r9)
20182 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, 384(%r9)
20183 ; AVX512DQ-BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
20184 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm14, 384(%rax)
20185 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm12, 448(%rax)
20186 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, 256(%rax)
20187 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, 320(%rax)
20188 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, 128(%rax)
20189 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm16, 192(%rax)
20190 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, (%rax)
20191 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, 64(%rax)
20192 ; AVX512DQ-BW-NEXT: addq $7240, %rsp # imm = 0x1C48
20193 ; AVX512DQ-BW-NEXT: vzeroupper
20194 ; AVX512DQ-BW-NEXT: retq
20196 ; AVX512DQ-BW-FCP-LABEL: load_i64_stride6_vf64:
20197 ; AVX512DQ-BW-FCP: # %bb.0:
20198 ; AVX512DQ-BW-FCP-NEXT: subq $7240, %rsp # imm = 0x1C48
20199 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2048(%rdi), %zmm3
20200 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1280(%rdi), %zmm4
20201 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1344(%rdi), %zmm0
20202 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 896(%rdi), %zmm5
20203 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 960(%rdi), %zmm26
20204 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 512(%rdi), %zmm2
20205 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 576(%rdi), %zmm1
20206 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 128(%rdi), %zmm6
20207 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 192(%rdi), %zmm29
20208 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [0,6,0,10,0,6,0,10]
20209 ; AVX512DQ-BW-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
20210 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm29, %zmm8
20211 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm6, %zmm7, %zmm8
20212 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20213 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm8
20214 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm7, %zmm8
20215 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20216 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm8
20217 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm5, %zmm7, %zmm8
20218 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20219 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm8
20220 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm4, %zmm7, %zmm8
20221 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20222 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,7,0,11,1,7,0,11]
20223 ; AVX512DQ-BW-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
20224 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm9
20225 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm8, %zmm9
20226 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20227 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm29, %zmm9
20228 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm6, %zmm8, %zmm9
20229 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20230 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm9
20231 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm4, %zmm8, %zmm9
20232 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20233 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm9
20234 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm5, %zmm8, %zmm9
20235 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20236 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [10,4,10,4,10,4,10,4]
20237 ; AVX512DQ-BW-FCP-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
20238 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm10
20239 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm10
20240 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20241 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm10
20242 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm29, %zmm9, %zmm10
20243 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20244 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm10
20245 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm10
20246 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20247 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm10
20248 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm26, %zmm9, %zmm10
20249 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20250 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [11,5,11,5,11,5,11,5]
20251 ; AVX512DQ-BW-FCP-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
20252 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm11
20253 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm10, %zmm11
20254 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20255 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm11
20256 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm29, %zmm10, %zmm11
20257 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20258 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm11
20259 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm11
20260 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20261 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm11
20262 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm26, %zmm10, %zmm11
20263 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20264 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [12,0,0,6,12,0,0,6]
20265 ; AVX512DQ-BW-FCP-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
20266 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm13
20267 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm12
20268 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm11, %zmm13
20269 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20270 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [13,0,1,7,13,0,1,7]
20271 ; AVX512DQ-BW-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
20272 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm12
20273 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20274 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm1
20275 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm29, %zmm11, %zmm1
20276 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20277 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm29, %zmm2, %zmm6
20278 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20279 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm1
20280 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
20281 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20282 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm4
20283 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20284 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
20285 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm26, %zmm11, %zmm0
20286 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20287 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm26, %zmm2, %zmm5
20288 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20289 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2112(%rdi), %zmm0
20290 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
20291 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
20292 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20293 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
20294 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm3, %zmm8, %zmm1
20295 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20296 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm1
20297 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm1
20298 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20299 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm1
20300 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm1
20301 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20302 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm1
20303 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm1
20304 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20305 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
20306 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20307 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1664(%rdi), %zmm1
20308 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1728(%rdi), %zmm0
20309 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm3
20310 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm7, %zmm3
20311 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20312 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm3
20313 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
20314 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20315 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
20316 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm3
20317 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20318 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
20319 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm3
20320 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20321 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
20322 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm3
20323 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20324 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm1
20325 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20326 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2432(%rdi), %zmm3
20327 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2496(%rdi), %zmm0
20328 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1
20329 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
20330 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20331 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2816(%rdi), %zmm4
20332 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2880(%rdi), %zmm1
20333 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm1, %zmm7
20334 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20335 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5
20336 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm3, %zmm8, %zmm5
20337 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20338 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm1, %zmm8
20339 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20340 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm5
20341 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm5
20342 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20343 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm1, %zmm4, %zmm9
20344 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20345 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm5
20346 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm10, %zmm5
20347 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20348 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm1, %zmm4, %zmm10
20349 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20350 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm5
20351 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm11, %zmm5
20352 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20353 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm1, %zmm4, %zmm11
20354 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20355 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm4
20356 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20357 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm3
20358 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20359 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 448(%rdi), %zmm1
20360 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 384(%rdi), %zmm2
20361 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm8 = [0,6,12,0]
20362 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm0
20363 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm0
20364 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20365 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm9 = [1,7,13,0]
20366 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm0
20367 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm0
20368 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20369 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm10 = [10,0,6,0]
20370 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm0
20371 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm0
20372 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20373 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm11 = [11,1,7,0]
20374 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm0
20375 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm0
20376 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20377 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm4 = [4,10,4,10,4,10,4,10]
20378 ; AVX512DQ-BW-FCP-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
20379 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm0
20380 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm0
20381 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20382 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [5,11,5,11,5,11,5,11]
20383 ; AVX512DQ-BW-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
20384 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
20385 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20386 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm2
20387 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
20388 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
20389 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
20390 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20391 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
20392 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
20393 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20394 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
20395 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
20396 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20397 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
20398 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
20399 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20400 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
20401 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
20402 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20403 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
20404 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20405 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1216(%rdi), %zmm1
20406 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1152(%rdi), %zmm2
20407 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
20408 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
20409 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20410 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
20411 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
20412 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20413 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
20414 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
20415 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20416 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
20417 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
20418 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20419 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
20420 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
20421 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20422 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
20423 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20424 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 832(%rdi), %zmm1
20425 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 768(%rdi), %zmm2
20426 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
20427 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
20428 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20429 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
20430 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
20431 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20432 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
20433 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
20434 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20435 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
20436 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
20437 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20438 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
20439 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
20440 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20441 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
20442 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20443 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1984(%rdi), %zmm1
20444 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1920(%rdi), %zmm2
20445 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
20446 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm3
20447 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20448 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
20449 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
20450 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20451 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
20452 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
20453 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20454 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
20455 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm11, %zmm3
20456 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20457 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm3
20458 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
20459 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20460 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
20461 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20462 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1600(%rdi), %zmm1
20463 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1536(%rdi), %zmm30
20464 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm30, %zmm2
20465 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm2
20466 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20467 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm30, %zmm2
20468 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm2
20469 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20470 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm2
20471 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm30, %zmm10, %zmm2
20472 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20473 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm2
20474 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm30, %zmm11, %zmm2
20475 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20476 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm30, %zmm2
20477 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm2
20478 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20479 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm30
20480 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 320(%rdi), %zmm26
20481 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 256(%rdi), %zmm18
20482 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm1
20483 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm26, %zmm4, %zmm1
20484 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20485 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 704(%rdi), %zmm21
20486 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 640(%rdi), %zmm13
20487 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm1
20488 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm21, %zmm4, %zmm1
20489 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20490 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1088(%rdi), %zmm24
20491 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1024(%rdi), %zmm19
20492 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm1
20493 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm24, %zmm4, %zmm1
20494 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20495 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1472(%rdi), %zmm28
20496 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1408(%rdi), %zmm16
20497 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm1
20498 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm28, %zmm4, %zmm1
20499 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20500 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1856(%rdi), %zmm25
20501 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 1792(%rdi), %zmm7
20502 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm1
20503 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm25, %zmm4, %zmm1
20504 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20505 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2240(%rdi), %zmm23
20506 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2176(%rdi), %zmm17
20507 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm1
20508 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm23, %zmm4, %zmm1
20509 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20510 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2624(%rdi), %zmm20
20511 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2560(%rdi), %zmm5
20512 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
20513 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm20, %zmm4, %zmm1
20514 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20515 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2368(%rdi), %zmm1
20516 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2304(%rdi), %zmm22
20517 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm29
20518 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm8, %zmm29
20519 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 3008(%rdi), %zmm14
20520 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2944(%rdi), %zmm31
20521 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm31, %zmm2
20522 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm14, %zmm4, %zmm2
20523 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20524 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2752(%rdi), %zmm2
20525 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 2688(%rdi), %zmm12
20526 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm2, %zmm12, %zmm8
20527 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm3
20528 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
20529 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20530 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm2, %zmm12, %zmm9
20531 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
20532 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm22, %zmm10, %zmm3
20533 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20534 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm12, %zmm2, %zmm10
20535 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm3
20536 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm22, %zmm11, %zmm3
20537 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, (%rsp) # 64-byte Spill
20538 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm12, %zmm2, %zmm11
20539 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm3
20540 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
20541 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20542 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm2, %zmm12, %zmm4
20543 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm0, %zmm12
20544 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm0, %zmm22
20545 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
20546 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm20, %zmm0, %zmm1
20547 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20548 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm1
20549 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm21, %zmm0, %zmm1
20550 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20551 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm1
20552 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm26, %zmm0, %zmm1
20553 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20554 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm1
20555 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm28, %zmm0, %zmm1
20556 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20557 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm1
20558 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm24, %zmm0, %zmm1
20559 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20560 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm1
20561 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm23, %zmm0, %zmm1
20562 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20563 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm1
20564 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm25, %zmm0, %zmm1
20565 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20566 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm31, %zmm1
20567 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm14, %zmm0, %zmm1
20568 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20569 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,6,12,0,0,6,12]
20570 ; AVX512DQ-BW-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
20571 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm0
20572 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm21, %zmm1, %zmm0
20573 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20574 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,7,13,0,1,7,13]
20575 ; AVX512DQ-BW-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
20576 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm0
20577 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm21, %zmm2, %zmm0
20578 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20579 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,10,0,6,0,10,0,6]
20580 ; AVX512DQ-BW-FCP-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
20581 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm0
20582 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm13, %zmm3, %zmm0
20583 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20584 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,11,1,7,0,11,1,7]
20585 ; AVX512DQ-BW-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
20586 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm13, %zmm0, %zmm21
20587 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm6
20588 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm26, %zmm1, %zmm6
20589 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20590 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm6
20591 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm26, %zmm2, %zmm6
20592 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20593 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm6
20594 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm18, %zmm3, %zmm6
20595 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20596 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm18, %zmm0, %zmm26
20597 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm27
20598 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm28, %zmm1, %zmm27
20599 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm6
20600 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm28, %zmm2, %zmm6
20601 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20602 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm28, %zmm6
20603 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm16, %zmm3, %zmm6
20604 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20605 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm16, %zmm0, %zmm28
20606 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm18
20607 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm24, %zmm1, %zmm18
20608 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm6
20609 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm24, %zmm2, %zmm6
20610 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20611 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm6
20612 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm19, %zmm3, %zmm6
20613 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20614 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm19, %zmm0, %zmm24
20615 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm15
20616 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm23, %zmm1, %zmm15
20617 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm19
20618 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm23, %zmm2, %zmm19
20619 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm6
20620 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm17, %zmm3, %zmm6
20621 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20622 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm17, %zmm0, %zmm23
20623 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm13
20624 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm13
20625 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm17
20626 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm25, %zmm2, %zmm17
20627 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm6
20628 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm3, %zmm6
20629 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20630 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm25
20631 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm6
20632 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm20, %zmm1, %zmm6
20633 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm14, %zmm31, %zmm1
20634 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm7
20635 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm20, %zmm2, %zmm7
20636 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm14, %zmm31, %zmm2
20637 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm16
20638 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm5, %zmm3, %zmm16
20639 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm31, %zmm14, %zmm3
20640 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm31, %zmm0, %zmm14
20641 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm5, %zmm0, %zmm20
20642 ; AVX512DQ-BW-FCP-NEXT: movb $56, %al
20643 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k1
20644 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20645 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20646 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20647 ; AVX512DQ-BW-FCP-NEXT: movb $-64, %al
20648 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k2
20649 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20650 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20651 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20652 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20653 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20654 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20655 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20656 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20657 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20658 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20659 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20660 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20661 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20662 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20663 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20664 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20665 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20666 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20667 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20668 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20669 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20670 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20671 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20672 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20673 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20674 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20675 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20676 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20677 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20678 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20679 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20680 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20681 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20682 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20683 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k1}
20684 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20685 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
20686 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20687 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
20688 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20689 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
20690 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20691 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20692 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20693 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20694 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20695 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20696 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20697 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20698 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20699 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20700 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20701 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20702 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20703 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20704 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20705 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20706 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20707 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20708 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20709 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20710 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20711 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20712 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20713 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20714 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20715 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20716 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20717 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20718 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20719 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20720 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20721 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20722 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20723 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20724 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20725 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20726 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20727 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20728 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20729 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20730 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20731 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20732 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20733 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm9 {%k1}
20734 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20735 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
20736 ; AVX512DQ-BW-FCP-NEXT: movb $24, %al
20737 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k2
20738 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20739 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20740 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20741 ; AVX512DQ-BW-FCP-NEXT: movb $-32, %al
20742 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k1
20743 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm5 {%k1}
20744 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20745 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20746 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20747 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20748 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20749 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20750 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20751 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20752 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20753 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20754 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20755 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20756 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20757 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20758 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20759 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20760 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm27, %zmm5 {%k1}
20761 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20762 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20763 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20764 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20765 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm5 {%k1}
20766 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20767 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20768 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20769 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20770 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm5 {%k1}
20771 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20772 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20773 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20774 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20775 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm5 {%k1}
20776 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20777 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20778 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
20779 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm10 {%k1}
20780 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20781 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 (%rsp), %zmm1 # 64-byte Reload
20782 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
20783 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm1 {%k1}
20784 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, (%rsp) # 64-byte Spill
20785 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20786 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
20787 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
20788 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20789 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
20790 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20791 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20792 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
20793 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
20794 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20795 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
20796 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20797 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20798 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
20799 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
20800 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20801 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
20802 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20803 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20804 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
20805 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
20806 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20807 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
20808 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20809 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20810 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
20811 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
20812 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm31 {%k1}
20813 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20814 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
20815 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm27 {%k2}
20816 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm27 {%k1}
20817 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20818 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
20819 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm11 {%k1}
20820 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20821 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 16-byte Folded Reload
20822 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
20823 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
20824 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 16-byte Folded Reload
20825 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20826 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm1 {%k1}
20827 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20828 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm5 # 16-byte Folded Reload
20829 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20830 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm5 {%k1}
20831 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20832 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm6 # 16-byte Folded Reload
20833 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20834 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm6 {%k1}
20835 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20836 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm7 # 16-byte Folded Reload
20837 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20838 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm7 {%k1}
20839 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20840 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm13 # 16-byte Folded Reload
20841 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20842 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm13 {%k1}
20843 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20844 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm15 # 16-byte Folded Reload
20845 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20846 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm15 {%k1}
20847 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20848 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm4, %zmm2, %zmm4
20849 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm4 {%k1}
20850 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20851 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 16-byte Folded Reload
20852 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm2 {%k1}
20853 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
20854 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 16-byte Folded Reload
20855 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm3 {%k1}
20856 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
20857 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm16, %zmm16 # 16-byte Folded Reload
20858 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm28, %zmm16 {%k1}
20859 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
20860 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm17, %zmm17 # 16-byte Folded Reload
20861 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm17 {%k1}
20862 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
20863 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm18, %zmm18 # 16-byte Folded Reload
20864 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm18 {%k1}
20865 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
20866 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm30, %zmm19, %zmm19
20867 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm19 {%k1}
20868 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
20869 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm12, %zmm21, %zmm12
20870 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm14, %zmm12 {%k1}
20871 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
20872 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $0, %xmm22, %zmm14, %zmm14
20873 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm14 {%k1}
20874 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, 448(%rsi)
20875 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm29, 384(%rsi)
20876 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20877 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 320(%rsi)
20878 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20879 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 256(%rsi)
20880 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20881 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 192(%rsi)
20882 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20883 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 128(%rsi)
20884 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20885 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 64(%rsi)
20886 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20887 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, (%rsi)
20888 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, 448(%rdx)
20889 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20890 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 256(%rdx)
20891 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20892 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 320(%rdx)
20893 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20894 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 128(%rdx)
20895 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20896 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 192(%rdx)
20897 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20898 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, (%rdx)
20899 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20900 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 64(%rdx)
20901 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20902 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 384(%rdx)
20903 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, 448(%rcx)
20904 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20905 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 256(%rcx)
20906 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20907 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 320(%rcx)
20908 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20909 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 128(%rcx)
20910 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20911 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 192(%rcx)
20912 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20913 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, (%rcx)
20914 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20915 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 64(%rcx)
20916 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20917 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 384(%rcx)
20918 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm11, 448(%r8)
20919 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm27, 256(%r8)
20920 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm31, 320(%r8)
20921 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20922 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 128(%r8)
20923 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20924 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 192(%r8)
20925 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20926 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, (%r8)
20927 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20928 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 64(%r8)
20929 ; AVX512DQ-BW-FCP-NEXT: vmovups (%rsp), %zmm8 # 64-byte Reload
20930 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm8, 384(%r8)
20931 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, 448(%r9)
20932 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm15, 256(%r9)
20933 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, 320(%r9)
20934 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, 128(%r9)
20935 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, 192(%r9)
20936 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, (%r9)
20937 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, 64(%r9)
20938 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, 384(%r9)
20939 ; AVX512DQ-BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
20940 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm14, 384(%rax)
20941 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm12, 448(%rax)
20942 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, 256(%rax)
20943 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, 320(%rax)
20944 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, 128(%rax)
20945 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm16, 192(%rax)
20946 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, (%rax)
20947 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, 64(%rax)
20948 ; AVX512DQ-BW-FCP-NEXT: addq $7240, %rsp # imm = 0x1C48
20949 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
20950 ; AVX512DQ-BW-FCP-NEXT: retq
20951 %wide.vec = load <384 x i64>, ptr %in.vec, align 64
20952 %strided.vec0 = shufflevector <384 x i64> %wide.vec, <384 x i64> poison, <64 x i32> <i32 0, i32 6, i32 12, i32 18, i32 24, i32 30, i32 36, i32 42, i32 48, i32 54, i32 60, i32 66, i32 72, i32 78, i32 84, i32 90, i32 96, i32 102, i32 108, i32 114, i32 120, i32 126, i32 132, i32 138, i32 144, i32 150, i32 156, i32 162, i32 168, i32 174, i32 180, i32 186, i32 192, i32 198, i32 204, i32 210, i32 216, i32 222, i32 228, i32 234, i32 240, i32 246, i32 252, i32 258, i32 264, i32 270, i32 276, i32 282, i32 288, i32 294, i32 300, i32 306, i32 312, i32 318, i32 324, i32 330, i32 336, i32 342, i32 348, i32 354, i32 360, i32 366, i32 372, i32 378>
20953 %strided.vec1 = shufflevector <384 x i64> %wide.vec, <384 x i64> poison, <64 x i32> <i32 1, i32 7, i32 13, i32 19, i32 25, i32 31, i32 37, i32 43, i32 49, i32 55, i32 61, i32 67, i32 73, i32 79, i32 85, i32 91, i32 97, i32 103, i32 109, i32 115, i32 121, i32 127, i32 133, i32 139, i32 145, i32 151, i32 157, i32 163, i32 169, i32 175, i32 181, i32 187, i32 193, i32 199, i32 205, i32 211, i32 217, i32 223, i32 229, i32 235, i32 241, i32 247, i32 253, i32 259, i32 265, i32 271, i32 277, i32 283, i32 289, i32 295, i32 301, i32 307, i32 313, i32 319, i32 325, i32 331, i32 337, i32 343, i32 349, i32 355, i32 361, i32 367, i32 373, i32 379>
20954 %strided.vec2 = shufflevector <384 x i64> %wide.vec, <384 x i64> poison, <64 x i32> <i32 2, i32 8, i32 14, i32 20, i32 26, i32 32, i32 38, i32 44, i32 50, i32 56, i32 62, i32 68, i32 74, i32 80, i32 86, i32 92, i32 98, i32 104, i32 110, i32 116, i32 122, i32 128, i32 134, i32 140, i32 146, i32 152, i32 158, i32 164, i32 170, i32 176, i32 182, i32 188, i32 194, i32 200, i32 206, i32 212, i32 218, i32 224, i32 230, i32 236, i32 242, i32 248, i32 254, i32 260, i32 266, i32 272, i32 278, i32 284, i32 290, i32 296, i32 302, i32 308, i32 314, i32 320, i32 326, i32 332, i32 338, i32 344, i32 350, i32 356, i32 362, i32 368, i32 374, i32 380>
20955 %strided.vec3 = shufflevector <384 x i64> %wide.vec, <384 x i64> poison, <64 x i32> <i32 3, i32 9, i32 15, i32 21, i32 27, i32 33, i32 39, i32 45, i32 51, i32 57, i32 63, i32 69, i32 75, i32 81, i32 87, i32 93, i32 99, i32 105, i32 111, i32 117, i32 123, i32 129, i32 135, i32 141, i32 147, i32 153, i32 159, i32 165, i32 171, i32 177, i32 183, i32 189, i32 195, i32 201, i32 207, i32 213, i32 219, i32 225, i32 231, i32 237, i32 243, i32 249, i32 255, i32 261, i32 267, i32 273, i32 279, i32 285, i32 291, i32 297, i32 303, i32 309, i32 315, i32 321, i32 327, i32 333, i32 339, i32 345, i32 351, i32 357, i32 363, i32 369, i32 375, i32 381>
20956 %strided.vec4 = shufflevector <384 x i64> %wide.vec, <384 x i64> poison, <64 x i32> <i32 4, i32 10, i32 16, i32 22, i32 28, i32 34, i32 40, i32 46, i32 52, i32 58, i32 64, i32 70, i32 76, i32 82, i32 88, i32 94, i32 100, i32 106, i32 112, i32 118, i32 124, i32 130, i32 136, i32 142, i32 148, i32 154, i32 160, i32 166, i32 172, i32 178, i32 184, i32 190, i32 196, i32 202, i32 208, i32 214, i32 220, i32 226, i32 232, i32 238, i32 244, i32 250, i32 256, i32 262, i32 268, i32 274, i32 280, i32 286, i32 292, i32 298, i32 304, i32 310, i32 316, i32 322, i32 328, i32 334, i32 340, i32 346, i32 352, i32 358, i32 364, i32 370, i32 376, i32 382>
20957 %strided.vec5 = shufflevector <384 x i64> %wide.vec, <384 x i64> poison, <64 x i32> <i32 5, i32 11, i32 17, i32 23, i32 29, i32 35, i32 41, i32 47, i32 53, i32 59, i32 65, i32 71, i32 77, i32 83, i32 89, i32 95, i32 101, i32 107, i32 113, i32 119, i32 125, i32 131, i32 137, i32 143, i32 149, i32 155, i32 161, i32 167, i32 173, i32 179, i32 185, i32 191, i32 197, i32 203, i32 209, i32 215, i32 221, i32 227, i32 233, i32 239, i32 245, i32 251, i32 257, i32 263, i32 269, i32 275, i32 281, i32 287, i32 293, i32 299, i32 305, i32 311, i32 317, i32 323, i32 329, i32 335, i32 341, i32 347, i32 353, i32 359, i32 365, i32 371, i32 377, i32 383>
20958 store <64 x i64> %strided.vec0, ptr %out.vec0, align 64
20959 store <64 x i64> %strided.vec1, ptr %out.vec1, align 64
20960 store <64 x i64> %strided.vec2, ptr %out.vec2, align 64
20961 store <64 x i64> %strided.vec3, ptr %out.vec3, align 64
20962 store <64 x i64> %strided.vec4, ptr %out.vec4, align 64
20963 store <64 x i64> %strided.vec5, ptr %out.vec5, align 64