1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+sse2 | FileCheck %s --check-prefixes=SSE
3 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx | FileCheck %s --check-prefixes=AVX
4 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2 | FileCheck %s --check-prefixes=AVX2
5 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX2-FP
6 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX2-FCP
7 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl | FileCheck %s --check-prefixes=AVX512
8 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512-FCP
9 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq | FileCheck %s --check-prefixes=AVX512DQ
10 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512DQ-FCP
11 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw | FileCheck %s --check-prefixes=AVX512BW
12 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512BW-FCP
13 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw | FileCheck %s --check-prefixes=AVX512DQ-BW
14 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512DQ-BW-FCP
16 ; These patterns are produced by LoopVectorizer for interleaved stores.
18 define void @store_i64_stride6_vf2(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
19 ; SSE-LABEL: store_i64_stride6_vf2:
21 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
22 ; SSE-NEXT: movaps (%rdi), %xmm0
23 ; SSE-NEXT: movaps (%rsi), %xmm1
24 ; SSE-NEXT: movaps (%rdx), %xmm2
25 ; SSE-NEXT: movaps (%rcx), %xmm3
26 ; SSE-NEXT: movaps (%r8), %xmm4
27 ; SSE-NEXT: movaps (%r9), %xmm5
28 ; SSE-NEXT: movaps %xmm0, %xmm6
29 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm1[0]
30 ; SSE-NEXT: movaps %xmm2, %xmm7
31 ; SSE-NEXT: unpckhpd {{.*#+}} xmm7 = xmm7[1],xmm3[1]
32 ; SSE-NEXT: movaps %xmm4, %xmm8
33 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm5[1]
34 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
35 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm5[0]
36 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm3[0]
37 ; SSE-NEXT: movaps %xmm2, 16(%rax)
38 ; SSE-NEXT: movaps %xmm4, 32(%rax)
39 ; SSE-NEXT: movaps %xmm0, 48(%rax)
40 ; SSE-NEXT: movaps %xmm8, 80(%rax)
41 ; SSE-NEXT: movaps %xmm7, 64(%rax)
42 ; SSE-NEXT: movaps %xmm6, (%rax)
45 ; AVX-LABEL: store_i64_stride6_vf2:
47 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
48 ; AVX-NEXT: vmovaps (%rdi), %xmm0
49 ; AVX-NEXT: vmovaps (%rsi), %xmm1
50 ; AVX-NEXT: vmovaps (%rdx), %xmm2
51 ; AVX-NEXT: vmovaps (%rcx), %xmm3
52 ; AVX-NEXT: vmovaps (%r8), %xmm4
53 ; AVX-NEXT: vmovaps (%r9), %xmm5
54 ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm1, %ymm6
55 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm7
56 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm6 = ymm7[0],ymm6[0],ymm7[2],ymm6[2]
57 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm5, %ymm1
58 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm4, %ymm0
59 ; AVX-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[3],ymm1[3]
60 ; AVX-NEXT: vinsertf128 $1, %xmm5, %ymm3, %ymm1
61 ; AVX-NEXT: vinsertf128 $1, %xmm4, %ymm2, %ymm2
62 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm2[1],ymm1[1],ymm2[3],ymm1[3]
63 ; AVX-NEXT: vmovaps %ymm1, 64(%rax)
64 ; AVX-NEXT: vmovapd %ymm0, 32(%rax)
65 ; AVX-NEXT: vmovaps %ymm6, (%rax)
66 ; AVX-NEXT: vzeroupper
69 ; AVX2-LABEL: store_i64_stride6_vf2:
71 ; AVX2-NEXT: movq {{[0-9]+}}(%rsp), %rax
72 ; AVX2-NEXT: vmovaps (%rdi), %xmm0
73 ; AVX2-NEXT: vmovaps (%rdx), %xmm1
74 ; AVX2-NEXT: vmovaps (%r8), %xmm2
75 ; AVX2-NEXT: vinsertf128 $1, (%rsi), %ymm0, %ymm0
76 ; AVX2-NEXT: vinsertf128 $1, (%rcx), %ymm1, %ymm1
77 ; AVX2-NEXT: vinsertf128 $1, (%r9), %ymm2, %ymm2
78 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
79 ; AVX2-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,2,1,3]
80 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1],ymm0[2,3],ymm2[4,5],ymm0[6,7]
81 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,1,3]
82 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
83 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,1,3]
84 ; AVX2-NEXT: vmovaps %ymm1, 64(%rax)
85 ; AVX2-NEXT: vmovaps %ymm0, 32(%rax)
86 ; AVX2-NEXT: vmovaps %ymm3, (%rax)
87 ; AVX2-NEXT: vzeroupper
90 ; AVX2-FP-LABEL: store_i64_stride6_vf2:
92 ; AVX2-FP-NEXT: movq {{[0-9]+}}(%rsp), %rax
93 ; AVX2-FP-NEXT: vmovaps (%rdi), %xmm0
94 ; AVX2-FP-NEXT: vmovaps (%rdx), %xmm1
95 ; AVX2-FP-NEXT: vmovaps (%r8), %xmm2
96 ; AVX2-FP-NEXT: vinsertf128 $1, (%rsi), %ymm0, %ymm0
97 ; AVX2-FP-NEXT: vinsertf128 $1, (%rcx), %ymm1, %ymm1
98 ; AVX2-FP-NEXT: vinsertf128 $1, (%r9), %ymm2, %ymm2
99 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
100 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,2,1,3]
101 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1],ymm0[2,3],ymm2[4,5],ymm0[6,7]
102 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,1,3]
103 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
104 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,1,3]
105 ; AVX2-FP-NEXT: vmovaps %ymm1, 64(%rax)
106 ; AVX2-FP-NEXT: vmovaps %ymm0, 32(%rax)
107 ; AVX2-FP-NEXT: vmovaps %ymm3, (%rax)
108 ; AVX2-FP-NEXT: vzeroupper
111 ; AVX2-FCP-LABEL: store_i64_stride6_vf2:
113 ; AVX2-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
114 ; AVX2-FCP-NEXT: vmovaps (%rdi), %xmm0
115 ; AVX2-FCP-NEXT: vmovaps (%rdx), %xmm1
116 ; AVX2-FCP-NEXT: vmovaps (%r8), %xmm2
117 ; AVX2-FCP-NEXT: vinsertf128 $1, (%rsi), %ymm0, %ymm0
118 ; AVX2-FCP-NEXT: vinsertf128 $1, (%rcx), %ymm1, %ymm1
119 ; AVX2-FCP-NEXT: vinsertf128 $1, (%r9), %ymm2, %ymm2
120 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
121 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,2,1,3]
122 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1],ymm0[2,3],ymm2[4,5],ymm0[6,7]
123 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,1,3]
124 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
125 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,1,3]
126 ; AVX2-FCP-NEXT: vmovaps %ymm1, 64(%rax)
127 ; AVX2-FCP-NEXT: vmovaps %ymm0, 32(%rax)
128 ; AVX2-FCP-NEXT: vmovaps %ymm3, (%rax)
129 ; AVX2-FCP-NEXT: vzeroupper
130 ; AVX2-FCP-NEXT: retq
132 ; AVX512-LABEL: store_i64_stride6_vf2:
134 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
135 ; AVX512-NEXT: vmovdqa (%rdi), %xmm0
136 ; AVX512-NEXT: vmovdqa (%rdx), %xmm1
137 ; AVX512-NEXT: vmovdqa (%r8), %xmm2
138 ; AVX512-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
139 ; AVX512-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
140 ; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
141 ; AVX512-NEXT: vinserti32x4 $1, (%r9), %zmm2, %zmm1
142 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm2 = [5,7,9,11]
143 ; AVX512-NEXT: vpermi2q %zmm1, %zmm0, %zmm2
144 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm3 = [0,2,4,6,8,10,1,3]
145 ; AVX512-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
146 ; AVX512-NEXT: vmovdqa64 %zmm3, (%rax)
147 ; AVX512-NEXT: vmovdqa %ymm2, 64(%rax)
148 ; AVX512-NEXT: vzeroupper
151 ; AVX512-FCP-LABEL: store_i64_stride6_vf2:
152 ; AVX512-FCP: # %bb.0:
153 ; AVX512-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
154 ; AVX512-FCP-NEXT: vmovdqa (%rdi), %xmm0
155 ; AVX512-FCP-NEXT: vmovdqa (%rdx), %xmm1
156 ; AVX512-FCP-NEXT: vmovdqa (%r8), %xmm2
157 ; AVX512-FCP-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
158 ; AVX512-FCP-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
159 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
160 ; AVX512-FCP-NEXT: vinserti32x4 $1, (%r9), %zmm2, %zmm1
161 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm2 = [5,7,9,11]
162 ; AVX512-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm2
163 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm3 = [0,2,4,6,8,10,1,3]
164 ; AVX512-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
165 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, (%rax)
166 ; AVX512-FCP-NEXT: vmovdqa %ymm2, 64(%rax)
167 ; AVX512-FCP-NEXT: vzeroupper
168 ; AVX512-FCP-NEXT: retq
170 ; AVX512DQ-LABEL: store_i64_stride6_vf2:
172 ; AVX512DQ-NEXT: movq {{[0-9]+}}(%rsp), %rax
173 ; AVX512DQ-NEXT: vmovdqa (%rdi), %xmm0
174 ; AVX512DQ-NEXT: vmovdqa (%rdx), %xmm1
175 ; AVX512DQ-NEXT: vmovdqa (%r8), %xmm2
176 ; AVX512DQ-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
177 ; AVX512DQ-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
178 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
179 ; AVX512DQ-NEXT: vinserti32x4 $1, (%r9), %zmm2, %zmm1
180 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm2 = [5,7,9,11]
181 ; AVX512DQ-NEXT: vpermi2q %zmm1, %zmm0, %zmm2
182 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm3 = [0,2,4,6,8,10,1,3]
183 ; AVX512DQ-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
184 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, (%rax)
185 ; AVX512DQ-NEXT: vmovdqa %ymm2, 64(%rax)
186 ; AVX512DQ-NEXT: vzeroupper
187 ; AVX512DQ-NEXT: retq
189 ; AVX512DQ-FCP-LABEL: store_i64_stride6_vf2:
190 ; AVX512DQ-FCP: # %bb.0:
191 ; AVX512DQ-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
192 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdi), %xmm0
193 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdx), %xmm1
194 ; AVX512DQ-FCP-NEXT: vmovdqa (%r8), %xmm2
195 ; AVX512DQ-FCP-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
196 ; AVX512DQ-FCP-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
197 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
198 ; AVX512DQ-FCP-NEXT: vinserti32x4 $1, (%r9), %zmm2, %zmm1
199 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm2 = [5,7,9,11]
200 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm2
201 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm3 = [0,2,4,6,8,10,1,3]
202 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
203 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, (%rax)
204 ; AVX512DQ-FCP-NEXT: vmovdqa %ymm2, 64(%rax)
205 ; AVX512DQ-FCP-NEXT: vzeroupper
206 ; AVX512DQ-FCP-NEXT: retq
208 ; AVX512BW-LABEL: store_i64_stride6_vf2:
210 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
211 ; AVX512BW-NEXT: vmovdqa (%rdi), %xmm0
212 ; AVX512BW-NEXT: vmovdqa (%rdx), %xmm1
213 ; AVX512BW-NEXT: vmovdqa (%r8), %xmm2
214 ; AVX512BW-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
215 ; AVX512BW-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
216 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
217 ; AVX512BW-NEXT: vinserti32x4 $1, (%r9), %zmm2, %zmm1
218 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm2 = [5,7,9,11]
219 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm0, %zmm2
220 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm3 = [0,2,4,6,8,10,1,3]
221 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
222 ; AVX512BW-NEXT: vmovdqa64 %zmm3, (%rax)
223 ; AVX512BW-NEXT: vmovdqa %ymm2, 64(%rax)
224 ; AVX512BW-NEXT: vzeroupper
225 ; AVX512BW-NEXT: retq
227 ; AVX512BW-FCP-LABEL: store_i64_stride6_vf2:
228 ; AVX512BW-FCP: # %bb.0:
229 ; AVX512BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
230 ; AVX512BW-FCP-NEXT: vmovdqa (%rdi), %xmm0
231 ; AVX512BW-FCP-NEXT: vmovdqa (%rdx), %xmm1
232 ; AVX512BW-FCP-NEXT: vmovdqa (%r8), %xmm2
233 ; AVX512BW-FCP-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
234 ; AVX512BW-FCP-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
235 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
236 ; AVX512BW-FCP-NEXT: vinserti32x4 $1, (%r9), %zmm2, %zmm1
237 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm2 = [5,7,9,11]
238 ; AVX512BW-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm2
239 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm3 = [0,2,4,6,8,10,1,3]
240 ; AVX512BW-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
241 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, (%rax)
242 ; AVX512BW-FCP-NEXT: vmovdqa %ymm2, 64(%rax)
243 ; AVX512BW-FCP-NEXT: vzeroupper
244 ; AVX512BW-FCP-NEXT: retq
246 ; AVX512DQ-BW-LABEL: store_i64_stride6_vf2:
247 ; AVX512DQ-BW: # %bb.0:
248 ; AVX512DQ-BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
249 ; AVX512DQ-BW-NEXT: vmovdqa (%rdi), %xmm0
250 ; AVX512DQ-BW-NEXT: vmovdqa (%rdx), %xmm1
251 ; AVX512DQ-BW-NEXT: vmovdqa (%r8), %xmm2
252 ; AVX512DQ-BW-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
253 ; AVX512DQ-BW-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
254 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
255 ; AVX512DQ-BW-NEXT: vinserti32x4 $1, (%r9), %zmm2, %zmm1
256 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm2 = [5,7,9,11]
257 ; AVX512DQ-BW-NEXT: vpermi2q %zmm1, %zmm0, %zmm2
258 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm3 = [0,2,4,6,8,10,1,3]
259 ; AVX512DQ-BW-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
260 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, (%rax)
261 ; AVX512DQ-BW-NEXT: vmovdqa %ymm2, 64(%rax)
262 ; AVX512DQ-BW-NEXT: vzeroupper
263 ; AVX512DQ-BW-NEXT: retq
265 ; AVX512DQ-BW-FCP-LABEL: store_i64_stride6_vf2:
266 ; AVX512DQ-BW-FCP: # %bb.0:
267 ; AVX512DQ-BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
268 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rdi), %xmm0
269 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rdx), %xmm1
270 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%r8), %xmm2
271 ; AVX512DQ-BW-FCP-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
272 ; AVX512DQ-BW-FCP-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
273 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
274 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $1, (%r9), %zmm2, %zmm1
275 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm2 = [5,7,9,11]
276 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm2
277 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm3 = [0,2,4,6,8,10,1,3]
278 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
279 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, (%rax)
280 ; AVX512DQ-BW-FCP-NEXT: vmovdqa %ymm2, 64(%rax)
281 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
282 ; AVX512DQ-BW-FCP-NEXT: retq
283 %in.vec0 = load <2 x i64>, ptr %in.vecptr0, align 64
284 %in.vec1 = load <2 x i64>, ptr %in.vecptr1, align 64
285 %in.vec2 = load <2 x i64>, ptr %in.vecptr2, align 64
286 %in.vec3 = load <2 x i64>, ptr %in.vecptr3, align 64
287 %in.vec4 = load <2 x i64>, ptr %in.vecptr4, align 64
288 %in.vec5 = load <2 x i64>, ptr %in.vecptr5, align 64
289 %1 = shufflevector <2 x i64> %in.vec0, <2 x i64> %in.vec1, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
290 %2 = shufflevector <2 x i64> %in.vec2, <2 x i64> %in.vec3, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
291 %3 = shufflevector <2 x i64> %in.vec4, <2 x i64> %in.vec5, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
292 %4 = shufflevector <4 x i64> %1, <4 x i64> %2, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
293 %5 = shufflevector <4 x i64> %3, <4 x i64> poison, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 undef, i32 undef, i32 undef, i32 undef>
294 %6 = shufflevector <8 x i64> %4, <8 x i64> %5, <12 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11>
295 %interleaved.vec = shufflevector <12 x i64> %6, <12 x i64> poison, <12 x i32> <i32 0, i32 2, i32 4, i32 6, i32 8, i32 10, i32 1, i32 3, i32 5, i32 7, i32 9, i32 11>
296 store <12 x i64> %interleaved.vec, ptr %out.vec, align 64
300 define void @store_i64_stride6_vf4(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
301 ; SSE-LABEL: store_i64_stride6_vf4:
303 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
304 ; SSE-NEXT: movaps (%rdi), %xmm2
305 ; SSE-NEXT: movaps 16(%rdi), %xmm1
306 ; SSE-NEXT: movaps (%rsi), %xmm5
307 ; SSE-NEXT: movaps 16(%rsi), %xmm6
308 ; SSE-NEXT: movaps (%rdx), %xmm0
309 ; SSE-NEXT: movaps 16(%rdx), %xmm4
310 ; SSE-NEXT: movaps (%rcx), %xmm7
311 ; SSE-NEXT: movaps 16(%rcx), %xmm8
312 ; SSE-NEXT: movaps (%r8), %xmm9
313 ; SSE-NEXT: movaps 16(%r8), %xmm10
314 ; SSE-NEXT: movaps (%r9), %xmm11
315 ; SSE-NEXT: movaps 16(%r9), %xmm12
316 ; SSE-NEXT: movaps %xmm1, %xmm3
317 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm6[1]
318 ; SSE-NEXT: movaps %xmm10, %xmm13
319 ; SSE-NEXT: movlhps {{.*#+}} xmm13 = xmm13[0],xmm12[0]
320 ; SSE-NEXT: movaps %xmm9, %xmm14
321 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm11[1]
322 ; SSE-NEXT: movaps %xmm0, %xmm15
323 ; SSE-NEXT: unpckhpd {{.*#+}} xmm15 = xmm15[1],xmm7[1]
324 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm7[0]
325 ; SSE-NEXT: movaps %xmm2, %xmm7
326 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm5[0]
327 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm12[1]
328 ; SSE-NEXT: movaps %xmm4, %xmm12
329 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm8[1]
330 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm8[0]
331 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm6[0]
332 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm5[1]
333 ; SSE-NEXT: movlhps {{.*#+}} xmm9 = xmm9[0],xmm11[0]
334 ; SSE-NEXT: movaps %xmm9, 32(%rax)
335 ; SSE-NEXT: movaps %xmm2, 48(%rax)
336 ; SSE-NEXT: movaps %xmm1, 96(%rax)
337 ; SSE-NEXT: movaps %xmm4, 112(%rax)
338 ; SSE-NEXT: movaps %xmm12, 160(%rax)
339 ; SSE-NEXT: movaps %xmm10, 176(%rax)
340 ; SSE-NEXT: movaps %xmm7, (%rax)
341 ; SSE-NEXT: movaps %xmm0, 16(%rax)
342 ; SSE-NEXT: movaps %xmm15, 64(%rax)
343 ; SSE-NEXT: movaps %xmm14, 80(%rax)
344 ; SSE-NEXT: movaps %xmm13, 128(%rax)
345 ; SSE-NEXT: movaps %xmm3, 144(%rax)
348 ; AVX-LABEL: store_i64_stride6_vf4:
350 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
351 ; AVX-NEXT: vmovapd (%rdi), %ymm0
352 ; AVX-NEXT: vmovapd (%rsi), %ymm1
353 ; AVX-NEXT: vmovaps (%rdx), %ymm2
354 ; AVX-NEXT: vmovapd (%r8), %ymm3
355 ; AVX-NEXT: vmovapd (%r9), %ymm4
356 ; AVX-NEXT: vmovddup {{.*#+}} xmm5 = mem[0,0]
357 ; AVX-NEXT: vmovaps (%rsi), %xmm6
358 ; AVX-NEXT: vmovaps (%rdi), %xmm7
359 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm8 = xmm7[1],xmm6[1]
360 ; AVX-NEXT: vinsertf128 $1, %xmm8, %ymm0, %ymm8
361 ; AVX-NEXT: vblendpd {{.*#+}} ymm8 = ymm3[0,1],ymm8[2,3]
362 ; AVX-NEXT: vblendpd {{.*#+}} ymm5 = ymm8[0],ymm5[1],ymm8[2,3]
363 ; AVX-NEXT: vmovaps (%rcx), %xmm8
364 ; AVX-NEXT: vmovaps (%rdx), %xmm9
365 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm10 = xmm9[1],xmm8[1]
366 ; AVX-NEXT: vinsertf128 $1, (%r9), %ymm10, %ymm11
367 ; AVX-NEXT: vbroadcastsd 8(%r8), %ymm12
368 ; AVX-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1,2,3],ymm12[4,5,6,7]
369 ; AVX-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1],ymm11[2,3],ymm10[4,5],ymm11[6,7]
370 ; AVX-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm2[0],mem[0],ymm2[2],mem[2]
371 ; AVX-NEXT: vmovaps 16(%rdi), %xmm11
372 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm11 = xmm11[0],mem[0]
373 ; AVX-NEXT: vblendps {{.*#+}} ymm2 = ymm11[0,1,2,3],ymm2[4,5,6,7]
374 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm11 = ymm4[2,3],ymm1[2,3]
375 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
376 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm3[2,3],ymm0[2,3]
377 ; AVX-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm11[0],ymm0[2],ymm11[3]
378 ; AVX-NEXT: vmovapd 16(%rdx), %xmm1
379 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],mem[1]
380 ; AVX-NEXT: vbroadcastsd 24(%r8), %ymm3
381 ; AVX-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0,1],ymm3[2,3]
382 ; AVX-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0,1,2],ymm4[3]
383 ; AVX-NEXT: vmovlhps {{.*#+}} xmm3 = xmm7[0],xmm6[0]
384 ; AVX-NEXT: vmovlhps {{.*#+}} xmm4 = xmm9[0],xmm8[0]
385 ; AVX-NEXT: vmovaps %xmm4, 16(%rax)
386 ; AVX-NEXT: vmovaps %xmm3, (%rax)
387 ; AVX-NEXT: vmovapd %ymm0, 128(%rax)
388 ; AVX-NEXT: vmovaps %ymm2, 96(%rax)
389 ; AVX-NEXT: vmovaps %ymm10, 64(%rax)
390 ; AVX-NEXT: vmovapd %ymm5, 32(%rax)
391 ; AVX-NEXT: vmovapd %ymm1, 160(%rax)
392 ; AVX-NEXT: vzeroupper
395 ; AVX2-LABEL: store_i64_stride6_vf4:
397 ; AVX2-NEXT: movq {{[0-9]+}}(%rsp), %rax
398 ; AVX2-NEXT: vmovaps (%rdi), %ymm0
399 ; AVX2-NEXT: vmovaps (%rsi), %ymm1
400 ; AVX2-NEXT: vmovaps (%rdx), %ymm2
401 ; AVX2-NEXT: vmovaps (%rcx), %ymm3
402 ; AVX2-NEXT: vmovaps (%r8), %ymm4
403 ; AVX2-NEXT: vmovaps (%r9), %xmm5
404 ; AVX2-NEXT: vmovddup {{.*#+}} xmm6 = xmm5[0,0]
405 ; AVX2-NEXT: vmovaps (%rsi), %xmm7
406 ; AVX2-NEXT: vmovaps (%rdi), %xmm8
407 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm9 = xmm8[1],xmm7[1]
408 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm9 = ymm4[0,1],ymm9[0,1]
409 ; AVX2-NEXT: vblendps {{.*#+}} ymm6 = ymm9[0,1],ymm6[2,3],ymm9[4,5,6,7]
410 ; AVX2-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
411 ; AVX2-NEXT: vmovaps (%rcx), %xmm9
412 ; AVX2-NEXT: vmovaps (%rdx), %xmm10
413 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm10[1],xmm9[1]
414 ; AVX2-NEXT: vbroadcastsd 8(%r8), %ymm12
415 ; AVX2-NEXT: vblendps {{.*#+}} ymm11 = ymm11[0,1,2,3],ymm12[4,5,6,7]
416 ; AVX2-NEXT: vblendps {{.*#+}} ymm5 = ymm11[0,1,2,3,4,5],ymm5[6,7]
417 ; AVX2-NEXT: vinsertf128 $1, %xmm9, %ymm7, %ymm7
418 ; AVX2-NEXT: vinsertf128 $1, %xmm10, %ymm8, %ymm8
419 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm7 = ymm8[0],ymm7[0],ymm8[2],ymm7[2]
420 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
421 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm9 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
422 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm8 = ymm9[2,3],ymm8[2,3]
423 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
424 ; AVX2-NEXT: vbroadcastsd 24(%r8), %ymm3
425 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],ymm3[2,3]
426 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],mem[6,7]
427 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
428 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm4[2,3],ymm0[2,3]
429 ; AVX2-NEXT: vbroadcastsd 16(%r9), %ymm1
430 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
431 ; AVX2-NEXT: vmovaps %ymm8, 96(%rax)
432 ; AVX2-NEXT: vmovaps %ymm0, 128(%rax)
433 ; AVX2-NEXT: vmovaps %ymm7, (%rax)
434 ; AVX2-NEXT: vmovaps %ymm2, 160(%rax)
435 ; AVX2-NEXT: vmovaps %ymm5, 64(%rax)
436 ; AVX2-NEXT: vmovaps %ymm6, 32(%rax)
437 ; AVX2-NEXT: vzeroupper
440 ; AVX2-FP-LABEL: store_i64_stride6_vf4:
442 ; AVX2-FP-NEXT: movq {{[0-9]+}}(%rsp), %rax
443 ; AVX2-FP-NEXT: vmovaps (%rdi), %ymm0
444 ; AVX2-FP-NEXT: vmovaps (%rsi), %ymm1
445 ; AVX2-FP-NEXT: vmovaps (%rdx), %ymm2
446 ; AVX2-FP-NEXT: vmovaps (%rcx), %ymm3
447 ; AVX2-FP-NEXT: vmovaps (%r8), %ymm4
448 ; AVX2-FP-NEXT: vmovaps (%r9), %xmm5
449 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm6 = xmm5[0,0]
450 ; AVX2-FP-NEXT: vmovaps (%rsi), %xmm7
451 ; AVX2-FP-NEXT: vmovaps (%rdi), %xmm8
452 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm9 = xmm8[1],xmm7[1]
453 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm9 = ymm4[0,1],ymm9[0,1]
454 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm6 = ymm9[0,1],ymm6[2,3],ymm9[4,5,6,7]
455 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
456 ; AVX2-FP-NEXT: vmovaps (%rcx), %xmm9
457 ; AVX2-FP-NEXT: vmovaps (%rdx), %xmm10
458 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm10[1],xmm9[1]
459 ; AVX2-FP-NEXT: vbroadcastsd 8(%r8), %ymm12
460 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm11 = ymm11[0,1,2,3],ymm12[4,5,6,7]
461 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm5 = ymm11[0,1,2,3,4,5],ymm5[6,7]
462 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm9, %ymm7, %ymm7
463 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm10, %ymm8, %ymm8
464 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm7 = ymm8[0],ymm7[0],ymm8[2],ymm7[2]
465 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
466 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm9 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
467 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm8 = ymm9[2,3],ymm8[2,3]
468 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
469 ; AVX2-FP-NEXT: vbroadcastsd 24(%r8), %ymm3
470 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],ymm3[2,3]
471 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],mem[6,7]
472 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
473 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm4[2,3],ymm0[2,3]
474 ; AVX2-FP-NEXT: vbroadcastsd 16(%r9), %ymm1
475 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
476 ; AVX2-FP-NEXT: vmovaps %ymm8, 96(%rax)
477 ; AVX2-FP-NEXT: vmovaps %ymm0, 128(%rax)
478 ; AVX2-FP-NEXT: vmovaps %ymm7, (%rax)
479 ; AVX2-FP-NEXT: vmovaps %ymm2, 160(%rax)
480 ; AVX2-FP-NEXT: vmovaps %ymm5, 64(%rax)
481 ; AVX2-FP-NEXT: vmovaps %ymm6, 32(%rax)
482 ; AVX2-FP-NEXT: vzeroupper
485 ; AVX2-FCP-LABEL: store_i64_stride6_vf4:
487 ; AVX2-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
488 ; AVX2-FCP-NEXT: vmovaps (%rdi), %ymm0
489 ; AVX2-FCP-NEXT: vmovaps (%rsi), %ymm1
490 ; AVX2-FCP-NEXT: vmovaps (%rdx), %ymm2
491 ; AVX2-FCP-NEXT: vmovaps (%rcx), %ymm3
492 ; AVX2-FCP-NEXT: vmovaps (%r8), %ymm4
493 ; AVX2-FCP-NEXT: vmovaps (%r9), %xmm5
494 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm6 = xmm5[0,0]
495 ; AVX2-FCP-NEXT: vmovaps (%rsi), %xmm7
496 ; AVX2-FCP-NEXT: vmovaps (%rdi), %xmm8
497 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm9 = xmm8[1],xmm7[1]
498 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm9 = ymm4[0,1],ymm9[0,1]
499 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm6 = ymm9[0,1],ymm6[2,3],ymm9[4,5,6,7]
500 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
501 ; AVX2-FCP-NEXT: vmovaps (%rcx), %xmm9
502 ; AVX2-FCP-NEXT: vmovaps (%rdx), %xmm10
503 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm10[1],xmm9[1]
504 ; AVX2-FCP-NEXT: vbroadcastsd 8(%r8), %ymm12
505 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm11 = ymm11[0,1,2,3],ymm12[4,5,6,7]
506 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm5 = ymm11[0,1,2,3,4,5],ymm5[6,7]
507 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm9, %ymm7, %ymm7
508 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm10, %ymm8, %ymm8
509 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm7 = ymm8[0],ymm7[0],ymm8[2],ymm7[2]
510 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
511 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm9 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
512 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm8 = ymm9[2,3],ymm8[2,3]
513 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
514 ; AVX2-FCP-NEXT: vbroadcastsd 24(%r8), %ymm3
515 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],ymm3[2,3]
516 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],mem[6,7]
517 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
518 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm4[2,3],ymm0[2,3]
519 ; AVX2-FCP-NEXT: vbroadcastsd 16(%r9), %ymm1
520 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
521 ; AVX2-FCP-NEXT: vmovaps %ymm8, 96(%rax)
522 ; AVX2-FCP-NEXT: vmovaps %ymm0, 128(%rax)
523 ; AVX2-FCP-NEXT: vmovaps %ymm7, (%rax)
524 ; AVX2-FCP-NEXT: vmovaps %ymm2, 160(%rax)
525 ; AVX2-FCP-NEXT: vmovaps %ymm5, 64(%rax)
526 ; AVX2-FCP-NEXT: vmovaps %ymm6, 32(%rax)
527 ; AVX2-FCP-NEXT: vzeroupper
528 ; AVX2-FCP-NEXT: retq
530 ; AVX512-LABEL: store_i64_stride6_vf4:
532 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
533 ; AVX512-NEXT: vmovdqa (%rdi), %ymm0
534 ; AVX512-NEXT: vmovdqa (%rdx), %ymm1
535 ; AVX512-NEXT: vmovdqa (%r8), %ymm2
536 ; AVX512-NEXT: vinserti64x4 $1, (%rsi), %zmm0, %zmm0
537 ; AVX512-NEXT: vinserti64x4 $1, (%rcx), %zmm1, %zmm1
538 ; AVX512-NEXT: vinserti64x4 $1, (%r9), %zmm2, %zmm2
539 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm3 = [0,4,8,12,0,0,1,5]
540 ; AVX512-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
541 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm4 = [0,1,2,3,8,12,6,7]
542 ; AVX512-NEXT: vpermi2q %zmm2, %zmm3, %zmm4
543 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm3 = [1,5,0,0,10,14,2,6]
544 ; AVX512-NEXT: vpermi2q %zmm0, %zmm1, %zmm3
545 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,9,13,4,5,6,7]
546 ; AVX512-NEXT: vpermi2q %zmm2, %zmm3, %zmm5
547 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [11,15,3,7,11,15,3,7]
548 ; AVX512-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
549 ; AVX512-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
550 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm0 = [10,14,2,3,4,5,11,15]
551 ; AVX512-NEXT: vpermi2q %zmm2, %zmm3, %zmm0
552 ; AVX512-NEXT: vmovdqa64 %zmm0, 128(%rax)
553 ; AVX512-NEXT: vmovdqa64 %zmm5, 64(%rax)
554 ; AVX512-NEXT: vmovdqa64 %zmm4, (%rax)
555 ; AVX512-NEXT: vzeroupper
558 ; AVX512-FCP-LABEL: store_i64_stride6_vf4:
559 ; AVX512-FCP: # %bb.0:
560 ; AVX512-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
561 ; AVX512-FCP-NEXT: vmovdqa (%rdi), %ymm0
562 ; AVX512-FCP-NEXT: vmovdqa (%rdx), %ymm1
563 ; AVX512-FCP-NEXT: vmovdqa (%r8), %ymm2
564 ; AVX512-FCP-NEXT: vinserti64x4 $1, (%rsi), %zmm0, %zmm0
565 ; AVX512-FCP-NEXT: vinserti64x4 $1, (%rcx), %zmm1, %zmm1
566 ; AVX512-FCP-NEXT: vinserti64x4 $1, (%r9), %zmm2, %zmm2
567 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm3 = [0,4,8,12,0,0,1,5]
568 ; AVX512-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
569 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm4 = [0,1,2,3,8,12,6,7]
570 ; AVX512-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm4
571 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm3 = [1,5,0,0,10,14,2,6]
572 ; AVX512-FCP-NEXT: vpermi2q %zmm0, %zmm1, %zmm3
573 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,9,13,4,5,6,7]
574 ; AVX512-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm5
575 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [11,15,3,7,11,15,3,7]
576 ; AVX512-FCP-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
577 ; AVX512-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
578 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm0 = [10,14,2,3,4,5,11,15]
579 ; AVX512-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm0
580 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, 128(%rax)
581 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, 64(%rax)
582 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, (%rax)
583 ; AVX512-FCP-NEXT: vzeroupper
584 ; AVX512-FCP-NEXT: retq
586 ; AVX512DQ-LABEL: store_i64_stride6_vf4:
588 ; AVX512DQ-NEXT: movq {{[0-9]+}}(%rsp), %rax
589 ; AVX512DQ-NEXT: vmovdqa (%rdi), %ymm0
590 ; AVX512DQ-NEXT: vmovdqa (%rdx), %ymm1
591 ; AVX512DQ-NEXT: vmovdqa (%r8), %ymm2
592 ; AVX512DQ-NEXT: vinserti64x4 $1, (%rsi), %zmm0, %zmm0
593 ; AVX512DQ-NEXT: vinserti64x4 $1, (%rcx), %zmm1, %zmm1
594 ; AVX512DQ-NEXT: vinserti64x4 $1, (%r9), %zmm2, %zmm2
595 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm3 = [0,4,8,12,0,0,1,5]
596 ; AVX512DQ-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
597 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm4 = [0,1,2,3,8,12,6,7]
598 ; AVX512DQ-NEXT: vpermi2q %zmm2, %zmm3, %zmm4
599 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm3 = [1,5,0,0,10,14,2,6]
600 ; AVX512DQ-NEXT: vpermi2q %zmm0, %zmm1, %zmm3
601 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,9,13,4,5,6,7]
602 ; AVX512DQ-NEXT: vpermi2q %zmm2, %zmm3, %zmm5
603 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [11,15,3,7,11,15,3,7]
604 ; AVX512DQ-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
605 ; AVX512DQ-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
606 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm0 = [10,14,2,3,4,5,11,15]
607 ; AVX512DQ-NEXT: vpermi2q %zmm2, %zmm3, %zmm0
608 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, 128(%rax)
609 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, 64(%rax)
610 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, (%rax)
611 ; AVX512DQ-NEXT: vzeroupper
612 ; AVX512DQ-NEXT: retq
614 ; AVX512DQ-FCP-LABEL: store_i64_stride6_vf4:
615 ; AVX512DQ-FCP: # %bb.0:
616 ; AVX512DQ-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
617 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdi), %ymm0
618 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdx), %ymm1
619 ; AVX512DQ-FCP-NEXT: vmovdqa (%r8), %ymm2
620 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, (%rsi), %zmm0, %zmm0
621 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, (%rcx), %zmm1, %zmm1
622 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, (%r9), %zmm2, %zmm2
623 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm3 = [0,4,8,12,0,0,1,5]
624 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
625 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm4 = [0,1,2,3,8,12,6,7]
626 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm4
627 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm3 = [1,5,0,0,10,14,2,6]
628 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm0, %zmm1, %zmm3
629 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,9,13,4,5,6,7]
630 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm5
631 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [11,15,3,7,11,15,3,7]
632 ; AVX512DQ-FCP-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
633 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
634 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm0 = [10,14,2,3,4,5,11,15]
635 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm0
636 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, 128(%rax)
637 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, 64(%rax)
638 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, (%rax)
639 ; AVX512DQ-FCP-NEXT: vzeroupper
640 ; AVX512DQ-FCP-NEXT: retq
642 ; AVX512BW-LABEL: store_i64_stride6_vf4:
644 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
645 ; AVX512BW-NEXT: vmovdqa (%rdi), %ymm0
646 ; AVX512BW-NEXT: vmovdqa (%rdx), %ymm1
647 ; AVX512BW-NEXT: vmovdqa (%r8), %ymm2
648 ; AVX512BW-NEXT: vinserti64x4 $1, (%rsi), %zmm0, %zmm0
649 ; AVX512BW-NEXT: vinserti64x4 $1, (%rcx), %zmm1, %zmm1
650 ; AVX512BW-NEXT: vinserti64x4 $1, (%r9), %zmm2, %zmm2
651 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm3 = [0,4,8,12,0,0,1,5]
652 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
653 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm4 = [0,1,2,3,8,12,6,7]
654 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm3, %zmm4
655 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm3 = [1,5,0,0,10,14,2,6]
656 ; AVX512BW-NEXT: vpermi2q %zmm0, %zmm1, %zmm3
657 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,9,13,4,5,6,7]
658 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm3, %zmm5
659 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [11,15,3,7,11,15,3,7]
660 ; AVX512BW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
661 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
662 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm0 = [10,14,2,3,4,5,11,15]
663 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm3, %zmm0
664 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 128(%rax)
665 ; AVX512BW-NEXT: vmovdqa64 %zmm5, 64(%rax)
666 ; AVX512BW-NEXT: vmovdqa64 %zmm4, (%rax)
667 ; AVX512BW-NEXT: vzeroupper
668 ; AVX512BW-NEXT: retq
670 ; AVX512BW-FCP-LABEL: store_i64_stride6_vf4:
671 ; AVX512BW-FCP: # %bb.0:
672 ; AVX512BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
673 ; AVX512BW-FCP-NEXT: vmovdqa (%rdi), %ymm0
674 ; AVX512BW-FCP-NEXT: vmovdqa (%rdx), %ymm1
675 ; AVX512BW-FCP-NEXT: vmovdqa (%r8), %ymm2
676 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, (%rsi), %zmm0, %zmm0
677 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, (%rcx), %zmm1, %zmm1
678 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, (%r9), %zmm2, %zmm2
679 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm3 = [0,4,8,12,0,0,1,5]
680 ; AVX512BW-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
681 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm4 = [0,1,2,3,8,12,6,7]
682 ; AVX512BW-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm4
683 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm3 = [1,5,0,0,10,14,2,6]
684 ; AVX512BW-FCP-NEXT: vpermi2q %zmm0, %zmm1, %zmm3
685 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,9,13,4,5,6,7]
686 ; AVX512BW-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm5
687 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [11,15,3,7,11,15,3,7]
688 ; AVX512BW-FCP-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
689 ; AVX512BW-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
690 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm0 = [10,14,2,3,4,5,11,15]
691 ; AVX512BW-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm0
692 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, 128(%rax)
693 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, 64(%rax)
694 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, (%rax)
695 ; AVX512BW-FCP-NEXT: vzeroupper
696 ; AVX512BW-FCP-NEXT: retq
698 ; AVX512DQ-BW-LABEL: store_i64_stride6_vf4:
699 ; AVX512DQ-BW: # %bb.0:
700 ; AVX512DQ-BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
701 ; AVX512DQ-BW-NEXT: vmovdqa (%rdi), %ymm0
702 ; AVX512DQ-BW-NEXT: vmovdqa (%rdx), %ymm1
703 ; AVX512DQ-BW-NEXT: vmovdqa (%r8), %ymm2
704 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, (%rsi), %zmm0, %zmm0
705 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, (%rcx), %zmm1, %zmm1
706 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, (%r9), %zmm2, %zmm2
707 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm3 = [0,4,8,12,0,0,1,5]
708 ; AVX512DQ-BW-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
709 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm4 = [0,1,2,3,8,12,6,7]
710 ; AVX512DQ-BW-NEXT: vpermi2q %zmm2, %zmm3, %zmm4
711 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm3 = [1,5,0,0,10,14,2,6]
712 ; AVX512DQ-BW-NEXT: vpermi2q %zmm0, %zmm1, %zmm3
713 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,9,13,4,5,6,7]
714 ; AVX512DQ-BW-NEXT: vpermi2q %zmm2, %zmm3, %zmm5
715 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [11,15,3,7,11,15,3,7]
716 ; AVX512DQ-BW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
717 ; AVX512DQ-BW-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
718 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm0 = [10,14,2,3,4,5,11,15]
719 ; AVX512DQ-BW-NEXT: vpermi2q %zmm2, %zmm3, %zmm0
720 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, 128(%rax)
721 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, 64(%rax)
722 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, (%rax)
723 ; AVX512DQ-BW-NEXT: vzeroupper
724 ; AVX512DQ-BW-NEXT: retq
726 ; AVX512DQ-BW-FCP-LABEL: store_i64_stride6_vf4:
727 ; AVX512DQ-BW-FCP: # %bb.0:
728 ; AVX512DQ-BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
729 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rdi), %ymm0
730 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rdx), %ymm1
731 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%r8), %ymm2
732 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, (%rsi), %zmm0, %zmm0
733 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, (%rcx), %zmm1, %zmm1
734 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, (%r9), %zmm2, %zmm2
735 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm3 = [0,4,8,12,0,0,1,5]
736 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
737 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm4 = [0,1,2,3,8,12,6,7]
738 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm4
739 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm3 = [1,5,0,0,10,14,2,6]
740 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm0, %zmm1, %zmm3
741 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,9,13,4,5,6,7]
742 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm5
743 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [11,15,3,7,11,15,3,7]
744 ; AVX512DQ-BW-FCP-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
745 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
746 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm0 = [10,14,2,3,4,5,11,15]
747 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm2, %zmm3, %zmm0
748 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, 128(%rax)
749 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, 64(%rax)
750 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, (%rax)
751 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
752 ; AVX512DQ-BW-FCP-NEXT: retq
753 %in.vec0 = load <4 x i64>, ptr %in.vecptr0, align 64
754 %in.vec1 = load <4 x i64>, ptr %in.vecptr1, align 64
755 %in.vec2 = load <4 x i64>, ptr %in.vecptr2, align 64
756 %in.vec3 = load <4 x i64>, ptr %in.vecptr3, align 64
757 %in.vec4 = load <4 x i64>, ptr %in.vecptr4, align 64
758 %in.vec5 = load <4 x i64>, ptr %in.vecptr5, align 64
759 %1 = shufflevector <4 x i64> %in.vec0, <4 x i64> %in.vec1, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
760 %2 = shufflevector <4 x i64> %in.vec2, <4 x i64> %in.vec3, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
761 %3 = shufflevector <4 x i64> %in.vec4, <4 x i64> %in.vec5, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
762 %4 = shufflevector <8 x i64> %1, <8 x i64> %2, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
763 %5 = shufflevector <8 x i64> %3, <8 x i64> poison, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
764 %6 = shufflevector <16 x i64> %4, <16 x i64> %5, <24 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23>
765 %interleaved.vec = shufflevector <24 x i64> %6, <24 x i64> poison, <24 x i32> <i32 0, i32 4, i32 8, i32 12, i32 16, i32 20, i32 1, i32 5, i32 9, i32 13, i32 17, i32 21, i32 2, i32 6, i32 10, i32 14, i32 18, i32 22, i32 3, i32 7, i32 11, i32 15, i32 19, i32 23>
766 store <24 x i64> %interleaved.vec, ptr %out.vec, align 64
770 define void @store_i64_stride6_vf8(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
771 ; SSE-LABEL: store_i64_stride6_vf8:
773 ; SSE-NEXT: subq $24, %rsp
774 ; SSE-NEXT: movaps (%rdi), %xmm2
775 ; SSE-NEXT: movaps 16(%rdi), %xmm3
776 ; SSE-NEXT: movaps 32(%rdi), %xmm5
777 ; SSE-NEXT: movaps (%rsi), %xmm1
778 ; SSE-NEXT: movaps 16(%rsi), %xmm12
779 ; SSE-NEXT: movaps 32(%rsi), %xmm14
780 ; SSE-NEXT: movaps (%rdx), %xmm4
781 ; SSE-NEXT: movaps 16(%rdx), %xmm6
782 ; SSE-NEXT: movaps 32(%rdx), %xmm0
783 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
784 ; SSE-NEXT: movaps (%rcx), %xmm10
785 ; SSE-NEXT: movaps 16(%rcx), %xmm13
786 ; SSE-NEXT: movaps (%r8), %xmm7
787 ; SSE-NEXT: movaps 16(%r8), %xmm9
788 ; SSE-NEXT: movaps (%r9), %xmm11
789 ; SSE-NEXT: movaps 16(%r9), %xmm0
790 ; SSE-NEXT: movaps %xmm2, %xmm8
791 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm1[0]
792 ; SSE-NEXT: movaps %xmm8, (%rsp) # 16-byte Spill
793 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
794 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
795 ; SSE-NEXT: movaps %xmm4, %xmm2
796 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm10[0]
797 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
798 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm10[1]
799 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
800 ; SSE-NEXT: movaps %xmm7, %xmm10
801 ; SSE-NEXT: movlhps {{.*#+}} xmm10 = xmm10[0],xmm11[0]
802 ; SSE-NEXT: unpckhpd {{.*#+}} xmm7 = xmm7[1],xmm11[1]
803 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
804 ; SSE-NEXT: movaps %xmm3, %xmm11
805 ; SSE-NEXT: movlhps {{.*#+}} xmm11 = xmm11[0],xmm12[0]
806 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm12[1]
807 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
808 ; SSE-NEXT: movaps %xmm6, %xmm12
809 ; SSE-NEXT: movlhps {{.*#+}} xmm12 = xmm12[0],xmm13[0]
810 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm13[1]
811 ; SSE-NEXT: movaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
812 ; SSE-NEXT: movaps %xmm9, %xmm13
813 ; SSE-NEXT: movlhps {{.*#+}} xmm13 = xmm13[0],xmm0[0]
814 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm0[1]
815 ; SSE-NEXT: movaps %xmm5, %xmm15
816 ; SSE-NEXT: movlhps {{.*#+}} xmm15 = xmm15[0],xmm14[0]
817 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm14[1]
818 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
819 ; SSE-NEXT: movaps 32(%rcx), %xmm0
820 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
821 ; SSE-NEXT: movaps %xmm1, %xmm14
822 ; SSE-NEXT: movlhps {{.*#+}} xmm14 = xmm14[0],xmm0[0]
823 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
824 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
825 ; SSE-NEXT: movaps 32(%r8), %xmm5
826 ; SSE-NEXT: movaps 32(%r9), %xmm0
827 ; SSE-NEXT: movaps %xmm5, %xmm8
828 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm0[0]
829 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm0[1]
830 ; SSE-NEXT: movaps 48(%rdi), %xmm6
831 ; SSE-NEXT: movaps 48(%rsi), %xmm1
832 ; SSE-NEXT: movaps %xmm6, %xmm7
833 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm1[0]
834 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm1[1]
835 ; SSE-NEXT: movaps 48(%rdx), %xmm1
836 ; SSE-NEXT: movaps 48(%rcx), %xmm0
837 ; SSE-NEXT: movaps %xmm1, %xmm4
838 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm0[0]
839 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
840 ; SSE-NEXT: movaps 48(%r8), %xmm0
841 ; SSE-NEXT: movaps 48(%r9), %xmm3
842 ; SSE-NEXT: movaps %xmm0, %xmm2
843 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm3[0]
844 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
845 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
846 ; SSE-NEXT: movaps %xmm0, 368(%rax)
847 ; SSE-NEXT: movaps %xmm1, 352(%rax)
848 ; SSE-NEXT: movaps %xmm6, 336(%rax)
849 ; SSE-NEXT: movaps %xmm2, 320(%rax)
850 ; SSE-NEXT: movaps %xmm4, 304(%rax)
851 ; SSE-NEXT: movaps %xmm7, 288(%rax)
852 ; SSE-NEXT: movaps %xmm5, 272(%rax)
853 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
854 ; SSE-NEXT: movaps %xmm0, 256(%rax)
855 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
856 ; SSE-NEXT: movaps %xmm0, 240(%rax)
857 ; SSE-NEXT: movaps %xmm8, 224(%rax)
858 ; SSE-NEXT: movaps %xmm14, 208(%rax)
859 ; SSE-NEXT: movaps %xmm15, 192(%rax)
860 ; SSE-NEXT: movaps %xmm9, 176(%rax)
861 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
862 ; SSE-NEXT: movaps %xmm0, 160(%rax)
863 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
864 ; SSE-NEXT: movaps %xmm0, 144(%rax)
865 ; SSE-NEXT: movaps %xmm13, 128(%rax)
866 ; SSE-NEXT: movaps %xmm12, 112(%rax)
867 ; SSE-NEXT: movaps %xmm11, 96(%rax)
868 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
869 ; SSE-NEXT: movaps %xmm0, 80(%rax)
870 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
871 ; SSE-NEXT: movaps %xmm0, 64(%rax)
872 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
873 ; SSE-NEXT: movaps %xmm0, 48(%rax)
874 ; SSE-NEXT: movaps %xmm10, 32(%rax)
875 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
876 ; SSE-NEXT: movaps %xmm0, 16(%rax)
877 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
878 ; SSE-NEXT: movaps %xmm0, (%rax)
879 ; SSE-NEXT: addq $24, %rsp
882 ; AVX-LABEL: store_i64_stride6_vf8:
884 ; AVX-NEXT: vmovapd 32(%rdi), %ymm13
885 ; AVX-NEXT: vmovapd (%r8), %ymm11
886 ; AVX-NEXT: vmovapd 32(%r8), %ymm14
887 ; AVX-NEXT: vmovddup {{.*#+}} xmm0 = mem[0,0]
888 ; AVX-NEXT: vmovaps (%rsi), %xmm1
889 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
890 ; AVX-NEXT: vmovaps 32(%rsi), %xmm4
891 ; AVX-NEXT: vmovaps (%rdi), %xmm3
892 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm3[1],xmm1[1]
893 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
894 ; AVX-NEXT: vblendpd {{.*#+}} ymm2 = ymm11[0,1],ymm2[2,3]
895 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm2[0],ymm0[1],ymm2[2,3]
896 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
897 ; AVX-NEXT: vmovaps (%rcx), %xmm5
898 ; AVX-NEXT: vmovaps 32(%rcx), %xmm6
899 ; AVX-NEXT: vmovaps 32(%rdx), %xmm8
900 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm8[1],xmm6[1]
901 ; AVX-NEXT: vinsertf128 $1, 32(%r9), %ymm2, %ymm7
902 ; AVX-NEXT: vbroadcastsd 40(%r8), %ymm9
903 ; AVX-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm9[4,5,6,7]
904 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1],ymm7[2,3],ymm2[4,5],ymm7[6,7]
905 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
906 ; AVX-NEXT: vmovaps (%rdx), %xmm9
907 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm7 = xmm9[1],xmm5[1]
908 ; AVX-NEXT: vinsertf128 $1, (%r9), %ymm7, %ymm10
909 ; AVX-NEXT: vbroadcastsd 8(%r8), %ymm12
910 ; AVX-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm12[4,5,6,7]
911 ; AVX-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1],ymm10[2,3],ymm7[4,5],ymm10[6,7]
912 ; AVX-NEXT: vmovddup {{.*#+}} xmm10 = mem[0,0]
913 ; AVX-NEXT: vmovaps 32(%rdi), %xmm12
914 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm15 = xmm12[1],xmm4[1]
915 ; AVX-NEXT: vinsertf128 $1, %xmm15, %ymm0, %ymm15
916 ; AVX-NEXT: vblendpd {{.*#+}} ymm15 = ymm14[0,1],ymm15[2,3]
917 ; AVX-NEXT: vblendpd {{.*#+}} ymm10 = ymm15[0],ymm10[1],ymm15[2,3]
918 ; AVX-NEXT: vmovapd 32(%rsi), %ymm15
919 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm13 = ymm13[1],ymm15[1],ymm13[3],ymm15[3]
920 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm13 = ymm14[2,3],ymm13[2,3]
921 ; AVX-NEXT: vmovapd 32(%r9), %ymm0
922 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm14 = ymm0[2,3],ymm15[2,3]
923 ; AVX-NEXT: vshufpd {{.*#+}} ymm13 = ymm13[0],ymm14[0],ymm13[2],ymm14[3]
924 ; AVX-NEXT: vmovapd (%rdi), %ymm14
925 ; AVX-NEXT: vmovapd (%rsi), %ymm15
926 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm14 = ymm14[1],ymm15[1],ymm14[3],ymm15[3]
927 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm11 = ymm11[2,3],ymm14[2,3]
928 ; AVX-NEXT: vmovapd (%r9), %ymm1
929 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm14 = ymm1[2,3],ymm15[2,3]
930 ; AVX-NEXT: vshufpd {{.*#+}} ymm14 = ymm11[0],ymm14[0],ymm11[2],ymm14[3]
931 ; AVX-NEXT: vmovapd 48(%rdx), %xmm11
932 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm11[1],mem[1]
933 ; AVX-NEXT: vbroadcastsd 56(%r8), %ymm15
934 ; AVX-NEXT: vblendpd {{.*#+}} ymm11 = ymm11[0,1],ymm15[2,3]
935 ; AVX-NEXT: vblendpd {{.*#+}} ymm11 = ymm11[0,1,2],ymm0[3]
936 ; AVX-NEXT: vmovaps 48(%rdi), %xmm0
937 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
938 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
939 ; AVX-NEXT: vbroadcastsd 48(%rcx), %ymm15
940 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm15[6,7]
941 ; AVX-NEXT: vmovapd 16(%rdx), %xmm15
942 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm15 = xmm15[1],mem[1]
943 ; AVX-NEXT: vbroadcastsd 24(%r8), %ymm2
944 ; AVX-NEXT: vblendpd {{.*#+}} ymm2 = ymm15[0,1],ymm2[2,3]
945 ; AVX-NEXT: vblendpd {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3]
946 ; AVX-NEXT: vmovaps 16(%rdi), %xmm2
947 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm2 = xmm2[0],mem[0]
948 ; AVX-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
949 ; AVX-NEXT: vbroadcastsd 16(%rcx), %ymm15
950 ; AVX-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm15[6,7]
951 ; AVX-NEXT: vmovlhps {{.*#+}} xmm4 = xmm12[0],xmm4[0]
952 ; AVX-NEXT: vmovlhps {{.*#+}} xmm6 = xmm8[0],xmm6[0]
953 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm3, %xmm3 # 16-byte Folded Reload
954 ; AVX-NEXT: # xmm3 = xmm3[0],mem[0]
955 ; AVX-NEXT: vmovlhps {{.*#+}} xmm5 = xmm9[0],xmm5[0]
956 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
957 ; AVX-NEXT: vmovaps %xmm5, 16(%rax)
958 ; AVX-NEXT: vmovaps %xmm3, (%rax)
959 ; AVX-NEXT: vmovaps %xmm6, 208(%rax)
960 ; AVX-NEXT: vmovaps %xmm4, 192(%rax)
961 ; AVX-NEXT: vmovapd %ymm14, 128(%rax)
962 ; AVX-NEXT: vmovapd %ymm13, 320(%rax)
963 ; AVX-NEXT: vmovaps %ymm2, 96(%rax)
964 ; AVX-NEXT: vmovapd %ymm1, 160(%rax)
965 ; AVX-NEXT: vmovapd %ymm10, 224(%rax)
966 ; AVX-NEXT: vmovaps %ymm0, 288(%rax)
967 ; AVX-NEXT: vmovaps %ymm7, 64(%rax)
968 ; AVX-NEXT: vmovapd %ymm11, 352(%rax)
969 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
970 ; AVX-NEXT: vmovaps %ymm0, 256(%rax)
971 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
972 ; AVX-NEXT: vmovaps %ymm0, 32(%rax)
973 ; AVX-NEXT: vzeroupper
976 ; AVX2-LABEL: store_i64_stride6_vf8:
978 ; AVX2-NEXT: vmovaps (%r8), %ymm1
979 ; AVX2-NEXT: vmovaps 32(%r8), %ymm4
980 ; AVX2-NEXT: vmovaps (%r9), %xmm3
981 ; AVX2-NEXT: vmovaps 32(%r9), %xmm5
982 ; AVX2-NEXT: vmovddup {{.*#+}} xmm0 = xmm3[0,0]
983 ; AVX2-NEXT: vmovaps (%rsi), %xmm9
984 ; AVX2-NEXT: vmovaps 32(%rsi), %xmm7
985 ; AVX2-NEXT: vmovaps (%rdi), %xmm6
986 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm6[1],xmm9[1]
987 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm1[0,1],ymm2[0,1]
988 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1],ymm0[2,3],ymm2[4,5,6,7]
989 ; AVX2-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm2
990 ; AVX2-NEXT: vmovaps (%rcx), %xmm11
991 ; AVX2-NEXT: vmovaps 32(%rcx), %xmm12
992 ; AVX2-NEXT: vmovaps (%rdx), %xmm13
993 ; AVX2-NEXT: vmovaps 32(%rdx), %xmm10
994 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm8 = xmm10[1],xmm12[1]
995 ; AVX2-NEXT: vbroadcastsd 40(%r8), %ymm14
996 ; AVX2-NEXT: vblendps {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm14[4,5,6,7]
997 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm8[0,1,2,3,4,5],ymm2[6,7]
998 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm8 = xmm13[1],xmm11[1]
999 ; AVX2-NEXT: vbroadcastsd 8(%r8), %ymm14
1000 ; AVX2-NEXT: vblendps {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm14[4,5,6,7]
1001 ; AVX2-NEXT: vmovaps 32(%rdi), %xmm14
1002 ; AVX2-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
1003 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm8[0,1,2,3,4,5],ymm3[6,7]
1004 ; AVX2-NEXT: vmovddup {{.*#+}} xmm5 = xmm5[0,0]
1005 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm8 = xmm14[1],xmm7[1]
1006 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm8 = ymm4[0,1],ymm8[0,1]
1007 ; AVX2-NEXT: vblendps {{.*#+}} ymm5 = ymm8[0,1],ymm5[2,3],ymm8[4,5,6,7]
1008 ; AVX2-NEXT: vmovaps (%rdi), %ymm8
1009 ; AVX2-NEXT: vinsertf128 $1, %xmm11, %ymm9, %ymm9
1010 ; AVX2-NEXT: vmovaps 32(%rdi), %ymm11
1011 ; AVX2-NEXT: vinsertf128 $1, %xmm13, %ymm6, %ymm6
1012 ; AVX2-NEXT: vmovaps (%rsi), %ymm13
1013 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm6 = ymm6[0],ymm9[0],ymm6[2],ymm9[2]
1014 ; AVX2-NEXT: vmovaps 32(%rsi), %ymm9
1015 ; AVX2-NEXT: vinsertf128 $1, %xmm12, %ymm7, %ymm7
1016 ; AVX2-NEXT: vmovaps 32(%rdx), %ymm12
1017 ; AVX2-NEXT: vinsertf128 $1, %xmm10, %ymm14, %ymm10
1018 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm7 = ymm10[0],ymm7[0],ymm10[2],ymm7[2]
1019 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm10 = ymm12[1],mem[1],ymm12[3],mem[3]
1020 ; AVX2-NEXT: vbroadcastsd 56(%r8), %ymm14
1021 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm10 = ymm10[2,3],ymm14[2,3]
1022 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm14 = ymm11[1],ymm9[1],ymm11[3],ymm9[3]
1023 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],ymm14[2,3]
1024 ; AVX2-NEXT: vbroadcastsd 48(%r9), %ymm14
1025 ; AVX2-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1],ymm14[2,3],ymm4[4,5,6,7]
1026 ; AVX2-NEXT: vmovaps (%rdx), %ymm14
1027 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm9 = ymm11[0],ymm9[0],ymm11[2],ymm9[2]
1028 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm9 = ymm9[2,3],ymm12[2,3]
1029 ; AVX2-NEXT: vbroadcastsd 48(%rcx), %ymm11
1030 ; AVX2-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],ymm11[6,7]
1031 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm11 = ymm14[1],mem[1],ymm14[3],mem[3]
1032 ; AVX2-NEXT: vbroadcastsd 24(%r8), %ymm12
1033 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm11 = ymm11[2,3],ymm12[2,3]
1034 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm12 = ymm8[1],ymm13[1],ymm8[3],ymm13[3]
1035 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm1[2,3],ymm12[2,3]
1036 ; AVX2-NEXT: vbroadcastsd 16(%r9), %ymm12
1037 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm12[2,3],ymm1[4,5,6,7]
1038 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm8[0],ymm13[0],ymm8[2],ymm13[2]
1039 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm8 = ymm8[2,3],ymm14[2,3]
1040 ; AVX2-NEXT: vbroadcastsd 16(%rcx), %ymm12
1041 ; AVX2-NEXT: vblendps {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],ymm12[6,7]
1042 ; AVX2-NEXT: movq {{[0-9]+}}(%rsp), %rax
1043 ; AVX2-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1,2,3,4,5],mem[6,7]
1044 ; AVX2-NEXT: vblendps {{.*#+}} ymm11 = ymm11[0,1,2,3,4,5],mem[6,7]
1045 ; AVX2-NEXT: vmovaps %ymm8, 96(%rax)
1046 ; AVX2-NEXT: vmovaps %ymm1, 128(%rax)
1047 ; AVX2-NEXT: vmovaps %ymm7, 192(%rax)
1048 ; AVX2-NEXT: vmovaps %ymm11, 160(%rax)
1049 ; AVX2-NEXT: vmovaps %ymm9, 288(%rax)
1050 ; AVX2-NEXT: vmovaps %ymm4, 320(%rax)
1051 ; AVX2-NEXT: vmovaps %ymm10, 352(%rax)
1052 ; AVX2-NEXT: vmovaps %ymm6, (%rax)
1053 ; AVX2-NEXT: vmovaps %ymm5, 224(%rax)
1054 ; AVX2-NEXT: vmovaps %ymm3, 64(%rax)
1055 ; AVX2-NEXT: vmovaps %ymm2, 256(%rax)
1056 ; AVX2-NEXT: vmovaps %ymm0, 32(%rax)
1057 ; AVX2-NEXT: vzeroupper
1060 ; AVX2-FP-LABEL: store_i64_stride6_vf8:
1062 ; AVX2-FP-NEXT: vmovaps (%r8), %ymm1
1063 ; AVX2-FP-NEXT: vmovaps 32(%r8), %ymm4
1064 ; AVX2-FP-NEXT: vmovaps (%r9), %xmm3
1065 ; AVX2-FP-NEXT: vmovaps 32(%r9), %xmm5
1066 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm0 = xmm3[0,0]
1067 ; AVX2-FP-NEXT: vmovaps (%rsi), %xmm9
1068 ; AVX2-FP-NEXT: vmovaps 32(%rsi), %xmm7
1069 ; AVX2-FP-NEXT: vmovaps (%rdi), %xmm6
1070 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm6[1],xmm9[1]
1071 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm1[0,1],ymm2[0,1]
1072 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1],ymm0[2,3],ymm2[4,5,6,7]
1073 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm2
1074 ; AVX2-FP-NEXT: vmovaps (%rcx), %xmm11
1075 ; AVX2-FP-NEXT: vmovaps 32(%rcx), %xmm12
1076 ; AVX2-FP-NEXT: vmovaps (%rdx), %xmm13
1077 ; AVX2-FP-NEXT: vmovaps 32(%rdx), %xmm10
1078 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm8 = xmm10[1],xmm12[1]
1079 ; AVX2-FP-NEXT: vbroadcastsd 40(%r8), %ymm14
1080 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm14[4,5,6,7]
1081 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm8[0,1,2,3,4,5],ymm2[6,7]
1082 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm8 = xmm13[1],xmm11[1]
1083 ; AVX2-FP-NEXT: vbroadcastsd 8(%r8), %ymm14
1084 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm14[4,5,6,7]
1085 ; AVX2-FP-NEXT: vmovaps 32(%rdi), %xmm14
1086 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
1087 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm8[0,1,2,3,4,5],ymm3[6,7]
1088 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm5 = xmm5[0,0]
1089 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm8 = xmm14[1],xmm7[1]
1090 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm8 = ymm4[0,1],ymm8[0,1]
1091 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm5 = ymm8[0,1],ymm5[2,3],ymm8[4,5,6,7]
1092 ; AVX2-FP-NEXT: vmovaps (%rdi), %ymm8
1093 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm11, %ymm9, %ymm9
1094 ; AVX2-FP-NEXT: vmovaps 32(%rdi), %ymm11
1095 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm13, %ymm6, %ymm6
1096 ; AVX2-FP-NEXT: vmovaps (%rsi), %ymm13
1097 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm6 = ymm6[0],ymm9[0],ymm6[2],ymm9[2]
1098 ; AVX2-FP-NEXT: vmovaps 32(%rsi), %ymm9
1099 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm12, %ymm7, %ymm7
1100 ; AVX2-FP-NEXT: vmovaps 32(%rdx), %ymm12
1101 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm10, %ymm14, %ymm10
1102 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm7 = ymm10[0],ymm7[0],ymm10[2],ymm7[2]
1103 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm10 = ymm12[1],mem[1],ymm12[3],mem[3]
1104 ; AVX2-FP-NEXT: vbroadcastsd 56(%r8), %ymm14
1105 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm10 = ymm10[2,3],ymm14[2,3]
1106 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm14 = ymm11[1],ymm9[1],ymm11[3],ymm9[3]
1107 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],ymm14[2,3]
1108 ; AVX2-FP-NEXT: vbroadcastsd 48(%r9), %ymm14
1109 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1],ymm14[2,3],ymm4[4,5,6,7]
1110 ; AVX2-FP-NEXT: vmovaps (%rdx), %ymm14
1111 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm9 = ymm11[0],ymm9[0],ymm11[2],ymm9[2]
1112 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm9 = ymm9[2,3],ymm12[2,3]
1113 ; AVX2-FP-NEXT: vbroadcastsd 48(%rcx), %ymm11
1114 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],ymm11[6,7]
1115 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm11 = ymm14[1],mem[1],ymm14[3],mem[3]
1116 ; AVX2-FP-NEXT: vbroadcastsd 24(%r8), %ymm12
1117 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm11 = ymm11[2,3],ymm12[2,3]
1118 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm12 = ymm8[1],ymm13[1],ymm8[3],ymm13[3]
1119 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm1[2,3],ymm12[2,3]
1120 ; AVX2-FP-NEXT: vbroadcastsd 16(%r9), %ymm12
1121 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm12[2,3],ymm1[4,5,6,7]
1122 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm8[0],ymm13[0],ymm8[2],ymm13[2]
1123 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm8 = ymm8[2,3],ymm14[2,3]
1124 ; AVX2-FP-NEXT: vbroadcastsd 16(%rcx), %ymm12
1125 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],ymm12[6,7]
1126 ; AVX2-FP-NEXT: movq {{[0-9]+}}(%rsp), %rax
1127 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1,2,3,4,5],mem[6,7]
1128 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm11 = ymm11[0,1,2,3,4,5],mem[6,7]
1129 ; AVX2-FP-NEXT: vmovaps %ymm8, 96(%rax)
1130 ; AVX2-FP-NEXT: vmovaps %ymm1, 128(%rax)
1131 ; AVX2-FP-NEXT: vmovaps %ymm7, 192(%rax)
1132 ; AVX2-FP-NEXT: vmovaps %ymm11, 160(%rax)
1133 ; AVX2-FP-NEXT: vmovaps %ymm9, 288(%rax)
1134 ; AVX2-FP-NEXT: vmovaps %ymm4, 320(%rax)
1135 ; AVX2-FP-NEXT: vmovaps %ymm10, 352(%rax)
1136 ; AVX2-FP-NEXT: vmovaps %ymm6, (%rax)
1137 ; AVX2-FP-NEXT: vmovaps %ymm5, 224(%rax)
1138 ; AVX2-FP-NEXT: vmovaps %ymm3, 64(%rax)
1139 ; AVX2-FP-NEXT: vmovaps %ymm2, 256(%rax)
1140 ; AVX2-FP-NEXT: vmovaps %ymm0, 32(%rax)
1141 ; AVX2-FP-NEXT: vzeroupper
1142 ; AVX2-FP-NEXT: retq
1144 ; AVX2-FCP-LABEL: store_i64_stride6_vf8:
1145 ; AVX2-FCP: # %bb.0:
1146 ; AVX2-FCP-NEXT: vmovaps (%r8), %ymm1
1147 ; AVX2-FCP-NEXT: vmovaps 32(%r8), %ymm4
1148 ; AVX2-FCP-NEXT: vmovaps (%r9), %xmm3
1149 ; AVX2-FCP-NEXT: vmovaps 32(%r9), %xmm5
1150 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm0 = xmm3[0,0]
1151 ; AVX2-FCP-NEXT: vmovaps (%rsi), %xmm9
1152 ; AVX2-FCP-NEXT: vmovaps 32(%rsi), %xmm7
1153 ; AVX2-FCP-NEXT: vmovaps (%rdi), %xmm6
1154 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm6[1],xmm9[1]
1155 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm1[0,1],ymm2[0,1]
1156 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1],ymm0[2,3],ymm2[4,5,6,7]
1157 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm2
1158 ; AVX2-FCP-NEXT: vmovaps (%rcx), %xmm11
1159 ; AVX2-FCP-NEXT: vmovaps 32(%rcx), %xmm12
1160 ; AVX2-FCP-NEXT: vmovaps (%rdx), %xmm13
1161 ; AVX2-FCP-NEXT: vmovaps 32(%rdx), %xmm10
1162 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm8 = xmm10[1],xmm12[1]
1163 ; AVX2-FCP-NEXT: vbroadcastsd 40(%r8), %ymm14
1164 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm14[4,5,6,7]
1165 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm8[0,1,2,3,4,5],ymm2[6,7]
1166 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm8 = xmm13[1],xmm11[1]
1167 ; AVX2-FCP-NEXT: vbroadcastsd 8(%r8), %ymm14
1168 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm14[4,5,6,7]
1169 ; AVX2-FCP-NEXT: vmovaps 32(%rdi), %xmm14
1170 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
1171 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm8[0,1,2,3,4,5],ymm3[6,7]
1172 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm5 = xmm5[0,0]
1173 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm8 = xmm14[1],xmm7[1]
1174 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm8 = ymm4[0,1],ymm8[0,1]
1175 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm5 = ymm8[0,1],ymm5[2,3],ymm8[4,5,6,7]
1176 ; AVX2-FCP-NEXT: vmovaps (%rdi), %ymm8
1177 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm11, %ymm9, %ymm9
1178 ; AVX2-FCP-NEXT: vmovaps 32(%rdi), %ymm11
1179 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm13, %ymm6, %ymm6
1180 ; AVX2-FCP-NEXT: vmovaps (%rsi), %ymm13
1181 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm6 = ymm6[0],ymm9[0],ymm6[2],ymm9[2]
1182 ; AVX2-FCP-NEXT: vmovaps 32(%rsi), %ymm9
1183 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm12, %ymm7, %ymm7
1184 ; AVX2-FCP-NEXT: vmovaps 32(%rdx), %ymm12
1185 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm10, %ymm14, %ymm10
1186 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm7 = ymm10[0],ymm7[0],ymm10[2],ymm7[2]
1187 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm10 = ymm12[1],mem[1],ymm12[3],mem[3]
1188 ; AVX2-FCP-NEXT: vbroadcastsd 56(%r8), %ymm14
1189 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm10 = ymm10[2,3],ymm14[2,3]
1190 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm14 = ymm11[1],ymm9[1],ymm11[3],ymm9[3]
1191 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],ymm14[2,3]
1192 ; AVX2-FCP-NEXT: vbroadcastsd 48(%r9), %ymm14
1193 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1],ymm14[2,3],ymm4[4,5,6,7]
1194 ; AVX2-FCP-NEXT: vmovaps (%rdx), %ymm14
1195 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm9 = ymm11[0],ymm9[0],ymm11[2],ymm9[2]
1196 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm9 = ymm9[2,3],ymm12[2,3]
1197 ; AVX2-FCP-NEXT: vbroadcastsd 48(%rcx), %ymm11
1198 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],ymm11[6,7]
1199 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm11 = ymm14[1],mem[1],ymm14[3],mem[3]
1200 ; AVX2-FCP-NEXT: vbroadcastsd 24(%r8), %ymm12
1201 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm11 = ymm11[2,3],ymm12[2,3]
1202 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm12 = ymm8[1],ymm13[1],ymm8[3],ymm13[3]
1203 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm1[2,3],ymm12[2,3]
1204 ; AVX2-FCP-NEXT: vbroadcastsd 16(%r9), %ymm12
1205 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm12[2,3],ymm1[4,5,6,7]
1206 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm8[0],ymm13[0],ymm8[2],ymm13[2]
1207 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm8 = ymm8[2,3],ymm14[2,3]
1208 ; AVX2-FCP-NEXT: vbroadcastsd 16(%rcx), %ymm12
1209 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],ymm12[6,7]
1210 ; AVX2-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
1211 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1,2,3,4,5],mem[6,7]
1212 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm11 = ymm11[0,1,2,3,4,5],mem[6,7]
1213 ; AVX2-FCP-NEXT: vmovaps %ymm8, 96(%rax)
1214 ; AVX2-FCP-NEXT: vmovaps %ymm1, 128(%rax)
1215 ; AVX2-FCP-NEXT: vmovaps %ymm7, 192(%rax)
1216 ; AVX2-FCP-NEXT: vmovaps %ymm11, 160(%rax)
1217 ; AVX2-FCP-NEXT: vmovaps %ymm9, 288(%rax)
1218 ; AVX2-FCP-NEXT: vmovaps %ymm4, 320(%rax)
1219 ; AVX2-FCP-NEXT: vmovaps %ymm10, 352(%rax)
1220 ; AVX2-FCP-NEXT: vmovaps %ymm6, (%rax)
1221 ; AVX2-FCP-NEXT: vmovaps %ymm5, 224(%rax)
1222 ; AVX2-FCP-NEXT: vmovaps %ymm3, 64(%rax)
1223 ; AVX2-FCP-NEXT: vmovaps %ymm2, 256(%rax)
1224 ; AVX2-FCP-NEXT: vmovaps %ymm0, 32(%rax)
1225 ; AVX2-FCP-NEXT: vzeroupper
1226 ; AVX2-FCP-NEXT: retq
1228 ; AVX512-LABEL: store_i64_stride6_vf8:
1230 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
1231 ; AVX512-NEXT: vmovdqa64 (%rdi), %zmm2
1232 ; AVX512-NEXT: vmovdqa64 (%rsi), %zmm3
1233 ; AVX512-NEXT: vmovdqa64 (%rdx), %zmm5
1234 ; AVX512-NEXT: vmovdqa64 (%rcx), %zmm6
1235 ; AVX512-NEXT: vmovdqa64 (%r8), %zmm1
1236 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
1237 ; AVX512-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
1238 ; AVX512-NEXT: vpermi2q %zmm3, %zmm2, %zmm0
1239 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm4 = [0,0,4,12]
1240 ; AVX512-NEXT: vpermi2q %zmm6, %zmm5, %zmm4
1241 ; AVX512-NEXT: movb $12, %r10b
1242 ; AVX512-NEXT: kmovw %r10d, %k1
1243 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm0 {%k1}
1244 ; AVX512-NEXT: movb $16, %r10b
1245 ; AVX512-NEXT: kmovw %r10d, %k2
1246 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm0 {%k2}
1247 ; AVX512-NEXT: vmovdqa64 (%r9), %zmm4
1248 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [2,10,2,10,2,10,2,10]
1249 ; AVX512-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1250 ; AVX512-NEXT: vpermi2q %zmm3, %zmm2, %zmm7
1251 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,9,2,10,1,9,2,10]
1252 ; AVX512-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
1253 ; AVX512-NEXT: vpermi2q %zmm6, %zmm5, %zmm8
1254 ; AVX512-NEXT: movb $48, %r9b
1255 ; AVX512-NEXT: kmovw %r9d, %k2
1256 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm8 {%k2}
1257 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm9 = [0,1,9,0,4,5,6,7]
1258 ; AVX512-NEXT: vpermi2q %zmm1, %zmm8, %zmm9
1259 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm7 = [0,1,2,9,4,5,6,7]
1260 ; AVX512-NEXT: vpermi2q %zmm4, %zmm9, %zmm7
1261 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [6,14,6,14,6,14,6,14]
1262 ; AVX512-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1263 ; AVX512-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
1264 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [5,13,6,14,5,13,6,14]
1265 ; AVX512-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1266 ; AVX512-NEXT: vpermi2q %zmm6, %zmm5, %zmm9
1267 ; AVX512-NEXT: vmovdqa64 %zmm8, %zmm9 {%k2}
1268 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm8 = [0,1,13,0,4,5,6,7]
1269 ; AVX512-NEXT: vpermi2q %zmm1, %zmm9, %zmm8
1270 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm9 = [0,1,2,13,4,5,6,7]
1271 ; AVX512-NEXT: vpermi2q %zmm4, %zmm8, %zmm9
1272 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [7,15,7,15,7,15,7,15]
1273 ; AVX512-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1274 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [3,11,3,11,3,11,3,11]
1275 ; AVX512-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1276 ; AVX512-NEXT: vpermi2q %zmm6, %zmm5, %zmm10
1277 ; AVX512-NEXT: vpermt2q %zmm6, %zmm8, %zmm5
1278 ; AVX512-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
1279 ; AVX512-NEXT: vshufi64x2 {{.*#+}} zmm5 = zmm8[0,1,2,3],zmm5[4,5,6,7]
1280 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm6 = [14,0,2,3,4,5,15,0]
1281 ; AVX512-NEXT: vpermi2q %zmm1, %zmm5, %zmm6
1282 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,14,2,3,4,5,6,15]
1283 ; AVX512-NEXT: vpermi2q %zmm4, %zmm6, %zmm5
1284 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,8,1,9,0,8,1,9]
1285 ; AVX512-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1286 ; AVX512-NEXT: vpermi2q %zmm3, %zmm2, %zmm6
1287 ; AVX512-NEXT: vmovdqa (%rdx), %xmm2
1288 ; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],mem[0]
1289 ; AVX512-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
1290 ; AVX512-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm6 {%k1}
1291 ; AVX512-NEXT: vinserti32x4 $2, (%r8), %zmm6, %zmm2
1292 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm3 = [0,1,2,3,4,8,6,7]
1293 ; AVX512-NEXT: vpermi2q %zmm4, %zmm2, %zmm3
1294 ; AVX512-NEXT: vmovdqa (%rdi), %ymm2
1295 ; AVX512-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
1296 ; AVX512-NEXT: vinserti64x4 $0, %ymm2, %zmm10, %zmm2
1297 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm6 = [10,0,2,3,4,5,11,0]
1298 ; AVX512-NEXT: vpermi2q %zmm1, %zmm2, %zmm6
1299 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,10,2,3,4,5,6,11]
1300 ; AVX512-NEXT: vpermi2q %zmm4, %zmm6, %zmm1
1301 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm2 = [0,1,2,3,4,12,6,7]
1302 ; AVX512-NEXT: vpermi2q %zmm4, %zmm0, %zmm2
1303 ; AVX512-NEXT: vmovdqa64 %zmm2, 192(%rax)
1304 ; AVX512-NEXT: vmovdqa64 %zmm1, 128(%rax)
1305 ; AVX512-NEXT: vmovdqa64 %zmm5, 320(%rax)
1306 ; AVX512-NEXT: vmovdqa64 %zmm9, 256(%rax)
1307 ; AVX512-NEXT: vmovdqa64 %zmm7, 64(%rax)
1308 ; AVX512-NEXT: vmovdqa64 %zmm3, (%rax)
1309 ; AVX512-NEXT: vzeroupper
1312 ; AVX512-FCP-LABEL: store_i64_stride6_vf8:
1313 ; AVX512-FCP: # %bb.0:
1314 ; AVX512-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
1315 ; AVX512-FCP-NEXT: vmovdqa64 (%rdi), %zmm2
1316 ; AVX512-FCP-NEXT: vmovdqa64 (%rsi), %zmm3
1317 ; AVX512-FCP-NEXT: vmovdqa64 (%rdx), %zmm5
1318 ; AVX512-FCP-NEXT: vmovdqa64 (%rcx), %zmm6
1319 ; AVX512-FCP-NEXT: vmovdqa64 (%r8), %zmm1
1320 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
1321 ; AVX512-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
1322 ; AVX512-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm0
1323 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm4 = [0,0,4,12]
1324 ; AVX512-FCP-NEXT: vpermi2q %zmm6, %zmm5, %zmm4
1325 ; AVX512-FCP-NEXT: movb $12, %r10b
1326 ; AVX512-FCP-NEXT: kmovw %r10d, %k1
1327 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm0 {%k1}
1328 ; AVX512-FCP-NEXT: movb $16, %r10b
1329 ; AVX512-FCP-NEXT: kmovw %r10d, %k2
1330 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm0 {%k2}
1331 ; AVX512-FCP-NEXT: vmovdqa64 (%r9), %zmm4
1332 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [2,10,2,10,2,10,2,10]
1333 ; AVX512-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1334 ; AVX512-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm7
1335 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,9,2,10,1,9,2,10]
1336 ; AVX512-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
1337 ; AVX512-FCP-NEXT: vpermi2q %zmm6, %zmm5, %zmm8
1338 ; AVX512-FCP-NEXT: movb $48, %r9b
1339 ; AVX512-FCP-NEXT: kmovw %r9d, %k2
1340 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm8 {%k2}
1341 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm9 = [0,1,9,0,4,5,6,7]
1342 ; AVX512-FCP-NEXT: vpermi2q %zmm1, %zmm8, %zmm9
1343 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm7 = [0,1,2,9,4,5,6,7]
1344 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm9, %zmm7
1345 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [6,14,6,14,6,14,6,14]
1346 ; AVX512-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1347 ; AVX512-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
1348 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [5,13,6,14,5,13,6,14]
1349 ; AVX512-FCP-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1350 ; AVX512-FCP-NEXT: vpermi2q %zmm6, %zmm5, %zmm9
1351 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, %zmm9 {%k2}
1352 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm8 = [0,1,13,0,4,5,6,7]
1353 ; AVX512-FCP-NEXT: vpermi2q %zmm1, %zmm9, %zmm8
1354 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm9 = [0,1,2,13,4,5,6,7]
1355 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm8, %zmm9
1356 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [7,15,7,15,7,15,7,15]
1357 ; AVX512-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1358 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [3,11,3,11,3,11,3,11]
1359 ; AVX512-FCP-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1360 ; AVX512-FCP-NEXT: vpermi2q %zmm6, %zmm5, %zmm10
1361 ; AVX512-FCP-NEXT: vpermt2q %zmm6, %zmm8, %zmm5
1362 ; AVX512-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
1363 ; AVX512-FCP-NEXT: vshufi64x2 {{.*#+}} zmm5 = zmm8[0,1,2,3],zmm5[4,5,6,7]
1364 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [14,0,2,3,4,5,15,0]
1365 ; AVX512-FCP-NEXT: vpermi2q %zmm1, %zmm5, %zmm6
1366 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,14,2,3,4,5,6,15]
1367 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm6, %zmm5
1368 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,8,1,9,0,8,1,9]
1369 ; AVX512-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1370 ; AVX512-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm6
1371 ; AVX512-FCP-NEXT: vmovdqa (%rdx), %xmm2
1372 ; AVX512-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],mem[0]
1373 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
1374 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm6 {%k1}
1375 ; AVX512-FCP-NEXT: vinserti32x4 $2, (%r8), %zmm6, %zmm2
1376 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm3 = [0,1,2,3,4,8,6,7]
1377 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm2, %zmm3
1378 ; AVX512-FCP-NEXT: vmovdqa (%rdi), %ymm2
1379 ; AVX512-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
1380 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm10, %zmm2
1381 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [10,0,2,3,4,5,11,0]
1382 ; AVX512-FCP-NEXT: vpermi2q %zmm1, %zmm2, %zmm6
1383 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,10,2,3,4,5,6,11]
1384 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm6, %zmm1
1385 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm2 = [0,1,2,3,4,12,6,7]
1386 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm0, %zmm2
1387 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, 192(%rax)
1388 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, 128(%rax)
1389 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, 320(%rax)
1390 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, 256(%rax)
1391 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, 64(%rax)
1392 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, (%rax)
1393 ; AVX512-FCP-NEXT: vzeroupper
1394 ; AVX512-FCP-NEXT: retq
1396 ; AVX512DQ-LABEL: store_i64_stride6_vf8:
1397 ; AVX512DQ: # %bb.0:
1398 ; AVX512DQ-NEXT: movq {{[0-9]+}}(%rsp), %rax
1399 ; AVX512DQ-NEXT: vmovdqa64 (%rdi), %zmm2
1400 ; AVX512DQ-NEXT: vmovdqa64 (%rsi), %zmm3
1401 ; AVX512DQ-NEXT: vmovdqa64 (%rdx), %zmm5
1402 ; AVX512DQ-NEXT: vmovdqa64 (%rcx), %zmm6
1403 ; AVX512DQ-NEXT: vmovdqa64 (%r8), %zmm1
1404 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
1405 ; AVX512DQ-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
1406 ; AVX512DQ-NEXT: vpermi2q %zmm3, %zmm2, %zmm0
1407 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm4 = [0,0,4,12]
1408 ; AVX512DQ-NEXT: vpermi2q %zmm6, %zmm5, %zmm4
1409 ; AVX512DQ-NEXT: movb $12, %r10b
1410 ; AVX512DQ-NEXT: kmovw %r10d, %k1
1411 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm0 {%k1}
1412 ; AVX512DQ-NEXT: movb $16, %r10b
1413 ; AVX512DQ-NEXT: kmovw %r10d, %k2
1414 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm0 {%k2}
1415 ; AVX512DQ-NEXT: vmovdqa64 (%r9), %zmm4
1416 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [2,10,2,10,2,10,2,10]
1417 ; AVX512DQ-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1418 ; AVX512DQ-NEXT: vpermi2q %zmm3, %zmm2, %zmm7
1419 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,9,2,10,1,9,2,10]
1420 ; AVX512DQ-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
1421 ; AVX512DQ-NEXT: vpermi2q %zmm6, %zmm5, %zmm8
1422 ; AVX512DQ-NEXT: movb $48, %r9b
1423 ; AVX512DQ-NEXT: kmovw %r9d, %k2
1424 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm8 {%k2}
1425 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm9 = [0,1,9,0,4,5,6,7]
1426 ; AVX512DQ-NEXT: vpermi2q %zmm1, %zmm8, %zmm9
1427 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm7 = [0,1,2,9,4,5,6,7]
1428 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm9, %zmm7
1429 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [6,14,6,14,6,14,6,14]
1430 ; AVX512DQ-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1431 ; AVX512DQ-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
1432 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [5,13,6,14,5,13,6,14]
1433 ; AVX512DQ-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1434 ; AVX512DQ-NEXT: vpermi2q %zmm6, %zmm5, %zmm9
1435 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, %zmm9 {%k2}
1436 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm8 = [0,1,13,0,4,5,6,7]
1437 ; AVX512DQ-NEXT: vpermi2q %zmm1, %zmm9, %zmm8
1438 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm9 = [0,1,2,13,4,5,6,7]
1439 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm8, %zmm9
1440 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [7,15,7,15,7,15,7,15]
1441 ; AVX512DQ-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1442 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [3,11,3,11,3,11,3,11]
1443 ; AVX512DQ-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1444 ; AVX512DQ-NEXT: vpermi2q %zmm6, %zmm5, %zmm10
1445 ; AVX512DQ-NEXT: vpermt2q %zmm6, %zmm8, %zmm5
1446 ; AVX512DQ-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
1447 ; AVX512DQ-NEXT: vshufi64x2 {{.*#+}} zmm5 = zmm8[0,1,2,3],zmm5[4,5,6,7]
1448 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm6 = [14,0,2,3,4,5,15,0]
1449 ; AVX512DQ-NEXT: vpermi2q %zmm1, %zmm5, %zmm6
1450 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,14,2,3,4,5,6,15]
1451 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm6, %zmm5
1452 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,8,1,9,0,8,1,9]
1453 ; AVX512DQ-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1454 ; AVX512DQ-NEXT: vpermi2q %zmm3, %zmm2, %zmm6
1455 ; AVX512DQ-NEXT: vmovdqa (%rdx), %xmm2
1456 ; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],mem[0]
1457 ; AVX512DQ-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
1458 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm6 {%k1}
1459 ; AVX512DQ-NEXT: vinserti32x4 $2, (%r8), %zmm6, %zmm2
1460 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm3 = [0,1,2,3,4,8,6,7]
1461 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm2, %zmm3
1462 ; AVX512DQ-NEXT: vmovdqa (%rdi), %ymm2
1463 ; AVX512DQ-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
1464 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm2, %zmm10, %zmm2
1465 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm6 = [10,0,2,3,4,5,11,0]
1466 ; AVX512DQ-NEXT: vpermi2q %zmm1, %zmm2, %zmm6
1467 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,10,2,3,4,5,6,11]
1468 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm6, %zmm1
1469 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm2 = [0,1,2,3,4,12,6,7]
1470 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm0, %zmm2
1471 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 192(%rax)
1472 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 128(%rax)
1473 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, 320(%rax)
1474 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, 256(%rax)
1475 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, 64(%rax)
1476 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, (%rax)
1477 ; AVX512DQ-NEXT: vzeroupper
1478 ; AVX512DQ-NEXT: retq
1480 ; AVX512DQ-FCP-LABEL: store_i64_stride6_vf8:
1481 ; AVX512DQ-FCP: # %bb.0:
1482 ; AVX512DQ-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
1483 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rdi), %zmm2
1484 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rsi), %zmm3
1485 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rdx), %zmm5
1486 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rcx), %zmm6
1487 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%r8), %zmm1
1488 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
1489 ; AVX512DQ-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
1490 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm0
1491 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm4 = [0,0,4,12]
1492 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm6, %zmm5, %zmm4
1493 ; AVX512DQ-FCP-NEXT: movb $12, %r10b
1494 ; AVX512DQ-FCP-NEXT: kmovw %r10d, %k1
1495 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm0 {%k1}
1496 ; AVX512DQ-FCP-NEXT: movb $16, %r10b
1497 ; AVX512DQ-FCP-NEXT: kmovw %r10d, %k2
1498 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm0 {%k2}
1499 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%r9), %zmm4
1500 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [2,10,2,10,2,10,2,10]
1501 ; AVX512DQ-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1502 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm7
1503 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,9,2,10,1,9,2,10]
1504 ; AVX512DQ-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
1505 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm6, %zmm5, %zmm8
1506 ; AVX512DQ-FCP-NEXT: movb $48, %r9b
1507 ; AVX512DQ-FCP-NEXT: kmovw %r9d, %k2
1508 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm8 {%k2}
1509 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm9 = [0,1,9,0,4,5,6,7]
1510 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm1, %zmm8, %zmm9
1511 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm7 = [0,1,2,9,4,5,6,7]
1512 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm9, %zmm7
1513 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [6,14,6,14,6,14,6,14]
1514 ; AVX512DQ-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1515 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
1516 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [5,13,6,14,5,13,6,14]
1517 ; AVX512DQ-FCP-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1518 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm6, %zmm5, %zmm9
1519 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, %zmm9 {%k2}
1520 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm8 = [0,1,13,0,4,5,6,7]
1521 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm1, %zmm9, %zmm8
1522 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm9 = [0,1,2,13,4,5,6,7]
1523 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm8, %zmm9
1524 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [7,15,7,15,7,15,7,15]
1525 ; AVX512DQ-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1526 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [3,11,3,11,3,11,3,11]
1527 ; AVX512DQ-FCP-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1528 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm6, %zmm5, %zmm10
1529 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm6, %zmm8, %zmm5
1530 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
1531 ; AVX512DQ-FCP-NEXT: vshufi64x2 {{.*#+}} zmm5 = zmm8[0,1,2,3],zmm5[4,5,6,7]
1532 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [14,0,2,3,4,5,15,0]
1533 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm1, %zmm5, %zmm6
1534 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,14,2,3,4,5,6,15]
1535 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm6, %zmm5
1536 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,8,1,9,0,8,1,9]
1537 ; AVX512DQ-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1538 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm6
1539 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdx), %xmm2
1540 ; AVX512DQ-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],mem[0]
1541 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
1542 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm6 {%k1}
1543 ; AVX512DQ-FCP-NEXT: vinserti32x4 $2, (%r8), %zmm6, %zmm2
1544 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm3 = [0,1,2,3,4,8,6,7]
1545 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm2, %zmm3
1546 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdi), %ymm2
1547 ; AVX512DQ-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
1548 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm10, %zmm2
1549 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [10,0,2,3,4,5,11,0]
1550 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm1, %zmm2, %zmm6
1551 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,10,2,3,4,5,6,11]
1552 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm6, %zmm1
1553 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm2 = [0,1,2,3,4,12,6,7]
1554 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm0, %zmm2
1555 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, 192(%rax)
1556 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, 128(%rax)
1557 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, 320(%rax)
1558 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, 256(%rax)
1559 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, 64(%rax)
1560 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, (%rax)
1561 ; AVX512DQ-FCP-NEXT: vzeroupper
1562 ; AVX512DQ-FCP-NEXT: retq
1564 ; AVX512BW-LABEL: store_i64_stride6_vf8:
1565 ; AVX512BW: # %bb.0:
1566 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1567 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm2
1568 ; AVX512BW-NEXT: vmovdqa64 (%rsi), %zmm3
1569 ; AVX512BW-NEXT: vmovdqa64 (%rdx), %zmm5
1570 ; AVX512BW-NEXT: vmovdqa64 (%rcx), %zmm6
1571 ; AVX512BW-NEXT: vmovdqa64 (%r8), %zmm1
1572 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
1573 ; AVX512BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
1574 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm0
1575 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm4 = [0,0,4,12]
1576 ; AVX512BW-NEXT: vpermi2q %zmm6, %zmm5, %zmm4
1577 ; AVX512BW-NEXT: movb $12, %r10b
1578 ; AVX512BW-NEXT: kmovd %r10d, %k1
1579 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm0 {%k1}
1580 ; AVX512BW-NEXT: movb $16, %r10b
1581 ; AVX512BW-NEXT: kmovd %r10d, %k2
1582 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm0 {%k2}
1583 ; AVX512BW-NEXT: vmovdqa64 (%r9), %zmm4
1584 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [2,10,2,10,2,10,2,10]
1585 ; AVX512BW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1586 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm7
1587 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,9,2,10,1,9,2,10]
1588 ; AVX512BW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
1589 ; AVX512BW-NEXT: vpermi2q %zmm6, %zmm5, %zmm8
1590 ; AVX512BW-NEXT: movb $48, %r9b
1591 ; AVX512BW-NEXT: kmovd %r9d, %k2
1592 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm8 {%k2}
1593 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm9 = [0,1,9,0,4,5,6,7]
1594 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm8, %zmm9
1595 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm7 = [0,1,2,9,4,5,6,7]
1596 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm9, %zmm7
1597 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [6,14,6,14,6,14,6,14]
1598 ; AVX512BW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1599 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
1600 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [5,13,6,14,5,13,6,14]
1601 ; AVX512BW-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1602 ; AVX512BW-NEXT: vpermi2q %zmm6, %zmm5, %zmm9
1603 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm9 {%k2}
1604 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm8 = [0,1,13,0,4,5,6,7]
1605 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm9, %zmm8
1606 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm9 = [0,1,2,13,4,5,6,7]
1607 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm8, %zmm9
1608 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [7,15,7,15,7,15,7,15]
1609 ; AVX512BW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1610 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [3,11,3,11,3,11,3,11]
1611 ; AVX512BW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1612 ; AVX512BW-NEXT: vpermi2q %zmm6, %zmm5, %zmm10
1613 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm8, %zmm5
1614 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
1615 ; AVX512BW-NEXT: vshufi64x2 {{.*#+}} zmm5 = zmm8[0,1,2,3],zmm5[4,5,6,7]
1616 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm6 = [14,0,2,3,4,5,15,0]
1617 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm5, %zmm6
1618 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,14,2,3,4,5,6,15]
1619 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm6, %zmm5
1620 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,8,1,9,0,8,1,9]
1621 ; AVX512BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1622 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm6
1623 ; AVX512BW-NEXT: vmovdqa (%rdx), %xmm2
1624 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],mem[0]
1625 ; AVX512BW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
1626 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm6 {%k1}
1627 ; AVX512BW-NEXT: vinserti32x4 $2, (%r8), %zmm6, %zmm2
1628 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm3 = [0,1,2,3,4,8,6,7]
1629 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm2, %zmm3
1630 ; AVX512BW-NEXT: vmovdqa (%rdi), %ymm2
1631 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
1632 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm2, %zmm10, %zmm2
1633 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm6 = [10,0,2,3,4,5,11,0]
1634 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm2, %zmm6
1635 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,10,2,3,4,5,6,11]
1636 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm6, %zmm1
1637 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm2 = [0,1,2,3,4,12,6,7]
1638 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm0, %zmm2
1639 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 192(%rax)
1640 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 128(%rax)
1641 ; AVX512BW-NEXT: vmovdqa64 %zmm5, 320(%rax)
1642 ; AVX512BW-NEXT: vmovdqa64 %zmm9, 256(%rax)
1643 ; AVX512BW-NEXT: vmovdqa64 %zmm7, 64(%rax)
1644 ; AVX512BW-NEXT: vmovdqa64 %zmm3, (%rax)
1645 ; AVX512BW-NEXT: vzeroupper
1646 ; AVX512BW-NEXT: retq
1648 ; AVX512BW-FCP-LABEL: store_i64_stride6_vf8:
1649 ; AVX512BW-FCP: # %bb.0:
1650 ; AVX512BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
1651 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm2
1652 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rsi), %zmm3
1653 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdx), %zmm5
1654 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rcx), %zmm6
1655 ; AVX512BW-FCP-NEXT: vmovdqa64 (%r8), %zmm1
1656 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
1657 ; AVX512BW-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
1658 ; AVX512BW-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm0
1659 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm4 = [0,0,4,12]
1660 ; AVX512BW-FCP-NEXT: vpermi2q %zmm6, %zmm5, %zmm4
1661 ; AVX512BW-FCP-NEXT: movb $12, %r10b
1662 ; AVX512BW-FCP-NEXT: kmovd %r10d, %k1
1663 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm0 {%k1}
1664 ; AVX512BW-FCP-NEXT: movb $16, %r10b
1665 ; AVX512BW-FCP-NEXT: kmovd %r10d, %k2
1666 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm0 {%k2}
1667 ; AVX512BW-FCP-NEXT: vmovdqa64 (%r9), %zmm4
1668 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [2,10,2,10,2,10,2,10]
1669 ; AVX512BW-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1670 ; AVX512BW-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm7
1671 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,9,2,10,1,9,2,10]
1672 ; AVX512BW-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
1673 ; AVX512BW-FCP-NEXT: vpermi2q %zmm6, %zmm5, %zmm8
1674 ; AVX512BW-FCP-NEXT: movb $48, %r9b
1675 ; AVX512BW-FCP-NEXT: kmovd %r9d, %k2
1676 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm8 {%k2}
1677 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm9 = [0,1,9,0,4,5,6,7]
1678 ; AVX512BW-FCP-NEXT: vpermi2q %zmm1, %zmm8, %zmm9
1679 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm7 = [0,1,2,9,4,5,6,7]
1680 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm9, %zmm7
1681 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [6,14,6,14,6,14,6,14]
1682 ; AVX512BW-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1683 ; AVX512BW-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
1684 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [5,13,6,14,5,13,6,14]
1685 ; AVX512BW-FCP-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1686 ; AVX512BW-FCP-NEXT: vpermi2q %zmm6, %zmm5, %zmm9
1687 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm9 {%k2}
1688 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm8 = [0,1,13,0,4,5,6,7]
1689 ; AVX512BW-FCP-NEXT: vpermi2q %zmm1, %zmm9, %zmm8
1690 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm9 = [0,1,2,13,4,5,6,7]
1691 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm8, %zmm9
1692 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [7,15,7,15,7,15,7,15]
1693 ; AVX512BW-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1694 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [3,11,3,11,3,11,3,11]
1695 ; AVX512BW-FCP-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1696 ; AVX512BW-FCP-NEXT: vpermi2q %zmm6, %zmm5, %zmm10
1697 ; AVX512BW-FCP-NEXT: vpermt2q %zmm6, %zmm8, %zmm5
1698 ; AVX512BW-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
1699 ; AVX512BW-FCP-NEXT: vshufi64x2 {{.*#+}} zmm5 = zmm8[0,1,2,3],zmm5[4,5,6,7]
1700 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [14,0,2,3,4,5,15,0]
1701 ; AVX512BW-FCP-NEXT: vpermi2q %zmm1, %zmm5, %zmm6
1702 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,14,2,3,4,5,6,15]
1703 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm6, %zmm5
1704 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,8,1,9,0,8,1,9]
1705 ; AVX512BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1706 ; AVX512BW-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm6
1707 ; AVX512BW-FCP-NEXT: vmovdqa (%rdx), %xmm2
1708 ; AVX512BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],mem[0]
1709 ; AVX512BW-FCP-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
1710 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm6 {%k1}
1711 ; AVX512BW-FCP-NEXT: vinserti32x4 $2, (%r8), %zmm6, %zmm2
1712 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm3 = [0,1,2,3,4,8,6,7]
1713 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm2, %zmm3
1714 ; AVX512BW-FCP-NEXT: vmovdqa (%rdi), %ymm2
1715 ; AVX512BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
1716 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm10, %zmm2
1717 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [10,0,2,3,4,5,11,0]
1718 ; AVX512BW-FCP-NEXT: vpermi2q %zmm1, %zmm2, %zmm6
1719 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,10,2,3,4,5,6,11]
1720 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm6, %zmm1
1721 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm2 = [0,1,2,3,4,12,6,7]
1722 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm0, %zmm2
1723 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, 192(%rax)
1724 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, 128(%rax)
1725 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, 320(%rax)
1726 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, 256(%rax)
1727 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, 64(%rax)
1728 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, (%rax)
1729 ; AVX512BW-FCP-NEXT: vzeroupper
1730 ; AVX512BW-FCP-NEXT: retq
1732 ; AVX512DQ-BW-LABEL: store_i64_stride6_vf8:
1733 ; AVX512DQ-BW: # %bb.0:
1734 ; AVX512DQ-BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1735 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdi), %zmm2
1736 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rsi), %zmm3
1737 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdx), %zmm5
1738 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rcx), %zmm6
1739 ; AVX512DQ-BW-NEXT: vmovdqa64 (%r8), %zmm1
1740 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
1741 ; AVX512DQ-BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
1742 ; AVX512DQ-BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm0
1743 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm4 = [0,0,4,12]
1744 ; AVX512DQ-BW-NEXT: vpermi2q %zmm6, %zmm5, %zmm4
1745 ; AVX512DQ-BW-NEXT: movb $12, %r10b
1746 ; AVX512DQ-BW-NEXT: kmovd %r10d, %k1
1747 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm0 {%k1}
1748 ; AVX512DQ-BW-NEXT: movb $16, %r10b
1749 ; AVX512DQ-BW-NEXT: kmovd %r10d, %k2
1750 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm0 {%k2}
1751 ; AVX512DQ-BW-NEXT: vmovdqa64 (%r9), %zmm4
1752 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [2,10,2,10,2,10,2,10]
1753 ; AVX512DQ-BW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1754 ; AVX512DQ-BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm7
1755 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,9,2,10,1,9,2,10]
1756 ; AVX512DQ-BW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
1757 ; AVX512DQ-BW-NEXT: vpermi2q %zmm6, %zmm5, %zmm8
1758 ; AVX512DQ-BW-NEXT: movb $48, %r9b
1759 ; AVX512DQ-BW-NEXT: kmovd %r9d, %k2
1760 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm8 {%k2}
1761 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm9 = [0,1,9,0,4,5,6,7]
1762 ; AVX512DQ-BW-NEXT: vpermi2q %zmm1, %zmm8, %zmm9
1763 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm7 = [0,1,2,9,4,5,6,7]
1764 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm9, %zmm7
1765 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [6,14,6,14,6,14,6,14]
1766 ; AVX512DQ-BW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1767 ; AVX512DQ-BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
1768 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [5,13,6,14,5,13,6,14]
1769 ; AVX512DQ-BW-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1770 ; AVX512DQ-BW-NEXT: vpermi2q %zmm6, %zmm5, %zmm9
1771 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, %zmm9 {%k2}
1772 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm8 = [0,1,13,0,4,5,6,7]
1773 ; AVX512DQ-BW-NEXT: vpermi2q %zmm1, %zmm9, %zmm8
1774 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm9 = [0,1,2,13,4,5,6,7]
1775 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm8, %zmm9
1776 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [7,15,7,15,7,15,7,15]
1777 ; AVX512DQ-BW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1778 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [3,11,3,11,3,11,3,11]
1779 ; AVX512DQ-BW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1780 ; AVX512DQ-BW-NEXT: vpermi2q %zmm6, %zmm5, %zmm10
1781 ; AVX512DQ-BW-NEXT: vpermt2q %zmm6, %zmm8, %zmm5
1782 ; AVX512DQ-BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
1783 ; AVX512DQ-BW-NEXT: vshufi64x2 {{.*#+}} zmm5 = zmm8[0,1,2,3],zmm5[4,5,6,7]
1784 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm6 = [14,0,2,3,4,5,15,0]
1785 ; AVX512DQ-BW-NEXT: vpermi2q %zmm1, %zmm5, %zmm6
1786 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,14,2,3,4,5,6,15]
1787 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm6, %zmm5
1788 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,8,1,9,0,8,1,9]
1789 ; AVX512DQ-BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1790 ; AVX512DQ-BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm6
1791 ; AVX512DQ-BW-NEXT: vmovdqa (%rdx), %xmm2
1792 ; AVX512DQ-BW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],mem[0]
1793 ; AVX512DQ-BW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
1794 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm6 {%k1}
1795 ; AVX512DQ-BW-NEXT: vinserti32x4 $2, (%r8), %zmm6, %zmm2
1796 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm3 = [0,1,2,3,4,8,6,7]
1797 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm2, %zmm3
1798 ; AVX512DQ-BW-NEXT: vmovdqa (%rdi), %ymm2
1799 ; AVX512DQ-BW-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
1800 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm2, %zmm10, %zmm2
1801 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm6 = [10,0,2,3,4,5,11,0]
1802 ; AVX512DQ-BW-NEXT: vpermi2q %zmm1, %zmm2, %zmm6
1803 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,10,2,3,4,5,6,11]
1804 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm6, %zmm1
1805 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm2 = [0,1,2,3,4,12,6,7]
1806 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm0, %zmm2
1807 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, 192(%rax)
1808 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, 128(%rax)
1809 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, 320(%rax)
1810 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, 256(%rax)
1811 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, 64(%rax)
1812 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, (%rax)
1813 ; AVX512DQ-BW-NEXT: vzeroupper
1814 ; AVX512DQ-BW-NEXT: retq
1816 ; AVX512DQ-BW-FCP-LABEL: store_i64_stride6_vf8:
1817 ; AVX512DQ-BW-FCP: # %bb.0:
1818 ; AVX512DQ-BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
1819 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm2
1820 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rsi), %zmm3
1821 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdx), %zmm5
1822 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rcx), %zmm6
1823 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%r8), %zmm1
1824 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
1825 ; AVX512DQ-BW-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
1826 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm0
1827 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm4 = [0,0,4,12]
1828 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm6, %zmm5, %zmm4
1829 ; AVX512DQ-BW-FCP-NEXT: movb $12, %r10b
1830 ; AVX512DQ-BW-FCP-NEXT: kmovd %r10d, %k1
1831 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm0 {%k1}
1832 ; AVX512DQ-BW-FCP-NEXT: movb $16, %r10b
1833 ; AVX512DQ-BW-FCP-NEXT: kmovd %r10d, %k2
1834 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm0 {%k2}
1835 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%r9), %zmm4
1836 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [2,10,2,10,2,10,2,10]
1837 ; AVX512DQ-BW-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1838 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm7
1839 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,9,2,10,1,9,2,10]
1840 ; AVX512DQ-BW-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
1841 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm6, %zmm5, %zmm8
1842 ; AVX512DQ-BW-FCP-NEXT: movb $48, %r9b
1843 ; AVX512DQ-BW-FCP-NEXT: kmovd %r9d, %k2
1844 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm8 {%k2}
1845 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm9 = [0,1,9,0,4,5,6,7]
1846 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm1, %zmm8, %zmm9
1847 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm7 = [0,1,2,9,4,5,6,7]
1848 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm9, %zmm7
1849 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [6,14,6,14,6,14,6,14]
1850 ; AVX512DQ-BW-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1851 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
1852 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [5,13,6,14,5,13,6,14]
1853 ; AVX512DQ-BW-FCP-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1854 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm6, %zmm5, %zmm9
1855 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm9 {%k2}
1856 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm8 = [0,1,13,0,4,5,6,7]
1857 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm1, %zmm9, %zmm8
1858 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm9 = [0,1,2,13,4,5,6,7]
1859 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm8, %zmm9
1860 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [7,15,7,15,7,15,7,15]
1861 ; AVX512DQ-BW-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1862 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [3,11,3,11,3,11,3,11]
1863 ; AVX512DQ-BW-FCP-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1864 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm6, %zmm5, %zmm10
1865 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm6, %zmm8, %zmm5
1866 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
1867 ; AVX512DQ-BW-FCP-NEXT: vshufi64x2 {{.*#+}} zmm5 = zmm8[0,1,2,3],zmm5[4,5,6,7]
1868 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [14,0,2,3,4,5,15,0]
1869 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm1, %zmm5, %zmm6
1870 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,14,2,3,4,5,6,15]
1871 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm6, %zmm5
1872 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,8,1,9,0,8,1,9]
1873 ; AVX512DQ-BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1874 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm3, %zmm2, %zmm6
1875 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rdx), %xmm2
1876 ; AVX512DQ-BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],mem[0]
1877 ; AVX512DQ-BW-FCP-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
1878 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm6 {%k1}
1879 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $2, (%r8), %zmm6, %zmm2
1880 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm3 = [0,1,2,3,4,8,6,7]
1881 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm2, %zmm3
1882 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rdi), %ymm2
1883 ; AVX512DQ-BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
1884 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm10, %zmm2
1885 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [10,0,2,3,4,5,11,0]
1886 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm1, %zmm2, %zmm6
1887 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,10,2,3,4,5,6,11]
1888 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm6, %zmm1
1889 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm2 = [0,1,2,3,4,12,6,7]
1890 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm0, %zmm2
1891 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, 192(%rax)
1892 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, 128(%rax)
1893 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, 320(%rax)
1894 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, 256(%rax)
1895 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, 64(%rax)
1896 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, (%rax)
1897 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
1898 ; AVX512DQ-BW-FCP-NEXT: retq
1899 %in.vec0 = load <8 x i64>, ptr %in.vecptr0, align 64
1900 %in.vec1 = load <8 x i64>, ptr %in.vecptr1, align 64
1901 %in.vec2 = load <8 x i64>, ptr %in.vecptr2, align 64
1902 %in.vec3 = load <8 x i64>, ptr %in.vecptr3, align 64
1903 %in.vec4 = load <8 x i64>, ptr %in.vecptr4, align 64
1904 %in.vec5 = load <8 x i64>, ptr %in.vecptr5, align 64
1905 %1 = shufflevector <8 x i64> %in.vec0, <8 x i64> %in.vec1, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
1906 %2 = shufflevector <8 x i64> %in.vec2, <8 x i64> %in.vec3, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
1907 %3 = shufflevector <8 x i64> %in.vec4, <8 x i64> %in.vec5, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
1908 %4 = shufflevector <16 x i64> %1, <16 x i64> %2, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1909 %5 = shufflevector <16 x i64> %3, <16 x i64> poison, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
1910 %6 = shufflevector <32 x i64> %4, <32 x i64> %5, <48 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47>
1911 %interleaved.vec = shufflevector <48 x i64> %6, <48 x i64> poison, <48 x i32> <i32 0, i32 8, i32 16, i32 24, i32 32, i32 40, i32 1, i32 9, i32 17, i32 25, i32 33, i32 41, i32 2, i32 10, i32 18, i32 26, i32 34, i32 42, i32 3, i32 11, i32 19, i32 27, i32 35, i32 43, i32 4, i32 12, i32 20, i32 28, i32 36, i32 44, i32 5, i32 13, i32 21, i32 29, i32 37, i32 45, i32 6, i32 14, i32 22, i32 30, i32 38, i32 46, i32 7, i32 15, i32 23, i32 31, i32 39, i32 47>
1912 store <48 x i64> %interleaved.vec, ptr %out.vec, align 64
1916 define void @store_i64_stride6_vf16(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
1917 ; SSE-LABEL: store_i64_stride6_vf16:
1919 ; SSE-NEXT: subq $408, %rsp # imm = 0x198
1920 ; SSE-NEXT: movaps (%rdi), %xmm7
1921 ; SSE-NEXT: movaps 16(%rdi), %xmm8
1922 ; SSE-NEXT: movaps 32(%rdi), %xmm9
1923 ; SSE-NEXT: movaps (%rsi), %xmm3
1924 ; SSE-NEXT: movaps 16(%rsi), %xmm1
1925 ; SSE-NEXT: movaps 32(%rsi), %xmm0
1926 ; SSE-NEXT: movaps (%rdx), %xmm10
1927 ; SSE-NEXT: movaps 16(%rdx), %xmm11
1928 ; SSE-NEXT: movaps 32(%rdx), %xmm12
1929 ; SSE-NEXT: movaps (%rcx), %xmm5
1930 ; SSE-NEXT: movaps 16(%rcx), %xmm2
1931 ; SSE-NEXT: movaps (%r8), %xmm13
1932 ; SSE-NEXT: movaps 16(%r8), %xmm15
1933 ; SSE-NEXT: movaps (%r9), %xmm6
1934 ; SSE-NEXT: movaps 16(%r9), %xmm4
1935 ; SSE-NEXT: movaps %xmm7, %xmm14
1936 ; SSE-NEXT: movlhps {{.*#+}} xmm14 = xmm14[0],xmm3[0]
1937 ; SSE-NEXT: movaps %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1938 ; SSE-NEXT: unpckhpd {{.*#+}} xmm7 = xmm7[1],xmm3[1]
1939 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1940 ; SSE-NEXT: movaps %xmm10, %xmm3
1941 ; SSE-NEXT: movlhps {{.*#+}} xmm3 = xmm3[0],xmm5[0]
1942 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1943 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm5[1]
1944 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1945 ; SSE-NEXT: movaps %xmm13, %xmm5
1946 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm6[0]
1947 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1948 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm6[1]
1949 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1950 ; SSE-NEXT: movaps %xmm8, %xmm3
1951 ; SSE-NEXT: movlhps {{.*#+}} xmm3 = xmm3[0],xmm1[0]
1952 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1953 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm1[1]
1954 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1955 ; SSE-NEXT: movaps %xmm11, %xmm1
1956 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
1957 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1958 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm2[1]
1959 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1960 ; SSE-NEXT: movaps %xmm15, %xmm1
1961 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm4[0]
1962 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1963 ; SSE-NEXT: unpckhpd {{.*#+}} xmm15 = xmm15[1],xmm4[1]
1964 ; SSE-NEXT: movaps %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1965 ; SSE-NEXT: movaps %xmm9, %xmm1
1966 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
1967 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1968 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm0[1]
1969 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1970 ; SSE-NEXT: movaps 32(%rcx), %xmm0
1971 ; SSE-NEXT: movaps %xmm12, %xmm1
1972 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
1973 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1974 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm0[1]
1975 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1976 ; SSE-NEXT: movaps 32(%r8), %xmm2
1977 ; SSE-NEXT: movaps 32(%r9), %xmm0
1978 ; SSE-NEXT: movaps %xmm2, %xmm1
1979 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
1980 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1981 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
1982 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1983 ; SSE-NEXT: movaps 48(%rdi), %xmm2
1984 ; SSE-NEXT: movaps 48(%rsi), %xmm0
1985 ; SSE-NEXT: movaps %xmm2, %xmm1
1986 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
1987 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1988 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
1989 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1990 ; SSE-NEXT: movaps 48(%rdx), %xmm2
1991 ; SSE-NEXT: movaps 48(%rcx), %xmm0
1992 ; SSE-NEXT: movaps %xmm2, %xmm1
1993 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
1994 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1995 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
1996 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1997 ; SSE-NEXT: movaps 48(%r8), %xmm2
1998 ; SSE-NEXT: movaps 48(%r9), %xmm0
1999 ; SSE-NEXT: movaps %xmm2, %xmm1
2000 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2001 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2002 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2003 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2004 ; SSE-NEXT: movaps 64(%rdi), %xmm2
2005 ; SSE-NEXT: movaps 64(%rsi), %xmm0
2006 ; SSE-NEXT: movaps %xmm2, %xmm1
2007 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2008 ; SSE-NEXT: movaps %xmm1, (%rsp) # 16-byte Spill
2009 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2010 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2011 ; SSE-NEXT: movaps 64(%rdx), %xmm2
2012 ; SSE-NEXT: movaps 64(%rcx), %xmm0
2013 ; SSE-NEXT: movaps %xmm2, %xmm1
2014 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2015 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2016 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2017 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2018 ; SSE-NEXT: movaps 64(%r8), %xmm2
2019 ; SSE-NEXT: movaps 64(%r9), %xmm0
2020 ; SSE-NEXT: movaps %xmm2, %xmm1
2021 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2022 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2023 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2024 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2025 ; SSE-NEXT: movaps 80(%rdi), %xmm15
2026 ; SSE-NEXT: movaps 80(%rsi), %xmm0
2027 ; SSE-NEXT: movaps %xmm15, %xmm1
2028 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2029 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2030 ; SSE-NEXT: unpckhpd {{.*#+}} xmm15 = xmm15[1],xmm0[1]
2031 ; SSE-NEXT: movaps 80(%rdx), %xmm12
2032 ; SSE-NEXT: movaps 80(%rcx), %xmm0
2033 ; SSE-NEXT: movaps %xmm12, %xmm1
2034 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2035 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2036 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm0[1]
2037 ; SSE-NEXT: movaps 80(%r8), %xmm14
2038 ; SSE-NEXT: movaps 80(%r9), %xmm0
2039 ; SSE-NEXT: movaps %xmm14, %xmm1
2040 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2041 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2042 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm0[1]
2043 ; SSE-NEXT: movaps 96(%rdi), %xmm9
2044 ; SSE-NEXT: movaps 96(%rsi), %xmm0
2045 ; SSE-NEXT: movaps %xmm9, %xmm13
2046 ; SSE-NEXT: movlhps {{.*#+}} xmm13 = xmm13[0],xmm0[0]
2047 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm0[1]
2048 ; SSE-NEXT: movaps 96(%rdx), %xmm10
2049 ; SSE-NEXT: movaps 96(%rcx), %xmm0
2050 ; SSE-NEXT: movaps %xmm10, %xmm11
2051 ; SSE-NEXT: movlhps {{.*#+}} xmm11 = xmm11[0],xmm0[0]
2052 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm0[1]
2053 ; SSE-NEXT: movaps 96(%r8), %xmm5
2054 ; SSE-NEXT: movaps 96(%r9), %xmm0
2055 ; SSE-NEXT: movaps %xmm5, %xmm8
2056 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm0[0]
2057 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm0[1]
2058 ; SSE-NEXT: movaps 112(%rdi), %xmm6
2059 ; SSE-NEXT: movaps 112(%rsi), %xmm1
2060 ; SSE-NEXT: movaps %xmm6, %xmm7
2061 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm1[0]
2062 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm1[1]
2063 ; SSE-NEXT: movaps 112(%rdx), %xmm1
2064 ; SSE-NEXT: movaps 112(%rcx), %xmm0
2065 ; SSE-NEXT: movaps %xmm1, %xmm4
2066 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm0[0]
2067 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
2068 ; SSE-NEXT: movaps 112(%r8), %xmm0
2069 ; SSE-NEXT: movaps 112(%r9), %xmm3
2070 ; SSE-NEXT: movaps %xmm0, %xmm2
2071 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm3[0]
2072 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
2073 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
2074 ; SSE-NEXT: movaps %xmm0, 752(%rax)
2075 ; SSE-NEXT: movaps %xmm1, 736(%rax)
2076 ; SSE-NEXT: movaps %xmm6, 720(%rax)
2077 ; SSE-NEXT: movaps %xmm2, 704(%rax)
2078 ; SSE-NEXT: movaps %xmm4, 688(%rax)
2079 ; SSE-NEXT: movaps %xmm7, 672(%rax)
2080 ; SSE-NEXT: movaps %xmm5, 656(%rax)
2081 ; SSE-NEXT: movaps %xmm10, 640(%rax)
2082 ; SSE-NEXT: movaps %xmm9, 624(%rax)
2083 ; SSE-NEXT: movaps %xmm8, 608(%rax)
2084 ; SSE-NEXT: movaps %xmm11, 592(%rax)
2085 ; SSE-NEXT: movaps %xmm13, 576(%rax)
2086 ; SSE-NEXT: movaps %xmm14, 560(%rax)
2087 ; SSE-NEXT: movaps %xmm12, 544(%rax)
2088 ; SSE-NEXT: movaps %xmm15, 528(%rax)
2089 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2090 ; SSE-NEXT: movaps %xmm0, 512(%rax)
2091 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2092 ; SSE-NEXT: movaps %xmm0, 496(%rax)
2093 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2094 ; SSE-NEXT: movaps %xmm0, 480(%rax)
2095 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2096 ; SSE-NEXT: movaps %xmm0, 464(%rax)
2097 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2098 ; SSE-NEXT: movaps %xmm0, 448(%rax)
2099 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2100 ; SSE-NEXT: movaps %xmm0, 432(%rax)
2101 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2102 ; SSE-NEXT: movaps %xmm0, 416(%rax)
2103 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2104 ; SSE-NEXT: movaps %xmm0, 400(%rax)
2105 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
2106 ; SSE-NEXT: movaps %xmm0, 384(%rax)
2107 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2108 ; SSE-NEXT: movaps %xmm0, 368(%rax)
2109 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2110 ; SSE-NEXT: movaps %xmm0, 352(%rax)
2111 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2112 ; SSE-NEXT: movaps %xmm0, 336(%rax)
2113 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2114 ; SSE-NEXT: movaps %xmm0, 320(%rax)
2115 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2116 ; SSE-NEXT: movaps %xmm0, 304(%rax)
2117 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2118 ; SSE-NEXT: movaps %xmm0, 288(%rax)
2119 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2120 ; SSE-NEXT: movaps %xmm0, 272(%rax)
2121 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2122 ; SSE-NEXT: movaps %xmm0, 256(%rax)
2123 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2124 ; SSE-NEXT: movaps %xmm0, 240(%rax)
2125 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2126 ; SSE-NEXT: movaps %xmm0, 224(%rax)
2127 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2128 ; SSE-NEXT: movaps %xmm0, 208(%rax)
2129 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2130 ; SSE-NEXT: movaps %xmm0, 192(%rax)
2131 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2132 ; SSE-NEXT: movaps %xmm0, 176(%rax)
2133 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2134 ; SSE-NEXT: movaps %xmm0, 160(%rax)
2135 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2136 ; SSE-NEXT: movaps %xmm0, 144(%rax)
2137 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2138 ; SSE-NEXT: movaps %xmm0, 128(%rax)
2139 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2140 ; SSE-NEXT: movaps %xmm0, 112(%rax)
2141 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2142 ; SSE-NEXT: movaps %xmm0, 96(%rax)
2143 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2144 ; SSE-NEXT: movaps %xmm0, 80(%rax)
2145 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2146 ; SSE-NEXT: movaps %xmm0, 64(%rax)
2147 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2148 ; SSE-NEXT: movaps %xmm0, 48(%rax)
2149 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2150 ; SSE-NEXT: movaps %xmm0, 32(%rax)
2151 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2152 ; SSE-NEXT: movaps %xmm0, 16(%rax)
2153 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2154 ; SSE-NEXT: movaps %xmm0, (%rax)
2155 ; SSE-NEXT: addq $408, %rsp # imm = 0x198
2158 ; AVX-LABEL: store_i64_stride6_vf16:
2160 ; AVX-NEXT: subq $440, %rsp # imm = 0x1B8
2161 ; AVX-NEXT: vmovapd (%r8), %ymm11
2162 ; AVX-NEXT: vmovapd 32(%r8), %ymm0
2163 ; AVX-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
2164 ; AVX-NEXT: vmovaps (%rsi), %xmm3
2165 ; AVX-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2166 ; AVX-NEXT: vmovaps 32(%rsi), %xmm4
2167 ; AVX-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2168 ; AVX-NEXT: vmovaps 64(%rsi), %xmm5
2169 ; AVX-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2170 ; AVX-NEXT: vmovaps (%rdi), %xmm2
2171 ; AVX-NEXT: vmovaps %xmm2, (%rsp) # 16-byte Spill
2172 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
2173 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
2174 ; AVX-NEXT: vblendpd {{.*#+}} ymm2 = ymm11[0,1],ymm2[2,3]
2175 ; AVX-NEXT: vblendpd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3]
2176 ; AVX-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2177 ; AVX-NEXT: vmovaps (%rcx), %xmm2
2178 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2179 ; AVX-NEXT: vmovaps 32(%rcx), %xmm6
2180 ; AVX-NEXT: vmovaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2181 ; AVX-NEXT: vmovaps (%rdx), %xmm1
2182 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2183 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
2184 ; AVX-NEXT: vinsertf128 $1, (%r9), %ymm1, %ymm2
2185 ; AVX-NEXT: vbroadcastsd 8(%r8), %ymm3
2186 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
2187 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3],ymm1[4,5],ymm2[6,7]
2188 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2189 ; AVX-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
2190 ; AVX-NEXT: vmovaps 32(%rdi), %xmm2
2191 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2192 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm4[1]
2193 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
2194 ; AVX-NEXT: vblendpd {{.*#+}} ymm2 = ymm0[0,1],ymm2[2,3]
2195 ; AVX-NEXT: vblendpd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3]
2196 ; AVX-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2197 ; AVX-NEXT: vmovaps 32(%rdx), %xmm1
2198 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2199 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm6[1]
2200 ; AVX-NEXT: vbroadcastsd 40(%r8), %ymm2
2201 ; AVX-NEXT: vblendps {{.*#+}} ymm2 = ymm1[0,1,2,3],ymm2[4,5,6,7]
2202 ; AVX-NEXT: vinsertf128 $1, 32(%r9), %ymm1, %ymm1
2203 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3],ymm2[4,5],ymm1[6,7]
2204 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2205 ; AVX-NEXT: vmovaps 64(%rdi), %xmm15
2206 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm15[1],xmm5[1]
2207 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
2208 ; AVX-NEXT: vmovapd 64(%r8), %ymm5
2209 ; AVX-NEXT: vblendpd {{.*#+}} ymm1 = ymm5[0,1],ymm1[2,3]
2210 ; AVX-NEXT: vmovddup {{.*#+}} xmm2 = mem[0,0]
2211 ; AVX-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3]
2212 ; AVX-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2213 ; AVX-NEXT: vmovaps 64(%rcx), %xmm10
2214 ; AVX-NEXT: vmovaps 64(%rdx), %xmm9
2215 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm9[1],xmm10[1]
2216 ; AVX-NEXT: vbroadcastsd 72(%r8), %ymm2
2217 ; AVX-NEXT: vblendps {{.*#+}} ymm2 = ymm1[0,1,2,3],ymm2[4,5,6,7]
2218 ; AVX-NEXT: vinsertf128 $1, 64(%r9), %ymm1, %ymm1
2219 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3],ymm2[4,5],ymm1[6,7]
2220 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2221 ; AVX-NEXT: vmovaps 96(%rsi), %xmm8
2222 ; AVX-NEXT: vmovaps 96(%rdi), %xmm7
2223 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm7[1],xmm8[1]
2224 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm2
2225 ; AVX-NEXT: vmovapd 96(%r8), %ymm1
2226 ; AVX-NEXT: vblendpd {{.*#+}} ymm2 = ymm1[0,1],ymm2[2,3]
2227 ; AVX-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
2228 ; AVX-NEXT: vblendpd {{.*#+}} ymm2 = ymm2[0],ymm3[1],ymm2[2,3]
2229 ; AVX-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2230 ; AVX-NEXT: vmovaps 96(%rcx), %xmm6
2231 ; AVX-NEXT: vmovaps 96(%rdx), %xmm3
2232 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm3[1],xmm6[1]
2233 ; AVX-NEXT: vbroadcastsd 104(%r8), %ymm4
2234 ; AVX-NEXT: vblendps {{.*#+}} ymm4 = ymm2[0,1,2,3],ymm4[4,5,6,7]
2235 ; AVX-NEXT: vinsertf128 $1, 96(%r9), %ymm2, %ymm2
2236 ; AVX-NEXT: vblendps {{.*#+}} ymm2 = ymm4[0,1],ymm2[2,3],ymm4[4,5],ymm2[6,7]
2237 ; AVX-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2238 ; AVX-NEXT: vmovapd (%rdi), %ymm2
2239 ; AVX-NEXT: vmovapd (%rsi), %ymm12
2240 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm12[1],ymm2[3],ymm12[3]
2241 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm11[2,3],ymm2[2,3]
2242 ; AVX-NEXT: vmovapd (%r9), %ymm4
2243 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm11 = ymm4[2,3],ymm12[2,3]
2244 ; AVX-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0],ymm11[0],ymm2[2],ymm11[3]
2245 ; AVX-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2246 ; AVX-NEXT: vmovapd 32(%rdi), %ymm2
2247 ; AVX-NEXT: vmovapd 32(%rsi), %ymm11
2248 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm11[1],ymm2[3],ymm11[3]
2249 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm2[2,3]
2250 ; AVX-NEXT: vmovapd 32(%r9), %ymm2
2251 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm11 = ymm2[2,3],ymm11[2,3]
2252 ; AVX-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm11[0],ymm0[2],ymm11[3]
2253 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2254 ; AVX-NEXT: vmovapd 64(%rdi), %ymm0
2255 ; AVX-NEXT: vmovapd 64(%rsi), %ymm11
2256 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm11[1],ymm0[3],ymm11[3]
2257 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm5 = ymm5[2,3],ymm0[2,3]
2258 ; AVX-NEXT: vmovapd 64(%r9), %ymm0
2259 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm11 = ymm0[2,3],ymm11[2,3]
2260 ; AVX-NEXT: vshufpd {{.*#+}} ymm5 = ymm5[0],ymm11[0],ymm5[2],ymm11[3]
2261 ; AVX-NEXT: vmovupd %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2262 ; AVX-NEXT: vmovapd 96(%rdi), %ymm5
2263 ; AVX-NEXT: vmovapd 96(%rsi), %ymm11
2264 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm5[1],ymm11[1],ymm5[3],ymm11[3]
2265 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm5 = ymm1[2,3],ymm5[2,3]
2266 ; AVX-NEXT: vmovapd 96(%r9), %ymm1
2267 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm11 = ymm1[2,3],ymm11[2,3]
2268 ; AVX-NEXT: vshufpd {{.*#+}} ymm11 = ymm5[0],ymm11[0],ymm5[2],ymm11[3]
2269 ; AVX-NEXT: vmovaps 16(%rdi), %xmm5
2270 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm5 = xmm5[0],mem[0]
2271 ; AVX-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3],mem[4,5,6,7]
2272 ; AVX-NEXT: vbroadcastsd 16(%rcx), %ymm12
2273 ; AVX-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],ymm12[6,7]
2274 ; AVX-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2275 ; AVX-NEXT: vmovapd 16(%rdx), %xmm5
2276 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm5 = xmm5[1],mem[1]
2277 ; AVX-NEXT: vbroadcastsd 24(%r8), %ymm12
2278 ; AVX-NEXT: vblendpd {{.*#+}} ymm5 = ymm5[0,1],ymm12[2,3]
2279 ; AVX-NEXT: vblendpd {{.*#+}} ymm4 = ymm5[0,1,2],ymm4[3]
2280 ; AVX-NEXT: vmovupd %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2281 ; AVX-NEXT: vmovaps 48(%rdi), %xmm4
2282 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm4 = xmm4[0],mem[0]
2283 ; AVX-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],mem[4,5,6,7]
2284 ; AVX-NEXT: vbroadcastsd 48(%rcx), %ymm5
2285 ; AVX-NEXT: vblendps {{.*#+}} ymm5 = ymm4[0,1,2,3,4,5],ymm5[6,7]
2286 ; AVX-NEXT: vmovapd 48(%rdx), %xmm4
2287 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm4[1],mem[1]
2288 ; AVX-NEXT: vbroadcastsd 56(%r8), %ymm14
2289 ; AVX-NEXT: vblendpd {{.*#+}} ymm4 = ymm4[0,1],ymm14[2,3]
2290 ; AVX-NEXT: vblendpd {{.*#+}} ymm4 = ymm4[0,1,2],ymm2[3]
2291 ; AVX-NEXT: vmovaps 80(%rdi), %xmm2
2292 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm2 = xmm2[0],mem[0]
2293 ; AVX-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
2294 ; AVX-NEXT: vbroadcastsd 80(%rcx), %ymm14
2295 ; AVX-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm14[6,7]
2296 ; AVX-NEXT: vmovapd 80(%rdx), %xmm14
2297 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm14 = xmm14[1],mem[1]
2298 ; AVX-NEXT: vbroadcastsd 88(%r8), %ymm13
2299 ; AVX-NEXT: vblendpd {{.*#+}} ymm13 = ymm14[0,1],ymm13[2,3]
2300 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm13[0,1,2],ymm0[3]
2301 ; AVX-NEXT: vmovaps 112(%rdi), %xmm13
2302 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm13 = xmm13[0],mem[0]
2303 ; AVX-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3],mem[4,5,6,7]
2304 ; AVX-NEXT: vbroadcastsd 112(%rcx), %ymm14
2305 ; AVX-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3,4,5],ymm14[6,7]
2306 ; AVX-NEXT: vmovapd 112(%rdx), %xmm14
2307 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm14 = xmm14[1],mem[1]
2308 ; AVX-NEXT: vbroadcastsd 120(%r8), %ymm12
2309 ; AVX-NEXT: vblendpd {{.*#+}} ymm12 = ymm14[0,1],ymm12[2,3]
2310 ; AVX-NEXT: vblendpd {{.*#+}} ymm1 = ymm12[0,1,2],ymm1[3]
2311 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm12 # 16-byte Folded Reload
2312 ; AVX-NEXT: # xmm12 = xmm15[0],mem[0]
2313 ; AVX-NEXT: vmovlhps {{.*#+}} xmm9 = xmm9[0],xmm10[0]
2314 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
2315 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm10, %xmm10 # 16-byte Folded Reload
2316 ; AVX-NEXT: # xmm10 = xmm10[0],mem[0]
2317 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
2318 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
2319 ; AVX-NEXT: # xmm14 = xmm14[0],mem[0]
2320 ; AVX-NEXT: vmovlhps {{.*#+}} xmm7 = xmm7[0],xmm8[0]
2321 ; AVX-NEXT: vmovlhps {{.*#+}} xmm3 = xmm3[0],xmm6[0]
2322 ; AVX-NEXT: vmovaps (%rsp), %xmm6 # 16-byte Reload
2323 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm6, %xmm6 # 16-byte Folded Reload
2324 ; AVX-NEXT: # xmm6 = xmm6[0],mem[0]
2325 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
2326 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm8, %xmm8 # 16-byte Folded Reload
2327 ; AVX-NEXT: # xmm8 = xmm8[0],mem[0]
2328 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
2329 ; AVX-NEXT: vmovaps %xmm8, 16(%rax)
2330 ; AVX-NEXT: vmovaps %xmm6, (%rax)
2331 ; AVX-NEXT: vmovaps %xmm3, 592(%rax)
2332 ; AVX-NEXT: vmovaps %xmm7, 576(%rax)
2333 ; AVX-NEXT: vmovaps %xmm14, 208(%rax)
2334 ; AVX-NEXT: vmovaps %xmm10, 192(%rax)
2335 ; AVX-NEXT: vmovaps %xmm9, 400(%rax)
2336 ; AVX-NEXT: vmovaps %xmm12, 384(%rax)
2337 ; AVX-NEXT: vmovapd %ymm11, 704(%rax)
2338 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2339 ; AVX-NEXT: vmovaps %ymm3, 512(%rax)
2340 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2341 ; AVX-NEXT: vmovaps %ymm3, 320(%rax)
2342 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2343 ; AVX-NEXT: vmovaps %ymm3, 128(%rax)
2344 ; AVX-NEXT: vmovapd %ymm1, 736(%rax)
2345 ; AVX-NEXT: vmovaps %ymm13, 672(%rax)
2346 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
2347 ; AVX-NEXT: vmovaps %ymm1, 640(%rax)
2348 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
2349 ; AVX-NEXT: vmovaps %ymm1, 608(%rax)
2350 ; AVX-NEXT: vmovapd %ymm0, 544(%rax)
2351 ; AVX-NEXT: vmovaps %ymm2, 480(%rax)
2352 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2353 ; AVX-NEXT: vmovaps %ymm0, 448(%rax)
2354 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2355 ; AVX-NEXT: vmovaps %ymm0, 416(%rax)
2356 ; AVX-NEXT: vmovapd %ymm4, 352(%rax)
2357 ; AVX-NEXT: vmovaps %ymm5, 288(%rax)
2358 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2359 ; AVX-NEXT: vmovaps %ymm0, 256(%rax)
2360 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2361 ; AVX-NEXT: vmovaps %ymm0, 224(%rax)
2362 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2363 ; AVX-NEXT: vmovaps %ymm0, 160(%rax)
2364 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2365 ; AVX-NEXT: vmovaps %ymm0, 96(%rax)
2366 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2367 ; AVX-NEXT: vmovaps %ymm0, 64(%rax)
2368 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2369 ; AVX-NEXT: vmovaps %ymm0, 32(%rax)
2370 ; AVX-NEXT: addq $440, %rsp # imm = 0x1B8
2371 ; AVX-NEXT: vzeroupper
2374 ; AVX2-LABEL: store_i64_stride6_vf16:
2376 ; AVX2-NEXT: subq $360, %rsp # imm = 0x168
2377 ; AVX2-NEXT: vmovaps (%r8), %ymm5
2378 ; AVX2-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2379 ; AVX2-NEXT: vmovaps 32(%r8), %ymm4
2380 ; AVX2-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2381 ; AVX2-NEXT: vmovaps (%r9), %xmm1
2382 ; AVX2-NEXT: vmovaps 32(%r9), %xmm0
2383 ; AVX2-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
2384 ; AVX2-NEXT: vmovaps (%rsi), %xmm3
2385 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2386 ; AVX2-NEXT: vmovaps 32(%rsi), %xmm6
2387 ; AVX2-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2388 ; AVX2-NEXT: vmovaps (%rdi), %xmm15
2389 ; AVX2-NEXT: vmovaps 32(%rdi), %xmm7
2390 ; AVX2-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2391 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm15[1],xmm3[1]
2392 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm5[0,1],ymm3[0,1]
2393 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5,6,7]
2394 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2395 ; AVX2-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
2396 ; AVX2-NEXT: vmovaps (%rcx), %xmm9
2397 ; AVX2-NEXT: vmovaps 32(%rcx), %xmm13
2398 ; AVX2-NEXT: vmovaps (%rdx), %xmm8
2399 ; AVX2-NEXT: vmovaps 32(%rdx), %xmm11
2400 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm8[1],xmm9[1]
2401 ; AVX2-NEXT: vbroadcastsd 8(%r8), %ymm3
2402 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
2403 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
2404 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2405 ; AVX2-NEXT: vmovddup {{.*#+}} xmm1 = xmm0[0,0]
2406 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm7[1],xmm6[1]
2407 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm4[0,1],ymm2[0,1]
2408 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3],ymm2[4,5,6,7]
2409 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2410 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm11[1],xmm13[1]
2411 ; AVX2-NEXT: vbroadcastsd 40(%r8), %ymm2
2412 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
2413 ; AVX2-NEXT: vmovaps 64(%r8), %ymm2
2414 ; AVX2-NEXT: vmovups %ymm2, (%rsp) # 32-byte Spill
2415 ; AVX2-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
2416 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
2417 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2418 ; AVX2-NEXT: vmovaps 64(%rsi), %xmm6
2419 ; AVX2-NEXT: vmovaps 64(%rdi), %xmm7
2420 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm6[1]
2421 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm2[0,1],ymm0[0,1]
2422 ; AVX2-NEXT: vmovaps 64(%r9), %xmm1
2423 ; AVX2-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
2424 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
2425 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2426 ; AVX2-NEXT: vmovaps 64(%rcx), %xmm4
2427 ; AVX2-NEXT: vmovaps 64(%rdx), %xmm5
2428 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm4[1]
2429 ; AVX2-NEXT: vbroadcastsd 72(%r8), %ymm12
2430 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm12[4,5,6,7]
2431 ; AVX2-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
2432 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
2433 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2434 ; AVX2-NEXT: vmovaps 96(%r8), %ymm0
2435 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2436 ; AVX2-NEXT: vmovaps 96(%rsi), %xmm2
2437 ; AVX2-NEXT: vmovaps 96(%rdi), %xmm3
2438 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm10 = xmm3[1],xmm2[1]
2439 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm10 = ymm0[0,1],ymm10[0,1]
2440 ; AVX2-NEXT: vmovaps 96(%r9), %xmm0
2441 ; AVX2-NEXT: vmovddup {{.*#+}} xmm12 = xmm0[0,0]
2442 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm10[0,1],ymm12[2,3],ymm10[4,5,6,7]
2443 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2444 ; AVX2-NEXT: vmovaps 96(%rcx), %xmm10
2445 ; AVX2-NEXT: vmovaps 96(%rdx), %xmm12
2446 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm12[1],xmm10[1]
2447 ; AVX2-NEXT: vbroadcastsd 104(%r8), %ymm14
2448 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm14[4,5,6,7]
2449 ; AVX2-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
2450 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
2451 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2452 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2453 ; AVX2-NEXT: vinsertf128 $1, %xmm9, %ymm0, %ymm0
2454 ; AVX2-NEXT: vinsertf128 $1, %xmm8, %ymm15, %ymm1
2455 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
2456 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2457 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2458 ; AVX2-NEXT: vinsertf128 $1, %xmm13, %ymm0, %ymm0
2459 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
2460 ; AVX2-NEXT: vinsertf128 $1, %xmm11, %ymm1, %ymm1
2461 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
2462 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2463 ; AVX2-NEXT: vinsertf128 $1, %xmm4, %ymm6, %ymm0
2464 ; AVX2-NEXT: vinsertf128 $1, %xmm5, %ymm7, %ymm1
2465 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm14 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
2466 ; AVX2-NEXT: vinsertf128 $1, %xmm10, %ymm2, %ymm0
2467 ; AVX2-NEXT: vinsertf128 $1, %xmm12, %ymm3, %ymm1
2468 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
2469 ; AVX2-NEXT: vmovaps (%rdi), %ymm1
2470 ; AVX2-NEXT: vmovaps (%rsi), %ymm2
2471 ; AVX2-NEXT: vmovaps (%rdx), %ymm3
2472 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm1[0],ymm2[0],ymm1[2],ymm2[2]
2473 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],ymm3[2,3]
2474 ; AVX2-NEXT: vbroadcastsd 16(%rcx), %ymm5
2475 ; AVX2-NEXT: vblendps {{.*#+}} ymm15 = ymm4[0,1,2,3,4,5],ymm5[6,7]
2476 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
2477 ; AVX2-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
2478 ; AVX2-NEXT: # ymm1 = mem[2,3],ymm1[2,3]
2479 ; AVX2-NEXT: vbroadcastsd 16(%r9), %ymm2
2480 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3],ymm1[4,5,6,7]
2481 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm3[1],mem[1],ymm3[3],mem[3]
2482 ; AVX2-NEXT: vbroadcastsd 24(%r8), %ymm3
2483 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm2[2,3],ymm3[2,3]
2484 ; AVX2-NEXT: vmovaps 32(%rdi), %ymm3
2485 ; AVX2-NEXT: vmovaps 32(%rsi), %ymm5
2486 ; AVX2-NEXT: vmovaps 32(%rdx), %ymm6
2487 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm3[0],ymm5[0],ymm3[2],ymm5[2]
2488 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],ymm6[2,3]
2489 ; AVX2-NEXT: vbroadcastsd 48(%rcx), %ymm7
2490 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm7[6,7]
2491 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm5[1],ymm3[3],ymm5[3]
2492 ; AVX2-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm3 # 32-byte Folded Reload
2493 ; AVX2-NEXT: # ymm3 = mem[2,3],ymm3[2,3]
2494 ; AVX2-NEXT: vbroadcastsd 48(%r9), %ymm5
2495 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1],ymm5[2,3],ymm3[4,5,6,7]
2496 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm6[1],mem[1],ymm6[3],mem[3]
2497 ; AVX2-NEXT: vbroadcastsd 56(%r8), %ymm6
2498 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm6 = ymm5[2,3],ymm6[2,3]
2499 ; AVX2-NEXT: vmovaps 64(%rdi), %ymm7
2500 ; AVX2-NEXT: vmovaps 64(%rsi), %ymm9
2501 ; AVX2-NEXT: vmovaps 64(%rdx), %ymm10
2502 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm5 = ymm7[0],ymm9[0],ymm7[2],ymm9[2]
2503 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm5 = ymm5[2,3],ymm10[2,3]
2504 ; AVX2-NEXT: vbroadcastsd 80(%rcx), %ymm11
2505 ; AVX2-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],ymm11[6,7]
2506 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm7 = ymm7[1],ymm9[1],ymm7[3],ymm9[3]
2507 ; AVX2-NEXT: vperm2f128 $19, (%rsp), %ymm7, %ymm7 # 32-byte Folded Reload
2508 ; AVX2-NEXT: # ymm7 = mem[2,3],ymm7[2,3]
2509 ; AVX2-NEXT: vbroadcastsd 80(%r9), %ymm9
2510 ; AVX2-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1],ymm9[2,3],ymm7[4,5,6,7]
2511 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm9 = ymm10[1],mem[1],ymm10[3],mem[3]
2512 ; AVX2-NEXT: vbroadcastsd 88(%r8), %ymm10
2513 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm9 = ymm9[2,3],ymm10[2,3]
2514 ; AVX2-NEXT: vmovaps 96(%rdi), %ymm10
2515 ; AVX2-NEXT: vmovaps 96(%rsi), %ymm11
2516 ; AVX2-NEXT: vmovaps 96(%rdx), %ymm12
2517 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm10[0],ymm11[0],ymm10[2],ymm11[2]
2518 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm8 = ymm8[2,3],ymm12[2,3]
2519 ; AVX2-NEXT: vbroadcastsd 112(%rcx), %ymm13
2520 ; AVX2-NEXT: vblendps {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],ymm13[6,7]
2521 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm10 = ymm10[1],ymm11[1],ymm10[3],ymm11[3]
2522 ; AVX2-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm10, %ymm10 # 32-byte Folded Reload
2523 ; AVX2-NEXT: # ymm10 = mem[2,3],ymm10[2,3]
2524 ; AVX2-NEXT: vbroadcastsd 112(%r9), %ymm11
2525 ; AVX2-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1],ymm11[2,3],ymm10[4,5,6,7]
2526 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm11 = ymm12[1],mem[1],ymm12[3],mem[3]
2527 ; AVX2-NEXT: vbroadcastsd 120(%r8), %ymm12
2528 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm11 = ymm11[2,3],ymm12[2,3]
2529 ; AVX2-NEXT: movq {{[0-9]+}}(%rsp), %rax
2530 ; AVX2-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],mem[6,7]
2531 ; AVX2-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3,4,5],mem[6,7]
2532 ; AVX2-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],mem[6,7]
2533 ; AVX2-NEXT: vblendps {{.*#+}} ymm11 = ymm11[0,1,2,3,4,5],mem[6,7]
2534 ; AVX2-NEXT: vmovaps %ymm11, 736(%rax)
2535 ; AVX2-NEXT: vmovaps %ymm10, 704(%rax)
2536 ; AVX2-NEXT: vmovaps %ymm8, 672(%rax)
2537 ; AVX2-NEXT: vmovaps %ymm0, 576(%rax)
2538 ; AVX2-NEXT: vmovaps %ymm9, 544(%rax)
2539 ; AVX2-NEXT: vmovaps %ymm7, 512(%rax)
2540 ; AVX2-NEXT: vmovaps %ymm5, 480(%rax)
2541 ; AVX2-NEXT: vmovaps %ymm14, 384(%rax)
2542 ; AVX2-NEXT: vmovaps %ymm6, 352(%rax)
2543 ; AVX2-NEXT: vmovaps %ymm3, 320(%rax)
2544 ; AVX2-NEXT: vmovaps %ymm2, 288(%rax)
2545 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2546 ; AVX2-NEXT: vmovaps %ymm0, 192(%rax)
2547 ; AVX2-NEXT: vmovaps %ymm4, 160(%rax)
2548 ; AVX2-NEXT: vmovaps %ymm1, 128(%rax)
2549 ; AVX2-NEXT: vmovaps %ymm15, 96(%rax)
2550 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2551 ; AVX2-NEXT: vmovaps %ymm0, (%rax)
2552 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2553 ; AVX2-NEXT: vmovaps %ymm0, 640(%rax)
2554 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2555 ; AVX2-NEXT: vmovaps %ymm0, 608(%rax)
2556 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2557 ; AVX2-NEXT: vmovaps %ymm0, 448(%rax)
2558 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2559 ; AVX2-NEXT: vmovaps %ymm0, 416(%rax)
2560 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2561 ; AVX2-NEXT: vmovaps %ymm0, 256(%rax)
2562 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2563 ; AVX2-NEXT: vmovaps %ymm0, 224(%rax)
2564 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2565 ; AVX2-NEXT: vmovaps %ymm0, 64(%rax)
2566 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2567 ; AVX2-NEXT: vmovaps %ymm0, 32(%rax)
2568 ; AVX2-NEXT: addq $360, %rsp # imm = 0x168
2569 ; AVX2-NEXT: vzeroupper
2572 ; AVX2-FP-LABEL: store_i64_stride6_vf16:
2574 ; AVX2-FP-NEXT: subq $360, %rsp # imm = 0x168
2575 ; AVX2-FP-NEXT: vmovaps (%r8), %ymm5
2576 ; AVX2-FP-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2577 ; AVX2-FP-NEXT: vmovaps 32(%r8), %ymm4
2578 ; AVX2-FP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2579 ; AVX2-FP-NEXT: vmovaps (%r9), %xmm1
2580 ; AVX2-FP-NEXT: vmovaps 32(%r9), %xmm0
2581 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
2582 ; AVX2-FP-NEXT: vmovaps (%rsi), %xmm3
2583 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2584 ; AVX2-FP-NEXT: vmovaps 32(%rsi), %xmm6
2585 ; AVX2-FP-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2586 ; AVX2-FP-NEXT: vmovaps (%rdi), %xmm15
2587 ; AVX2-FP-NEXT: vmovaps 32(%rdi), %xmm7
2588 ; AVX2-FP-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2589 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm15[1],xmm3[1]
2590 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm5[0,1],ymm3[0,1]
2591 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5,6,7]
2592 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2593 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
2594 ; AVX2-FP-NEXT: vmovaps (%rcx), %xmm9
2595 ; AVX2-FP-NEXT: vmovaps 32(%rcx), %xmm13
2596 ; AVX2-FP-NEXT: vmovaps (%rdx), %xmm8
2597 ; AVX2-FP-NEXT: vmovaps 32(%rdx), %xmm11
2598 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm8[1],xmm9[1]
2599 ; AVX2-FP-NEXT: vbroadcastsd 8(%r8), %ymm3
2600 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
2601 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
2602 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2603 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm1 = xmm0[0,0]
2604 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm7[1],xmm6[1]
2605 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm4[0,1],ymm2[0,1]
2606 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3],ymm2[4,5,6,7]
2607 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2608 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm11[1],xmm13[1]
2609 ; AVX2-FP-NEXT: vbroadcastsd 40(%r8), %ymm2
2610 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
2611 ; AVX2-FP-NEXT: vmovaps 64(%r8), %ymm2
2612 ; AVX2-FP-NEXT: vmovups %ymm2, (%rsp) # 32-byte Spill
2613 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
2614 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
2615 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2616 ; AVX2-FP-NEXT: vmovaps 64(%rsi), %xmm6
2617 ; AVX2-FP-NEXT: vmovaps 64(%rdi), %xmm7
2618 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm6[1]
2619 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm2[0,1],ymm0[0,1]
2620 ; AVX2-FP-NEXT: vmovaps 64(%r9), %xmm1
2621 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
2622 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
2623 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2624 ; AVX2-FP-NEXT: vmovaps 64(%rcx), %xmm4
2625 ; AVX2-FP-NEXT: vmovaps 64(%rdx), %xmm5
2626 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm4[1]
2627 ; AVX2-FP-NEXT: vbroadcastsd 72(%r8), %ymm12
2628 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm12[4,5,6,7]
2629 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
2630 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
2631 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2632 ; AVX2-FP-NEXT: vmovaps 96(%r8), %ymm0
2633 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2634 ; AVX2-FP-NEXT: vmovaps 96(%rsi), %xmm2
2635 ; AVX2-FP-NEXT: vmovaps 96(%rdi), %xmm3
2636 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm10 = xmm3[1],xmm2[1]
2637 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm10 = ymm0[0,1],ymm10[0,1]
2638 ; AVX2-FP-NEXT: vmovaps 96(%r9), %xmm0
2639 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm12 = xmm0[0,0]
2640 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm10[0,1],ymm12[2,3],ymm10[4,5,6,7]
2641 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2642 ; AVX2-FP-NEXT: vmovaps 96(%rcx), %xmm10
2643 ; AVX2-FP-NEXT: vmovaps 96(%rdx), %xmm12
2644 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm12[1],xmm10[1]
2645 ; AVX2-FP-NEXT: vbroadcastsd 104(%r8), %ymm14
2646 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm14[4,5,6,7]
2647 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
2648 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
2649 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2650 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2651 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm9, %ymm0, %ymm0
2652 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm8, %ymm15, %ymm1
2653 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
2654 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2655 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2656 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm13, %ymm0, %ymm0
2657 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
2658 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm11, %ymm1, %ymm1
2659 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
2660 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2661 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm4, %ymm6, %ymm0
2662 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm5, %ymm7, %ymm1
2663 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm14 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
2664 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm10, %ymm2, %ymm0
2665 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm12, %ymm3, %ymm1
2666 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
2667 ; AVX2-FP-NEXT: vmovaps (%rdi), %ymm1
2668 ; AVX2-FP-NEXT: vmovaps (%rsi), %ymm2
2669 ; AVX2-FP-NEXT: vmovaps (%rdx), %ymm3
2670 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm1[0],ymm2[0],ymm1[2],ymm2[2]
2671 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],ymm3[2,3]
2672 ; AVX2-FP-NEXT: vbroadcastsd 16(%rcx), %ymm5
2673 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm15 = ymm4[0,1,2,3,4,5],ymm5[6,7]
2674 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
2675 ; AVX2-FP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
2676 ; AVX2-FP-NEXT: # ymm1 = mem[2,3],ymm1[2,3]
2677 ; AVX2-FP-NEXT: vbroadcastsd 16(%r9), %ymm2
2678 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3],ymm1[4,5,6,7]
2679 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm3[1],mem[1],ymm3[3],mem[3]
2680 ; AVX2-FP-NEXT: vbroadcastsd 24(%r8), %ymm3
2681 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm2[2,3],ymm3[2,3]
2682 ; AVX2-FP-NEXT: vmovaps 32(%rdi), %ymm3
2683 ; AVX2-FP-NEXT: vmovaps 32(%rsi), %ymm5
2684 ; AVX2-FP-NEXT: vmovaps 32(%rdx), %ymm6
2685 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm3[0],ymm5[0],ymm3[2],ymm5[2]
2686 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],ymm6[2,3]
2687 ; AVX2-FP-NEXT: vbroadcastsd 48(%rcx), %ymm7
2688 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm7[6,7]
2689 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm5[1],ymm3[3],ymm5[3]
2690 ; AVX2-FP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm3 # 32-byte Folded Reload
2691 ; AVX2-FP-NEXT: # ymm3 = mem[2,3],ymm3[2,3]
2692 ; AVX2-FP-NEXT: vbroadcastsd 48(%r9), %ymm5
2693 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1],ymm5[2,3],ymm3[4,5,6,7]
2694 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm6[1],mem[1],ymm6[3],mem[3]
2695 ; AVX2-FP-NEXT: vbroadcastsd 56(%r8), %ymm6
2696 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm6 = ymm5[2,3],ymm6[2,3]
2697 ; AVX2-FP-NEXT: vmovaps 64(%rdi), %ymm7
2698 ; AVX2-FP-NEXT: vmovaps 64(%rsi), %ymm9
2699 ; AVX2-FP-NEXT: vmovaps 64(%rdx), %ymm10
2700 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm5 = ymm7[0],ymm9[0],ymm7[2],ymm9[2]
2701 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm5 = ymm5[2,3],ymm10[2,3]
2702 ; AVX2-FP-NEXT: vbroadcastsd 80(%rcx), %ymm11
2703 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],ymm11[6,7]
2704 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm7 = ymm7[1],ymm9[1],ymm7[3],ymm9[3]
2705 ; AVX2-FP-NEXT: vperm2f128 $19, (%rsp), %ymm7, %ymm7 # 32-byte Folded Reload
2706 ; AVX2-FP-NEXT: # ymm7 = mem[2,3],ymm7[2,3]
2707 ; AVX2-FP-NEXT: vbroadcastsd 80(%r9), %ymm9
2708 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1],ymm9[2,3],ymm7[4,5,6,7]
2709 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm9 = ymm10[1],mem[1],ymm10[3],mem[3]
2710 ; AVX2-FP-NEXT: vbroadcastsd 88(%r8), %ymm10
2711 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm9 = ymm9[2,3],ymm10[2,3]
2712 ; AVX2-FP-NEXT: vmovaps 96(%rdi), %ymm10
2713 ; AVX2-FP-NEXT: vmovaps 96(%rsi), %ymm11
2714 ; AVX2-FP-NEXT: vmovaps 96(%rdx), %ymm12
2715 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm10[0],ymm11[0],ymm10[2],ymm11[2]
2716 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm8 = ymm8[2,3],ymm12[2,3]
2717 ; AVX2-FP-NEXT: vbroadcastsd 112(%rcx), %ymm13
2718 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],ymm13[6,7]
2719 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm10 = ymm10[1],ymm11[1],ymm10[3],ymm11[3]
2720 ; AVX2-FP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm10, %ymm10 # 32-byte Folded Reload
2721 ; AVX2-FP-NEXT: # ymm10 = mem[2,3],ymm10[2,3]
2722 ; AVX2-FP-NEXT: vbroadcastsd 112(%r9), %ymm11
2723 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1],ymm11[2,3],ymm10[4,5,6,7]
2724 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm11 = ymm12[1],mem[1],ymm12[3],mem[3]
2725 ; AVX2-FP-NEXT: vbroadcastsd 120(%r8), %ymm12
2726 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm11 = ymm11[2,3],ymm12[2,3]
2727 ; AVX2-FP-NEXT: movq {{[0-9]+}}(%rsp), %rax
2728 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],mem[6,7]
2729 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3,4,5],mem[6,7]
2730 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],mem[6,7]
2731 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm11 = ymm11[0,1,2,3,4,5],mem[6,7]
2732 ; AVX2-FP-NEXT: vmovaps %ymm11, 736(%rax)
2733 ; AVX2-FP-NEXT: vmovaps %ymm10, 704(%rax)
2734 ; AVX2-FP-NEXT: vmovaps %ymm8, 672(%rax)
2735 ; AVX2-FP-NEXT: vmovaps %ymm0, 576(%rax)
2736 ; AVX2-FP-NEXT: vmovaps %ymm9, 544(%rax)
2737 ; AVX2-FP-NEXT: vmovaps %ymm7, 512(%rax)
2738 ; AVX2-FP-NEXT: vmovaps %ymm5, 480(%rax)
2739 ; AVX2-FP-NEXT: vmovaps %ymm14, 384(%rax)
2740 ; AVX2-FP-NEXT: vmovaps %ymm6, 352(%rax)
2741 ; AVX2-FP-NEXT: vmovaps %ymm3, 320(%rax)
2742 ; AVX2-FP-NEXT: vmovaps %ymm2, 288(%rax)
2743 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2744 ; AVX2-FP-NEXT: vmovaps %ymm0, 192(%rax)
2745 ; AVX2-FP-NEXT: vmovaps %ymm4, 160(%rax)
2746 ; AVX2-FP-NEXT: vmovaps %ymm1, 128(%rax)
2747 ; AVX2-FP-NEXT: vmovaps %ymm15, 96(%rax)
2748 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2749 ; AVX2-FP-NEXT: vmovaps %ymm0, (%rax)
2750 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2751 ; AVX2-FP-NEXT: vmovaps %ymm0, 640(%rax)
2752 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2753 ; AVX2-FP-NEXT: vmovaps %ymm0, 608(%rax)
2754 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2755 ; AVX2-FP-NEXT: vmovaps %ymm0, 448(%rax)
2756 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2757 ; AVX2-FP-NEXT: vmovaps %ymm0, 416(%rax)
2758 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2759 ; AVX2-FP-NEXT: vmovaps %ymm0, 256(%rax)
2760 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2761 ; AVX2-FP-NEXT: vmovaps %ymm0, 224(%rax)
2762 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2763 ; AVX2-FP-NEXT: vmovaps %ymm0, 64(%rax)
2764 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2765 ; AVX2-FP-NEXT: vmovaps %ymm0, 32(%rax)
2766 ; AVX2-FP-NEXT: addq $360, %rsp # imm = 0x168
2767 ; AVX2-FP-NEXT: vzeroupper
2768 ; AVX2-FP-NEXT: retq
2770 ; AVX2-FCP-LABEL: store_i64_stride6_vf16:
2771 ; AVX2-FCP: # %bb.0:
2772 ; AVX2-FCP-NEXT: subq $360, %rsp # imm = 0x168
2773 ; AVX2-FCP-NEXT: vmovaps (%r8), %ymm5
2774 ; AVX2-FCP-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2775 ; AVX2-FCP-NEXT: vmovaps 32(%r8), %ymm4
2776 ; AVX2-FCP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2777 ; AVX2-FCP-NEXT: vmovaps (%r9), %xmm1
2778 ; AVX2-FCP-NEXT: vmovaps 32(%r9), %xmm0
2779 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
2780 ; AVX2-FCP-NEXT: vmovaps (%rsi), %xmm3
2781 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2782 ; AVX2-FCP-NEXT: vmovaps 32(%rsi), %xmm6
2783 ; AVX2-FCP-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2784 ; AVX2-FCP-NEXT: vmovaps (%rdi), %xmm15
2785 ; AVX2-FCP-NEXT: vmovaps 32(%rdi), %xmm7
2786 ; AVX2-FCP-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2787 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm15[1],xmm3[1]
2788 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm5[0,1],ymm3[0,1]
2789 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5,6,7]
2790 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2791 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
2792 ; AVX2-FCP-NEXT: vmovaps (%rcx), %xmm9
2793 ; AVX2-FCP-NEXT: vmovaps 32(%rcx), %xmm13
2794 ; AVX2-FCP-NEXT: vmovaps (%rdx), %xmm8
2795 ; AVX2-FCP-NEXT: vmovaps 32(%rdx), %xmm11
2796 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm8[1],xmm9[1]
2797 ; AVX2-FCP-NEXT: vbroadcastsd 8(%r8), %ymm3
2798 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
2799 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
2800 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2801 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm1 = xmm0[0,0]
2802 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm7[1],xmm6[1]
2803 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm4[0,1],ymm2[0,1]
2804 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3],ymm2[4,5,6,7]
2805 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2806 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm11[1],xmm13[1]
2807 ; AVX2-FCP-NEXT: vbroadcastsd 40(%r8), %ymm2
2808 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
2809 ; AVX2-FCP-NEXT: vmovaps 64(%r8), %ymm2
2810 ; AVX2-FCP-NEXT: vmovups %ymm2, (%rsp) # 32-byte Spill
2811 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
2812 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
2813 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2814 ; AVX2-FCP-NEXT: vmovaps 64(%rsi), %xmm6
2815 ; AVX2-FCP-NEXT: vmovaps 64(%rdi), %xmm7
2816 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm6[1]
2817 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm2[0,1],ymm0[0,1]
2818 ; AVX2-FCP-NEXT: vmovaps 64(%r9), %xmm1
2819 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
2820 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
2821 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2822 ; AVX2-FCP-NEXT: vmovaps 64(%rcx), %xmm4
2823 ; AVX2-FCP-NEXT: vmovaps 64(%rdx), %xmm5
2824 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm4[1]
2825 ; AVX2-FCP-NEXT: vbroadcastsd 72(%r8), %ymm12
2826 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm12[4,5,6,7]
2827 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
2828 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
2829 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2830 ; AVX2-FCP-NEXT: vmovaps 96(%r8), %ymm0
2831 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2832 ; AVX2-FCP-NEXT: vmovaps 96(%rsi), %xmm2
2833 ; AVX2-FCP-NEXT: vmovaps 96(%rdi), %xmm3
2834 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm10 = xmm3[1],xmm2[1]
2835 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm10 = ymm0[0,1],ymm10[0,1]
2836 ; AVX2-FCP-NEXT: vmovaps 96(%r9), %xmm0
2837 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm12 = xmm0[0,0]
2838 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm10[0,1],ymm12[2,3],ymm10[4,5,6,7]
2839 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2840 ; AVX2-FCP-NEXT: vmovaps 96(%rcx), %xmm10
2841 ; AVX2-FCP-NEXT: vmovaps 96(%rdx), %xmm12
2842 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm12[1],xmm10[1]
2843 ; AVX2-FCP-NEXT: vbroadcastsd 104(%r8), %ymm14
2844 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm14[4,5,6,7]
2845 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
2846 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
2847 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2848 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2849 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm9, %ymm0, %ymm0
2850 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm8, %ymm15, %ymm1
2851 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
2852 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2853 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2854 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm13, %ymm0, %ymm0
2855 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
2856 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm11, %ymm1, %ymm1
2857 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
2858 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2859 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm4, %ymm6, %ymm0
2860 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm5, %ymm7, %ymm1
2861 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm14 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
2862 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm10, %ymm2, %ymm0
2863 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm12, %ymm3, %ymm1
2864 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
2865 ; AVX2-FCP-NEXT: vmovaps (%rdi), %ymm1
2866 ; AVX2-FCP-NEXT: vmovaps (%rsi), %ymm2
2867 ; AVX2-FCP-NEXT: vmovaps (%rdx), %ymm3
2868 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm1[0],ymm2[0],ymm1[2],ymm2[2]
2869 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],ymm3[2,3]
2870 ; AVX2-FCP-NEXT: vbroadcastsd 16(%rcx), %ymm5
2871 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm15 = ymm4[0,1,2,3,4,5],ymm5[6,7]
2872 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
2873 ; AVX2-FCP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
2874 ; AVX2-FCP-NEXT: # ymm1 = mem[2,3],ymm1[2,3]
2875 ; AVX2-FCP-NEXT: vbroadcastsd 16(%r9), %ymm2
2876 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3],ymm1[4,5,6,7]
2877 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm3[1],mem[1],ymm3[3],mem[3]
2878 ; AVX2-FCP-NEXT: vbroadcastsd 24(%r8), %ymm3
2879 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm2[2,3],ymm3[2,3]
2880 ; AVX2-FCP-NEXT: vmovaps 32(%rdi), %ymm3
2881 ; AVX2-FCP-NEXT: vmovaps 32(%rsi), %ymm5
2882 ; AVX2-FCP-NEXT: vmovaps 32(%rdx), %ymm6
2883 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm3[0],ymm5[0],ymm3[2],ymm5[2]
2884 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],ymm6[2,3]
2885 ; AVX2-FCP-NEXT: vbroadcastsd 48(%rcx), %ymm7
2886 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm7[6,7]
2887 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm5[1],ymm3[3],ymm5[3]
2888 ; AVX2-FCP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm3 # 32-byte Folded Reload
2889 ; AVX2-FCP-NEXT: # ymm3 = mem[2,3],ymm3[2,3]
2890 ; AVX2-FCP-NEXT: vbroadcastsd 48(%r9), %ymm5
2891 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1],ymm5[2,3],ymm3[4,5,6,7]
2892 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm6[1],mem[1],ymm6[3],mem[3]
2893 ; AVX2-FCP-NEXT: vbroadcastsd 56(%r8), %ymm6
2894 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm6 = ymm5[2,3],ymm6[2,3]
2895 ; AVX2-FCP-NEXT: vmovaps 64(%rdi), %ymm7
2896 ; AVX2-FCP-NEXT: vmovaps 64(%rsi), %ymm9
2897 ; AVX2-FCP-NEXT: vmovaps 64(%rdx), %ymm10
2898 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm5 = ymm7[0],ymm9[0],ymm7[2],ymm9[2]
2899 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm5 = ymm5[2,3],ymm10[2,3]
2900 ; AVX2-FCP-NEXT: vbroadcastsd 80(%rcx), %ymm11
2901 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],ymm11[6,7]
2902 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm7 = ymm7[1],ymm9[1],ymm7[3],ymm9[3]
2903 ; AVX2-FCP-NEXT: vperm2f128 $19, (%rsp), %ymm7, %ymm7 # 32-byte Folded Reload
2904 ; AVX2-FCP-NEXT: # ymm7 = mem[2,3],ymm7[2,3]
2905 ; AVX2-FCP-NEXT: vbroadcastsd 80(%r9), %ymm9
2906 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1],ymm9[2,3],ymm7[4,5,6,7]
2907 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm9 = ymm10[1],mem[1],ymm10[3],mem[3]
2908 ; AVX2-FCP-NEXT: vbroadcastsd 88(%r8), %ymm10
2909 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm9 = ymm9[2,3],ymm10[2,3]
2910 ; AVX2-FCP-NEXT: vmovaps 96(%rdi), %ymm10
2911 ; AVX2-FCP-NEXT: vmovaps 96(%rsi), %ymm11
2912 ; AVX2-FCP-NEXT: vmovaps 96(%rdx), %ymm12
2913 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm10[0],ymm11[0],ymm10[2],ymm11[2]
2914 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm8 = ymm8[2,3],ymm12[2,3]
2915 ; AVX2-FCP-NEXT: vbroadcastsd 112(%rcx), %ymm13
2916 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],ymm13[6,7]
2917 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm10 = ymm10[1],ymm11[1],ymm10[3],ymm11[3]
2918 ; AVX2-FCP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm10, %ymm10 # 32-byte Folded Reload
2919 ; AVX2-FCP-NEXT: # ymm10 = mem[2,3],ymm10[2,3]
2920 ; AVX2-FCP-NEXT: vbroadcastsd 112(%r9), %ymm11
2921 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1],ymm11[2,3],ymm10[4,5,6,7]
2922 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm11 = ymm12[1],mem[1],ymm12[3],mem[3]
2923 ; AVX2-FCP-NEXT: vbroadcastsd 120(%r8), %ymm12
2924 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm11 = ymm11[2,3],ymm12[2,3]
2925 ; AVX2-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
2926 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],mem[6,7]
2927 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3,4,5],mem[6,7]
2928 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],mem[6,7]
2929 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm11 = ymm11[0,1,2,3,4,5],mem[6,7]
2930 ; AVX2-FCP-NEXT: vmovaps %ymm11, 736(%rax)
2931 ; AVX2-FCP-NEXT: vmovaps %ymm10, 704(%rax)
2932 ; AVX2-FCP-NEXT: vmovaps %ymm8, 672(%rax)
2933 ; AVX2-FCP-NEXT: vmovaps %ymm0, 576(%rax)
2934 ; AVX2-FCP-NEXT: vmovaps %ymm9, 544(%rax)
2935 ; AVX2-FCP-NEXT: vmovaps %ymm7, 512(%rax)
2936 ; AVX2-FCP-NEXT: vmovaps %ymm5, 480(%rax)
2937 ; AVX2-FCP-NEXT: vmovaps %ymm14, 384(%rax)
2938 ; AVX2-FCP-NEXT: vmovaps %ymm6, 352(%rax)
2939 ; AVX2-FCP-NEXT: vmovaps %ymm3, 320(%rax)
2940 ; AVX2-FCP-NEXT: vmovaps %ymm2, 288(%rax)
2941 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2942 ; AVX2-FCP-NEXT: vmovaps %ymm0, 192(%rax)
2943 ; AVX2-FCP-NEXT: vmovaps %ymm4, 160(%rax)
2944 ; AVX2-FCP-NEXT: vmovaps %ymm1, 128(%rax)
2945 ; AVX2-FCP-NEXT: vmovaps %ymm15, 96(%rax)
2946 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2947 ; AVX2-FCP-NEXT: vmovaps %ymm0, (%rax)
2948 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2949 ; AVX2-FCP-NEXT: vmovaps %ymm0, 640(%rax)
2950 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2951 ; AVX2-FCP-NEXT: vmovaps %ymm0, 608(%rax)
2952 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2953 ; AVX2-FCP-NEXT: vmovaps %ymm0, 448(%rax)
2954 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2955 ; AVX2-FCP-NEXT: vmovaps %ymm0, 416(%rax)
2956 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2957 ; AVX2-FCP-NEXT: vmovaps %ymm0, 256(%rax)
2958 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2959 ; AVX2-FCP-NEXT: vmovaps %ymm0, 224(%rax)
2960 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2961 ; AVX2-FCP-NEXT: vmovaps %ymm0, 64(%rax)
2962 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2963 ; AVX2-FCP-NEXT: vmovaps %ymm0, 32(%rax)
2964 ; AVX2-FCP-NEXT: addq $360, %rsp # imm = 0x168
2965 ; AVX2-FCP-NEXT: vzeroupper
2966 ; AVX2-FCP-NEXT: retq
2968 ; AVX512-LABEL: store_i64_stride6_vf16:
2970 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
2971 ; AVX512-NEXT: vmovdqa64 64(%rdi), %zmm8
2972 ; AVX512-NEXT: vmovdqa64 (%rdi), %zmm12
2973 ; AVX512-NEXT: vmovdqa64 (%rsi), %zmm15
2974 ; AVX512-NEXT: vmovdqa64 64(%rsi), %zmm11
2975 ; AVX512-NEXT: vmovdqa64 64(%rdx), %zmm5
2976 ; AVX512-NEXT: vmovdqa64 (%rdx), %zmm2
2977 ; AVX512-NEXT: vmovdqa64 (%rcx), %zmm4
2978 ; AVX512-NEXT: vmovdqa64 64(%rcx), %zmm7
2979 ; AVX512-NEXT: vmovdqa64 (%r8), %zmm3
2980 ; AVX512-NEXT: vmovdqa64 64(%r8), %zmm6
2981 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
2982 ; AVX512-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
2983 ; AVX512-NEXT: vmovdqa64 %zmm12, %zmm1
2984 ; AVX512-NEXT: vpermt2q %zmm15, %zmm0, %zmm1
2985 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm9 = [0,0,4,12]
2986 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm10
2987 ; AVX512-NEXT: vpermt2q %zmm4, %zmm9, %zmm10
2988 ; AVX512-NEXT: movb $12, %r10b
2989 ; AVX512-NEXT: kmovw %r10d, %k1
2990 ; AVX512-NEXT: vmovdqa64 %zmm10, %zmm1 {%k1}
2991 ; AVX512-NEXT: movb $16, %r10b
2992 ; AVX512-NEXT: kmovw %r10d, %k2
2993 ; AVX512-NEXT: vmovdqa64 %zmm3, %zmm1 {%k2}
2994 ; AVX512-NEXT: vpermi2q %zmm11, %zmm8, %zmm0
2995 ; AVX512-NEXT: vpermi2q %zmm7, %zmm5, %zmm9
2996 ; AVX512-NEXT: vmovdqa64 %zmm9, %zmm0 {%k1}
2997 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm0 {%k2}
2998 ; AVX512-NEXT: vmovdqa64 (%r9), %zmm10
2999 ; AVX512-NEXT: vmovdqa64 64(%r9), %zmm14
3000 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm19 = [7,15,7,15,7,15,7,15]
3001 ; AVX512-NEXT: # zmm19 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3002 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm9
3003 ; AVX512-NEXT: vpermt2q %zmm7, %zmm19, %zmm9
3004 ; AVX512-NEXT: vmovdqa64 %zmm8, %zmm13
3005 ; AVX512-NEXT: vpermt2q %zmm11, %zmm19, %zmm13
3006 ; AVX512-NEXT: vshufi64x2 {{.*#+}} zmm9 = zmm13[0,1,2,3],zmm9[4,5,6,7]
3007 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm20 = [14,0,2,3,4,5,15,0]
3008 ; AVX512-NEXT: vpermt2q %zmm6, %zmm20, %zmm9
3009 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm21 = [0,14,2,3,4,5,6,15]
3010 ; AVX512-NEXT: vpermt2q %zmm14, %zmm21, %zmm9
3011 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm22 = [6,14,6,14,6,14,6,14]
3012 ; AVX512-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3013 ; AVX512-NEXT: vmovdqa64 %zmm8, %zmm17
3014 ; AVX512-NEXT: vpermt2q %zmm11, %zmm22, %zmm17
3015 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm16 = [5,13,6,14,5,13,6,14]
3016 ; AVX512-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3]
3017 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm13
3018 ; AVX512-NEXT: vpermt2q %zmm7, %zmm16, %zmm13
3019 ; AVX512-NEXT: movb $48, %r9b
3020 ; AVX512-NEXT: kmovw %r9d, %k2
3021 ; AVX512-NEXT: vmovdqa64 %zmm17, %zmm13 {%k2}
3022 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm23 = [0,1,13,0,4,5,6,7]
3023 ; AVX512-NEXT: vpermt2q %zmm6, %zmm23, %zmm13
3024 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm24 = [0,1,2,13,4,5,6,7]
3025 ; AVX512-NEXT: vpermt2q %zmm14, %zmm24, %zmm13
3026 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm25 = [2,10,2,10,2,10,2,10]
3027 ; AVX512-NEXT: # zmm25 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3028 ; AVX512-NEXT: vmovdqa64 %zmm8, %zmm26
3029 ; AVX512-NEXT: vpermt2q %zmm11, %zmm25, %zmm26
3030 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [1,9,2,10,1,9,2,10]
3031 ; AVX512-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
3032 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm17
3033 ; AVX512-NEXT: vpermt2q %zmm7, %zmm18, %zmm17
3034 ; AVX512-NEXT: vmovdqa64 %zmm26, %zmm17 {%k2}
3035 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm26 = [0,1,9,0,4,5,6,7]
3036 ; AVX512-NEXT: vpermt2q %zmm6, %zmm26, %zmm17
3037 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm27 = [0,1,2,9,4,5,6,7]
3038 ; AVX512-NEXT: vpermt2q %zmm14, %zmm27, %zmm17
3039 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm28
3040 ; AVX512-NEXT: vpermt2q %zmm4, %zmm19, %zmm28
3041 ; AVX512-NEXT: vpermi2q %zmm15, %zmm12, %zmm19
3042 ; AVX512-NEXT: vshufi64x2 {{.*#+}} zmm19 = zmm19[0,1,2,3],zmm28[4,5,6,7]
3043 ; AVX512-NEXT: vpermt2q %zmm3, %zmm20, %zmm19
3044 ; AVX512-NEXT: vpermt2q %zmm10, %zmm21, %zmm19
3045 ; AVX512-NEXT: vpermi2q %zmm15, %zmm12, %zmm22
3046 ; AVX512-NEXT: vpermi2q %zmm4, %zmm2, %zmm16
3047 ; AVX512-NEXT: vmovdqa64 %zmm22, %zmm16 {%k2}
3048 ; AVX512-NEXT: vpermt2q %zmm3, %zmm23, %zmm16
3049 ; AVX512-NEXT: vpermt2q %zmm10, %zmm24, %zmm16
3050 ; AVX512-NEXT: vpermi2q %zmm15, %zmm12, %zmm25
3051 ; AVX512-NEXT: vpermi2q %zmm4, %zmm2, %zmm18
3052 ; AVX512-NEXT: vmovdqa64 %zmm25, %zmm18 {%k2}
3053 ; AVX512-NEXT: vpermt2q %zmm3, %zmm26, %zmm18
3054 ; AVX512-NEXT: vpermt2q %zmm10, %zmm27, %zmm18
3055 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm20 = [0,8,1,9,0,8,1,9]
3056 ; AVX512-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3]
3057 ; AVX512-NEXT: vpermt2q %zmm15, %zmm20, %zmm12
3058 ; AVX512-NEXT: vmovdqa (%rdx), %xmm15
3059 ; AVX512-NEXT: vmovdqa64 64(%rdx), %xmm21
3060 ; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm15 = xmm15[0],mem[0]
3061 ; AVX512-NEXT: vinserti128 $1, %xmm15, %ymm0, %ymm15
3062 ; AVX512-NEXT: vinserti64x4 $0, %ymm15, %zmm0, %zmm12 {%k1}
3063 ; AVX512-NEXT: vinserti32x4 $2, (%r8), %zmm12, %zmm12
3064 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm15 = [0,1,2,3,4,8,6,7]
3065 ; AVX512-NEXT: vpermt2q %zmm10, %zmm15, %zmm12
3066 ; AVX512-NEXT: vpermt2q %zmm11, %zmm20, %zmm8
3067 ; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm11 = xmm21[0],mem[0]
3068 ; AVX512-NEXT: vinserti128 $1, %xmm11, %ymm0, %ymm11
3069 ; AVX512-NEXT: vinserti64x4 $0, %ymm11, %zmm0, %zmm8 {%k1}
3070 ; AVX512-NEXT: vinserti32x4 $2, 64(%r8), %zmm8, %zmm8
3071 ; AVX512-NEXT: vpermt2q %zmm14, %zmm15, %zmm8
3072 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm11 = [0,1,2,3,4,12,6,7]
3073 ; AVX512-NEXT: vpermt2q %zmm14, %zmm11, %zmm0
3074 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [3,11,3,11,3,11,3,11]
3075 ; AVX512-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3076 ; AVX512-NEXT: vpermt2q %zmm7, %zmm15, %zmm5
3077 ; AVX512-NEXT: vmovdqa (%rdi), %ymm7
3078 ; AVX512-NEXT: vmovdqa64 64(%rdi), %ymm20
3079 ; AVX512-NEXT: vpunpckhqdq {{.*#+}} ymm20 = ymm20[1],mem[1],ymm20[3],mem[3]
3080 ; AVX512-NEXT: vinserti64x4 $0, %ymm20, %zmm5, %zmm5
3081 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm20 = [10,0,2,3,4,5,11,0]
3082 ; AVX512-NEXT: vpermt2q %zmm6, %zmm20, %zmm5
3083 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,10,2,3,4,5,6,11]
3084 ; AVX512-NEXT: vpermt2q %zmm14, %zmm6, %zmm5
3085 ; AVX512-NEXT: vpermt2q %zmm10, %zmm11, %zmm1
3086 ; AVX512-NEXT: vpermt2q %zmm4, %zmm15, %zmm2
3087 ; AVX512-NEXT: vpunpckhqdq {{.*#+}} ymm4 = ymm7[1],mem[1],ymm7[3],mem[3]
3088 ; AVX512-NEXT: vinserti64x4 $0, %ymm4, %zmm2, %zmm2
3089 ; AVX512-NEXT: vpermt2q %zmm3, %zmm20, %zmm2
3090 ; AVX512-NEXT: vpermt2q %zmm10, %zmm6, %zmm2
3091 ; AVX512-NEXT: vmovdqa64 %zmm18, 64(%rax)
3092 ; AVX512-NEXT: vmovdqa64 %zmm2, 128(%rax)
3093 ; AVX512-NEXT: vmovdqa64 %zmm1, 192(%rax)
3094 ; AVX512-NEXT: vmovdqa64 %zmm16, 256(%rax)
3095 ; AVX512-NEXT: vmovdqa64 %zmm19, 320(%rax)
3096 ; AVX512-NEXT: vmovdqa64 %zmm17, 448(%rax)
3097 ; AVX512-NEXT: vmovdqa64 %zmm5, 512(%rax)
3098 ; AVX512-NEXT: vmovdqa64 %zmm0, 576(%rax)
3099 ; AVX512-NEXT: vmovdqa64 %zmm13, 640(%rax)
3100 ; AVX512-NEXT: vmovdqa64 %zmm9, 704(%rax)
3101 ; AVX512-NEXT: vmovdqa64 %zmm8, 384(%rax)
3102 ; AVX512-NEXT: vmovdqa64 %zmm12, (%rax)
3103 ; AVX512-NEXT: vzeroupper
3106 ; AVX512-FCP-LABEL: store_i64_stride6_vf16:
3107 ; AVX512-FCP: # %bb.0:
3108 ; AVX512-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
3109 ; AVX512-FCP-NEXT: vmovdqa64 64(%rdi), %zmm8
3110 ; AVX512-FCP-NEXT: vmovdqa64 (%rdi), %zmm12
3111 ; AVX512-FCP-NEXT: vmovdqa64 (%rsi), %zmm15
3112 ; AVX512-FCP-NEXT: vmovdqa64 64(%rsi), %zmm11
3113 ; AVX512-FCP-NEXT: vmovdqa64 64(%rdx), %zmm5
3114 ; AVX512-FCP-NEXT: vmovdqa64 (%rdx), %zmm2
3115 ; AVX512-FCP-NEXT: vmovdqa64 (%rcx), %zmm4
3116 ; AVX512-FCP-NEXT: vmovdqa64 64(%rcx), %zmm7
3117 ; AVX512-FCP-NEXT: vmovdqa64 (%r8), %zmm3
3118 ; AVX512-FCP-NEXT: vmovdqa64 64(%r8), %zmm6
3119 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
3120 ; AVX512-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
3121 ; AVX512-FCP-NEXT: vmovdqa64 %zmm12, %zmm1
3122 ; AVX512-FCP-NEXT: vpermt2q %zmm15, %zmm0, %zmm1
3123 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm9 = [0,0,4,12]
3124 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm10
3125 ; AVX512-FCP-NEXT: vpermt2q %zmm4, %zmm9, %zmm10
3126 ; AVX512-FCP-NEXT: movb $12, %r10b
3127 ; AVX512-FCP-NEXT: kmovw %r10d, %k1
3128 ; AVX512-FCP-NEXT: vmovdqa64 %zmm10, %zmm1 {%k1}
3129 ; AVX512-FCP-NEXT: movb $16, %r10b
3130 ; AVX512-FCP-NEXT: kmovw %r10d, %k2
3131 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, %zmm1 {%k2}
3132 ; AVX512-FCP-NEXT: vpermi2q %zmm11, %zmm8, %zmm0
3133 ; AVX512-FCP-NEXT: vpermi2q %zmm7, %zmm5, %zmm9
3134 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, %zmm0 {%k1}
3135 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm0 {%k2}
3136 ; AVX512-FCP-NEXT: vmovdqa64 (%r9), %zmm10
3137 ; AVX512-FCP-NEXT: vmovdqa64 64(%r9), %zmm14
3138 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm19 = [7,15,7,15,7,15,7,15]
3139 ; AVX512-FCP-NEXT: # zmm19 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3140 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm9
3141 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm19, %zmm9
3142 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, %zmm13
3143 ; AVX512-FCP-NEXT: vpermt2q %zmm11, %zmm19, %zmm13
3144 ; AVX512-FCP-NEXT: vshufi64x2 {{.*#+}} zmm9 = zmm13[0,1,2,3],zmm9[4,5,6,7]
3145 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm20 = [14,0,2,3,4,5,15,0]
3146 ; AVX512-FCP-NEXT: vpermt2q %zmm6, %zmm20, %zmm9
3147 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm21 = [0,14,2,3,4,5,6,15]
3148 ; AVX512-FCP-NEXT: vpermt2q %zmm14, %zmm21, %zmm9
3149 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm22 = [6,14,6,14,6,14,6,14]
3150 ; AVX512-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3151 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, %zmm17
3152 ; AVX512-FCP-NEXT: vpermt2q %zmm11, %zmm22, %zmm17
3153 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm16 = [5,13,6,14,5,13,6,14]
3154 ; AVX512-FCP-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3]
3155 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm13
3156 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm16, %zmm13
3157 ; AVX512-FCP-NEXT: movb $48, %r9b
3158 ; AVX512-FCP-NEXT: kmovw %r9d, %k2
3159 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, %zmm13 {%k2}
3160 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm23 = [0,1,13,0,4,5,6,7]
3161 ; AVX512-FCP-NEXT: vpermt2q %zmm6, %zmm23, %zmm13
3162 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm24 = [0,1,2,13,4,5,6,7]
3163 ; AVX512-FCP-NEXT: vpermt2q %zmm14, %zmm24, %zmm13
3164 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm25 = [2,10,2,10,2,10,2,10]
3165 ; AVX512-FCP-NEXT: # zmm25 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3166 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, %zmm26
3167 ; AVX512-FCP-NEXT: vpermt2q %zmm11, %zmm25, %zmm26
3168 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [1,9,2,10,1,9,2,10]
3169 ; AVX512-FCP-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
3170 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm17
3171 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm18, %zmm17
3172 ; AVX512-FCP-NEXT: vmovdqa64 %zmm26, %zmm17 {%k2}
3173 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm26 = [0,1,9,0,4,5,6,7]
3174 ; AVX512-FCP-NEXT: vpermt2q %zmm6, %zmm26, %zmm17
3175 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm27 = [0,1,2,9,4,5,6,7]
3176 ; AVX512-FCP-NEXT: vpermt2q %zmm14, %zmm27, %zmm17
3177 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm28
3178 ; AVX512-FCP-NEXT: vpermt2q %zmm4, %zmm19, %zmm28
3179 ; AVX512-FCP-NEXT: vpermi2q %zmm15, %zmm12, %zmm19
3180 ; AVX512-FCP-NEXT: vshufi64x2 {{.*#+}} zmm19 = zmm19[0,1,2,3],zmm28[4,5,6,7]
3181 ; AVX512-FCP-NEXT: vpermt2q %zmm3, %zmm20, %zmm19
3182 ; AVX512-FCP-NEXT: vpermt2q %zmm10, %zmm21, %zmm19
3183 ; AVX512-FCP-NEXT: vpermi2q %zmm15, %zmm12, %zmm22
3184 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm2, %zmm16
3185 ; AVX512-FCP-NEXT: vmovdqa64 %zmm22, %zmm16 {%k2}
3186 ; AVX512-FCP-NEXT: vpermt2q %zmm3, %zmm23, %zmm16
3187 ; AVX512-FCP-NEXT: vpermt2q %zmm10, %zmm24, %zmm16
3188 ; AVX512-FCP-NEXT: vpermi2q %zmm15, %zmm12, %zmm25
3189 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm2, %zmm18
3190 ; AVX512-FCP-NEXT: vmovdqa64 %zmm25, %zmm18 {%k2}
3191 ; AVX512-FCP-NEXT: vpermt2q %zmm3, %zmm26, %zmm18
3192 ; AVX512-FCP-NEXT: vpermt2q %zmm10, %zmm27, %zmm18
3193 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm20 = [0,8,1,9,0,8,1,9]
3194 ; AVX512-FCP-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3]
3195 ; AVX512-FCP-NEXT: vpermt2q %zmm15, %zmm20, %zmm12
3196 ; AVX512-FCP-NEXT: vmovdqa (%rdx), %xmm15
3197 ; AVX512-FCP-NEXT: vmovdqa64 64(%rdx), %xmm21
3198 ; AVX512-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm15 = xmm15[0],mem[0]
3199 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm15, %ymm0, %ymm15
3200 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm15, %zmm0, %zmm12 {%k1}
3201 ; AVX512-FCP-NEXT: vinserti32x4 $2, (%r8), %zmm12, %zmm12
3202 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm15 = [0,1,2,3,4,8,6,7]
3203 ; AVX512-FCP-NEXT: vpermt2q %zmm10, %zmm15, %zmm12
3204 ; AVX512-FCP-NEXT: vpermt2q %zmm11, %zmm20, %zmm8
3205 ; AVX512-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm11 = xmm21[0],mem[0]
3206 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm11, %ymm0, %ymm11
3207 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm11, %zmm0, %zmm8 {%k1}
3208 ; AVX512-FCP-NEXT: vinserti32x4 $2, 64(%r8), %zmm8, %zmm8
3209 ; AVX512-FCP-NEXT: vpermt2q %zmm14, %zmm15, %zmm8
3210 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm11 = [0,1,2,3,4,12,6,7]
3211 ; AVX512-FCP-NEXT: vpermt2q %zmm14, %zmm11, %zmm0
3212 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [3,11,3,11,3,11,3,11]
3213 ; AVX512-FCP-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3214 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm15, %zmm5
3215 ; AVX512-FCP-NEXT: vmovdqa (%rdi), %ymm7
3216 ; AVX512-FCP-NEXT: vmovdqa64 64(%rdi), %ymm20
3217 ; AVX512-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm20 = ymm20[1],mem[1],ymm20[3],mem[3]
3218 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm20, %zmm5, %zmm5
3219 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm20 = [10,0,2,3,4,5,11,0]
3220 ; AVX512-FCP-NEXT: vpermt2q %zmm6, %zmm20, %zmm5
3221 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,10,2,3,4,5,6,11]
3222 ; AVX512-FCP-NEXT: vpermt2q %zmm14, %zmm6, %zmm5
3223 ; AVX512-FCP-NEXT: vpermt2q %zmm10, %zmm11, %zmm1
3224 ; AVX512-FCP-NEXT: vpermt2q %zmm4, %zmm15, %zmm2
3225 ; AVX512-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm4 = ymm7[1],mem[1],ymm7[3],mem[3]
3226 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm4, %zmm2, %zmm2
3227 ; AVX512-FCP-NEXT: vpermt2q %zmm3, %zmm20, %zmm2
3228 ; AVX512-FCP-NEXT: vpermt2q %zmm10, %zmm6, %zmm2
3229 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, 64(%rax)
3230 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, 128(%rax)
3231 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, 192(%rax)
3232 ; AVX512-FCP-NEXT: vmovdqa64 %zmm16, 256(%rax)
3233 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, 320(%rax)
3234 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, 448(%rax)
3235 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, 512(%rax)
3236 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, 576(%rax)
3237 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, 640(%rax)
3238 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, 704(%rax)
3239 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, 384(%rax)
3240 ; AVX512-FCP-NEXT: vmovdqa64 %zmm12, (%rax)
3241 ; AVX512-FCP-NEXT: vzeroupper
3242 ; AVX512-FCP-NEXT: retq
3244 ; AVX512DQ-LABEL: store_i64_stride6_vf16:
3245 ; AVX512DQ: # %bb.0:
3246 ; AVX512DQ-NEXT: movq {{[0-9]+}}(%rsp), %rax
3247 ; AVX512DQ-NEXT: vmovdqa64 64(%rdi), %zmm8
3248 ; AVX512DQ-NEXT: vmovdqa64 (%rdi), %zmm12
3249 ; AVX512DQ-NEXT: vmovdqa64 (%rsi), %zmm15
3250 ; AVX512DQ-NEXT: vmovdqa64 64(%rsi), %zmm11
3251 ; AVX512DQ-NEXT: vmovdqa64 64(%rdx), %zmm5
3252 ; AVX512DQ-NEXT: vmovdqa64 (%rdx), %zmm2
3253 ; AVX512DQ-NEXT: vmovdqa64 (%rcx), %zmm4
3254 ; AVX512DQ-NEXT: vmovdqa64 64(%rcx), %zmm7
3255 ; AVX512DQ-NEXT: vmovdqa64 (%r8), %zmm3
3256 ; AVX512DQ-NEXT: vmovdqa64 64(%r8), %zmm6
3257 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
3258 ; AVX512DQ-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
3259 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, %zmm1
3260 ; AVX512DQ-NEXT: vpermt2q %zmm15, %zmm0, %zmm1
3261 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm9 = [0,0,4,12]
3262 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm10
3263 ; AVX512DQ-NEXT: vpermt2q %zmm4, %zmm9, %zmm10
3264 ; AVX512DQ-NEXT: movb $12, %r10b
3265 ; AVX512DQ-NEXT: kmovw %r10d, %k1
3266 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, %zmm1 {%k1}
3267 ; AVX512DQ-NEXT: movb $16, %r10b
3268 ; AVX512DQ-NEXT: kmovw %r10d, %k2
3269 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, %zmm1 {%k2}
3270 ; AVX512DQ-NEXT: vpermi2q %zmm11, %zmm8, %zmm0
3271 ; AVX512DQ-NEXT: vpermi2q %zmm7, %zmm5, %zmm9
3272 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, %zmm0 {%k1}
3273 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm0 {%k2}
3274 ; AVX512DQ-NEXT: vmovdqa64 (%r9), %zmm10
3275 ; AVX512DQ-NEXT: vmovdqa64 64(%r9), %zmm14
3276 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm19 = [7,15,7,15,7,15,7,15]
3277 ; AVX512DQ-NEXT: # zmm19 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3278 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm9
3279 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm19, %zmm9
3280 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, %zmm13
3281 ; AVX512DQ-NEXT: vpermt2q %zmm11, %zmm19, %zmm13
3282 ; AVX512DQ-NEXT: vshufi64x2 {{.*#+}} zmm9 = zmm13[0,1,2,3],zmm9[4,5,6,7]
3283 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm20 = [14,0,2,3,4,5,15,0]
3284 ; AVX512DQ-NEXT: vpermt2q %zmm6, %zmm20, %zmm9
3285 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm21 = [0,14,2,3,4,5,6,15]
3286 ; AVX512DQ-NEXT: vpermt2q %zmm14, %zmm21, %zmm9
3287 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm22 = [6,14,6,14,6,14,6,14]
3288 ; AVX512DQ-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3289 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, %zmm17
3290 ; AVX512DQ-NEXT: vpermt2q %zmm11, %zmm22, %zmm17
3291 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm16 = [5,13,6,14,5,13,6,14]
3292 ; AVX512DQ-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3]
3293 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm13
3294 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm16, %zmm13
3295 ; AVX512DQ-NEXT: movb $48, %r9b
3296 ; AVX512DQ-NEXT: kmovw %r9d, %k2
3297 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, %zmm13 {%k2}
3298 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm23 = [0,1,13,0,4,5,6,7]
3299 ; AVX512DQ-NEXT: vpermt2q %zmm6, %zmm23, %zmm13
3300 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm24 = [0,1,2,13,4,5,6,7]
3301 ; AVX512DQ-NEXT: vpermt2q %zmm14, %zmm24, %zmm13
3302 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm25 = [2,10,2,10,2,10,2,10]
3303 ; AVX512DQ-NEXT: # zmm25 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3304 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, %zmm26
3305 ; AVX512DQ-NEXT: vpermt2q %zmm11, %zmm25, %zmm26
3306 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [1,9,2,10,1,9,2,10]
3307 ; AVX512DQ-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
3308 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm17
3309 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm18, %zmm17
3310 ; AVX512DQ-NEXT: vmovdqa64 %zmm26, %zmm17 {%k2}
3311 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm26 = [0,1,9,0,4,5,6,7]
3312 ; AVX512DQ-NEXT: vpermt2q %zmm6, %zmm26, %zmm17
3313 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm27 = [0,1,2,9,4,5,6,7]
3314 ; AVX512DQ-NEXT: vpermt2q %zmm14, %zmm27, %zmm17
3315 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm28
3316 ; AVX512DQ-NEXT: vpermt2q %zmm4, %zmm19, %zmm28
3317 ; AVX512DQ-NEXT: vpermi2q %zmm15, %zmm12, %zmm19
3318 ; AVX512DQ-NEXT: vshufi64x2 {{.*#+}} zmm19 = zmm19[0,1,2,3],zmm28[4,5,6,7]
3319 ; AVX512DQ-NEXT: vpermt2q %zmm3, %zmm20, %zmm19
3320 ; AVX512DQ-NEXT: vpermt2q %zmm10, %zmm21, %zmm19
3321 ; AVX512DQ-NEXT: vpermi2q %zmm15, %zmm12, %zmm22
3322 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm2, %zmm16
3323 ; AVX512DQ-NEXT: vmovdqa64 %zmm22, %zmm16 {%k2}
3324 ; AVX512DQ-NEXT: vpermt2q %zmm3, %zmm23, %zmm16
3325 ; AVX512DQ-NEXT: vpermt2q %zmm10, %zmm24, %zmm16
3326 ; AVX512DQ-NEXT: vpermi2q %zmm15, %zmm12, %zmm25
3327 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm2, %zmm18
3328 ; AVX512DQ-NEXT: vmovdqa64 %zmm25, %zmm18 {%k2}
3329 ; AVX512DQ-NEXT: vpermt2q %zmm3, %zmm26, %zmm18
3330 ; AVX512DQ-NEXT: vpermt2q %zmm10, %zmm27, %zmm18
3331 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm20 = [0,8,1,9,0,8,1,9]
3332 ; AVX512DQ-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3]
3333 ; AVX512DQ-NEXT: vpermt2q %zmm15, %zmm20, %zmm12
3334 ; AVX512DQ-NEXT: vmovdqa (%rdx), %xmm15
3335 ; AVX512DQ-NEXT: vmovdqa64 64(%rdx), %xmm21
3336 ; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm15 = xmm15[0],mem[0]
3337 ; AVX512DQ-NEXT: vinserti128 $1, %xmm15, %ymm0, %ymm15
3338 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm15, %zmm0, %zmm12 {%k1}
3339 ; AVX512DQ-NEXT: vinserti32x4 $2, (%r8), %zmm12, %zmm12
3340 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm15 = [0,1,2,3,4,8,6,7]
3341 ; AVX512DQ-NEXT: vpermt2q %zmm10, %zmm15, %zmm12
3342 ; AVX512DQ-NEXT: vpermt2q %zmm11, %zmm20, %zmm8
3343 ; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm11 = xmm21[0],mem[0]
3344 ; AVX512DQ-NEXT: vinserti128 $1, %xmm11, %ymm0, %ymm11
3345 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm11, %zmm0, %zmm8 {%k1}
3346 ; AVX512DQ-NEXT: vinserti32x4 $2, 64(%r8), %zmm8, %zmm8
3347 ; AVX512DQ-NEXT: vpermt2q %zmm14, %zmm15, %zmm8
3348 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm11 = [0,1,2,3,4,12,6,7]
3349 ; AVX512DQ-NEXT: vpermt2q %zmm14, %zmm11, %zmm0
3350 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [3,11,3,11,3,11,3,11]
3351 ; AVX512DQ-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3352 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm15, %zmm5
3353 ; AVX512DQ-NEXT: vmovdqa (%rdi), %ymm7
3354 ; AVX512DQ-NEXT: vmovdqa64 64(%rdi), %ymm20
3355 ; AVX512DQ-NEXT: vpunpckhqdq {{.*#+}} ymm20 = ymm20[1],mem[1],ymm20[3],mem[3]
3356 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm20, %zmm5, %zmm5
3357 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm20 = [10,0,2,3,4,5,11,0]
3358 ; AVX512DQ-NEXT: vpermt2q %zmm6, %zmm20, %zmm5
3359 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,10,2,3,4,5,6,11]
3360 ; AVX512DQ-NEXT: vpermt2q %zmm14, %zmm6, %zmm5
3361 ; AVX512DQ-NEXT: vpermt2q %zmm10, %zmm11, %zmm1
3362 ; AVX512DQ-NEXT: vpermt2q %zmm4, %zmm15, %zmm2
3363 ; AVX512DQ-NEXT: vpunpckhqdq {{.*#+}} ymm4 = ymm7[1],mem[1],ymm7[3],mem[3]
3364 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm4, %zmm2, %zmm2
3365 ; AVX512DQ-NEXT: vpermt2q %zmm3, %zmm20, %zmm2
3366 ; AVX512DQ-NEXT: vpermt2q %zmm10, %zmm6, %zmm2
3367 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, 64(%rax)
3368 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 128(%rax)
3369 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 192(%rax)
3370 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, 256(%rax)
3371 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, 320(%rax)
3372 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, 448(%rax)
3373 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, 512(%rax)
3374 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, 576(%rax)
3375 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, 640(%rax)
3376 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, 704(%rax)
3377 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, 384(%rax)
3378 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, (%rax)
3379 ; AVX512DQ-NEXT: vzeroupper
3380 ; AVX512DQ-NEXT: retq
3382 ; AVX512DQ-FCP-LABEL: store_i64_stride6_vf16:
3383 ; AVX512DQ-FCP: # %bb.0:
3384 ; AVX512DQ-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
3385 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rdi), %zmm8
3386 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rdi), %zmm12
3387 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rsi), %zmm15
3388 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rsi), %zmm11
3389 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rdx), %zmm5
3390 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rdx), %zmm2
3391 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rcx), %zmm4
3392 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rcx), %zmm7
3393 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%r8), %zmm3
3394 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%r8), %zmm6
3395 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
3396 ; AVX512DQ-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
3397 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm12, %zmm1
3398 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm15, %zmm0, %zmm1
3399 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm9 = [0,0,4,12]
3400 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm10
3401 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm4, %zmm9, %zmm10
3402 ; AVX512DQ-FCP-NEXT: movb $12, %r10b
3403 ; AVX512DQ-FCP-NEXT: kmovw %r10d, %k1
3404 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm10, %zmm1 {%k1}
3405 ; AVX512DQ-FCP-NEXT: movb $16, %r10b
3406 ; AVX512DQ-FCP-NEXT: kmovw %r10d, %k2
3407 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, %zmm1 {%k2}
3408 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm11, %zmm8, %zmm0
3409 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm7, %zmm5, %zmm9
3410 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, %zmm0 {%k1}
3411 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm0 {%k2}
3412 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%r9), %zmm10
3413 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%r9), %zmm14
3414 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm19 = [7,15,7,15,7,15,7,15]
3415 ; AVX512DQ-FCP-NEXT: # zmm19 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3416 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm9
3417 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm19, %zmm9
3418 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, %zmm13
3419 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm11, %zmm19, %zmm13
3420 ; AVX512DQ-FCP-NEXT: vshufi64x2 {{.*#+}} zmm9 = zmm13[0,1,2,3],zmm9[4,5,6,7]
3421 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm20 = [14,0,2,3,4,5,15,0]
3422 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm6, %zmm20, %zmm9
3423 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm21 = [0,14,2,3,4,5,6,15]
3424 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm14, %zmm21, %zmm9
3425 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm22 = [6,14,6,14,6,14,6,14]
3426 ; AVX512DQ-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3427 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, %zmm17
3428 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm11, %zmm22, %zmm17
3429 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm16 = [5,13,6,14,5,13,6,14]
3430 ; AVX512DQ-FCP-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3]
3431 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm13
3432 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm16, %zmm13
3433 ; AVX512DQ-FCP-NEXT: movb $48, %r9b
3434 ; AVX512DQ-FCP-NEXT: kmovw %r9d, %k2
3435 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, %zmm13 {%k2}
3436 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm23 = [0,1,13,0,4,5,6,7]
3437 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm6, %zmm23, %zmm13
3438 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm24 = [0,1,2,13,4,5,6,7]
3439 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm14, %zmm24, %zmm13
3440 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm25 = [2,10,2,10,2,10,2,10]
3441 ; AVX512DQ-FCP-NEXT: # zmm25 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3442 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, %zmm26
3443 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm11, %zmm25, %zmm26
3444 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [1,9,2,10,1,9,2,10]
3445 ; AVX512DQ-FCP-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
3446 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm17
3447 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm18, %zmm17
3448 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm26, %zmm17 {%k2}
3449 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm26 = [0,1,9,0,4,5,6,7]
3450 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm6, %zmm26, %zmm17
3451 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm27 = [0,1,2,9,4,5,6,7]
3452 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm14, %zmm27, %zmm17
3453 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm28
3454 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm4, %zmm19, %zmm28
3455 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm15, %zmm12, %zmm19
3456 ; AVX512DQ-FCP-NEXT: vshufi64x2 {{.*#+}} zmm19 = zmm19[0,1,2,3],zmm28[4,5,6,7]
3457 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm3, %zmm20, %zmm19
3458 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm10, %zmm21, %zmm19
3459 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm15, %zmm12, %zmm22
3460 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm2, %zmm16
3461 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm22, %zmm16 {%k2}
3462 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm3, %zmm23, %zmm16
3463 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm10, %zmm24, %zmm16
3464 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm15, %zmm12, %zmm25
3465 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm2, %zmm18
3466 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm25, %zmm18 {%k2}
3467 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm3, %zmm26, %zmm18
3468 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm10, %zmm27, %zmm18
3469 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm20 = [0,8,1,9,0,8,1,9]
3470 ; AVX512DQ-FCP-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3]
3471 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm15, %zmm20, %zmm12
3472 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdx), %xmm15
3473 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rdx), %xmm21
3474 ; AVX512DQ-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm15 = xmm15[0],mem[0]
3475 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm15, %ymm0, %ymm15
3476 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm15, %zmm0, %zmm12 {%k1}
3477 ; AVX512DQ-FCP-NEXT: vinserti32x4 $2, (%r8), %zmm12, %zmm12
3478 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm15 = [0,1,2,3,4,8,6,7]
3479 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm10, %zmm15, %zmm12
3480 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm11, %zmm20, %zmm8
3481 ; AVX512DQ-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm11 = xmm21[0],mem[0]
3482 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm11, %ymm0, %ymm11
3483 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm11, %zmm0, %zmm8 {%k1}
3484 ; AVX512DQ-FCP-NEXT: vinserti32x4 $2, 64(%r8), %zmm8, %zmm8
3485 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm14, %zmm15, %zmm8
3486 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm11 = [0,1,2,3,4,12,6,7]
3487 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm14, %zmm11, %zmm0
3488 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [3,11,3,11,3,11,3,11]
3489 ; AVX512DQ-FCP-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3490 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm15, %zmm5
3491 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdi), %ymm7
3492 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rdi), %ymm20
3493 ; AVX512DQ-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm20 = ymm20[1],mem[1],ymm20[3],mem[3]
3494 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm20, %zmm5, %zmm5
3495 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm20 = [10,0,2,3,4,5,11,0]
3496 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm6, %zmm20, %zmm5
3497 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,10,2,3,4,5,6,11]
3498 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm14, %zmm6, %zmm5
3499 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm10, %zmm11, %zmm1
3500 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm4, %zmm15, %zmm2
3501 ; AVX512DQ-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm4 = ymm7[1],mem[1],ymm7[3],mem[3]
3502 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm4, %zmm2, %zmm2
3503 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm3, %zmm20, %zmm2
3504 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm10, %zmm6, %zmm2
3505 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, 64(%rax)
3506 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, 128(%rax)
3507 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, 192(%rax)
3508 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm16, 256(%rax)
3509 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, 320(%rax)
3510 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, 448(%rax)
3511 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, 512(%rax)
3512 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, 576(%rax)
3513 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, 640(%rax)
3514 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, 704(%rax)
3515 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, 384(%rax)
3516 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm12, (%rax)
3517 ; AVX512DQ-FCP-NEXT: vzeroupper
3518 ; AVX512DQ-FCP-NEXT: retq
3520 ; AVX512BW-LABEL: store_i64_stride6_vf16:
3521 ; AVX512BW: # %bb.0:
3522 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
3523 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %zmm8
3524 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm12
3525 ; AVX512BW-NEXT: vmovdqa64 (%rsi), %zmm15
3526 ; AVX512BW-NEXT: vmovdqa64 64(%rsi), %zmm11
3527 ; AVX512BW-NEXT: vmovdqa64 64(%rdx), %zmm5
3528 ; AVX512BW-NEXT: vmovdqa64 (%rdx), %zmm2
3529 ; AVX512BW-NEXT: vmovdqa64 (%rcx), %zmm4
3530 ; AVX512BW-NEXT: vmovdqa64 64(%rcx), %zmm7
3531 ; AVX512BW-NEXT: vmovdqa64 (%r8), %zmm3
3532 ; AVX512BW-NEXT: vmovdqa64 64(%r8), %zmm6
3533 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
3534 ; AVX512BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
3535 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm1
3536 ; AVX512BW-NEXT: vpermt2q %zmm15, %zmm0, %zmm1
3537 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm9 = [0,0,4,12]
3538 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm10
3539 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm9, %zmm10
3540 ; AVX512BW-NEXT: movb $12, %r10b
3541 ; AVX512BW-NEXT: kmovd %r10d, %k1
3542 ; AVX512BW-NEXT: vmovdqa64 %zmm10, %zmm1 {%k1}
3543 ; AVX512BW-NEXT: movb $16, %r10b
3544 ; AVX512BW-NEXT: kmovd %r10d, %k2
3545 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm1 {%k2}
3546 ; AVX512BW-NEXT: vpermi2q %zmm11, %zmm8, %zmm0
3547 ; AVX512BW-NEXT: vpermi2q %zmm7, %zmm5, %zmm9
3548 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm0 {%k1}
3549 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm0 {%k2}
3550 ; AVX512BW-NEXT: vmovdqa64 (%r9), %zmm10
3551 ; AVX512BW-NEXT: vmovdqa64 64(%r9), %zmm14
3552 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm19 = [7,15,7,15,7,15,7,15]
3553 ; AVX512BW-NEXT: # zmm19 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3554 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm9
3555 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm19, %zmm9
3556 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm13
3557 ; AVX512BW-NEXT: vpermt2q %zmm11, %zmm19, %zmm13
3558 ; AVX512BW-NEXT: vshufi64x2 {{.*#+}} zmm9 = zmm13[0,1,2,3],zmm9[4,5,6,7]
3559 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm20 = [14,0,2,3,4,5,15,0]
3560 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm20, %zmm9
3561 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm21 = [0,14,2,3,4,5,6,15]
3562 ; AVX512BW-NEXT: vpermt2q %zmm14, %zmm21, %zmm9
3563 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm22 = [6,14,6,14,6,14,6,14]
3564 ; AVX512BW-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3565 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm17
3566 ; AVX512BW-NEXT: vpermt2q %zmm11, %zmm22, %zmm17
3567 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm16 = [5,13,6,14,5,13,6,14]
3568 ; AVX512BW-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3]
3569 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm13
3570 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm16, %zmm13
3571 ; AVX512BW-NEXT: movb $48, %r9b
3572 ; AVX512BW-NEXT: kmovd %r9d, %k2
3573 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm13 {%k2}
3574 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm23 = [0,1,13,0,4,5,6,7]
3575 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm23, %zmm13
3576 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm24 = [0,1,2,13,4,5,6,7]
3577 ; AVX512BW-NEXT: vpermt2q %zmm14, %zmm24, %zmm13
3578 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm25 = [2,10,2,10,2,10,2,10]
3579 ; AVX512BW-NEXT: # zmm25 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3580 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm26
3581 ; AVX512BW-NEXT: vpermt2q %zmm11, %zmm25, %zmm26
3582 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [1,9,2,10,1,9,2,10]
3583 ; AVX512BW-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
3584 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm17
3585 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm18, %zmm17
3586 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm17 {%k2}
3587 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm26 = [0,1,9,0,4,5,6,7]
3588 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm26, %zmm17
3589 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm27 = [0,1,2,9,4,5,6,7]
3590 ; AVX512BW-NEXT: vpermt2q %zmm14, %zmm27, %zmm17
3591 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm28
3592 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm19, %zmm28
3593 ; AVX512BW-NEXT: vpermi2q %zmm15, %zmm12, %zmm19
3594 ; AVX512BW-NEXT: vshufi64x2 {{.*#+}} zmm19 = zmm19[0,1,2,3],zmm28[4,5,6,7]
3595 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm20, %zmm19
3596 ; AVX512BW-NEXT: vpermt2q %zmm10, %zmm21, %zmm19
3597 ; AVX512BW-NEXT: vpermi2q %zmm15, %zmm12, %zmm22
3598 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm2, %zmm16
3599 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm16 {%k2}
3600 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm23, %zmm16
3601 ; AVX512BW-NEXT: vpermt2q %zmm10, %zmm24, %zmm16
3602 ; AVX512BW-NEXT: vpermi2q %zmm15, %zmm12, %zmm25
3603 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm2, %zmm18
3604 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm18 {%k2}
3605 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm26, %zmm18
3606 ; AVX512BW-NEXT: vpermt2q %zmm10, %zmm27, %zmm18
3607 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm20 = [0,8,1,9,0,8,1,9]
3608 ; AVX512BW-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3]
3609 ; AVX512BW-NEXT: vpermt2q %zmm15, %zmm20, %zmm12
3610 ; AVX512BW-NEXT: vmovdqa (%rdx), %xmm15
3611 ; AVX512BW-NEXT: vmovdqa64 64(%rdx), %xmm21
3612 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm15 = xmm15[0],mem[0]
3613 ; AVX512BW-NEXT: vinserti128 $1, %xmm15, %ymm0, %ymm15
3614 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm15, %zmm0, %zmm12 {%k1}
3615 ; AVX512BW-NEXT: vinserti32x4 $2, (%r8), %zmm12, %zmm12
3616 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm15 = [0,1,2,3,4,8,6,7]
3617 ; AVX512BW-NEXT: vpermt2q %zmm10, %zmm15, %zmm12
3618 ; AVX512BW-NEXT: vpermt2q %zmm11, %zmm20, %zmm8
3619 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm11 = xmm21[0],mem[0]
3620 ; AVX512BW-NEXT: vinserti128 $1, %xmm11, %ymm0, %ymm11
3621 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm11, %zmm0, %zmm8 {%k1}
3622 ; AVX512BW-NEXT: vinserti32x4 $2, 64(%r8), %zmm8, %zmm8
3623 ; AVX512BW-NEXT: vpermt2q %zmm14, %zmm15, %zmm8
3624 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm11 = [0,1,2,3,4,12,6,7]
3625 ; AVX512BW-NEXT: vpermt2q %zmm14, %zmm11, %zmm0
3626 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [3,11,3,11,3,11,3,11]
3627 ; AVX512BW-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3628 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm15, %zmm5
3629 ; AVX512BW-NEXT: vmovdqa (%rdi), %ymm7
3630 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %ymm20
3631 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm20 = ymm20[1],mem[1],ymm20[3],mem[3]
3632 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm20, %zmm5, %zmm5
3633 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm20 = [10,0,2,3,4,5,11,0]
3634 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm20, %zmm5
3635 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,10,2,3,4,5,6,11]
3636 ; AVX512BW-NEXT: vpermt2q %zmm14, %zmm6, %zmm5
3637 ; AVX512BW-NEXT: vpermt2q %zmm10, %zmm11, %zmm1
3638 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm15, %zmm2
3639 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm4 = ymm7[1],mem[1],ymm7[3],mem[3]
3640 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm4, %zmm2, %zmm2
3641 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm20, %zmm2
3642 ; AVX512BW-NEXT: vpermt2q %zmm10, %zmm6, %zmm2
3643 ; AVX512BW-NEXT: vmovdqa64 %zmm18, 64(%rax)
3644 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 128(%rax)
3645 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 192(%rax)
3646 ; AVX512BW-NEXT: vmovdqa64 %zmm16, 256(%rax)
3647 ; AVX512BW-NEXT: vmovdqa64 %zmm19, 320(%rax)
3648 ; AVX512BW-NEXT: vmovdqa64 %zmm17, 448(%rax)
3649 ; AVX512BW-NEXT: vmovdqa64 %zmm5, 512(%rax)
3650 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 576(%rax)
3651 ; AVX512BW-NEXT: vmovdqa64 %zmm13, 640(%rax)
3652 ; AVX512BW-NEXT: vmovdqa64 %zmm9, 704(%rax)
3653 ; AVX512BW-NEXT: vmovdqa64 %zmm8, 384(%rax)
3654 ; AVX512BW-NEXT: vmovdqa64 %zmm12, (%rax)
3655 ; AVX512BW-NEXT: vzeroupper
3656 ; AVX512BW-NEXT: retq
3658 ; AVX512BW-FCP-LABEL: store_i64_stride6_vf16:
3659 ; AVX512BW-FCP: # %bb.0:
3660 ; AVX512BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
3661 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm8
3662 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm12
3663 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rsi), %zmm15
3664 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rsi), %zmm11
3665 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rdx), %zmm5
3666 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdx), %zmm2
3667 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rcx), %zmm4
3668 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rcx), %zmm7
3669 ; AVX512BW-FCP-NEXT: vmovdqa64 (%r8), %zmm3
3670 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%r8), %zmm6
3671 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
3672 ; AVX512BW-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
3673 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm1
3674 ; AVX512BW-FCP-NEXT: vpermt2q %zmm15, %zmm0, %zmm1
3675 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm9 = [0,0,4,12]
3676 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm10
3677 ; AVX512BW-FCP-NEXT: vpermt2q %zmm4, %zmm9, %zmm10
3678 ; AVX512BW-FCP-NEXT: movb $12, %r10b
3679 ; AVX512BW-FCP-NEXT: kmovd %r10d, %k1
3680 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm1 {%k1}
3681 ; AVX512BW-FCP-NEXT: movb $16, %r10b
3682 ; AVX512BW-FCP-NEXT: kmovd %r10d, %k2
3683 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm1 {%k2}
3684 ; AVX512BW-FCP-NEXT: vpermi2q %zmm11, %zmm8, %zmm0
3685 ; AVX512BW-FCP-NEXT: vpermi2q %zmm7, %zmm5, %zmm9
3686 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm0 {%k1}
3687 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm0 {%k2}
3688 ; AVX512BW-FCP-NEXT: vmovdqa64 (%r9), %zmm10
3689 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%r9), %zmm14
3690 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm19 = [7,15,7,15,7,15,7,15]
3691 ; AVX512BW-FCP-NEXT: # zmm19 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3692 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm9
3693 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm19, %zmm9
3694 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm13
3695 ; AVX512BW-FCP-NEXT: vpermt2q %zmm11, %zmm19, %zmm13
3696 ; AVX512BW-FCP-NEXT: vshufi64x2 {{.*#+}} zmm9 = zmm13[0,1,2,3],zmm9[4,5,6,7]
3697 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm20 = [14,0,2,3,4,5,15,0]
3698 ; AVX512BW-FCP-NEXT: vpermt2q %zmm6, %zmm20, %zmm9
3699 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm21 = [0,14,2,3,4,5,6,15]
3700 ; AVX512BW-FCP-NEXT: vpermt2q %zmm14, %zmm21, %zmm9
3701 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm22 = [6,14,6,14,6,14,6,14]
3702 ; AVX512BW-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3703 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm17
3704 ; AVX512BW-FCP-NEXT: vpermt2q %zmm11, %zmm22, %zmm17
3705 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm16 = [5,13,6,14,5,13,6,14]
3706 ; AVX512BW-FCP-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3]
3707 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm13
3708 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm16, %zmm13
3709 ; AVX512BW-FCP-NEXT: movb $48, %r9b
3710 ; AVX512BW-FCP-NEXT: kmovd %r9d, %k2
3711 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm13 {%k2}
3712 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm23 = [0,1,13,0,4,5,6,7]
3713 ; AVX512BW-FCP-NEXT: vpermt2q %zmm6, %zmm23, %zmm13
3714 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm24 = [0,1,2,13,4,5,6,7]
3715 ; AVX512BW-FCP-NEXT: vpermt2q %zmm14, %zmm24, %zmm13
3716 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm25 = [2,10,2,10,2,10,2,10]
3717 ; AVX512BW-FCP-NEXT: # zmm25 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3718 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm26
3719 ; AVX512BW-FCP-NEXT: vpermt2q %zmm11, %zmm25, %zmm26
3720 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [1,9,2,10,1,9,2,10]
3721 ; AVX512BW-FCP-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
3722 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm17
3723 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm18, %zmm17
3724 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm17 {%k2}
3725 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm26 = [0,1,9,0,4,5,6,7]
3726 ; AVX512BW-FCP-NEXT: vpermt2q %zmm6, %zmm26, %zmm17
3727 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm27 = [0,1,2,9,4,5,6,7]
3728 ; AVX512BW-FCP-NEXT: vpermt2q %zmm14, %zmm27, %zmm17
3729 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm28
3730 ; AVX512BW-FCP-NEXT: vpermt2q %zmm4, %zmm19, %zmm28
3731 ; AVX512BW-FCP-NEXT: vpermi2q %zmm15, %zmm12, %zmm19
3732 ; AVX512BW-FCP-NEXT: vshufi64x2 {{.*#+}} zmm19 = zmm19[0,1,2,3],zmm28[4,5,6,7]
3733 ; AVX512BW-FCP-NEXT: vpermt2q %zmm3, %zmm20, %zmm19
3734 ; AVX512BW-FCP-NEXT: vpermt2q %zmm10, %zmm21, %zmm19
3735 ; AVX512BW-FCP-NEXT: vpermi2q %zmm15, %zmm12, %zmm22
3736 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm2, %zmm16
3737 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm16 {%k2}
3738 ; AVX512BW-FCP-NEXT: vpermt2q %zmm3, %zmm23, %zmm16
3739 ; AVX512BW-FCP-NEXT: vpermt2q %zmm10, %zmm24, %zmm16
3740 ; AVX512BW-FCP-NEXT: vpermi2q %zmm15, %zmm12, %zmm25
3741 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm2, %zmm18
3742 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm18 {%k2}
3743 ; AVX512BW-FCP-NEXT: vpermt2q %zmm3, %zmm26, %zmm18
3744 ; AVX512BW-FCP-NEXT: vpermt2q %zmm10, %zmm27, %zmm18
3745 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm20 = [0,8,1,9,0,8,1,9]
3746 ; AVX512BW-FCP-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3]
3747 ; AVX512BW-FCP-NEXT: vpermt2q %zmm15, %zmm20, %zmm12
3748 ; AVX512BW-FCP-NEXT: vmovdqa (%rdx), %xmm15
3749 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rdx), %xmm21
3750 ; AVX512BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm15 = xmm15[0],mem[0]
3751 ; AVX512BW-FCP-NEXT: vinserti128 $1, %xmm15, %ymm0, %ymm15
3752 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm15, %zmm0, %zmm12 {%k1}
3753 ; AVX512BW-FCP-NEXT: vinserti32x4 $2, (%r8), %zmm12, %zmm12
3754 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm15 = [0,1,2,3,4,8,6,7]
3755 ; AVX512BW-FCP-NEXT: vpermt2q %zmm10, %zmm15, %zmm12
3756 ; AVX512BW-FCP-NEXT: vpermt2q %zmm11, %zmm20, %zmm8
3757 ; AVX512BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm11 = xmm21[0],mem[0]
3758 ; AVX512BW-FCP-NEXT: vinserti128 $1, %xmm11, %ymm0, %ymm11
3759 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm11, %zmm0, %zmm8 {%k1}
3760 ; AVX512BW-FCP-NEXT: vinserti32x4 $2, 64(%r8), %zmm8, %zmm8
3761 ; AVX512BW-FCP-NEXT: vpermt2q %zmm14, %zmm15, %zmm8
3762 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm11 = [0,1,2,3,4,12,6,7]
3763 ; AVX512BW-FCP-NEXT: vpermt2q %zmm14, %zmm11, %zmm0
3764 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [3,11,3,11,3,11,3,11]
3765 ; AVX512BW-FCP-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3766 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm15, %zmm5
3767 ; AVX512BW-FCP-NEXT: vmovdqa (%rdi), %ymm7
3768 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rdi), %ymm20
3769 ; AVX512BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm20 = ymm20[1],mem[1],ymm20[3],mem[3]
3770 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm20, %zmm5, %zmm5
3771 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm20 = [10,0,2,3,4,5,11,0]
3772 ; AVX512BW-FCP-NEXT: vpermt2q %zmm6, %zmm20, %zmm5
3773 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,10,2,3,4,5,6,11]
3774 ; AVX512BW-FCP-NEXT: vpermt2q %zmm14, %zmm6, %zmm5
3775 ; AVX512BW-FCP-NEXT: vpermt2q %zmm10, %zmm11, %zmm1
3776 ; AVX512BW-FCP-NEXT: vpermt2q %zmm4, %zmm15, %zmm2
3777 ; AVX512BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm4 = ymm7[1],mem[1],ymm7[3],mem[3]
3778 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm4, %zmm2, %zmm2
3779 ; AVX512BW-FCP-NEXT: vpermt2q %zmm3, %zmm20, %zmm2
3780 ; AVX512BW-FCP-NEXT: vpermt2q %zmm10, %zmm6, %zmm2
3781 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, 64(%rax)
3782 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, 128(%rax)
3783 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, 192(%rax)
3784 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm16, 256(%rax)
3785 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, 320(%rax)
3786 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, 448(%rax)
3787 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, 512(%rax)
3788 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, 576(%rax)
3789 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, 640(%rax)
3790 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, 704(%rax)
3791 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, 384(%rax)
3792 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm12, (%rax)
3793 ; AVX512BW-FCP-NEXT: vzeroupper
3794 ; AVX512BW-FCP-NEXT: retq
3796 ; AVX512DQ-BW-LABEL: store_i64_stride6_vf16:
3797 ; AVX512DQ-BW: # %bb.0:
3798 ; AVX512DQ-BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
3799 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rdi), %zmm8
3800 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdi), %zmm12
3801 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rsi), %zmm15
3802 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rsi), %zmm11
3803 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rdx), %zmm5
3804 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdx), %zmm2
3805 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rcx), %zmm4
3806 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rcx), %zmm7
3807 ; AVX512DQ-BW-NEXT: vmovdqa64 (%r8), %zmm3
3808 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%r8), %zmm6
3809 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
3810 ; AVX512DQ-BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
3811 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm12, %zmm1
3812 ; AVX512DQ-BW-NEXT: vpermt2q %zmm15, %zmm0, %zmm1
3813 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm9 = [0,0,4,12]
3814 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm10
3815 ; AVX512DQ-BW-NEXT: vpermt2q %zmm4, %zmm9, %zmm10
3816 ; AVX512DQ-BW-NEXT: movb $12, %r10b
3817 ; AVX512DQ-BW-NEXT: kmovd %r10d, %k1
3818 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, %zmm1 {%k1}
3819 ; AVX512DQ-BW-NEXT: movb $16, %r10b
3820 ; AVX512DQ-BW-NEXT: kmovd %r10d, %k2
3821 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, %zmm1 {%k2}
3822 ; AVX512DQ-BW-NEXT: vpermi2q %zmm11, %zmm8, %zmm0
3823 ; AVX512DQ-BW-NEXT: vpermi2q %zmm7, %zmm5, %zmm9
3824 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, %zmm0 {%k1}
3825 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm0 {%k2}
3826 ; AVX512DQ-BW-NEXT: vmovdqa64 (%r9), %zmm10
3827 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%r9), %zmm14
3828 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm19 = [7,15,7,15,7,15,7,15]
3829 ; AVX512DQ-BW-NEXT: # zmm19 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3830 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm9
3831 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm19, %zmm9
3832 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, %zmm13
3833 ; AVX512DQ-BW-NEXT: vpermt2q %zmm11, %zmm19, %zmm13
3834 ; AVX512DQ-BW-NEXT: vshufi64x2 {{.*#+}} zmm9 = zmm13[0,1,2,3],zmm9[4,5,6,7]
3835 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm20 = [14,0,2,3,4,5,15,0]
3836 ; AVX512DQ-BW-NEXT: vpermt2q %zmm6, %zmm20, %zmm9
3837 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm21 = [0,14,2,3,4,5,6,15]
3838 ; AVX512DQ-BW-NEXT: vpermt2q %zmm14, %zmm21, %zmm9
3839 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm22 = [6,14,6,14,6,14,6,14]
3840 ; AVX512DQ-BW-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3841 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, %zmm17
3842 ; AVX512DQ-BW-NEXT: vpermt2q %zmm11, %zmm22, %zmm17
3843 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm16 = [5,13,6,14,5,13,6,14]
3844 ; AVX512DQ-BW-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3]
3845 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm13
3846 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm16, %zmm13
3847 ; AVX512DQ-BW-NEXT: movb $48, %r9b
3848 ; AVX512DQ-BW-NEXT: kmovd %r9d, %k2
3849 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, %zmm13 {%k2}
3850 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm23 = [0,1,13,0,4,5,6,7]
3851 ; AVX512DQ-BW-NEXT: vpermt2q %zmm6, %zmm23, %zmm13
3852 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm24 = [0,1,2,13,4,5,6,7]
3853 ; AVX512DQ-BW-NEXT: vpermt2q %zmm14, %zmm24, %zmm13
3854 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm25 = [2,10,2,10,2,10,2,10]
3855 ; AVX512DQ-BW-NEXT: # zmm25 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3856 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, %zmm26
3857 ; AVX512DQ-BW-NEXT: vpermt2q %zmm11, %zmm25, %zmm26
3858 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [1,9,2,10,1,9,2,10]
3859 ; AVX512DQ-BW-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
3860 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm17
3861 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm18, %zmm17
3862 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm26, %zmm17 {%k2}
3863 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm26 = [0,1,9,0,4,5,6,7]
3864 ; AVX512DQ-BW-NEXT: vpermt2q %zmm6, %zmm26, %zmm17
3865 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm27 = [0,1,2,9,4,5,6,7]
3866 ; AVX512DQ-BW-NEXT: vpermt2q %zmm14, %zmm27, %zmm17
3867 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm28
3868 ; AVX512DQ-BW-NEXT: vpermt2q %zmm4, %zmm19, %zmm28
3869 ; AVX512DQ-BW-NEXT: vpermi2q %zmm15, %zmm12, %zmm19
3870 ; AVX512DQ-BW-NEXT: vshufi64x2 {{.*#+}} zmm19 = zmm19[0,1,2,3],zmm28[4,5,6,7]
3871 ; AVX512DQ-BW-NEXT: vpermt2q %zmm3, %zmm20, %zmm19
3872 ; AVX512DQ-BW-NEXT: vpermt2q %zmm10, %zmm21, %zmm19
3873 ; AVX512DQ-BW-NEXT: vpermi2q %zmm15, %zmm12, %zmm22
3874 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm2, %zmm16
3875 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm22, %zmm16 {%k2}
3876 ; AVX512DQ-BW-NEXT: vpermt2q %zmm3, %zmm23, %zmm16
3877 ; AVX512DQ-BW-NEXT: vpermt2q %zmm10, %zmm24, %zmm16
3878 ; AVX512DQ-BW-NEXT: vpermi2q %zmm15, %zmm12, %zmm25
3879 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm2, %zmm18
3880 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm25, %zmm18 {%k2}
3881 ; AVX512DQ-BW-NEXT: vpermt2q %zmm3, %zmm26, %zmm18
3882 ; AVX512DQ-BW-NEXT: vpermt2q %zmm10, %zmm27, %zmm18
3883 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm20 = [0,8,1,9,0,8,1,9]
3884 ; AVX512DQ-BW-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3]
3885 ; AVX512DQ-BW-NEXT: vpermt2q %zmm15, %zmm20, %zmm12
3886 ; AVX512DQ-BW-NEXT: vmovdqa (%rdx), %xmm15
3887 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rdx), %xmm21
3888 ; AVX512DQ-BW-NEXT: vpunpcklqdq {{.*#+}} xmm15 = xmm15[0],mem[0]
3889 ; AVX512DQ-BW-NEXT: vinserti128 $1, %xmm15, %ymm0, %ymm15
3890 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm15, %zmm0, %zmm12 {%k1}
3891 ; AVX512DQ-BW-NEXT: vinserti32x4 $2, (%r8), %zmm12, %zmm12
3892 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm15 = [0,1,2,3,4,8,6,7]
3893 ; AVX512DQ-BW-NEXT: vpermt2q %zmm10, %zmm15, %zmm12
3894 ; AVX512DQ-BW-NEXT: vpermt2q %zmm11, %zmm20, %zmm8
3895 ; AVX512DQ-BW-NEXT: vpunpcklqdq {{.*#+}} xmm11 = xmm21[0],mem[0]
3896 ; AVX512DQ-BW-NEXT: vinserti128 $1, %xmm11, %ymm0, %ymm11
3897 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm11, %zmm0, %zmm8 {%k1}
3898 ; AVX512DQ-BW-NEXT: vinserti32x4 $2, 64(%r8), %zmm8, %zmm8
3899 ; AVX512DQ-BW-NEXT: vpermt2q %zmm14, %zmm15, %zmm8
3900 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm11 = [0,1,2,3,4,12,6,7]
3901 ; AVX512DQ-BW-NEXT: vpermt2q %zmm14, %zmm11, %zmm0
3902 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [3,11,3,11,3,11,3,11]
3903 ; AVX512DQ-BW-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3904 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm15, %zmm5
3905 ; AVX512DQ-BW-NEXT: vmovdqa (%rdi), %ymm7
3906 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rdi), %ymm20
3907 ; AVX512DQ-BW-NEXT: vpunpckhqdq {{.*#+}} ymm20 = ymm20[1],mem[1],ymm20[3],mem[3]
3908 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm20, %zmm5, %zmm5
3909 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm20 = [10,0,2,3,4,5,11,0]
3910 ; AVX512DQ-BW-NEXT: vpermt2q %zmm6, %zmm20, %zmm5
3911 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,10,2,3,4,5,6,11]
3912 ; AVX512DQ-BW-NEXT: vpermt2q %zmm14, %zmm6, %zmm5
3913 ; AVX512DQ-BW-NEXT: vpermt2q %zmm10, %zmm11, %zmm1
3914 ; AVX512DQ-BW-NEXT: vpermt2q %zmm4, %zmm15, %zmm2
3915 ; AVX512DQ-BW-NEXT: vpunpckhqdq {{.*#+}} ymm4 = ymm7[1],mem[1],ymm7[3],mem[3]
3916 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm4, %zmm2, %zmm2
3917 ; AVX512DQ-BW-NEXT: vpermt2q %zmm3, %zmm20, %zmm2
3918 ; AVX512DQ-BW-NEXT: vpermt2q %zmm10, %zmm6, %zmm2
3919 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, 64(%rax)
3920 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, 128(%rax)
3921 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, 192(%rax)
3922 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm16, 256(%rax)
3923 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, 320(%rax)
3924 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, 448(%rax)
3925 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, 512(%rax)
3926 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, 576(%rax)
3927 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, 640(%rax)
3928 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, 704(%rax)
3929 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, 384(%rax)
3930 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm12, (%rax)
3931 ; AVX512DQ-BW-NEXT: vzeroupper
3932 ; AVX512DQ-BW-NEXT: retq
3934 ; AVX512DQ-BW-FCP-LABEL: store_i64_stride6_vf16:
3935 ; AVX512DQ-BW-FCP: # %bb.0:
3936 ; AVX512DQ-BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
3937 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm8
3938 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm12
3939 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rsi), %zmm15
3940 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rsi), %zmm11
3941 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rdx), %zmm5
3942 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdx), %zmm2
3943 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rcx), %zmm4
3944 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rcx), %zmm7
3945 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%r8), %zmm3
3946 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%r8), %zmm6
3947 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
3948 ; AVX512DQ-BW-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
3949 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm1
3950 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm15, %zmm0, %zmm1
3951 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm9 = [0,0,4,12]
3952 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm10
3953 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm4, %zmm9, %zmm10
3954 ; AVX512DQ-BW-FCP-NEXT: movb $12, %r10b
3955 ; AVX512DQ-BW-FCP-NEXT: kmovd %r10d, %k1
3956 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm1 {%k1}
3957 ; AVX512DQ-BW-FCP-NEXT: movb $16, %r10b
3958 ; AVX512DQ-BW-FCP-NEXT: kmovd %r10d, %k2
3959 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm1 {%k2}
3960 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm11, %zmm8, %zmm0
3961 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm7, %zmm5, %zmm9
3962 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm0 {%k1}
3963 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm0 {%k2}
3964 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%r9), %zmm10
3965 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%r9), %zmm14
3966 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm19 = [7,15,7,15,7,15,7,15]
3967 ; AVX512DQ-BW-FCP-NEXT: # zmm19 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3968 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm9
3969 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm19, %zmm9
3970 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm13
3971 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm11, %zmm19, %zmm13
3972 ; AVX512DQ-BW-FCP-NEXT: vshufi64x2 {{.*#+}} zmm9 = zmm13[0,1,2,3],zmm9[4,5,6,7]
3973 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm20 = [14,0,2,3,4,5,15,0]
3974 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm6, %zmm20, %zmm9
3975 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm21 = [0,14,2,3,4,5,6,15]
3976 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm14, %zmm21, %zmm9
3977 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm22 = [6,14,6,14,6,14,6,14]
3978 ; AVX512DQ-BW-FCP-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3979 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm17
3980 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm11, %zmm22, %zmm17
3981 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm16 = [5,13,6,14,5,13,6,14]
3982 ; AVX512DQ-BW-FCP-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3]
3983 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm13
3984 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm16, %zmm13
3985 ; AVX512DQ-BW-FCP-NEXT: movb $48, %r9b
3986 ; AVX512DQ-BW-FCP-NEXT: kmovd %r9d, %k2
3987 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm13 {%k2}
3988 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm23 = [0,1,13,0,4,5,6,7]
3989 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm6, %zmm23, %zmm13
3990 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm24 = [0,1,2,13,4,5,6,7]
3991 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm14, %zmm24, %zmm13
3992 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm25 = [2,10,2,10,2,10,2,10]
3993 ; AVX512DQ-BW-FCP-NEXT: # zmm25 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3994 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm26
3995 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm11, %zmm25, %zmm26
3996 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [1,9,2,10,1,9,2,10]
3997 ; AVX512DQ-BW-FCP-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
3998 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm17
3999 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm18, %zmm17
4000 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm17 {%k2}
4001 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm26 = [0,1,9,0,4,5,6,7]
4002 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm6, %zmm26, %zmm17
4003 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm27 = [0,1,2,9,4,5,6,7]
4004 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm14, %zmm27, %zmm17
4005 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm28
4006 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm4, %zmm19, %zmm28
4007 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm15, %zmm12, %zmm19
4008 ; AVX512DQ-BW-FCP-NEXT: vshufi64x2 {{.*#+}} zmm19 = zmm19[0,1,2,3],zmm28[4,5,6,7]
4009 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm3, %zmm20, %zmm19
4010 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm10, %zmm21, %zmm19
4011 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm15, %zmm12, %zmm22
4012 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm2, %zmm16
4013 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm16 {%k2}
4014 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm3, %zmm23, %zmm16
4015 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm10, %zmm24, %zmm16
4016 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm15, %zmm12, %zmm25
4017 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm2, %zmm18
4018 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm18 {%k2}
4019 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm3, %zmm26, %zmm18
4020 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm10, %zmm27, %zmm18
4021 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm20 = [0,8,1,9,0,8,1,9]
4022 ; AVX512DQ-BW-FCP-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3]
4023 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm15, %zmm20, %zmm12
4024 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rdx), %xmm15
4025 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rdx), %xmm21
4026 ; AVX512DQ-BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm15 = xmm15[0],mem[0]
4027 ; AVX512DQ-BW-FCP-NEXT: vinserti128 $1, %xmm15, %ymm0, %ymm15
4028 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm15, %zmm0, %zmm12 {%k1}
4029 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $2, (%r8), %zmm12, %zmm12
4030 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm15 = [0,1,2,3,4,8,6,7]
4031 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm10, %zmm15, %zmm12
4032 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm11, %zmm20, %zmm8
4033 ; AVX512DQ-BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm11 = xmm21[0],mem[0]
4034 ; AVX512DQ-BW-FCP-NEXT: vinserti128 $1, %xmm11, %ymm0, %ymm11
4035 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm11, %zmm0, %zmm8 {%k1}
4036 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $2, 64(%r8), %zmm8, %zmm8
4037 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm14, %zmm15, %zmm8
4038 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm11 = [0,1,2,3,4,12,6,7]
4039 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm14, %zmm11, %zmm0
4040 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [3,11,3,11,3,11,3,11]
4041 ; AVX512DQ-BW-FCP-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4042 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm15, %zmm5
4043 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rdi), %ymm7
4044 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rdi), %ymm20
4045 ; AVX512DQ-BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm20 = ymm20[1],mem[1],ymm20[3],mem[3]
4046 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm20, %zmm5, %zmm5
4047 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm20 = [10,0,2,3,4,5,11,0]
4048 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm6, %zmm20, %zmm5
4049 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,10,2,3,4,5,6,11]
4050 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm14, %zmm6, %zmm5
4051 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm10, %zmm11, %zmm1
4052 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm4, %zmm15, %zmm2
4053 ; AVX512DQ-BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm4 = ymm7[1],mem[1],ymm7[3],mem[3]
4054 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm4, %zmm2, %zmm2
4055 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm3, %zmm20, %zmm2
4056 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm10, %zmm6, %zmm2
4057 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, 64(%rax)
4058 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, 128(%rax)
4059 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, 192(%rax)
4060 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm16, 256(%rax)
4061 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, 320(%rax)
4062 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, 448(%rax)
4063 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, 512(%rax)
4064 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, 576(%rax)
4065 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, 640(%rax)
4066 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, 704(%rax)
4067 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, 384(%rax)
4068 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm12, (%rax)
4069 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
4070 ; AVX512DQ-BW-FCP-NEXT: retq
4071 %in.vec0 = load <16 x i64>, ptr %in.vecptr0, align 64
4072 %in.vec1 = load <16 x i64>, ptr %in.vecptr1, align 64
4073 %in.vec2 = load <16 x i64>, ptr %in.vecptr2, align 64
4074 %in.vec3 = load <16 x i64>, ptr %in.vecptr3, align 64
4075 %in.vec4 = load <16 x i64>, ptr %in.vecptr4, align 64
4076 %in.vec5 = load <16 x i64>, ptr %in.vecptr5, align 64
4077 %1 = shufflevector <16 x i64> %in.vec0, <16 x i64> %in.vec1, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
4078 %2 = shufflevector <16 x i64> %in.vec2, <16 x i64> %in.vec3, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
4079 %3 = shufflevector <16 x i64> %in.vec4, <16 x i64> %in.vec5, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
4080 %4 = shufflevector <32 x i64> %1, <32 x i64> %2, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
4081 %5 = shufflevector <32 x i64> %3, <32 x i64> poison, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
4082 %6 = shufflevector <64 x i64> %4, <64 x i64> %5, <96 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95>
4083 %interleaved.vec = shufflevector <96 x i64> %6, <96 x i64> poison, <96 x i32> <i32 0, i32 16, i32 32, i32 48, i32 64, i32 80, i32 1, i32 17, i32 33, i32 49, i32 65, i32 81, i32 2, i32 18, i32 34, i32 50, i32 66, i32 82, i32 3, i32 19, i32 35, i32 51, i32 67, i32 83, i32 4, i32 20, i32 36, i32 52, i32 68, i32 84, i32 5, i32 21, i32 37, i32 53, i32 69, i32 85, i32 6, i32 22, i32 38, i32 54, i32 70, i32 86, i32 7, i32 23, i32 39, i32 55, i32 71, i32 87, i32 8, i32 24, i32 40, i32 56, i32 72, i32 88, i32 9, i32 25, i32 41, i32 57, i32 73, i32 89, i32 10, i32 26, i32 42, i32 58, i32 74, i32 90, i32 11, i32 27, i32 43, i32 59, i32 75, i32 91, i32 12, i32 28, i32 44, i32 60, i32 76, i32 92, i32 13, i32 29, i32 45, i32 61, i32 77, i32 93, i32 14, i32 30, i32 46, i32 62, i32 78, i32 94, i32 15, i32 31, i32 47, i32 63, i32 79, i32 95>
4084 store <96 x i64> %interleaved.vec, ptr %out.vec, align 64
4088 define void @store_i64_stride6_vf32(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
4089 ; SSE-LABEL: store_i64_stride6_vf32:
4091 ; SSE-NEXT: subq $1176, %rsp # imm = 0x498
4092 ; SSE-NEXT: movaps (%rdi), %xmm7
4093 ; SSE-NEXT: movaps 16(%rdi), %xmm8
4094 ; SSE-NEXT: movaps 32(%rdi), %xmm9
4095 ; SSE-NEXT: movaps (%rsi), %xmm2
4096 ; SSE-NEXT: movaps 16(%rsi), %xmm1
4097 ; SSE-NEXT: movaps 32(%rsi), %xmm0
4098 ; SSE-NEXT: movaps (%rdx), %xmm10
4099 ; SSE-NEXT: movaps 16(%rdx), %xmm11
4100 ; SSE-NEXT: movaps 32(%rdx), %xmm12
4101 ; SSE-NEXT: movaps (%rcx), %xmm4
4102 ; SSE-NEXT: movaps 16(%rcx), %xmm3
4103 ; SSE-NEXT: movaps (%r8), %xmm13
4104 ; SSE-NEXT: movaps 16(%r8), %xmm14
4105 ; SSE-NEXT: movaps (%r9), %xmm6
4106 ; SSE-NEXT: movaps 16(%r9), %xmm5
4107 ; SSE-NEXT: movaps %xmm7, %xmm15
4108 ; SSE-NEXT: movlhps {{.*#+}} xmm15 = xmm15[0],xmm2[0]
4109 ; SSE-NEXT: movaps %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4110 ; SSE-NEXT: unpckhpd {{.*#+}} xmm7 = xmm7[1],xmm2[1]
4111 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4112 ; SSE-NEXT: movaps %xmm10, %xmm2
4113 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm4[0]
4114 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4115 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm4[1]
4116 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4117 ; SSE-NEXT: movaps %xmm13, %xmm4
4118 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm6[0]
4119 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4120 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm6[1]
4121 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4122 ; SSE-NEXT: movaps %xmm8, %xmm2
4123 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm1[0]
4124 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4125 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm1[1]
4126 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4127 ; SSE-NEXT: movaps %xmm11, %xmm1
4128 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
4129 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4130 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm3[1]
4131 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4132 ; SSE-NEXT: movaps %xmm14, %xmm1
4133 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm5[0]
4134 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4135 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm5[1]
4136 ; SSE-NEXT: movaps %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4137 ; SSE-NEXT: movaps %xmm9, %xmm1
4138 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4139 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4140 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm0[1]
4141 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4142 ; SSE-NEXT: movaps 32(%rcx), %xmm0
4143 ; SSE-NEXT: movaps %xmm12, %xmm1
4144 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4145 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4146 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm0[1]
4147 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4148 ; SSE-NEXT: movaps 32(%r8), %xmm2
4149 ; SSE-NEXT: movaps 32(%r9), %xmm0
4150 ; SSE-NEXT: movaps %xmm2, %xmm1
4151 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4152 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4153 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4154 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4155 ; SSE-NEXT: movaps 48(%rdi), %xmm2
4156 ; SSE-NEXT: movaps 48(%rsi), %xmm0
4157 ; SSE-NEXT: movaps %xmm2, %xmm1
4158 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4159 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4160 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4161 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4162 ; SSE-NEXT: movaps 48(%rdx), %xmm2
4163 ; SSE-NEXT: movaps 48(%rcx), %xmm0
4164 ; SSE-NEXT: movaps %xmm2, %xmm1
4165 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4166 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4167 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4168 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4169 ; SSE-NEXT: movaps 48(%r8), %xmm2
4170 ; SSE-NEXT: movaps 48(%r9), %xmm0
4171 ; SSE-NEXT: movaps %xmm2, %xmm1
4172 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4173 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4174 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4175 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4176 ; SSE-NEXT: movaps 64(%rdi), %xmm2
4177 ; SSE-NEXT: movaps 64(%rsi), %xmm0
4178 ; SSE-NEXT: movaps %xmm2, %xmm1
4179 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4180 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4181 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4182 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4183 ; SSE-NEXT: movaps 64(%rdx), %xmm2
4184 ; SSE-NEXT: movaps 64(%rcx), %xmm0
4185 ; SSE-NEXT: movaps %xmm2, %xmm1
4186 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4187 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4188 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4189 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4190 ; SSE-NEXT: movaps 64(%r8), %xmm2
4191 ; SSE-NEXT: movaps 64(%r9), %xmm0
4192 ; SSE-NEXT: movaps %xmm2, %xmm1
4193 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4194 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4195 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4196 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4197 ; SSE-NEXT: movaps 80(%rdi), %xmm2
4198 ; SSE-NEXT: movaps 80(%rsi), %xmm0
4199 ; SSE-NEXT: movaps %xmm2, %xmm1
4200 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4201 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4202 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4203 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4204 ; SSE-NEXT: movaps 80(%rdx), %xmm2
4205 ; SSE-NEXT: movaps 80(%rcx), %xmm0
4206 ; SSE-NEXT: movaps %xmm2, %xmm1
4207 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4208 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4209 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4210 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4211 ; SSE-NEXT: movaps 80(%r8), %xmm2
4212 ; SSE-NEXT: movaps 80(%r9), %xmm0
4213 ; SSE-NEXT: movaps %xmm2, %xmm1
4214 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4215 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4216 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4217 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4218 ; SSE-NEXT: movaps 96(%rdi), %xmm2
4219 ; SSE-NEXT: movaps 96(%rsi), %xmm0
4220 ; SSE-NEXT: movaps %xmm2, %xmm1
4221 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4222 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4223 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4224 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4225 ; SSE-NEXT: movaps 96(%rdx), %xmm2
4226 ; SSE-NEXT: movaps 96(%rcx), %xmm0
4227 ; SSE-NEXT: movaps %xmm2, %xmm1
4228 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4229 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4230 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4231 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4232 ; SSE-NEXT: movaps 96(%r8), %xmm2
4233 ; SSE-NEXT: movaps 96(%r9), %xmm0
4234 ; SSE-NEXT: movaps %xmm2, %xmm1
4235 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4236 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4237 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4238 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4239 ; SSE-NEXT: movaps 112(%rdi), %xmm2
4240 ; SSE-NEXT: movaps 112(%rsi), %xmm0
4241 ; SSE-NEXT: movaps %xmm2, %xmm1
4242 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4243 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4244 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4245 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4246 ; SSE-NEXT: movaps 112(%rdx), %xmm2
4247 ; SSE-NEXT: movaps 112(%rcx), %xmm0
4248 ; SSE-NEXT: movaps %xmm2, %xmm1
4249 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4250 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4251 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4252 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4253 ; SSE-NEXT: movaps 112(%r8), %xmm2
4254 ; SSE-NEXT: movaps 112(%r9), %xmm0
4255 ; SSE-NEXT: movaps %xmm2, %xmm1
4256 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4257 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4258 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4259 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4260 ; SSE-NEXT: movaps 128(%rdi), %xmm2
4261 ; SSE-NEXT: movaps 128(%rsi), %xmm0
4262 ; SSE-NEXT: movaps %xmm2, %xmm1
4263 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4264 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4265 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4266 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4267 ; SSE-NEXT: movaps 128(%rdx), %xmm2
4268 ; SSE-NEXT: movaps 128(%rcx), %xmm0
4269 ; SSE-NEXT: movaps %xmm2, %xmm1
4270 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4271 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4272 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4273 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4274 ; SSE-NEXT: movaps 128(%r8), %xmm2
4275 ; SSE-NEXT: movaps 128(%r9), %xmm0
4276 ; SSE-NEXT: movaps %xmm2, %xmm1
4277 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4278 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4279 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4280 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4281 ; SSE-NEXT: movaps 144(%rdi), %xmm2
4282 ; SSE-NEXT: movaps 144(%rsi), %xmm0
4283 ; SSE-NEXT: movaps %xmm2, %xmm1
4284 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4285 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4286 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4287 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4288 ; SSE-NEXT: movaps 144(%rdx), %xmm2
4289 ; SSE-NEXT: movaps 144(%rcx), %xmm0
4290 ; SSE-NEXT: movaps %xmm2, %xmm1
4291 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4292 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4293 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4294 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4295 ; SSE-NEXT: movaps 144(%r8), %xmm2
4296 ; SSE-NEXT: movaps 144(%r9), %xmm0
4297 ; SSE-NEXT: movaps %xmm2, %xmm1
4298 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4299 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4300 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4301 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4302 ; SSE-NEXT: movaps 160(%rdi), %xmm2
4303 ; SSE-NEXT: movaps 160(%rsi), %xmm0
4304 ; SSE-NEXT: movaps %xmm2, %xmm1
4305 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4306 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4307 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4308 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4309 ; SSE-NEXT: movaps 160(%rdx), %xmm2
4310 ; SSE-NEXT: movaps 160(%rcx), %xmm0
4311 ; SSE-NEXT: movaps %xmm2, %xmm1
4312 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4313 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4314 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4315 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4316 ; SSE-NEXT: movaps 160(%r8), %xmm2
4317 ; SSE-NEXT: movaps 160(%r9), %xmm0
4318 ; SSE-NEXT: movaps %xmm2, %xmm1
4319 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4320 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4321 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4322 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4323 ; SSE-NEXT: movaps 176(%rdi), %xmm2
4324 ; SSE-NEXT: movaps 176(%rsi), %xmm0
4325 ; SSE-NEXT: movaps %xmm2, %xmm1
4326 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4327 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4328 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4329 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4330 ; SSE-NEXT: movaps 176(%rdx), %xmm2
4331 ; SSE-NEXT: movaps 176(%rcx), %xmm0
4332 ; SSE-NEXT: movaps %xmm2, %xmm1
4333 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4334 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4335 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4336 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4337 ; SSE-NEXT: movaps 176(%r8), %xmm2
4338 ; SSE-NEXT: movaps 176(%r9), %xmm0
4339 ; SSE-NEXT: movaps %xmm2, %xmm1
4340 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4341 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4342 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4343 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4344 ; SSE-NEXT: movaps 192(%rdi), %xmm2
4345 ; SSE-NEXT: movaps 192(%rsi), %xmm0
4346 ; SSE-NEXT: movaps %xmm2, %xmm1
4347 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4348 ; SSE-NEXT: movaps %xmm1, (%rsp) # 16-byte Spill
4349 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4350 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4351 ; SSE-NEXT: movaps 192(%rdx), %xmm2
4352 ; SSE-NEXT: movaps 192(%rcx), %xmm0
4353 ; SSE-NEXT: movaps %xmm2, %xmm1
4354 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4355 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4356 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4357 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4358 ; SSE-NEXT: movaps 192(%r8), %xmm2
4359 ; SSE-NEXT: movaps 192(%r9), %xmm0
4360 ; SSE-NEXT: movaps %xmm2, %xmm1
4361 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4362 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4363 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4364 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4365 ; SSE-NEXT: movaps 208(%rdi), %xmm15
4366 ; SSE-NEXT: movaps 208(%rsi), %xmm0
4367 ; SSE-NEXT: movaps %xmm15, %xmm1
4368 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4369 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4370 ; SSE-NEXT: unpckhpd {{.*#+}} xmm15 = xmm15[1],xmm0[1]
4371 ; SSE-NEXT: movaps 208(%rdx), %xmm12
4372 ; SSE-NEXT: movaps 208(%rcx), %xmm0
4373 ; SSE-NEXT: movaps %xmm12, %xmm1
4374 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4375 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4376 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm0[1]
4377 ; SSE-NEXT: movaps 208(%r8), %xmm11
4378 ; SSE-NEXT: movaps 208(%r9), %xmm0
4379 ; SSE-NEXT: movaps %xmm11, %xmm1
4380 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4381 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4382 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm0[1]
4383 ; SSE-NEXT: movaps 224(%rdi), %xmm13
4384 ; SSE-NEXT: movaps 224(%rsi), %xmm0
4385 ; SSE-NEXT: movaps %xmm13, %xmm14
4386 ; SSE-NEXT: movlhps {{.*#+}} xmm14 = xmm14[0],xmm0[0]
4387 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm0[1]
4388 ; SSE-NEXT: movaps 224(%rdx), %xmm9
4389 ; SSE-NEXT: movaps 224(%rcx), %xmm0
4390 ; SSE-NEXT: movaps %xmm9, %xmm10
4391 ; SSE-NEXT: movlhps {{.*#+}} xmm10 = xmm10[0],xmm0[0]
4392 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm0[1]
4393 ; SSE-NEXT: movaps 224(%r8), %xmm5
4394 ; SSE-NEXT: movaps 224(%r9), %xmm0
4395 ; SSE-NEXT: movaps %xmm5, %xmm8
4396 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm0[0]
4397 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm0[1]
4398 ; SSE-NEXT: movaps 240(%rdi), %xmm6
4399 ; SSE-NEXT: movaps 240(%rsi), %xmm1
4400 ; SSE-NEXT: movaps %xmm6, %xmm7
4401 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm1[0]
4402 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm1[1]
4403 ; SSE-NEXT: movaps 240(%rdx), %xmm1
4404 ; SSE-NEXT: movaps 240(%rcx), %xmm0
4405 ; SSE-NEXT: movaps %xmm1, %xmm4
4406 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm0[0]
4407 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
4408 ; SSE-NEXT: movaps 240(%r8), %xmm0
4409 ; SSE-NEXT: movaps 240(%r9), %xmm3
4410 ; SSE-NEXT: movaps %xmm0, %xmm2
4411 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm3[0]
4412 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
4413 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
4414 ; SSE-NEXT: movaps %xmm0, 1520(%rax)
4415 ; SSE-NEXT: movaps %xmm1, 1504(%rax)
4416 ; SSE-NEXT: movaps %xmm6, 1488(%rax)
4417 ; SSE-NEXT: movaps %xmm2, 1472(%rax)
4418 ; SSE-NEXT: movaps %xmm4, 1456(%rax)
4419 ; SSE-NEXT: movaps %xmm7, 1440(%rax)
4420 ; SSE-NEXT: movaps %xmm5, 1424(%rax)
4421 ; SSE-NEXT: movaps %xmm9, 1408(%rax)
4422 ; SSE-NEXT: movaps %xmm13, 1392(%rax)
4423 ; SSE-NEXT: movaps %xmm8, 1376(%rax)
4424 ; SSE-NEXT: movaps %xmm10, 1360(%rax)
4425 ; SSE-NEXT: movaps %xmm14, 1344(%rax)
4426 ; SSE-NEXT: movaps %xmm11, 1328(%rax)
4427 ; SSE-NEXT: movaps %xmm12, 1312(%rax)
4428 ; SSE-NEXT: movaps %xmm15, 1296(%rax)
4429 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4430 ; SSE-NEXT: movaps %xmm0, 1280(%rax)
4431 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4432 ; SSE-NEXT: movaps %xmm0, 1264(%rax)
4433 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4434 ; SSE-NEXT: movaps %xmm0, 1248(%rax)
4435 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4436 ; SSE-NEXT: movaps %xmm0, 1232(%rax)
4437 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4438 ; SSE-NEXT: movaps %xmm0, 1216(%rax)
4439 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4440 ; SSE-NEXT: movaps %xmm0, 1200(%rax)
4441 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4442 ; SSE-NEXT: movaps %xmm0, 1184(%rax)
4443 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4444 ; SSE-NEXT: movaps %xmm0, 1168(%rax)
4445 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
4446 ; SSE-NEXT: movaps %xmm0, 1152(%rax)
4447 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4448 ; SSE-NEXT: movaps %xmm0, 1136(%rax)
4449 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4450 ; SSE-NEXT: movaps %xmm0, 1120(%rax)
4451 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4452 ; SSE-NEXT: movaps %xmm0, 1104(%rax)
4453 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4454 ; SSE-NEXT: movaps %xmm0, 1088(%rax)
4455 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4456 ; SSE-NEXT: movaps %xmm0, 1072(%rax)
4457 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4458 ; SSE-NEXT: movaps %xmm0, 1056(%rax)
4459 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4460 ; SSE-NEXT: movaps %xmm0, 1040(%rax)
4461 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4462 ; SSE-NEXT: movaps %xmm0, 1024(%rax)
4463 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4464 ; SSE-NEXT: movaps %xmm0, 1008(%rax)
4465 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4466 ; SSE-NEXT: movaps %xmm0, 992(%rax)
4467 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4468 ; SSE-NEXT: movaps %xmm0, 976(%rax)
4469 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4470 ; SSE-NEXT: movaps %xmm0, 960(%rax)
4471 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4472 ; SSE-NEXT: movaps %xmm0, 944(%rax)
4473 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4474 ; SSE-NEXT: movaps %xmm0, 928(%rax)
4475 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4476 ; SSE-NEXT: movaps %xmm0, 912(%rax)
4477 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4478 ; SSE-NEXT: movaps %xmm0, 896(%rax)
4479 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4480 ; SSE-NEXT: movaps %xmm0, 880(%rax)
4481 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4482 ; SSE-NEXT: movaps %xmm0, 864(%rax)
4483 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4484 ; SSE-NEXT: movaps %xmm0, 848(%rax)
4485 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4486 ; SSE-NEXT: movaps %xmm0, 832(%rax)
4487 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4488 ; SSE-NEXT: movaps %xmm0, 816(%rax)
4489 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4490 ; SSE-NEXT: movaps %xmm0, 800(%rax)
4491 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4492 ; SSE-NEXT: movaps %xmm0, 784(%rax)
4493 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4494 ; SSE-NEXT: movaps %xmm0, 768(%rax)
4495 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4496 ; SSE-NEXT: movaps %xmm0, 752(%rax)
4497 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4498 ; SSE-NEXT: movaps %xmm0, 736(%rax)
4499 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4500 ; SSE-NEXT: movaps %xmm0, 720(%rax)
4501 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4502 ; SSE-NEXT: movaps %xmm0, 704(%rax)
4503 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4504 ; SSE-NEXT: movaps %xmm0, 688(%rax)
4505 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4506 ; SSE-NEXT: movaps %xmm0, 672(%rax)
4507 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4508 ; SSE-NEXT: movaps %xmm0, 656(%rax)
4509 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4510 ; SSE-NEXT: movaps %xmm0, 640(%rax)
4511 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4512 ; SSE-NEXT: movaps %xmm0, 624(%rax)
4513 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4514 ; SSE-NEXT: movaps %xmm0, 608(%rax)
4515 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4516 ; SSE-NEXT: movaps %xmm0, 592(%rax)
4517 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4518 ; SSE-NEXT: movaps %xmm0, 576(%rax)
4519 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4520 ; SSE-NEXT: movaps %xmm0, 560(%rax)
4521 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4522 ; SSE-NEXT: movaps %xmm0, 544(%rax)
4523 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4524 ; SSE-NEXT: movaps %xmm0, 528(%rax)
4525 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4526 ; SSE-NEXT: movaps %xmm0, 512(%rax)
4527 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4528 ; SSE-NEXT: movaps %xmm0, 496(%rax)
4529 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4530 ; SSE-NEXT: movaps %xmm0, 480(%rax)
4531 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4532 ; SSE-NEXT: movaps %xmm0, 464(%rax)
4533 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4534 ; SSE-NEXT: movaps %xmm0, 448(%rax)
4535 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4536 ; SSE-NEXT: movaps %xmm0, 432(%rax)
4537 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4538 ; SSE-NEXT: movaps %xmm0, 416(%rax)
4539 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4540 ; SSE-NEXT: movaps %xmm0, 400(%rax)
4541 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4542 ; SSE-NEXT: movaps %xmm0, 384(%rax)
4543 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4544 ; SSE-NEXT: movaps %xmm0, 368(%rax)
4545 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4546 ; SSE-NEXT: movaps %xmm0, 352(%rax)
4547 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4548 ; SSE-NEXT: movaps %xmm0, 336(%rax)
4549 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4550 ; SSE-NEXT: movaps %xmm0, 320(%rax)
4551 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4552 ; SSE-NEXT: movaps %xmm0, 304(%rax)
4553 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4554 ; SSE-NEXT: movaps %xmm0, 288(%rax)
4555 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4556 ; SSE-NEXT: movaps %xmm0, 272(%rax)
4557 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4558 ; SSE-NEXT: movaps %xmm0, 256(%rax)
4559 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4560 ; SSE-NEXT: movaps %xmm0, 240(%rax)
4561 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4562 ; SSE-NEXT: movaps %xmm0, 224(%rax)
4563 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4564 ; SSE-NEXT: movaps %xmm0, 208(%rax)
4565 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4566 ; SSE-NEXT: movaps %xmm0, 192(%rax)
4567 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4568 ; SSE-NEXT: movaps %xmm0, 176(%rax)
4569 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4570 ; SSE-NEXT: movaps %xmm0, 160(%rax)
4571 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4572 ; SSE-NEXT: movaps %xmm0, 144(%rax)
4573 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4574 ; SSE-NEXT: movaps %xmm0, 128(%rax)
4575 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4576 ; SSE-NEXT: movaps %xmm0, 112(%rax)
4577 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4578 ; SSE-NEXT: movaps %xmm0, 96(%rax)
4579 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4580 ; SSE-NEXT: movaps %xmm0, 80(%rax)
4581 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4582 ; SSE-NEXT: movaps %xmm0, 64(%rax)
4583 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4584 ; SSE-NEXT: movaps %xmm0, 48(%rax)
4585 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4586 ; SSE-NEXT: movaps %xmm0, 32(%rax)
4587 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4588 ; SSE-NEXT: movaps %xmm0, 16(%rax)
4589 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4590 ; SSE-NEXT: movaps %xmm0, (%rax)
4591 ; SSE-NEXT: addq $1176, %rsp # imm = 0x498
4594 ; AVX-LABEL: store_i64_stride6_vf32:
4596 ; AVX-NEXT: subq $1608, %rsp # imm = 0x648
4597 ; AVX-NEXT: vmovapd (%r8), %ymm0
4598 ; AVX-NEXT: vmovapd 32(%r8), %ymm1
4599 ; AVX-NEXT: vmovddup {{.*#+}} xmm2 = mem[0,0]
4600 ; AVX-NEXT: vmovaps (%rsi), %xmm4
4601 ; AVX-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4602 ; AVX-NEXT: vmovaps 32(%rsi), %xmm10
4603 ; AVX-NEXT: vmovaps 64(%rsi), %xmm11
4604 ; AVX-NEXT: vmovaps (%rdi), %xmm3
4605 ; AVX-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4606 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm4[1]
4607 ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
4608 ; AVX-NEXT: vblendpd {{.*#+}} ymm3 = ymm0[0,1],ymm3[2,3]
4609 ; AVX-NEXT: vblendpd {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3]
4610 ; AVX-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4611 ; AVX-NEXT: vmovaps (%rcx), %xmm3
4612 ; AVX-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4613 ; AVX-NEXT: vmovaps 32(%rcx), %xmm5
4614 ; AVX-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4615 ; AVX-NEXT: vmovaps (%rdx), %xmm2
4616 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4617 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
4618 ; AVX-NEXT: vinsertf128 $1, (%r9), %ymm2, %ymm3
4619 ; AVX-NEXT: vbroadcastsd 8(%r8), %ymm4
4620 ; AVX-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm4[4,5,6,7]
4621 ; AVX-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1],ymm3[2,3],ymm2[4,5],ymm3[6,7]
4622 ; AVX-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4623 ; AVX-NEXT: vmovddup {{.*#+}} xmm2 = mem[0,0]
4624 ; AVX-NEXT: vmovaps 32(%rdi), %xmm3
4625 ; AVX-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4626 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm10[1]
4627 ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
4628 ; AVX-NEXT: vblendpd {{.*#+}} ymm3 = ymm1[0,1],ymm3[2,3]
4629 ; AVX-NEXT: vblendpd {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3]
4630 ; AVX-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4631 ; AVX-NEXT: vmovaps 32(%rdx), %xmm2
4632 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4633 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm5[1]
4634 ; AVX-NEXT: vbroadcastsd 40(%r8), %ymm3
4635 ; AVX-NEXT: vblendps {{.*#+}} ymm3 = ymm2[0,1,2,3],ymm3[4,5,6,7]
4636 ; AVX-NEXT: vinsertf128 $1, 32(%r9), %ymm2, %ymm2
4637 ; AVX-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7]
4638 ; AVX-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4639 ; AVX-NEXT: vmovaps 64(%rdi), %xmm13
4640 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm13[1],xmm11[1]
4641 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
4642 ; AVX-NEXT: vmovapd 64(%r8), %ymm3
4643 ; AVX-NEXT: vblendpd {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3]
4644 ; AVX-NEXT: vmovddup {{.*#+}} xmm4 = mem[0,0]
4645 ; AVX-NEXT: vblendpd {{.*#+}} ymm2 = ymm2[0],ymm4[1],ymm2[2,3]
4646 ; AVX-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4647 ; AVX-NEXT: vmovaps 64(%rcx), %xmm4
4648 ; AVX-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4649 ; AVX-NEXT: vmovaps 64(%rdx), %xmm2
4650 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4651 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm4[1]
4652 ; AVX-NEXT: vbroadcastsd 72(%r8), %ymm4
4653 ; AVX-NEXT: vblendps {{.*#+}} ymm4 = ymm2[0,1,2,3],ymm4[4,5,6,7]
4654 ; AVX-NEXT: vinsertf128 $1, 64(%r9), %ymm2, %ymm2
4655 ; AVX-NEXT: vblendps {{.*#+}} ymm2 = ymm4[0,1],ymm2[2,3],ymm4[4,5],ymm2[6,7]
4656 ; AVX-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4657 ; AVX-NEXT: vmovaps 96(%rsi), %xmm4
4658 ; AVX-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4659 ; AVX-NEXT: vmovaps 96(%rdi), %xmm2
4660 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4661 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm4[1]
4662 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
4663 ; AVX-NEXT: vmovapd 96(%r8), %ymm5
4664 ; AVX-NEXT: vblendpd {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3]
4665 ; AVX-NEXT: vmovddup {{.*#+}} xmm4 = mem[0,0]
4666 ; AVX-NEXT: vblendpd {{.*#+}} ymm2 = ymm2[0],ymm4[1],ymm2[2,3]
4667 ; AVX-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4668 ; AVX-NEXT: vmovaps 96(%rcx), %xmm4
4669 ; AVX-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4670 ; AVX-NEXT: vmovaps 96(%rdx), %xmm2
4671 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4672 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm4[1]
4673 ; AVX-NEXT: vbroadcastsd 104(%r8), %ymm4
4674 ; AVX-NEXT: vblendps {{.*#+}} ymm4 = ymm2[0,1,2,3],ymm4[4,5,6,7]
4675 ; AVX-NEXT: vinsertf128 $1, 96(%r9), %ymm2, %ymm2
4676 ; AVX-NEXT: vblendps {{.*#+}} ymm2 = ymm4[0,1],ymm2[2,3],ymm4[4,5],ymm2[6,7]
4677 ; AVX-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4678 ; AVX-NEXT: vmovaps 128(%rsi), %xmm14
4679 ; AVX-NEXT: vmovaps 128(%rdi), %xmm2
4680 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4681 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm14[1]
4682 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
4683 ; AVX-NEXT: vmovapd 128(%r8), %ymm8
4684 ; AVX-NEXT: vblendpd {{.*#+}} ymm2 = ymm8[0,1],ymm2[2,3]
4685 ; AVX-NEXT: vmovddup {{.*#+}} xmm4 = mem[0,0]
4686 ; AVX-NEXT: vblendpd {{.*#+}} ymm2 = ymm2[0],ymm4[1],ymm2[2,3]
4687 ; AVX-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4688 ; AVX-NEXT: vmovaps 128(%rcx), %xmm4
4689 ; AVX-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4690 ; AVX-NEXT: vmovaps 128(%rdx), %xmm2
4691 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4692 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm4[1]
4693 ; AVX-NEXT: vbroadcastsd 136(%r8), %ymm4
4694 ; AVX-NEXT: vblendps {{.*#+}} ymm4 = ymm2[0,1,2,3],ymm4[4,5,6,7]
4695 ; AVX-NEXT: vinsertf128 $1, 128(%r9), %ymm2, %ymm2
4696 ; AVX-NEXT: vblendps {{.*#+}} ymm2 = ymm4[0,1],ymm2[2,3],ymm4[4,5],ymm2[6,7]
4697 ; AVX-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4698 ; AVX-NEXT: vmovaps 160(%rsi), %xmm4
4699 ; AVX-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4700 ; AVX-NEXT: vmovaps 160(%rdi), %xmm2
4701 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4702 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm4[1]
4703 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm4
4704 ; AVX-NEXT: vmovapd 160(%r8), %ymm2
4705 ; AVX-NEXT: vblendpd {{.*#+}} ymm4 = ymm2[0,1],ymm4[2,3]
4706 ; AVX-NEXT: vmovddup {{.*#+}} xmm6 = mem[0,0]
4707 ; AVX-NEXT: vblendpd {{.*#+}} ymm4 = ymm4[0],ymm6[1],ymm4[2,3]
4708 ; AVX-NEXT: vmovupd %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4709 ; AVX-NEXT: vmovaps 160(%rcx), %xmm6
4710 ; AVX-NEXT: vmovaps %xmm6, (%rsp) # 16-byte Spill
4711 ; AVX-NEXT: vmovaps 160(%rdx), %xmm4
4712 ; AVX-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4713 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm4[1],xmm6[1]
4714 ; AVX-NEXT: vbroadcastsd 168(%r8), %ymm6
4715 ; AVX-NEXT: vblendps {{.*#+}} ymm6 = ymm4[0,1,2,3],ymm6[4,5,6,7]
4716 ; AVX-NEXT: vinsertf128 $1, 160(%r9), %ymm4, %ymm4
4717 ; AVX-NEXT: vblendps {{.*#+}} ymm4 = ymm6[0,1],ymm4[2,3],ymm6[4,5],ymm4[6,7]
4718 ; AVX-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4719 ; AVX-NEXT: vmovaps 192(%rsi), %xmm6
4720 ; AVX-NEXT: vmovaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4721 ; AVX-NEXT: vmovaps 192(%rdi), %xmm4
4722 ; AVX-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4723 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm4[1],xmm6[1]
4724 ; AVX-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm6
4725 ; AVX-NEXT: vmovapd 192(%r8), %ymm4
4726 ; AVX-NEXT: vblendpd {{.*#+}} ymm6 = ymm4[0,1],ymm6[2,3]
4727 ; AVX-NEXT: vmovddup {{.*#+}} xmm7 = mem[0,0]
4728 ; AVX-NEXT: vblendpd {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2,3]
4729 ; AVX-NEXT: vmovupd %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4730 ; AVX-NEXT: vmovaps 192(%rcx), %xmm7
4731 ; AVX-NEXT: vmovaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4732 ; AVX-NEXT: vmovaps 192(%rdx), %xmm6
4733 ; AVX-NEXT: vmovaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4734 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm6 = xmm6[1],xmm7[1]
4735 ; AVX-NEXT: vbroadcastsd 200(%r8), %ymm7
4736 ; AVX-NEXT: vblendps {{.*#+}} ymm7 = ymm6[0,1,2,3],ymm7[4,5,6,7]
4737 ; AVX-NEXT: vinsertf128 $1, 192(%r9), %ymm6, %ymm6
4738 ; AVX-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1],ymm6[2,3],ymm7[4,5],ymm6[6,7]
4739 ; AVX-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4740 ; AVX-NEXT: vmovaps 224(%rsi), %xmm7
4741 ; AVX-NEXT: vmovaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4742 ; AVX-NEXT: vmovaps 224(%rdi), %xmm6
4743 ; AVX-NEXT: vmovaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4744 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm6 = xmm6[1],xmm7[1]
4745 ; AVX-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
4746 ; AVX-NEXT: vmovapd 224(%r8), %ymm9
4747 ; AVX-NEXT: vblendpd {{.*#+}} ymm6 = ymm9[0,1],ymm6[2,3]
4748 ; AVX-NEXT: vmovddup {{.*#+}} xmm7 = mem[0,0]
4749 ; AVX-NEXT: vblendpd {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2,3]
4750 ; AVX-NEXT: vmovupd %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4751 ; AVX-NEXT: vmovaps 224(%rcx), %xmm7
4752 ; AVX-NEXT: vmovaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4753 ; AVX-NEXT: vmovaps 224(%rdx), %xmm6
4754 ; AVX-NEXT: vmovaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4755 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm6 = xmm6[1],xmm7[1]
4756 ; AVX-NEXT: vbroadcastsd 232(%r8), %ymm7
4757 ; AVX-NEXT: vblendps {{.*#+}} ymm7 = ymm6[0,1,2,3],ymm7[4,5,6,7]
4758 ; AVX-NEXT: vinsertf128 $1, 224(%r9), %ymm6, %ymm6
4759 ; AVX-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1],ymm6[2,3],ymm7[4,5],ymm6[6,7]
4760 ; AVX-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4761 ; AVX-NEXT: vmovapd (%rdi), %ymm6
4762 ; AVX-NEXT: vmovapd (%rsi), %ymm7
4763 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm6 = ymm6[1],ymm7[1],ymm6[3],ymm7[3]
4764 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm6 = ymm0[2,3],ymm6[2,3]
4765 ; AVX-NEXT: vmovapd (%r9), %ymm0
4766 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm7 = ymm0[2,3],ymm7[2,3]
4767 ; AVX-NEXT: vshufpd {{.*#+}} ymm6 = ymm6[0],ymm7[0],ymm6[2],ymm7[3]
4768 ; AVX-NEXT: vmovupd %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4769 ; AVX-NEXT: vmovapd 32(%rdi), %ymm6
4770 ; AVX-NEXT: vmovapd 32(%rsi), %ymm7
4771 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm6 = ymm6[1],ymm7[1],ymm6[3],ymm7[3]
4772 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm1[2,3],ymm6[2,3]
4773 ; AVX-NEXT: vmovapd 32(%r9), %ymm12
4774 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm6 = ymm12[2,3],ymm7[2,3]
4775 ; AVX-NEXT: vshufpd {{.*#+}} ymm1 = ymm1[0],ymm6[0],ymm1[2],ymm6[3]
4776 ; AVX-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4777 ; AVX-NEXT: vmovapd 64(%rdi), %ymm1
4778 ; AVX-NEXT: vmovapd 64(%rsi), %ymm7
4779 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm7[1],ymm1[3],ymm7[3]
4780 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm3[2,3],ymm1[2,3]
4781 ; AVX-NEXT: vmovapd 64(%r9), %ymm6
4782 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm6[2,3],ymm7[2,3]
4783 ; AVX-NEXT: vshufpd {{.*#+}} ymm1 = ymm1[0],ymm3[0],ymm1[2],ymm3[3]
4784 ; AVX-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4785 ; AVX-NEXT: vmovapd 96(%rdi), %ymm1
4786 ; AVX-NEXT: vmovapd 96(%rsi), %ymm3
4787 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm3[1],ymm1[3],ymm3[3]
4788 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm5[2,3],ymm1[2,3]
4789 ; AVX-NEXT: vmovapd 96(%r9), %ymm5
4790 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm5[2,3],ymm3[2,3]
4791 ; AVX-NEXT: vshufpd {{.*#+}} ymm1 = ymm1[0],ymm3[0],ymm1[2],ymm3[3]
4792 ; AVX-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4793 ; AVX-NEXT: vmovapd 128(%rdi), %ymm1
4794 ; AVX-NEXT: vmovapd 128(%rsi), %ymm7
4795 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm7[1],ymm1[3],ymm7[3]
4796 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm8[2,3],ymm1[2,3]
4797 ; AVX-NEXT: vmovapd 128(%r9), %ymm3
4798 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm7 = ymm3[2,3],ymm7[2,3]
4799 ; AVX-NEXT: vshufpd {{.*#+}} ymm1 = ymm1[0],ymm7[0],ymm1[2],ymm7[3]
4800 ; AVX-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4801 ; AVX-NEXT: vmovapd 160(%rdi), %ymm1
4802 ; AVX-NEXT: vmovapd 160(%rsi), %ymm7
4803 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm7[1],ymm1[3],ymm7[3]
4804 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],ymm1[2,3]
4805 ; AVX-NEXT: vmovapd 160(%r9), %ymm1
4806 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm7 = ymm1[2,3],ymm7[2,3]
4807 ; AVX-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0],ymm7[0],ymm2[2],ymm7[3]
4808 ; AVX-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4809 ; AVX-NEXT: vmovapd 192(%rdi), %ymm2
4810 ; AVX-NEXT: vmovapd 192(%rsi), %ymm7
4811 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm7[1],ymm2[3],ymm7[3]
4812 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],ymm2[2,3]
4813 ; AVX-NEXT: vmovapd 192(%r9), %ymm2
4814 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm7 = ymm2[2,3],ymm7[2,3]
4815 ; AVX-NEXT: vshufpd {{.*#+}} ymm4 = ymm4[0],ymm7[0],ymm4[2],ymm7[3]
4816 ; AVX-NEXT: vmovupd %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4817 ; AVX-NEXT: vmovapd 224(%rdi), %ymm4
4818 ; AVX-NEXT: vmovapd 224(%rsi), %ymm7
4819 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm4 = ymm4[1],ymm7[1],ymm4[3],ymm7[3]
4820 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm8 = ymm9[2,3],ymm4[2,3]
4821 ; AVX-NEXT: vmovapd 224(%r9), %ymm4
4822 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm7 = ymm4[2,3],ymm7[2,3]
4823 ; AVX-NEXT: vshufpd {{.*#+}} ymm7 = ymm8[0],ymm7[0],ymm8[2],ymm7[3]
4824 ; AVX-NEXT: vmovupd %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4825 ; AVX-NEXT: vmovaps 16(%rdi), %xmm7
4826 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm7 = xmm7[0],mem[0]
4827 ; AVX-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],mem[4,5,6,7]
4828 ; AVX-NEXT: vbroadcastsd 16(%rcx), %ymm8
4829 ; AVX-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3,4,5],ymm8[6,7]
4830 ; AVX-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4831 ; AVX-NEXT: vmovapd 16(%rdx), %xmm7
4832 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm7 = xmm7[1],mem[1]
4833 ; AVX-NEXT: vbroadcastsd 24(%r8), %ymm8
4834 ; AVX-NEXT: vblendpd {{.*#+}} ymm7 = ymm7[0,1],ymm8[2,3]
4835 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm7[0,1,2],ymm0[3]
4836 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4837 ; AVX-NEXT: vmovaps 48(%rdi), %xmm0
4838 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
4839 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
4840 ; AVX-NEXT: vbroadcastsd 48(%rcx), %ymm7
4841 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm7[6,7]
4842 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4843 ; AVX-NEXT: vmovapd 48(%rdx), %xmm0
4844 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
4845 ; AVX-NEXT: vbroadcastsd 56(%r8), %ymm7
4846 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm7[2,3]
4847 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm12[3]
4848 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4849 ; AVX-NEXT: vmovaps 80(%rdi), %xmm0
4850 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
4851 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
4852 ; AVX-NEXT: vbroadcastsd 80(%rcx), %ymm7
4853 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm7[6,7]
4854 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4855 ; AVX-NEXT: vmovapd 80(%rdx), %xmm0
4856 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
4857 ; AVX-NEXT: vbroadcastsd 88(%r8), %ymm7
4858 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm7[2,3]
4859 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm6[3]
4860 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4861 ; AVX-NEXT: vmovaps 112(%rdi), %xmm0
4862 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
4863 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
4864 ; AVX-NEXT: vbroadcastsd 112(%rcx), %ymm6
4865 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm6[6,7]
4866 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4867 ; AVX-NEXT: vmovapd 112(%rdx), %xmm0
4868 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
4869 ; AVX-NEXT: vbroadcastsd 120(%r8), %ymm6
4870 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm6[2,3]
4871 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm5[3]
4872 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4873 ; AVX-NEXT: vmovaps 144(%rdi), %xmm0
4874 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
4875 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
4876 ; AVX-NEXT: vbroadcastsd 144(%rcx), %ymm5
4877 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm5[6,7]
4878 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4879 ; AVX-NEXT: vmovapd 144(%rdx), %xmm0
4880 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
4881 ; AVX-NEXT: vbroadcastsd 152(%r8), %ymm5
4882 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm5[2,3]
4883 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm3[3]
4884 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4885 ; AVX-NEXT: vmovaps 176(%rdi), %xmm0
4886 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
4887 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
4888 ; AVX-NEXT: vbroadcastsd 176(%rcx), %ymm3
4889 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm3[6,7]
4890 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4891 ; AVX-NEXT: vmovapd 176(%rdx), %xmm0
4892 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
4893 ; AVX-NEXT: vbroadcastsd 184(%r8), %ymm3
4894 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm3[2,3]
4895 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm1[3]
4896 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4897 ; AVX-NEXT: vmovaps 208(%rdi), %xmm0
4898 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
4899 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
4900 ; AVX-NEXT: vbroadcastsd 208(%rcx), %ymm1
4901 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
4902 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4903 ; AVX-NEXT: vmovapd 208(%rdx), %xmm0
4904 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
4905 ; AVX-NEXT: vbroadcastsd 216(%r8), %ymm15
4906 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm15[2,3]
4907 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm2[3]
4908 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4909 ; AVX-NEXT: vmovaps 240(%rdi), %xmm0
4910 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
4911 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
4912 ; AVX-NEXT: vbroadcastsd 240(%rcx), %ymm15
4913 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm15[6,7]
4914 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4915 ; AVX-NEXT: vmovapd 240(%rdx), %xmm15
4916 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm15 = xmm15[1],mem[1]
4917 ; AVX-NEXT: vbroadcastsd 248(%r8), %ymm12
4918 ; AVX-NEXT: vblendpd {{.*#+}} ymm12 = ymm15[0,1],ymm12[2,3]
4919 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm12[0,1,2],ymm4[3]
4920 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4921 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4922 ; AVX-NEXT: vmovlhps {{.*#+}} xmm14 = xmm0[0],xmm14[0]
4923 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4924 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm15 # 16-byte Folded Reload
4925 ; AVX-NEXT: # xmm15 = xmm0[0],mem[0]
4926 ; AVX-NEXT: vmovlhps {{.*#+}} xmm13 = xmm13[0],xmm11[0]
4927 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4928 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm12 # 16-byte Folded Reload
4929 ; AVX-NEXT: # xmm12 = xmm0[0],mem[0]
4930 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4931 ; AVX-NEXT: vmovlhps {{.*#+}} xmm11 = xmm0[0],xmm10[0]
4932 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4933 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm10 # 16-byte Folded Reload
4934 ; AVX-NEXT: # xmm10 = xmm0[0],mem[0]
4935 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4936 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm9 # 16-byte Folded Reload
4937 ; AVX-NEXT: # xmm9 = xmm0[0],mem[0]
4938 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4939 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm8 # 16-byte Folded Reload
4940 ; AVX-NEXT: # xmm8 = xmm0[0],mem[0]
4941 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4942 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm7 # 16-byte Folded Reload
4943 ; AVX-NEXT: # xmm7 = xmm0[0],mem[0]
4944 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4945 ; AVX-NEXT: vunpcklpd (%rsp), %xmm0, %xmm6 # 16-byte Folded Reload
4946 ; AVX-NEXT: # xmm6 = xmm0[0],mem[0]
4947 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4948 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm5 # 16-byte Folded Reload
4949 ; AVX-NEXT: # xmm5 = xmm0[0],mem[0]
4950 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4951 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm4 # 16-byte Folded Reload
4952 ; AVX-NEXT: # xmm4 = xmm0[0],mem[0]
4953 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4954 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm3 # 16-byte Folded Reload
4955 ; AVX-NEXT: # xmm3 = xmm0[0],mem[0]
4956 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4957 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm2 # 16-byte Folded Reload
4958 ; AVX-NEXT: # xmm2 = xmm0[0],mem[0]
4959 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4960 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm1 # 16-byte Folded Reload
4961 ; AVX-NEXT: # xmm1 = xmm0[0],mem[0]
4962 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4963 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
4964 ; AVX-NEXT: # xmm0 = xmm0[0],mem[0]
4965 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
4966 ; AVX-NEXT: vmovaps %xmm0, 16(%rax)
4967 ; AVX-NEXT: vmovaps %xmm1, (%rax)
4968 ; AVX-NEXT: vmovaps %xmm2, 1168(%rax)
4969 ; AVX-NEXT: vmovaps %xmm3, 1152(%rax)
4970 ; AVX-NEXT: vmovaps %xmm4, 1360(%rax)
4971 ; AVX-NEXT: vmovaps %xmm5, 1344(%rax)
4972 ; AVX-NEXT: vmovaps %xmm6, 976(%rax)
4973 ; AVX-NEXT: vmovaps %xmm7, 960(%rax)
4974 ; AVX-NEXT: vmovaps %xmm8, 592(%rax)
4975 ; AVX-NEXT: vmovaps %xmm9, 576(%rax)
4976 ; AVX-NEXT: vmovaps %xmm10, 208(%rax)
4977 ; AVX-NEXT: vmovaps %xmm11, 192(%rax)
4978 ; AVX-NEXT: vmovaps %xmm12, 400(%rax)
4979 ; AVX-NEXT: vmovaps %xmm13, 384(%rax)
4980 ; AVX-NEXT: vmovaps %xmm15, 784(%rax)
4981 ; AVX-NEXT: vmovaps %xmm14, 768(%rax)
4982 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4983 ; AVX-NEXT: vmovaps %ymm0, 1472(%rax)
4984 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4985 ; AVX-NEXT: vmovaps %ymm0, 1280(%rax)
4986 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4987 ; AVX-NEXT: vmovaps %ymm0, 1088(%rax)
4988 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4989 ; AVX-NEXT: vmovaps %ymm0, 896(%rax)
4990 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4991 ; AVX-NEXT: vmovaps %ymm0, 704(%rax)
4992 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4993 ; AVX-NEXT: vmovaps %ymm0, 512(%rax)
4994 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4995 ; AVX-NEXT: vmovaps %ymm0, 320(%rax)
4996 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4997 ; AVX-NEXT: vmovaps %ymm0, 128(%rax)
4998 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4999 ; AVX-NEXT: vmovaps %ymm0, 1504(%rax)
5000 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5001 ; AVX-NEXT: vmovaps %ymm0, 1440(%rax)
5002 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5003 ; AVX-NEXT: vmovaps %ymm0, 1408(%rax)
5004 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5005 ; AVX-NEXT: vmovaps %ymm0, 1376(%rax)
5006 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5007 ; AVX-NEXT: vmovaps %ymm0, 1312(%rax)
5008 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5009 ; AVX-NEXT: vmovaps %ymm0, 1248(%rax)
5010 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5011 ; AVX-NEXT: vmovaps %ymm0, 1216(%rax)
5012 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5013 ; AVX-NEXT: vmovaps %ymm0, 1184(%rax)
5014 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5015 ; AVX-NEXT: vmovaps %ymm0, 1120(%rax)
5016 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5017 ; AVX-NEXT: vmovaps %ymm0, 1056(%rax)
5018 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5019 ; AVX-NEXT: vmovaps %ymm0, 1024(%rax)
5020 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5021 ; AVX-NEXT: vmovaps %ymm0, 992(%rax)
5022 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5023 ; AVX-NEXT: vmovaps %ymm0, 928(%rax)
5024 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5025 ; AVX-NEXT: vmovaps %ymm0, 864(%rax)
5026 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5027 ; AVX-NEXT: vmovaps %ymm0, 832(%rax)
5028 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5029 ; AVX-NEXT: vmovaps %ymm0, 800(%rax)
5030 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5031 ; AVX-NEXT: vmovaps %ymm0, 736(%rax)
5032 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5033 ; AVX-NEXT: vmovaps %ymm0, 672(%rax)
5034 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5035 ; AVX-NEXT: vmovaps %ymm0, 640(%rax)
5036 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5037 ; AVX-NEXT: vmovaps %ymm0, 608(%rax)
5038 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5039 ; AVX-NEXT: vmovaps %ymm0, 544(%rax)
5040 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5041 ; AVX-NEXT: vmovaps %ymm0, 480(%rax)
5042 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5043 ; AVX-NEXT: vmovaps %ymm0, 448(%rax)
5044 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5045 ; AVX-NEXT: vmovaps %ymm0, 416(%rax)
5046 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5047 ; AVX-NEXT: vmovaps %ymm0, 352(%rax)
5048 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5049 ; AVX-NEXT: vmovaps %ymm0, 288(%rax)
5050 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5051 ; AVX-NEXT: vmovaps %ymm0, 256(%rax)
5052 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5053 ; AVX-NEXT: vmovaps %ymm0, 224(%rax)
5054 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5055 ; AVX-NEXT: vmovaps %ymm0, 160(%rax)
5056 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5057 ; AVX-NEXT: vmovaps %ymm0, 96(%rax)
5058 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5059 ; AVX-NEXT: vmovaps %ymm0, 64(%rax)
5060 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5061 ; AVX-NEXT: vmovaps %ymm0, 32(%rax)
5062 ; AVX-NEXT: addq $1608, %rsp # imm = 0x648
5063 ; AVX-NEXT: vzeroupper
5066 ; AVX2-LABEL: store_i64_stride6_vf32:
5068 ; AVX2-NEXT: subq $1208, %rsp # imm = 0x4B8
5069 ; AVX2-NEXT: vmovaps (%r8), %ymm5
5070 ; AVX2-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5071 ; AVX2-NEXT: vmovaps 32(%r8), %ymm4
5072 ; AVX2-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5073 ; AVX2-NEXT: vmovaps (%r9), %xmm1
5074 ; AVX2-NEXT: vmovaps 32(%r9), %xmm0
5075 ; AVX2-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
5076 ; AVX2-NEXT: vmovaps (%rsi), %xmm8
5077 ; AVX2-NEXT: vmovups %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5078 ; AVX2-NEXT: vmovaps 32(%rsi), %xmm6
5079 ; AVX2-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5080 ; AVX2-NEXT: vmovaps (%rdi), %xmm3
5081 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5082 ; AVX2-NEXT: vmovaps 32(%rdi), %xmm7
5083 ; AVX2-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5084 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm8[1]
5085 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm5[0,1],ymm3[0,1]
5086 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5,6,7]
5087 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5088 ; AVX2-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
5089 ; AVX2-NEXT: vmovaps (%rcx), %xmm3
5090 ; AVX2-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5091 ; AVX2-NEXT: vmovaps 32(%rcx), %xmm5
5092 ; AVX2-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5093 ; AVX2-NEXT: vmovaps (%rdx), %xmm2
5094 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5095 ; AVX2-NEXT: vmovaps 32(%rdx), %xmm8
5096 ; AVX2-NEXT: vmovaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5097 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
5098 ; AVX2-NEXT: vbroadcastsd 8(%r8), %ymm3
5099 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
5100 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
5101 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5102 ; AVX2-NEXT: vmovddup {{.*#+}} xmm1 = xmm0[0,0]
5103 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm7[1],xmm6[1]
5104 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm4[0,1],ymm2[0,1]
5105 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3],ymm2[4,5,6,7]
5106 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5107 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm8[1],xmm5[1]
5108 ; AVX2-NEXT: vbroadcastsd 40(%r8), %ymm2
5109 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
5110 ; AVX2-NEXT: vmovaps 64(%r8), %ymm2
5111 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5112 ; AVX2-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
5113 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
5114 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5115 ; AVX2-NEXT: vmovaps 64(%rsi), %xmm1
5116 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5117 ; AVX2-NEXT: vmovaps 64(%rdi), %xmm0
5118 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5119 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
5120 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm2[0,1],ymm0[0,1]
5121 ; AVX2-NEXT: vmovaps 64(%r9), %xmm1
5122 ; AVX2-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
5123 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
5124 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5125 ; AVX2-NEXT: vmovaps 64(%rcx), %xmm2
5126 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5127 ; AVX2-NEXT: vmovaps 64(%rdx), %xmm0
5128 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5129 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
5130 ; AVX2-NEXT: vbroadcastsd 72(%r8), %ymm2
5131 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5132 ; AVX2-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
5133 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
5134 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5135 ; AVX2-NEXT: vmovaps 96(%r8), %ymm1
5136 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5137 ; AVX2-NEXT: vmovaps 96(%rsi), %xmm2
5138 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5139 ; AVX2-NEXT: vmovaps 96(%rdi), %xmm0
5140 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5141 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
5142 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
5143 ; AVX2-NEXT: vmovaps 96(%r9), %xmm1
5144 ; AVX2-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
5145 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
5146 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5147 ; AVX2-NEXT: vmovaps 96(%rcx), %xmm2
5148 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5149 ; AVX2-NEXT: vmovaps 96(%rdx), %xmm0
5150 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5151 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
5152 ; AVX2-NEXT: vbroadcastsd 104(%r8), %ymm2
5153 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5154 ; AVX2-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
5155 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
5156 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5157 ; AVX2-NEXT: vmovaps 128(%r8), %ymm1
5158 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5159 ; AVX2-NEXT: vmovaps 128(%rsi), %xmm2
5160 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5161 ; AVX2-NEXT: vmovaps 128(%rdi), %xmm0
5162 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5163 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
5164 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
5165 ; AVX2-NEXT: vmovaps 128(%r9), %xmm1
5166 ; AVX2-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
5167 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
5168 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5169 ; AVX2-NEXT: vmovaps 128(%rcx), %xmm14
5170 ; AVX2-NEXT: vmovaps 128(%rdx), %xmm0
5171 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5172 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm14[1]
5173 ; AVX2-NEXT: vbroadcastsd 136(%r8), %ymm2
5174 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5175 ; AVX2-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
5176 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
5177 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5178 ; AVX2-NEXT: vmovaps 160(%r8), %ymm1
5179 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5180 ; AVX2-NEXT: vmovaps 160(%rsi), %xmm12
5181 ; AVX2-NEXT: vmovaps 160(%rdi), %xmm13
5182 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm13[1],xmm12[1]
5183 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
5184 ; AVX2-NEXT: vmovaps 160(%r9), %xmm1
5185 ; AVX2-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
5186 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
5187 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5188 ; AVX2-NEXT: vmovaps 160(%rcx), %xmm10
5189 ; AVX2-NEXT: vmovaps 160(%rdx), %xmm11
5190 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm11[1],xmm10[1]
5191 ; AVX2-NEXT: vbroadcastsd 168(%r8), %ymm2
5192 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5193 ; AVX2-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
5194 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
5195 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5196 ; AVX2-NEXT: vmovaps 192(%r8), %ymm1
5197 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5198 ; AVX2-NEXT: vmovaps 192(%rsi), %xmm8
5199 ; AVX2-NEXT: vmovaps 192(%rdi), %xmm9
5200 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm9[1],xmm8[1]
5201 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
5202 ; AVX2-NEXT: vmovaps 192(%r9), %xmm1
5203 ; AVX2-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
5204 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
5205 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5206 ; AVX2-NEXT: vmovaps 192(%rcx), %xmm6
5207 ; AVX2-NEXT: vmovaps 192(%rdx), %xmm7
5208 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm6[1]
5209 ; AVX2-NEXT: vbroadcastsd 200(%r8), %ymm2
5210 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5211 ; AVX2-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
5212 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
5213 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5214 ; AVX2-NEXT: vmovaps 224(%r8), %ymm1
5215 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5216 ; AVX2-NEXT: vmovaps 224(%rsi), %xmm4
5217 ; AVX2-NEXT: vmovaps 224(%rdi), %xmm5
5218 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm4[1]
5219 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm1[0,1],ymm0[0,1]
5220 ; AVX2-NEXT: vmovaps 224(%r9), %xmm0
5221 ; AVX2-NEXT: vmovddup {{.*#+}} xmm2 = xmm0[0,0]
5222 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3],ymm1[4,5,6,7]
5223 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5224 ; AVX2-NEXT: vmovaps 224(%rcx), %xmm2
5225 ; AVX2-NEXT: vmovaps 224(%rdx), %xmm3
5226 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm3[1],xmm2[1]
5227 ; AVX2-NEXT: vbroadcastsd 232(%r8), %ymm15
5228 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm15[4,5,6,7]
5229 ; AVX2-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
5230 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
5231 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5232 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5233 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
5234 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5235 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
5236 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5237 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5238 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5239 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
5240 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5241 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
5242 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5243 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5244 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5245 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
5246 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5247 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
5248 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5249 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5250 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5251 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
5252 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5253 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
5254 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5255 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5256 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5257 ; AVX2-NEXT: vinsertf128 $1, %xmm14, %ymm0, %ymm0
5258 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5259 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
5260 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5261 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5262 ; AVX2-NEXT: vinsertf128 $1, %xmm10, %ymm12, %ymm0
5263 ; AVX2-NEXT: vinsertf128 $1, %xmm11, %ymm13, %ymm1
5264 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5265 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5266 ; AVX2-NEXT: vinsertf128 $1, %xmm6, %ymm8, %ymm0
5267 ; AVX2-NEXT: vinsertf128 $1, %xmm7, %ymm9, %ymm1
5268 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5269 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5270 ; AVX2-NEXT: vinsertf128 $1, %xmm2, %ymm4, %ymm0
5271 ; AVX2-NEXT: vinsertf128 $1, %xmm3, %ymm5, %ymm1
5272 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5273 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5274 ; AVX2-NEXT: vmovaps (%rdi), %ymm0
5275 ; AVX2-NEXT: vmovaps (%rsi), %ymm1
5276 ; AVX2-NEXT: vmovaps (%rdx), %ymm2
5277 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5278 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
5279 ; AVX2-NEXT: vbroadcastsd 16(%rcx), %ymm4
5280 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
5281 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5282 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
5283 ; AVX2-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5284 ; AVX2-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
5285 ; AVX2-NEXT: vbroadcastsd 16(%r9), %ymm1
5286 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
5287 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5288 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
5289 ; AVX2-NEXT: vbroadcastsd 24(%r8), %ymm1
5290 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
5291 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5292 ; AVX2-NEXT: vmovaps 32(%rdi), %ymm0
5293 ; AVX2-NEXT: vmovaps 32(%rsi), %ymm1
5294 ; AVX2-NEXT: vmovaps 32(%rdx), %ymm2
5295 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5296 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
5297 ; AVX2-NEXT: vbroadcastsd 48(%rcx), %ymm4
5298 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
5299 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5300 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
5301 ; AVX2-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5302 ; AVX2-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
5303 ; AVX2-NEXT: vbroadcastsd 48(%r9), %ymm1
5304 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
5305 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5306 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
5307 ; AVX2-NEXT: vbroadcastsd 56(%r8), %ymm1
5308 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
5309 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5310 ; AVX2-NEXT: vmovaps 64(%rdi), %ymm0
5311 ; AVX2-NEXT: vmovaps 64(%rsi), %ymm1
5312 ; AVX2-NEXT: vmovaps 64(%rdx), %ymm2
5313 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5314 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
5315 ; AVX2-NEXT: vbroadcastsd 80(%rcx), %ymm4
5316 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
5317 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5318 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
5319 ; AVX2-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5320 ; AVX2-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
5321 ; AVX2-NEXT: vbroadcastsd 80(%r9), %ymm1
5322 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
5323 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5324 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
5325 ; AVX2-NEXT: vbroadcastsd 88(%r8), %ymm1
5326 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm13 = ymm0[2,3],ymm1[2,3]
5327 ; AVX2-NEXT: vmovaps 96(%rdi), %ymm0
5328 ; AVX2-NEXT: vmovaps 96(%rsi), %ymm1
5329 ; AVX2-NEXT: vmovaps 96(%rdx), %ymm2
5330 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5331 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
5332 ; AVX2-NEXT: vbroadcastsd 112(%rcx), %ymm4
5333 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
5334 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5335 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
5336 ; AVX2-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5337 ; AVX2-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
5338 ; AVX2-NEXT: vbroadcastsd 112(%r9), %ymm1
5339 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
5340 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5341 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
5342 ; AVX2-NEXT: vbroadcastsd 120(%r8), %ymm1
5343 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm9 = ymm0[2,3],ymm1[2,3]
5344 ; AVX2-NEXT: vmovaps 128(%rdi), %ymm0
5345 ; AVX2-NEXT: vmovaps 128(%rsi), %ymm1
5346 ; AVX2-NEXT: vmovaps 128(%rdx), %ymm2
5347 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5348 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
5349 ; AVX2-NEXT: vbroadcastsd 144(%rcx), %ymm4
5350 ; AVX2-NEXT: vblendps {{.*#+}} ymm11 = ymm3[0,1,2,3,4,5],ymm4[6,7]
5351 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
5352 ; AVX2-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5353 ; AVX2-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
5354 ; AVX2-NEXT: vbroadcastsd 144(%r9), %ymm1
5355 ; AVX2-NEXT: vblendps {{.*#+}} ymm10 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
5356 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
5357 ; AVX2-NEXT: vbroadcastsd 152(%r8), %ymm1
5358 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm6 = ymm0[2,3],ymm1[2,3]
5359 ; AVX2-NEXT: vmovaps 160(%rdi), %ymm0
5360 ; AVX2-NEXT: vmovaps 160(%rsi), %ymm1
5361 ; AVX2-NEXT: vmovaps 160(%rdx), %ymm2
5362 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5363 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],ymm2[2,3]
5364 ; AVX2-NEXT: vbroadcastsd 176(%rcx), %ymm5
5365 ; AVX2-NEXT: vblendps {{.*#+}} ymm8 = ymm4[0,1,2,3,4,5],ymm5[6,7]
5366 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
5367 ; AVX2-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5368 ; AVX2-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
5369 ; AVX2-NEXT: vbroadcastsd 176(%r9), %ymm1
5370 ; AVX2-NEXT: vblendps {{.*#+}} ymm7 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
5371 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
5372 ; AVX2-NEXT: vbroadcastsd 184(%r8), %ymm1
5373 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm5 = ymm0[2,3],ymm1[2,3]
5374 ; AVX2-NEXT: vmovaps 192(%rdi), %ymm1
5375 ; AVX2-NEXT: vmovaps 192(%rsi), %ymm0
5376 ; AVX2-NEXT: vmovaps 192(%rdx), %ymm2
5377 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5378 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],ymm2[2,3]
5379 ; AVX2-NEXT: vbroadcastsd 208(%rcx), %ymm12
5380 ; AVX2-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm12[6,7]
5381 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
5382 ; AVX2-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5383 ; AVX2-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
5384 ; AVX2-NEXT: vbroadcastsd 208(%r9), %ymm1
5385 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
5386 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm2[1],mem[1],ymm2[3],mem[3]
5387 ; AVX2-NEXT: vbroadcastsd 216(%r8), %ymm2
5388 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm1[2,3],ymm2[2,3]
5389 ; AVX2-NEXT: vmovaps 224(%rdi), %ymm2
5390 ; AVX2-NEXT: vmovaps 224(%rsi), %ymm12
5391 ; AVX2-NEXT: vmovaps 224(%rdx), %ymm0
5392 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm14 = ymm2[0],ymm12[0],ymm2[2],ymm12[2]
5393 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm14 = ymm14[2,3],ymm0[2,3]
5394 ; AVX2-NEXT: vbroadcastsd 240(%rcx), %ymm15
5395 ; AVX2-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
5396 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm12[1],ymm2[3],ymm12[3]
5397 ; AVX2-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2, %ymm2 # 32-byte Folded Reload
5398 ; AVX2-NEXT: # ymm2 = mem[2,3],ymm2[2,3]
5399 ; AVX2-NEXT: vbroadcastsd 240(%r9), %ymm12
5400 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1],ymm12[2,3],ymm2[4,5,6,7]
5401 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5402 ; AVX2-NEXT: vbroadcastsd 248(%r8), %ymm12
5403 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm12[2,3]
5404 ; AVX2-NEXT: movq {{[0-9]+}}(%rsp), %rax
5405 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
5406 ; AVX2-NEXT: vblendps {{.*#+}} ymm12 = ymm12[0,1,2,3,4,5],mem[6,7]
5407 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
5408 ; AVX2-NEXT: vblendps {{.*#+}} ymm15 = ymm15[0,1,2,3,4,5],mem[6,7]
5409 ; AVX2-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3,4,5],mem[6,7]
5410 ; AVX2-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],mem[6,7]
5411 ; AVX2-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3,4,5],mem[6,7]
5412 ; AVX2-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],mem[6,7]
5413 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],mem[6,7]
5414 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
5415 ; AVX2-NEXT: vmovaps %ymm0, 1504(%rax)
5416 ; AVX2-NEXT: vmovaps %ymm2, 1472(%rax)
5417 ; AVX2-NEXT: vmovaps %ymm14, 1440(%rax)
5418 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5419 ; AVX2-NEXT: vmovaps %ymm0, 1344(%rax)
5420 ; AVX2-NEXT: vmovaps %ymm1, 1312(%rax)
5421 ; AVX2-NEXT: vmovaps %ymm3, 1280(%rax)
5422 ; AVX2-NEXT: vmovaps %ymm4, 1248(%rax)
5423 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5424 ; AVX2-NEXT: vmovaps %ymm0, 1152(%rax)
5425 ; AVX2-NEXT: vmovaps %ymm5, 1120(%rax)
5426 ; AVX2-NEXT: vmovaps %ymm7, 1088(%rax)
5427 ; AVX2-NEXT: vmovaps %ymm8, 1056(%rax)
5428 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5429 ; AVX2-NEXT: vmovaps %ymm0, 960(%rax)
5430 ; AVX2-NEXT: vmovaps %ymm6, 928(%rax)
5431 ; AVX2-NEXT: vmovaps %ymm10, 896(%rax)
5432 ; AVX2-NEXT: vmovaps %ymm11, 864(%rax)
5433 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5434 ; AVX2-NEXT: vmovaps %ymm0, 768(%rax)
5435 ; AVX2-NEXT: vmovaps %ymm9, 736(%rax)
5436 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5437 ; AVX2-NEXT: vmovaps %ymm0, 704(%rax)
5438 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5439 ; AVX2-NEXT: vmovaps %ymm0, 672(%rax)
5440 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5441 ; AVX2-NEXT: vmovaps %ymm0, 576(%rax)
5442 ; AVX2-NEXT: vmovaps %ymm13, 544(%rax)
5443 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5444 ; AVX2-NEXT: vmovaps %ymm0, 512(%rax)
5445 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5446 ; AVX2-NEXT: vmovaps %ymm0, 480(%rax)
5447 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5448 ; AVX2-NEXT: vmovaps %ymm0, 384(%rax)
5449 ; AVX2-NEXT: vmovaps %ymm15, 352(%rax)
5450 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5451 ; AVX2-NEXT: vmovaps %ymm0, 320(%rax)
5452 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5453 ; AVX2-NEXT: vmovaps %ymm0, 288(%rax)
5454 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5455 ; AVX2-NEXT: vmovaps %ymm0, 192(%rax)
5456 ; AVX2-NEXT: vmovaps %ymm12, 160(%rax)
5457 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5458 ; AVX2-NEXT: vmovaps %ymm0, 128(%rax)
5459 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5460 ; AVX2-NEXT: vmovaps %ymm0, 96(%rax)
5461 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5462 ; AVX2-NEXT: vmovaps %ymm0, (%rax)
5463 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5464 ; AVX2-NEXT: vmovaps %ymm0, 1408(%rax)
5465 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5466 ; AVX2-NEXT: vmovaps %ymm0, 1376(%rax)
5467 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5468 ; AVX2-NEXT: vmovaps %ymm0, 1216(%rax)
5469 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5470 ; AVX2-NEXT: vmovaps %ymm0, 1184(%rax)
5471 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5472 ; AVX2-NEXT: vmovaps %ymm0, 1024(%rax)
5473 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5474 ; AVX2-NEXT: vmovaps %ymm0, 992(%rax)
5475 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5476 ; AVX2-NEXT: vmovaps %ymm0, 832(%rax)
5477 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5478 ; AVX2-NEXT: vmovaps %ymm0, 800(%rax)
5479 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5480 ; AVX2-NEXT: vmovaps %ymm0, 640(%rax)
5481 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5482 ; AVX2-NEXT: vmovaps %ymm0, 608(%rax)
5483 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5484 ; AVX2-NEXT: vmovaps %ymm0, 448(%rax)
5485 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5486 ; AVX2-NEXT: vmovaps %ymm0, 416(%rax)
5487 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5488 ; AVX2-NEXT: vmovaps %ymm0, 256(%rax)
5489 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5490 ; AVX2-NEXT: vmovaps %ymm0, 224(%rax)
5491 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5492 ; AVX2-NEXT: vmovaps %ymm0, 64(%rax)
5493 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5494 ; AVX2-NEXT: vmovaps %ymm0, 32(%rax)
5495 ; AVX2-NEXT: addq $1208, %rsp # imm = 0x4B8
5496 ; AVX2-NEXT: vzeroupper
5499 ; AVX2-FP-LABEL: store_i64_stride6_vf32:
5501 ; AVX2-FP-NEXT: subq $1208, %rsp # imm = 0x4B8
5502 ; AVX2-FP-NEXT: vmovaps (%r8), %ymm5
5503 ; AVX2-FP-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5504 ; AVX2-FP-NEXT: vmovaps 32(%r8), %ymm4
5505 ; AVX2-FP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5506 ; AVX2-FP-NEXT: vmovaps (%r9), %xmm1
5507 ; AVX2-FP-NEXT: vmovaps 32(%r9), %xmm0
5508 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
5509 ; AVX2-FP-NEXT: vmovaps (%rsi), %xmm8
5510 ; AVX2-FP-NEXT: vmovups %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5511 ; AVX2-FP-NEXT: vmovaps 32(%rsi), %xmm6
5512 ; AVX2-FP-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5513 ; AVX2-FP-NEXT: vmovaps (%rdi), %xmm3
5514 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5515 ; AVX2-FP-NEXT: vmovaps 32(%rdi), %xmm7
5516 ; AVX2-FP-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5517 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm8[1]
5518 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm5[0,1],ymm3[0,1]
5519 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5,6,7]
5520 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5521 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
5522 ; AVX2-FP-NEXT: vmovaps (%rcx), %xmm3
5523 ; AVX2-FP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5524 ; AVX2-FP-NEXT: vmovaps 32(%rcx), %xmm5
5525 ; AVX2-FP-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5526 ; AVX2-FP-NEXT: vmovaps (%rdx), %xmm2
5527 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5528 ; AVX2-FP-NEXT: vmovaps 32(%rdx), %xmm8
5529 ; AVX2-FP-NEXT: vmovaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5530 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
5531 ; AVX2-FP-NEXT: vbroadcastsd 8(%r8), %ymm3
5532 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
5533 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
5534 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5535 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm1 = xmm0[0,0]
5536 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm7[1],xmm6[1]
5537 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm4[0,1],ymm2[0,1]
5538 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3],ymm2[4,5,6,7]
5539 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5540 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm8[1],xmm5[1]
5541 ; AVX2-FP-NEXT: vbroadcastsd 40(%r8), %ymm2
5542 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
5543 ; AVX2-FP-NEXT: vmovaps 64(%r8), %ymm2
5544 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5545 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
5546 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
5547 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5548 ; AVX2-FP-NEXT: vmovaps 64(%rsi), %xmm1
5549 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5550 ; AVX2-FP-NEXT: vmovaps 64(%rdi), %xmm0
5551 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5552 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
5553 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm2[0,1],ymm0[0,1]
5554 ; AVX2-FP-NEXT: vmovaps 64(%r9), %xmm1
5555 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
5556 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
5557 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5558 ; AVX2-FP-NEXT: vmovaps 64(%rcx), %xmm2
5559 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5560 ; AVX2-FP-NEXT: vmovaps 64(%rdx), %xmm0
5561 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5562 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
5563 ; AVX2-FP-NEXT: vbroadcastsd 72(%r8), %ymm2
5564 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5565 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
5566 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
5567 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5568 ; AVX2-FP-NEXT: vmovaps 96(%r8), %ymm1
5569 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5570 ; AVX2-FP-NEXT: vmovaps 96(%rsi), %xmm2
5571 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5572 ; AVX2-FP-NEXT: vmovaps 96(%rdi), %xmm0
5573 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5574 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
5575 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
5576 ; AVX2-FP-NEXT: vmovaps 96(%r9), %xmm1
5577 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
5578 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
5579 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5580 ; AVX2-FP-NEXT: vmovaps 96(%rcx), %xmm2
5581 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5582 ; AVX2-FP-NEXT: vmovaps 96(%rdx), %xmm0
5583 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5584 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
5585 ; AVX2-FP-NEXT: vbroadcastsd 104(%r8), %ymm2
5586 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5587 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
5588 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
5589 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5590 ; AVX2-FP-NEXT: vmovaps 128(%r8), %ymm1
5591 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5592 ; AVX2-FP-NEXT: vmovaps 128(%rsi), %xmm2
5593 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5594 ; AVX2-FP-NEXT: vmovaps 128(%rdi), %xmm0
5595 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5596 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
5597 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
5598 ; AVX2-FP-NEXT: vmovaps 128(%r9), %xmm1
5599 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
5600 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
5601 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5602 ; AVX2-FP-NEXT: vmovaps 128(%rcx), %xmm14
5603 ; AVX2-FP-NEXT: vmovaps 128(%rdx), %xmm0
5604 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5605 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm14[1]
5606 ; AVX2-FP-NEXT: vbroadcastsd 136(%r8), %ymm2
5607 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5608 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
5609 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
5610 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5611 ; AVX2-FP-NEXT: vmovaps 160(%r8), %ymm1
5612 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5613 ; AVX2-FP-NEXT: vmovaps 160(%rsi), %xmm12
5614 ; AVX2-FP-NEXT: vmovaps 160(%rdi), %xmm13
5615 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm13[1],xmm12[1]
5616 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
5617 ; AVX2-FP-NEXT: vmovaps 160(%r9), %xmm1
5618 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
5619 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
5620 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5621 ; AVX2-FP-NEXT: vmovaps 160(%rcx), %xmm10
5622 ; AVX2-FP-NEXT: vmovaps 160(%rdx), %xmm11
5623 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm11[1],xmm10[1]
5624 ; AVX2-FP-NEXT: vbroadcastsd 168(%r8), %ymm2
5625 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5626 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
5627 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
5628 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5629 ; AVX2-FP-NEXT: vmovaps 192(%r8), %ymm1
5630 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5631 ; AVX2-FP-NEXT: vmovaps 192(%rsi), %xmm8
5632 ; AVX2-FP-NEXT: vmovaps 192(%rdi), %xmm9
5633 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm9[1],xmm8[1]
5634 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
5635 ; AVX2-FP-NEXT: vmovaps 192(%r9), %xmm1
5636 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
5637 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
5638 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5639 ; AVX2-FP-NEXT: vmovaps 192(%rcx), %xmm6
5640 ; AVX2-FP-NEXT: vmovaps 192(%rdx), %xmm7
5641 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm6[1]
5642 ; AVX2-FP-NEXT: vbroadcastsd 200(%r8), %ymm2
5643 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5644 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
5645 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
5646 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5647 ; AVX2-FP-NEXT: vmovaps 224(%r8), %ymm1
5648 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5649 ; AVX2-FP-NEXT: vmovaps 224(%rsi), %xmm4
5650 ; AVX2-FP-NEXT: vmovaps 224(%rdi), %xmm5
5651 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm4[1]
5652 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm1[0,1],ymm0[0,1]
5653 ; AVX2-FP-NEXT: vmovaps 224(%r9), %xmm0
5654 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm2 = xmm0[0,0]
5655 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3],ymm1[4,5,6,7]
5656 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5657 ; AVX2-FP-NEXT: vmovaps 224(%rcx), %xmm2
5658 ; AVX2-FP-NEXT: vmovaps 224(%rdx), %xmm3
5659 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm3[1],xmm2[1]
5660 ; AVX2-FP-NEXT: vbroadcastsd 232(%r8), %ymm15
5661 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm15[4,5,6,7]
5662 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
5663 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
5664 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5665 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5666 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
5667 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5668 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
5669 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5670 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5671 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5672 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
5673 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5674 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
5675 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5676 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5677 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5678 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
5679 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5680 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
5681 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5682 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5683 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5684 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
5685 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5686 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
5687 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5688 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5689 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5690 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm14, %ymm0, %ymm0
5691 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5692 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
5693 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5694 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5695 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm10, %ymm12, %ymm0
5696 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm11, %ymm13, %ymm1
5697 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5698 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5699 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm6, %ymm8, %ymm0
5700 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm7, %ymm9, %ymm1
5701 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5702 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5703 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm2, %ymm4, %ymm0
5704 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm3, %ymm5, %ymm1
5705 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5706 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5707 ; AVX2-FP-NEXT: vmovaps (%rdi), %ymm0
5708 ; AVX2-FP-NEXT: vmovaps (%rsi), %ymm1
5709 ; AVX2-FP-NEXT: vmovaps (%rdx), %ymm2
5710 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5711 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
5712 ; AVX2-FP-NEXT: vbroadcastsd 16(%rcx), %ymm4
5713 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
5714 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5715 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
5716 ; AVX2-FP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5717 ; AVX2-FP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
5718 ; AVX2-FP-NEXT: vbroadcastsd 16(%r9), %ymm1
5719 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
5720 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5721 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
5722 ; AVX2-FP-NEXT: vbroadcastsd 24(%r8), %ymm1
5723 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
5724 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5725 ; AVX2-FP-NEXT: vmovaps 32(%rdi), %ymm0
5726 ; AVX2-FP-NEXT: vmovaps 32(%rsi), %ymm1
5727 ; AVX2-FP-NEXT: vmovaps 32(%rdx), %ymm2
5728 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5729 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
5730 ; AVX2-FP-NEXT: vbroadcastsd 48(%rcx), %ymm4
5731 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
5732 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5733 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
5734 ; AVX2-FP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5735 ; AVX2-FP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
5736 ; AVX2-FP-NEXT: vbroadcastsd 48(%r9), %ymm1
5737 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
5738 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5739 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
5740 ; AVX2-FP-NEXT: vbroadcastsd 56(%r8), %ymm1
5741 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
5742 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5743 ; AVX2-FP-NEXT: vmovaps 64(%rdi), %ymm0
5744 ; AVX2-FP-NEXT: vmovaps 64(%rsi), %ymm1
5745 ; AVX2-FP-NEXT: vmovaps 64(%rdx), %ymm2
5746 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5747 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
5748 ; AVX2-FP-NEXT: vbroadcastsd 80(%rcx), %ymm4
5749 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
5750 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5751 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
5752 ; AVX2-FP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5753 ; AVX2-FP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
5754 ; AVX2-FP-NEXT: vbroadcastsd 80(%r9), %ymm1
5755 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
5756 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5757 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
5758 ; AVX2-FP-NEXT: vbroadcastsd 88(%r8), %ymm1
5759 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm13 = ymm0[2,3],ymm1[2,3]
5760 ; AVX2-FP-NEXT: vmovaps 96(%rdi), %ymm0
5761 ; AVX2-FP-NEXT: vmovaps 96(%rsi), %ymm1
5762 ; AVX2-FP-NEXT: vmovaps 96(%rdx), %ymm2
5763 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5764 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
5765 ; AVX2-FP-NEXT: vbroadcastsd 112(%rcx), %ymm4
5766 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
5767 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5768 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
5769 ; AVX2-FP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5770 ; AVX2-FP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
5771 ; AVX2-FP-NEXT: vbroadcastsd 112(%r9), %ymm1
5772 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
5773 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5774 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
5775 ; AVX2-FP-NEXT: vbroadcastsd 120(%r8), %ymm1
5776 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm9 = ymm0[2,3],ymm1[2,3]
5777 ; AVX2-FP-NEXT: vmovaps 128(%rdi), %ymm0
5778 ; AVX2-FP-NEXT: vmovaps 128(%rsi), %ymm1
5779 ; AVX2-FP-NEXT: vmovaps 128(%rdx), %ymm2
5780 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5781 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
5782 ; AVX2-FP-NEXT: vbroadcastsd 144(%rcx), %ymm4
5783 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm11 = ymm3[0,1,2,3,4,5],ymm4[6,7]
5784 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
5785 ; AVX2-FP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5786 ; AVX2-FP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
5787 ; AVX2-FP-NEXT: vbroadcastsd 144(%r9), %ymm1
5788 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm10 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
5789 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
5790 ; AVX2-FP-NEXT: vbroadcastsd 152(%r8), %ymm1
5791 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm6 = ymm0[2,3],ymm1[2,3]
5792 ; AVX2-FP-NEXT: vmovaps 160(%rdi), %ymm0
5793 ; AVX2-FP-NEXT: vmovaps 160(%rsi), %ymm1
5794 ; AVX2-FP-NEXT: vmovaps 160(%rdx), %ymm2
5795 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5796 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],ymm2[2,3]
5797 ; AVX2-FP-NEXT: vbroadcastsd 176(%rcx), %ymm5
5798 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm8 = ymm4[0,1,2,3,4,5],ymm5[6,7]
5799 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
5800 ; AVX2-FP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5801 ; AVX2-FP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
5802 ; AVX2-FP-NEXT: vbroadcastsd 176(%r9), %ymm1
5803 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm7 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
5804 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
5805 ; AVX2-FP-NEXT: vbroadcastsd 184(%r8), %ymm1
5806 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm5 = ymm0[2,3],ymm1[2,3]
5807 ; AVX2-FP-NEXT: vmovaps 192(%rdi), %ymm1
5808 ; AVX2-FP-NEXT: vmovaps 192(%rsi), %ymm0
5809 ; AVX2-FP-NEXT: vmovaps 192(%rdx), %ymm2
5810 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5811 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],ymm2[2,3]
5812 ; AVX2-FP-NEXT: vbroadcastsd 208(%rcx), %ymm12
5813 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm12[6,7]
5814 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
5815 ; AVX2-FP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5816 ; AVX2-FP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
5817 ; AVX2-FP-NEXT: vbroadcastsd 208(%r9), %ymm1
5818 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
5819 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm2[1],mem[1],ymm2[3],mem[3]
5820 ; AVX2-FP-NEXT: vbroadcastsd 216(%r8), %ymm2
5821 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm1[2,3],ymm2[2,3]
5822 ; AVX2-FP-NEXT: vmovaps 224(%rdi), %ymm2
5823 ; AVX2-FP-NEXT: vmovaps 224(%rsi), %ymm12
5824 ; AVX2-FP-NEXT: vmovaps 224(%rdx), %ymm0
5825 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm14 = ymm2[0],ymm12[0],ymm2[2],ymm12[2]
5826 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm14 = ymm14[2,3],ymm0[2,3]
5827 ; AVX2-FP-NEXT: vbroadcastsd 240(%rcx), %ymm15
5828 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
5829 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm12[1],ymm2[3],ymm12[3]
5830 ; AVX2-FP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2, %ymm2 # 32-byte Folded Reload
5831 ; AVX2-FP-NEXT: # ymm2 = mem[2,3],ymm2[2,3]
5832 ; AVX2-FP-NEXT: vbroadcastsd 240(%r9), %ymm12
5833 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1],ymm12[2,3],ymm2[4,5,6,7]
5834 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5835 ; AVX2-FP-NEXT: vbroadcastsd 248(%r8), %ymm12
5836 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm12[2,3]
5837 ; AVX2-FP-NEXT: movq {{[0-9]+}}(%rsp), %rax
5838 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
5839 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm12 = ymm12[0,1,2,3,4,5],mem[6,7]
5840 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
5841 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm15 = ymm15[0,1,2,3,4,5],mem[6,7]
5842 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3,4,5],mem[6,7]
5843 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],mem[6,7]
5844 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3,4,5],mem[6,7]
5845 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],mem[6,7]
5846 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],mem[6,7]
5847 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
5848 ; AVX2-FP-NEXT: vmovaps %ymm0, 1504(%rax)
5849 ; AVX2-FP-NEXT: vmovaps %ymm2, 1472(%rax)
5850 ; AVX2-FP-NEXT: vmovaps %ymm14, 1440(%rax)
5851 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5852 ; AVX2-FP-NEXT: vmovaps %ymm0, 1344(%rax)
5853 ; AVX2-FP-NEXT: vmovaps %ymm1, 1312(%rax)
5854 ; AVX2-FP-NEXT: vmovaps %ymm3, 1280(%rax)
5855 ; AVX2-FP-NEXT: vmovaps %ymm4, 1248(%rax)
5856 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5857 ; AVX2-FP-NEXT: vmovaps %ymm0, 1152(%rax)
5858 ; AVX2-FP-NEXT: vmovaps %ymm5, 1120(%rax)
5859 ; AVX2-FP-NEXT: vmovaps %ymm7, 1088(%rax)
5860 ; AVX2-FP-NEXT: vmovaps %ymm8, 1056(%rax)
5861 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5862 ; AVX2-FP-NEXT: vmovaps %ymm0, 960(%rax)
5863 ; AVX2-FP-NEXT: vmovaps %ymm6, 928(%rax)
5864 ; AVX2-FP-NEXT: vmovaps %ymm10, 896(%rax)
5865 ; AVX2-FP-NEXT: vmovaps %ymm11, 864(%rax)
5866 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5867 ; AVX2-FP-NEXT: vmovaps %ymm0, 768(%rax)
5868 ; AVX2-FP-NEXT: vmovaps %ymm9, 736(%rax)
5869 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5870 ; AVX2-FP-NEXT: vmovaps %ymm0, 704(%rax)
5871 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5872 ; AVX2-FP-NEXT: vmovaps %ymm0, 672(%rax)
5873 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5874 ; AVX2-FP-NEXT: vmovaps %ymm0, 576(%rax)
5875 ; AVX2-FP-NEXT: vmovaps %ymm13, 544(%rax)
5876 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5877 ; AVX2-FP-NEXT: vmovaps %ymm0, 512(%rax)
5878 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5879 ; AVX2-FP-NEXT: vmovaps %ymm0, 480(%rax)
5880 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5881 ; AVX2-FP-NEXT: vmovaps %ymm0, 384(%rax)
5882 ; AVX2-FP-NEXT: vmovaps %ymm15, 352(%rax)
5883 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5884 ; AVX2-FP-NEXT: vmovaps %ymm0, 320(%rax)
5885 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5886 ; AVX2-FP-NEXT: vmovaps %ymm0, 288(%rax)
5887 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5888 ; AVX2-FP-NEXT: vmovaps %ymm0, 192(%rax)
5889 ; AVX2-FP-NEXT: vmovaps %ymm12, 160(%rax)
5890 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5891 ; AVX2-FP-NEXT: vmovaps %ymm0, 128(%rax)
5892 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5893 ; AVX2-FP-NEXT: vmovaps %ymm0, 96(%rax)
5894 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5895 ; AVX2-FP-NEXT: vmovaps %ymm0, (%rax)
5896 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5897 ; AVX2-FP-NEXT: vmovaps %ymm0, 1408(%rax)
5898 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5899 ; AVX2-FP-NEXT: vmovaps %ymm0, 1376(%rax)
5900 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5901 ; AVX2-FP-NEXT: vmovaps %ymm0, 1216(%rax)
5902 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5903 ; AVX2-FP-NEXT: vmovaps %ymm0, 1184(%rax)
5904 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5905 ; AVX2-FP-NEXT: vmovaps %ymm0, 1024(%rax)
5906 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5907 ; AVX2-FP-NEXT: vmovaps %ymm0, 992(%rax)
5908 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5909 ; AVX2-FP-NEXT: vmovaps %ymm0, 832(%rax)
5910 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5911 ; AVX2-FP-NEXT: vmovaps %ymm0, 800(%rax)
5912 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5913 ; AVX2-FP-NEXT: vmovaps %ymm0, 640(%rax)
5914 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5915 ; AVX2-FP-NEXT: vmovaps %ymm0, 608(%rax)
5916 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5917 ; AVX2-FP-NEXT: vmovaps %ymm0, 448(%rax)
5918 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5919 ; AVX2-FP-NEXT: vmovaps %ymm0, 416(%rax)
5920 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5921 ; AVX2-FP-NEXT: vmovaps %ymm0, 256(%rax)
5922 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5923 ; AVX2-FP-NEXT: vmovaps %ymm0, 224(%rax)
5924 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5925 ; AVX2-FP-NEXT: vmovaps %ymm0, 64(%rax)
5926 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5927 ; AVX2-FP-NEXT: vmovaps %ymm0, 32(%rax)
5928 ; AVX2-FP-NEXT: addq $1208, %rsp # imm = 0x4B8
5929 ; AVX2-FP-NEXT: vzeroupper
5930 ; AVX2-FP-NEXT: retq
5932 ; AVX2-FCP-LABEL: store_i64_stride6_vf32:
5933 ; AVX2-FCP: # %bb.0:
5934 ; AVX2-FCP-NEXT: subq $1208, %rsp # imm = 0x4B8
5935 ; AVX2-FCP-NEXT: vmovaps (%r8), %ymm5
5936 ; AVX2-FCP-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5937 ; AVX2-FCP-NEXT: vmovaps 32(%r8), %ymm4
5938 ; AVX2-FCP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5939 ; AVX2-FCP-NEXT: vmovaps (%r9), %xmm1
5940 ; AVX2-FCP-NEXT: vmovaps 32(%r9), %xmm0
5941 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
5942 ; AVX2-FCP-NEXT: vmovaps (%rsi), %xmm8
5943 ; AVX2-FCP-NEXT: vmovups %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5944 ; AVX2-FCP-NEXT: vmovaps 32(%rsi), %xmm6
5945 ; AVX2-FCP-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5946 ; AVX2-FCP-NEXT: vmovaps (%rdi), %xmm3
5947 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5948 ; AVX2-FCP-NEXT: vmovaps 32(%rdi), %xmm7
5949 ; AVX2-FCP-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5950 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm8[1]
5951 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm5[0,1],ymm3[0,1]
5952 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5,6,7]
5953 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5954 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
5955 ; AVX2-FCP-NEXT: vmovaps (%rcx), %xmm3
5956 ; AVX2-FCP-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5957 ; AVX2-FCP-NEXT: vmovaps 32(%rcx), %xmm5
5958 ; AVX2-FCP-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5959 ; AVX2-FCP-NEXT: vmovaps (%rdx), %xmm2
5960 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5961 ; AVX2-FCP-NEXT: vmovaps 32(%rdx), %xmm8
5962 ; AVX2-FCP-NEXT: vmovaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5963 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
5964 ; AVX2-FCP-NEXT: vbroadcastsd 8(%r8), %ymm3
5965 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
5966 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
5967 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5968 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm1 = xmm0[0,0]
5969 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm7[1],xmm6[1]
5970 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm4[0,1],ymm2[0,1]
5971 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3],ymm2[4,5,6,7]
5972 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5973 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm8[1],xmm5[1]
5974 ; AVX2-FCP-NEXT: vbroadcastsd 40(%r8), %ymm2
5975 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
5976 ; AVX2-FCP-NEXT: vmovaps 64(%r8), %ymm2
5977 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5978 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
5979 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
5980 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5981 ; AVX2-FCP-NEXT: vmovaps 64(%rsi), %xmm1
5982 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5983 ; AVX2-FCP-NEXT: vmovaps 64(%rdi), %xmm0
5984 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5985 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
5986 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm2[0,1],ymm0[0,1]
5987 ; AVX2-FCP-NEXT: vmovaps 64(%r9), %xmm1
5988 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
5989 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
5990 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5991 ; AVX2-FCP-NEXT: vmovaps 64(%rcx), %xmm2
5992 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5993 ; AVX2-FCP-NEXT: vmovaps 64(%rdx), %xmm0
5994 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5995 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
5996 ; AVX2-FCP-NEXT: vbroadcastsd 72(%r8), %ymm2
5997 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5998 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
5999 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
6000 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6001 ; AVX2-FCP-NEXT: vmovaps 96(%r8), %ymm1
6002 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6003 ; AVX2-FCP-NEXT: vmovaps 96(%rsi), %xmm2
6004 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6005 ; AVX2-FCP-NEXT: vmovaps 96(%rdi), %xmm0
6006 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6007 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
6008 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
6009 ; AVX2-FCP-NEXT: vmovaps 96(%r9), %xmm1
6010 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
6011 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
6012 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6013 ; AVX2-FCP-NEXT: vmovaps 96(%rcx), %xmm2
6014 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6015 ; AVX2-FCP-NEXT: vmovaps 96(%rdx), %xmm0
6016 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6017 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
6018 ; AVX2-FCP-NEXT: vbroadcastsd 104(%r8), %ymm2
6019 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6020 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
6021 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
6022 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6023 ; AVX2-FCP-NEXT: vmovaps 128(%r8), %ymm1
6024 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6025 ; AVX2-FCP-NEXT: vmovaps 128(%rsi), %xmm2
6026 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6027 ; AVX2-FCP-NEXT: vmovaps 128(%rdi), %xmm0
6028 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6029 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
6030 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
6031 ; AVX2-FCP-NEXT: vmovaps 128(%r9), %xmm1
6032 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
6033 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
6034 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6035 ; AVX2-FCP-NEXT: vmovaps 128(%rcx), %xmm14
6036 ; AVX2-FCP-NEXT: vmovaps 128(%rdx), %xmm0
6037 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6038 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm14[1]
6039 ; AVX2-FCP-NEXT: vbroadcastsd 136(%r8), %ymm2
6040 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6041 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
6042 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
6043 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6044 ; AVX2-FCP-NEXT: vmovaps 160(%r8), %ymm1
6045 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6046 ; AVX2-FCP-NEXT: vmovaps 160(%rsi), %xmm12
6047 ; AVX2-FCP-NEXT: vmovaps 160(%rdi), %xmm13
6048 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm13[1],xmm12[1]
6049 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
6050 ; AVX2-FCP-NEXT: vmovaps 160(%r9), %xmm1
6051 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
6052 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
6053 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6054 ; AVX2-FCP-NEXT: vmovaps 160(%rcx), %xmm10
6055 ; AVX2-FCP-NEXT: vmovaps 160(%rdx), %xmm11
6056 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm11[1],xmm10[1]
6057 ; AVX2-FCP-NEXT: vbroadcastsd 168(%r8), %ymm2
6058 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6059 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
6060 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
6061 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6062 ; AVX2-FCP-NEXT: vmovaps 192(%r8), %ymm1
6063 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6064 ; AVX2-FCP-NEXT: vmovaps 192(%rsi), %xmm8
6065 ; AVX2-FCP-NEXT: vmovaps 192(%rdi), %xmm9
6066 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm9[1],xmm8[1]
6067 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
6068 ; AVX2-FCP-NEXT: vmovaps 192(%r9), %xmm1
6069 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
6070 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
6071 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6072 ; AVX2-FCP-NEXT: vmovaps 192(%rcx), %xmm6
6073 ; AVX2-FCP-NEXT: vmovaps 192(%rdx), %xmm7
6074 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm6[1]
6075 ; AVX2-FCP-NEXT: vbroadcastsd 200(%r8), %ymm2
6076 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6077 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
6078 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
6079 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6080 ; AVX2-FCP-NEXT: vmovaps 224(%r8), %ymm1
6081 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6082 ; AVX2-FCP-NEXT: vmovaps 224(%rsi), %xmm4
6083 ; AVX2-FCP-NEXT: vmovaps 224(%rdi), %xmm5
6084 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm4[1]
6085 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm1[0,1],ymm0[0,1]
6086 ; AVX2-FCP-NEXT: vmovaps 224(%r9), %xmm0
6087 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm2 = xmm0[0,0]
6088 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3],ymm1[4,5,6,7]
6089 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6090 ; AVX2-FCP-NEXT: vmovaps 224(%rcx), %xmm2
6091 ; AVX2-FCP-NEXT: vmovaps 224(%rdx), %xmm3
6092 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm3[1],xmm2[1]
6093 ; AVX2-FCP-NEXT: vbroadcastsd 232(%r8), %ymm15
6094 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm15[4,5,6,7]
6095 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
6096 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
6097 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6098 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6099 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
6100 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6101 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
6102 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
6103 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6104 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6105 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
6106 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6107 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
6108 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
6109 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6110 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6111 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
6112 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6113 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
6114 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
6115 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6116 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6117 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
6118 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6119 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
6120 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
6121 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6122 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6123 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm14, %ymm0, %ymm0
6124 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6125 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
6126 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
6127 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6128 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm10, %ymm12, %ymm0
6129 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm11, %ymm13, %ymm1
6130 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
6131 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6132 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm6, %ymm8, %ymm0
6133 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm7, %ymm9, %ymm1
6134 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
6135 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6136 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm2, %ymm4, %ymm0
6137 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm3, %ymm5, %ymm1
6138 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
6139 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6140 ; AVX2-FCP-NEXT: vmovaps (%rdi), %ymm0
6141 ; AVX2-FCP-NEXT: vmovaps (%rsi), %ymm1
6142 ; AVX2-FCP-NEXT: vmovaps (%rdx), %ymm2
6143 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
6144 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
6145 ; AVX2-FCP-NEXT: vbroadcastsd 16(%rcx), %ymm4
6146 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
6147 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6148 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
6149 ; AVX2-FCP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6150 ; AVX2-FCP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
6151 ; AVX2-FCP-NEXT: vbroadcastsd 16(%r9), %ymm1
6152 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
6153 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6154 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
6155 ; AVX2-FCP-NEXT: vbroadcastsd 24(%r8), %ymm1
6156 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
6157 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6158 ; AVX2-FCP-NEXT: vmovaps 32(%rdi), %ymm0
6159 ; AVX2-FCP-NEXT: vmovaps 32(%rsi), %ymm1
6160 ; AVX2-FCP-NEXT: vmovaps 32(%rdx), %ymm2
6161 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
6162 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
6163 ; AVX2-FCP-NEXT: vbroadcastsd 48(%rcx), %ymm4
6164 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
6165 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6166 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
6167 ; AVX2-FCP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6168 ; AVX2-FCP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
6169 ; AVX2-FCP-NEXT: vbroadcastsd 48(%r9), %ymm1
6170 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
6171 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6172 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
6173 ; AVX2-FCP-NEXT: vbroadcastsd 56(%r8), %ymm1
6174 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
6175 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6176 ; AVX2-FCP-NEXT: vmovaps 64(%rdi), %ymm0
6177 ; AVX2-FCP-NEXT: vmovaps 64(%rsi), %ymm1
6178 ; AVX2-FCP-NEXT: vmovaps 64(%rdx), %ymm2
6179 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
6180 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
6181 ; AVX2-FCP-NEXT: vbroadcastsd 80(%rcx), %ymm4
6182 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
6183 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6184 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
6185 ; AVX2-FCP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6186 ; AVX2-FCP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
6187 ; AVX2-FCP-NEXT: vbroadcastsd 80(%r9), %ymm1
6188 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
6189 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6190 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
6191 ; AVX2-FCP-NEXT: vbroadcastsd 88(%r8), %ymm1
6192 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm13 = ymm0[2,3],ymm1[2,3]
6193 ; AVX2-FCP-NEXT: vmovaps 96(%rdi), %ymm0
6194 ; AVX2-FCP-NEXT: vmovaps 96(%rsi), %ymm1
6195 ; AVX2-FCP-NEXT: vmovaps 96(%rdx), %ymm2
6196 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
6197 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
6198 ; AVX2-FCP-NEXT: vbroadcastsd 112(%rcx), %ymm4
6199 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
6200 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6201 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
6202 ; AVX2-FCP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6203 ; AVX2-FCP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
6204 ; AVX2-FCP-NEXT: vbroadcastsd 112(%r9), %ymm1
6205 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
6206 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6207 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
6208 ; AVX2-FCP-NEXT: vbroadcastsd 120(%r8), %ymm1
6209 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm9 = ymm0[2,3],ymm1[2,3]
6210 ; AVX2-FCP-NEXT: vmovaps 128(%rdi), %ymm0
6211 ; AVX2-FCP-NEXT: vmovaps 128(%rsi), %ymm1
6212 ; AVX2-FCP-NEXT: vmovaps 128(%rdx), %ymm2
6213 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
6214 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
6215 ; AVX2-FCP-NEXT: vbroadcastsd 144(%rcx), %ymm4
6216 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm11 = ymm3[0,1,2,3,4,5],ymm4[6,7]
6217 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
6218 ; AVX2-FCP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6219 ; AVX2-FCP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
6220 ; AVX2-FCP-NEXT: vbroadcastsd 144(%r9), %ymm1
6221 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm10 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
6222 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
6223 ; AVX2-FCP-NEXT: vbroadcastsd 152(%r8), %ymm1
6224 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm6 = ymm0[2,3],ymm1[2,3]
6225 ; AVX2-FCP-NEXT: vmovaps 160(%rdi), %ymm0
6226 ; AVX2-FCP-NEXT: vmovaps 160(%rsi), %ymm1
6227 ; AVX2-FCP-NEXT: vmovaps 160(%rdx), %ymm2
6228 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
6229 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],ymm2[2,3]
6230 ; AVX2-FCP-NEXT: vbroadcastsd 176(%rcx), %ymm5
6231 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm8 = ymm4[0,1,2,3,4,5],ymm5[6,7]
6232 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
6233 ; AVX2-FCP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6234 ; AVX2-FCP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
6235 ; AVX2-FCP-NEXT: vbroadcastsd 176(%r9), %ymm1
6236 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm7 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
6237 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
6238 ; AVX2-FCP-NEXT: vbroadcastsd 184(%r8), %ymm1
6239 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm5 = ymm0[2,3],ymm1[2,3]
6240 ; AVX2-FCP-NEXT: vmovaps 192(%rdi), %ymm1
6241 ; AVX2-FCP-NEXT: vmovaps 192(%rsi), %ymm0
6242 ; AVX2-FCP-NEXT: vmovaps 192(%rdx), %ymm2
6243 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
6244 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],ymm2[2,3]
6245 ; AVX2-FCP-NEXT: vbroadcastsd 208(%rcx), %ymm12
6246 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm12[6,7]
6247 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
6248 ; AVX2-FCP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6249 ; AVX2-FCP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
6250 ; AVX2-FCP-NEXT: vbroadcastsd 208(%r9), %ymm1
6251 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
6252 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm2[1],mem[1],ymm2[3],mem[3]
6253 ; AVX2-FCP-NEXT: vbroadcastsd 216(%r8), %ymm2
6254 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm1[2,3],ymm2[2,3]
6255 ; AVX2-FCP-NEXT: vmovaps 224(%rdi), %ymm2
6256 ; AVX2-FCP-NEXT: vmovaps 224(%rsi), %ymm12
6257 ; AVX2-FCP-NEXT: vmovaps 224(%rdx), %ymm0
6258 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm14 = ymm2[0],ymm12[0],ymm2[2],ymm12[2]
6259 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm14 = ymm14[2,3],ymm0[2,3]
6260 ; AVX2-FCP-NEXT: vbroadcastsd 240(%rcx), %ymm15
6261 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
6262 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm12[1],ymm2[3],ymm12[3]
6263 ; AVX2-FCP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2, %ymm2 # 32-byte Folded Reload
6264 ; AVX2-FCP-NEXT: # ymm2 = mem[2,3],ymm2[2,3]
6265 ; AVX2-FCP-NEXT: vbroadcastsd 240(%r9), %ymm12
6266 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1],ymm12[2,3],ymm2[4,5,6,7]
6267 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6268 ; AVX2-FCP-NEXT: vbroadcastsd 248(%r8), %ymm12
6269 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm12[2,3]
6270 ; AVX2-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
6271 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
6272 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm12 = ymm12[0,1,2,3,4,5],mem[6,7]
6273 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
6274 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm15 = ymm15[0,1,2,3,4,5],mem[6,7]
6275 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3,4,5],mem[6,7]
6276 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],mem[6,7]
6277 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3,4,5],mem[6,7]
6278 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],mem[6,7]
6279 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],mem[6,7]
6280 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
6281 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1504(%rax)
6282 ; AVX2-FCP-NEXT: vmovaps %ymm2, 1472(%rax)
6283 ; AVX2-FCP-NEXT: vmovaps %ymm14, 1440(%rax)
6284 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6285 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1344(%rax)
6286 ; AVX2-FCP-NEXT: vmovaps %ymm1, 1312(%rax)
6287 ; AVX2-FCP-NEXT: vmovaps %ymm3, 1280(%rax)
6288 ; AVX2-FCP-NEXT: vmovaps %ymm4, 1248(%rax)
6289 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6290 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1152(%rax)
6291 ; AVX2-FCP-NEXT: vmovaps %ymm5, 1120(%rax)
6292 ; AVX2-FCP-NEXT: vmovaps %ymm7, 1088(%rax)
6293 ; AVX2-FCP-NEXT: vmovaps %ymm8, 1056(%rax)
6294 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6295 ; AVX2-FCP-NEXT: vmovaps %ymm0, 960(%rax)
6296 ; AVX2-FCP-NEXT: vmovaps %ymm6, 928(%rax)
6297 ; AVX2-FCP-NEXT: vmovaps %ymm10, 896(%rax)
6298 ; AVX2-FCP-NEXT: vmovaps %ymm11, 864(%rax)
6299 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6300 ; AVX2-FCP-NEXT: vmovaps %ymm0, 768(%rax)
6301 ; AVX2-FCP-NEXT: vmovaps %ymm9, 736(%rax)
6302 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6303 ; AVX2-FCP-NEXT: vmovaps %ymm0, 704(%rax)
6304 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6305 ; AVX2-FCP-NEXT: vmovaps %ymm0, 672(%rax)
6306 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6307 ; AVX2-FCP-NEXT: vmovaps %ymm0, 576(%rax)
6308 ; AVX2-FCP-NEXT: vmovaps %ymm13, 544(%rax)
6309 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6310 ; AVX2-FCP-NEXT: vmovaps %ymm0, 512(%rax)
6311 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6312 ; AVX2-FCP-NEXT: vmovaps %ymm0, 480(%rax)
6313 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6314 ; AVX2-FCP-NEXT: vmovaps %ymm0, 384(%rax)
6315 ; AVX2-FCP-NEXT: vmovaps %ymm15, 352(%rax)
6316 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6317 ; AVX2-FCP-NEXT: vmovaps %ymm0, 320(%rax)
6318 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6319 ; AVX2-FCP-NEXT: vmovaps %ymm0, 288(%rax)
6320 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6321 ; AVX2-FCP-NEXT: vmovaps %ymm0, 192(%rax)
6322 ; AVX2-FCP-NEXT: vmovaps %ymm12, 160(%rax)
6323 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6324 ; AVX2-FCP-NEXT: vmovaps %ymm0, 128(%rax)
6325 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6326 ; AVX2-FCP-NEXT: vmovaps %ymm0, 96(%rax)
6327 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6328 ; AVX2-FCP-NEXT: vmovaps %ymm0, (%rax)
6329 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6330 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1408(%rax)
6331 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6332 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1376(%rax)
6333 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6334 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1216(%rax)
6335 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6336 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1184(%rax)
6337 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6338 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1024(%rax)
6339 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6340 ; AVX2-FCP-NEXT: vmovaps %ymm0, 992(%rax)
6341 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6342 ; AVX2-FCP-NEXT: vmovaps %ymm0, 832(%rax)
6343 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6344 ; AVX2-FCP-NEXT: vmovaps %ymm0, 800(%rax)
6345 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6346 ; AVX2-FCP-NEXT: vmovaps %ymm0, 640(%rax)
6347 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6348 ; AVX2-FCP-NEXT: vmovaps %ymm0, 608(%rax)
6349 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6350 ; AVX2-FCP-NEXT: vmovaps %ymm0, 448(%rax)
6351 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6352 ; AVX2-FCP-NEXT: vmovaps %ymm0, 416(%rax)
6353 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6354 ; AVX2-FCP-NEXT: vmovaps %ymm0, 256(%rax)
6355 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6356 ; AVX2-FCP-NEXT: vmovaps %ymm0, 224(%rax)
6357 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6358 ; AVX2-FCP-NEXT: vmovaps %ymm0, 64(%rax)
6359 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6360 ; AVX2-FCP-NEXT: vmovaps %ymm0, 32(%rax)
6361 ; AVX2-FCP-NEXT: addq $1208, %rsp # imm = 0x4B8
6362 ; AVX2-FCP-NEXT: vzeroupper
6363 ; AVX2-FCP-NEXT: retq
6365 ; AVX512-LABEL: store_i64_stride6_vf32:
6367 ; AVX512-NEXT: subq $648, %rsp # imm = 0x288
6368 ; AVX512-NEXT: vmovdqa64 (%rdi), %zmm11
6369 ; AVX512-NEXT: vmovdqa64 64(%rdi), %zmm5
6370 ; AVX512-NEXT: vmovdqa64 128(%rdi), %zmm1
6371 ; AVX512-NEXT: vmovdqa64 192(%rdi), %zmm19
6372 ; AVX512-NEXT: vmovdqa64 (%rsi), %zmm29
6373 ; AVX512-NEXT: vmovdqa64 64(%rsi), %zmm25
6374 ; AVX512-NEXT: vmovdqa64 128(%rsi), %zmm23
6375 ; AVX512-NEXT: vmovdqa64 192(%rsi), %zmm20
6376 ; AVX512-NEXT: vmovdqa64 (%rdx), %zmm24
6377 ; AVX512-NEXT: vmovdqa64 64(%rdx), %zmm4
6378 ; AVX512-NEXT: vmovdqa64 128(%rdx), %zmm7
6379 ; AVX512-NEXT: vmovdqa64 192(%rdx), %zmm21
6380 ; AVX512-NEXT: vmovdqa64 (%rcx), %zmm18
6381 ; AVX512-NEXT: vmovdqa64 64(%rcx), %zmm13
6382 ; AVX512-NEXT: vmovdqa64 128(%rcx), %zmm12
6383 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [4,12,5,13,4,12,5,13]
6384 ; AVX512-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
6385 ; AVX512-NEXT: vmovdqa64 %zmm19, %zmm0
6386 ; AVX512-NEXT: vpermt2q %zmm20, %zmm27, %zmm0
6387 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6388 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm0
6389 ; AVX512-NEXT: vpermt2q %zmm23, %zmm27, %zmm0
6390 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6391 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm0
6392 ; AVX512-NEXT: vpermt2q %zmm25, %zmm27, %zmm0
6393 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6394 ; AVX512-NEXT: vpermi2q %zmm29, %zmm11, %zmm27
6395 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm14 = [2,10,2,10,2,10,2,10]
6396 ; AVX512-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6397 ; AVX512-NEXT: vmovdqa64 %zmm11, %zmm0
6398 ; AVX512-NEXT: vpermt2q %zmm29, %zmm14, %zmm0
6399 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6400 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [6,14,6,14,6,14,6,14]
6401 ; AVX512-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6402 ; AVX512-NEXT: vmovdqa64 %zmm11, %zmm0
6403 ; AVX512-NEXT: vpermt2q %zmm29, %zmm15, %zmm0
6404 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6405 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm16 = [7,15,7,15,7,15,7,15]
6406 ; AVX512-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6407 ; AVX512-NEXT: vmovdqa64 %zmm24, %zmm0
6408 ; AVX512-NEXT: vpermt2q %zmm18, %zmm16, %zmm0
6409 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6410 ; AVX512-NEXT: vmovdqa64 %zmm11, %zmm0
6411 ; AVX512-NEXT: vpermt2q %zmm29, %zmm16, %zmm0
6412 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6413 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm0
6414 ; AVX512-NEXT: vpermt2q %zmm25, %zmm14, %zmm0
6415 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6416 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm0
6417 ; AVX512-NEXT: vpermt2q %zmm25, %zmm15, %zmm0
6418 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6419 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm26
6420 ; AVX512-NEXT: vpermt2q %zmm13, %zmm16, %zmm26
6421 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm22
6422 ; AVX512-NEXT: vpermt2q %zmm25, %zmm16, %zmm22
6423 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm0
6424 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm30
6425 ; AVX512-NEXT: vpermt2q %zmm23, %zmm14, %zmm30
6426 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,8,1,9,0,8,1,9]
6427 ; AVX512-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
6428 ; AVX512-NEXT: vpermt2q %zmm29, %zmm1, %zmm11
6429 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm8
6430 ; AVX512-NEXT: vpermt2q %zmm23, %zmm15, %zmm8
6431 ; AVX512-NEXT: vpermt2q %zmm25, %zmm1, %zmm5
6432 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6433 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm28
6434 ; AVX512-NEXT: vpermt2q %zmm23, %zmm16, %zmm28
6435 ; AVX512-NEXT: vpermt2q %zmm23, %zmm1, %zmm0
6436 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6437 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm17
6438 ; AVX512-NEXT: vpermt2q %zmm12, %zmm16, %zmm17
6439 ; AVX512-NEXT: vpermi2q %zmm20, %zmm19, %zmm14
6440 ; AVX512-NEXT: vpermi2q %zmm20, %zmm19, %zmm15
6441 ; AVX512-NEXT: vmovdqa64 192(%rcx), %zmm2
6442 ; AVX512-NEXT: vmovdqa64 %zmm21, %zmm10
6443 ; AVX512-NEXT: vpermt2q %zmm2, %zmm16, %zmm10
6444 ; AVX512-NEXT: vpermi2q %zmm20, %zmm19, %zmm16
6445 ; AVX512-NEXT: vpermt2q %zmm20, %zmm1, %zmm19
6446 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm1 = [0,0,4,12]
6447 ; AVX512-NEXT: vmovdqa64 %zmm21, %zmm6
6448 ; AVX512-NEXT: vpermt2q %zmm2, %zmm1, %zmm6
6449 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm5
6450 ; AVX512-NEXT: vpermt2q %zmm12, %zmm1, %zmm5
6451 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm3
6452 ; AVX512-NEXT: vpermt2q %zmm13, %zmm1, %zmm3
6453 ; AVX512-NEXT: vpermi2q %zmm18, %zmm24, %zmm1
6454 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm25 = [1,9,2,10,1,9,2,10]
6455 ; AVX512-NEXT: # zmm25 = mem[0,1,2,3,0,1,2,3]
6456 ; AVX512-NEXT: vmovdqa64 %zmm24, %zmm20
6457 ; AVX512-NEXT: vpermt2q %zmm18, %zmm25, %zmm20
6458 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm29 = [5,13,6,14,5,13,6,14]
6459 ; AVX512-NEXT: # zmm29 = mem[0,1,2,3,0,1,2,3]
6460 ; AVX512-NEXT: vmovdqa64 %zmm24, %zmm23
6461 ; AVX512-NEXT: vmovdqa64 %zmm24, %zmm9
6462 ; AVX512-NEXT: vpermt2q %zmm18, %zmm29, %zmm23
6463 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [3,11,3,11,3,11,3,11]
6464 ; AVX512-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6465 ; AVX512-NEXT: vpermt2q %zmm18, %zmm0, %zmm9
6466 ; AVX512-NEXT: vmovdqu64 %zmm9, (%rsp) # 64-byte Spill
6467 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm18
6468 ; AVX512-NEXT: vpermt2q %zmm13, %zmm25, %zmm18
6469 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm31
6470 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm24
6471 ; AVX512-NEXT: vpermt2q %zmm13, %zmm29, %zmm31
6472 ; AVX512-NEXT: vpermt2q %zmm13, %zmm0, %zmm24
6473 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm13
6474 ; AVX512-NEXT: vpermt2q %zmm12, %zmm25, %zmm13
6475 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm4
6476 ; AVX512-NEXT: vpermt2q %zmm12, %zmm29, %zmm4
6477 ; AVX512-NEXT: vpermt2q %zmm12, %zmm0, %zmm7
6478 ; AVX512-NEXT: vpermi2q %zmm2, %zmm21, %zmm25
6479 ; AVX512-NEXT: vpermi2q %zmm2, %zmm21, %zmm29
6480 ; AVX512-NEXT: vpermt2q %zmm2, %zmm0, %zmm21
6481 ; AVX512-NEXT: movb $12, %al
6482 ; AVX512-NEXT: kmovw %eax, %k1
6483 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6484 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm0 {%k1}
6485 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6486 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6487 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm0 {%k1}
6488 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6489 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
6490 ; AVX512-NEXT: vmovdqa64 %zmm3, %zmm9 {%k1}
6491 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm27 {%k1}
6492 ; AVX512-NEXT: movb $48, %al
6493 ; AVX512-NEXT: kmovw %eax, %k2
6494 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6495 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm20 {%k2}
6496 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6497 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm23 {%k2}
6498 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6499 ; AVX512-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm12 # 64-byte Folded Reload
6500 ; AVX512-NEXT: # zmm12 = zmm0[0,1,2,3],mem[4,5,6,7]
6501 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6502 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
6503 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6504 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
6505 ; AVX512-NEXT: vmovdqa64 (%r8), %zmm0
6506 ; AVX512-NEXT: vshufi64x2 {{.*#+}} zmm22 = zmm22[0,1,2,3],zmm26[4,5,6,7]
6507 ; AVX512-NEXT: vmovdqa64 64(%r8), %zmm1
6508 ; AVX512-NEXT: vmovdqa64 %zmm30, %zmm13 {%k2}
6509 ; AVX512-NEXT: vmovdqa64 128(%r8), %zmm2
6510 ; AVX512-NEXT: vmovdqa64 %zmm8, %zmm4 {%k2}
6511 ; AVX512-NEXT: vmovdqa64 192(%r8), %zmm26
6512 ; AVX512-NEXT: vshufi64x2 {{.*#+}} zmm3 = zmm28[0,1,2,3],zmm17[4,5,6,7]
6513 ; AVX512-NEXT: vmovdqa64 (%r9), %zmm8
6514 ; AVX512-NEXT: vmovdqa64 %zmm14, %zmm25 {%k2}
6515 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,9,0,4,5,6,7]
6516 ; AVX512-NEXT: vpermt2q %zmm0, %zmm5, %zmm20
6517 ; AVX512-NEXT: vpermt2q %zmm1, %zmm5, %zmm18
6518 ; AVX512-NEXT: vpermt2q %zmm2, %zmm5, %zmm13
6519 ; AVX512-NEXT: vpermt2q %zmm26, %zmm5, %zmm25
6520 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,2,9,4,5,6,7]
6521 ; AVX512-NEXT: vpermt2q %zmm8, %zmm5, %zmm20
6522 ; AVX512-NEXT: vmovdqa64 64(%r9), %zmm17
6523 ; AVX512-NEXT: vpermt2q %zmm17, %zmm5, %zmm18
6524 ; AVX512-NEXT: vmovdqa64 128(%r9), %zmm30
6525 ; AVX512-NEXT: vpermt2q %zmm30, %zmm5, %zmm13
6526 ; AVX512-NEXT: vmovdqa64 192(%r9), %zmm14
6527 ; AVX512-NEXT: vpermt2q %zmm14, %zmm5, %zmm25
6528 ; AVX512-NEXT: vmovdqa64 %zmm15, %zmm29 {%k2}
6529 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,13,0,4,5,6,7]
6530 ; AVX512-NEXT: vpermt2q %zmm0, %zmm5, %zmm23
6531 ; AVX512-NEXT: vpermt2q %zmm1, %zmm5, %zmm31
6532 ; AVX512-NEXT: vpermt2q %zmm2, %zmm5, %zmm4
6533 ; AVX512-NEXT: vpermt2q %zmm26, %zmm5, %zmm29
6534 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,2,13,4,5,6,7]
6535 ; AVX512-NEXT: vpermt2q %zmm8, %zmm5, %zmm23
6536 ; AVX512-NEXT: vpermt2q %zmm17, %zmm5, %zmm31
6537 ; AVX512-NEXT: vpermt2q %zmm30, %zmm5, %zmm4
6538 ; AVX512-NEXT: vpermt2q %zmm14, %zmm5, %zmm29
6539 ; AVX512-NEXT: vshufi64x2 {{.*#+}} zmm15 = zmm16[0,1,2,3],zmm10[4,5,6,7]
6540 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm5 = [14,0,2,3,4,5,15,0]
6541 ; AVX512-NEXT: vpermt2q %zmm0, %zmm5, %zmm12
6542 ; AVX512-NEXT: vpermt2q %zmm1, %zmm5, %zmm22
6543 ; AVX512-NEXT: vpermt2q %zmm2, %zmm5, %zmm3
6544 ; AVX512-NEXT: vpermt2q %zmm26, %zmm5, %zmm15
6545 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,14,2,3,4,5,6,15]
6546 ; AVX512-NEXT: vpermt2q %zmm8, %zmm5, %zmm12
6547 ; AVX512-NEXT: vpermt2q %zmm17, %zmm5, %zmm22
6548 ; AVX512-NEXT: vpermt2q %zmm30, %zmm5, %zmm3
6549 ; AVX512-NEXT: vpermt2q %zmm14, %zmm5, %zmm15
6550 ; AVX512-NEXT: vmovdqa (%rdx), %xmm5
6551 ; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
6552 ; AVX512-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
6553 ; AVX512-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm11 {%k1}
6554 ; AVX512-NEXT: vmovdqa 64(%rdx), %xmm5
6555 ; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
6556 ; AVX512-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
6557 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
6558 ; AVX512-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm10 {%k1}
6559 ; AVX512-NEXT: vmovdqa 128(%rdx), %xmm5
6560 ; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
6561 ; AVX512-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
6562 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
6563 ; AVX512-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm16 {%k1}
6564 ; AVX512-NEXT: vmovdqa 192(%rdx), %xmm5
6565 ; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
6566 ; AVX512-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
6567 ; AVX512-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm19 {%k1}
6568 ; AVX512-NEXT: vinserti32x4 $2, (%r8), %zmm11, %zmm5
6569 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,1,2,3,4,8,6,7]
6570 ; AVX512-NEXT: vpermt2q %zmm8, %zmm6, %zmm5
6571 ; AVX512-NEXT: vinserti32x4 $2, 64(%r8), %zmm10, %zmm10
6572 ; AVX512-NEXT: vpermt2q %zmm17, %zmm6, %zmm10
6573 ; AVX512-NEXT: vinserti32x4 $2, 128(%r8), %zmm16, %zmm11
6574 ; AVX512-NEXT: vpermt2q %zmm30, %zmm6, %zmm11
6575 ; AVX512-NEXT: vinserti32x4 $2, 192(%r8), %zmm19, %zmm16
6576 ; AVX512-NEXT: vpermt2q %zmm14, %zmm6, %zmm16
6577 ; AVX512-NEXT: vmovdqa (%rdi), %ymm6
6578 ; AVX512-NEXT: vpunpckhqdq {{.*#+}} ymm6 = ymm6[1],mem[1],ymm6[3],mem[3]
6579 ; AVX512-NEXT: vmovdqu64 (%rsp), %zmm19 # 64-byte Reload
6580 ; AVX512-NEXT: vinserti64x4 $0, %ymm6, %zmm19, %zmm6
6581 ; AVX512-NEXT: movb $16, %al
6582 ; AVX512-NEXT: kmovw %eax, %k1
6583 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm27 {%k1}
6584 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm19 = [10,0,2,3,4,5,11,0]
6585 ; AVX512-NEXT: vpermt2q %zmm0, %zmm19, %zmm6
6586 ; AVX512-NEXT: vmovdqa 64(%rdi), %ymm0
6587 ; AVX512-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6588 ; AVX512-NEXT: vinserti64x4 $0, %ymm0, %zmm24, %zmm0
6589 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm9 {%k1}
6590 ; AVX512-NEXT: vpermt2q %zmm1, %zmm19, %zmm0
6591 ; AVX512-NEXT: vmovdqa 128(%rdi), %ymm1
6592 ; AVX512-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
6593 ; AVX512-NEXT: vinserti64x4 $0, %ymm1, %zmm7, %zmm1
6594 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm28 # 64-byte Reload
6595 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm28 {%k1}
6596 ; AVX512-NEXT: vpermt2q %zmm2, %zmm19, %zmm1
6597 ; AVX512-NEXT: vmovdqa 192(%rdi), %ymm2
6598 ; AVX512-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
6599 ; AVX512-NEXT: vinserti64x4 $0, %ymm2, %zmm21, %zmm2
6600 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm24 # 64-byte Reload
6601 ; AVX512-NEXT: vmovdqa64 %zmm26, %zmm24 {%k1}
6602 ; AVX512-NEXT: vpermt2q %zmm26, %zmm19, %zmm2
6603 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm19 = [0,10,2,3,4,5,6,11]
6604 ; AVX512-NEXT: vpermt2q %zmm8, %zmm19, %zmm6
6605 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm21 = [0,1,2,3,4,12,6,7]
6606 ; AVX512-NEXT: vpermt2q %zmm8, %zmm21, %zmm27
6607 ; AVX512-NEXT: vpermt2q %zmm17, %zmm19, %zmm0
6608 ; AVX512-NEXT: vpermt2q %zmm17, %zmm21, %zmm9
6609 ; AVX512-NEXT: vpermt2q %zmm30, %zmm19, %zmm1
6610 ; AVX512-NEXT: vpermt2q %zmm30, %zmm21, %zmm28
6611 ; AVX512-NEXT: vpermt2q %zmm14, %zmm19, %zmm2
6612 ; AVX512-NEXT: vpermt2q %zmm14, %zmm21, %zmm24
6613 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
6614 ; AVX512-NEXT: vmovdqa64 %zmm15, 1472(%rax)
6615 ; AVX512-NEXT: vmovdqa64 %zmm29, 1408(%rax)
6616 ; AVX512-NEXT: vmovdqa64 %zmm24, 1344(%rax)
6617 ; AVX512-NEXT: vmovdqa64 %zmm2, 1280(%rax)
6618 ; AVX512-NEXT: vmovdqa64 %zmm25, 1216(%rax)
6619 ; AVX512-NEXT: vmovdqa64 %zmm3, 1088(%rax)
6620 ; AVX512-NEXT: vmovdqa64 %zmm4, 1024(%rax)
6621 ; AVX512-NEXT: vmovdqa64 %zmm28, 960(%rax)
6622 ; AVX512-NEXT: vmovdqa64 %zmm1, 896(%rax)
6623 ; AVX512-NEXT: vmovdqa64 %zmm13, 832(%rax)
6624 ; AVX512-NEXT: vmovdqa64 %zmm22, 704(%rax)
6625 ; AVX512-NEXT: vmovdqa64 %zmm31, 640(%rax)
6626 ; AVX512-NEXT: vmovdqa64 %zmm9, 576(%rax)
6627 ; AVX512-NEXT: vmovdqa64 %zmm0, 512(%rax)
6628 ; AVX512-NEXT: vmovdqa64 %zmm18, 448(%rax)
6629 ; AVX512-NEXT: vmovdqa64 %zmm12, 320(%rax)
6630 ; AVX512-NEXT: vmovdqa64 %zmm23, 256(%rax)
6631 ; AVX512-NEXT: vmovdqa64 %zmm27, 192(%rax)
6632 ; AVX512-NEXT: vmovdqa64 %zmm6, 128(%rax)
6633 ; AVX512-NEXT: vmovdqa64 %zmm20, 64(%rax)
6634 ; AVX512-NEXT: vmovdqa64 %zmm16, 1152(%rax)
6635 ; AVX512-NEXT: vmovdqa64 %zmm11, 768(%rax)
6636 ; AVX512-NEXT: vmovdqa64 %zmm10, 384(%rax)
6637 ; AVX512-NEXT: vmovdqa64 %zmm5, (%rax)
6638 ; AVX512-NEXT: addq $648, %rsp # imm = 0x288
6639 ; AVX512-NEXT: vzeroupper
6642 ; AVX512-FCP-LABEL: store_i64_stride6_vf32:
6643 ; AVX512-FCP: # %bb.0:
6644 ; AVX512-FCP-NEXT: subq $648, %rsp # imm = 0x288
6645 ; AVX512-FCP-NEXT: vmovdqa64 (%rdi), %zmm11
6646 ; AVX512-FCP-NEXT: vmovdqa64 64(%rdi), %zmm5
6647 ; AVX512-FCP-NEXT: vmovdqa64 128(%rdi), %zmm1
6648 ; AVX512-FCP-NEXT: vmovdqa64 192(%rdi), %zmm19
6649 ; AVX512-FCP-NEXT: vmovdqa64 (%rsi), %zmm29
6650 ; AVX512-FCP-NEXT: vmovdqa64 64(%rsi), %zmm25
6651 ; AVX512-FCP-NEXT: vmovdqa64 128(%rsi), %zmm23
6652 ; AVX512-FCP-NEXT: vmovdqa64 192(%rsi), %zmm20
6653 ; AVX512-FCP-NEXT: vmovdqa64 (%rdx), %zmm24
6654 ; AVX512-FCP-NEXT: vmovdqa64 64(%rdx), %zmm4
6655 ; AVX512-FCP-NEXT: vmovdqa64 128(%rdx), %zmm7
6656 ; AVX512-FCP-NEXT: vmovdqa64 192(%rdx), %zmm21
6657 ; AVX512-FCP-NEXT: vmovdqa64 (%rcx), %zmm18
6658 ; AVX512-FCP-NEXT: vmovdqa64 64(%rcx), %zmm13
6659 ; AVX512-FCP-NEXT: vmovdqa64 128(%rcx), %zmm12
6660 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [4,12,5,13,4,12,5,13]
6661 ; AVX512-FCP-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
6662 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, %zmm0
6663 ; AVX512-FCP-NEXT: vpermt2q %zmm20, %zmm27, %zmm0
6664 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6665 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm0
6666 ; AVX512-FCP-NEXT: vpermt2q %zmm23, %zmm27, %zmm0
6667 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6668 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
6669 ; AVX512-FCP-NEXT: vpermt2q %zmm25, %zmm27, %zmm0
6670 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6671 ; AVX512-FCP-NEXT: vpermi2q %zmm29, %zmm11, %zmm27
6672 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm14 = [2,10,2,10,2,10,2,10]
6673 ; AVX512-FCP-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6674 ; AVX512-FCP-NEXT: vmovdqa64 %zmm11, %zmm0
6675 ; AVX512-FCP-NEXT: vpermt2q %zmm29, %zmm14, %zmm0
6676 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6677 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [6,14,6,14,6,14,6,14]
6678 ; AVX512-FCP-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6679 ; AVX512-FCP-NEXT: vmovdqa64 %zmm11, %zmm0
6680 ; AVX512-FCP-NEXT: vpermt2q %zmm29, %zmm15, %zmm0
6681 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6682 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm16 = [7,15,7,15,7,15,7,15]
6683 ; AVX512-FCP-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6684 ; AVX512-FCP-NEXT: vmovdqa64 %zmm24, %zmm0
6685 ; AVX512-FCP-NEXT: vpermt2q %zmm18, %zmm16, %zmm0
6686 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6687 ; AVX512-FCP-NEXT: vmovdqa64 %zmm11, %zmm0
6688 ; AVX512-FCP-NEXT: vpermt2q %zmm29, %zmm16, %zmm0
6689 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6690 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
6691 ; AVX512-FCP-NEXT: vpermt2q %zmm25, %zmm14, %zmm0
6692 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6693 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
6694 ; AVX512-FCP-NEXT: vpermt2q %zmm25, %zmm15, %zmm0
6695 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6696 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm26
6697 ; AVX512-FCP-NEXT: vpermt2q %zmm13, %zmm16, %zmm26
6698 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm22
6699 ; AVX512-FCP-NEXT: vpermt2q %zmm25, %zmm16, %zmm22
6700 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm0
6701 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm30
6702 ; AVX512-FCP-NEXT: vpermt2q %zmm23, %zmm14, %zmm30
6703 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,8,1,9,0,8,1,9]
6704 ; AVX512-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
6705 ; AVX512-FCP-NEXT: vpermt2q %zmm29, %zmm1, %zmm11
6706 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm8
6707 ; AVX512-FCP-NEXT: vpermt2q %zmm23, %zmm15, %zmm8
6708 ; AVX512-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm5
6709 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6710 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm28
6711 ; AVX512-FCP-NEXT: vpermt2q %zmm23, %zmm16, %zmm28
6712 ; AVX512-FCP-NEXT: vpermt2q %zmm23, %zmm1, %zmm0
6713 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6714 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm17
6715 ; AVX512-FCP-NEXT: vpermt2q %zmm12, %zmm16, %zmm17
6716 ; AVX512-FCP-NEXT: vpermi2q %zmm20, %zmm19, %zmm14
6717 ; AVX512-FCP-NEXT: vpermi2q %zmm20, %zmm19, %zmm15
6718 ; AVX512-FCP-NEXT: vmovdqa64 192(%rcx), %zmm2
6719 ; AVX512-FCP-NEXT: vmovdqa64 %zmm21, %zmm10
6720 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm16, %zmm10
6721 ; AVX512-FCP-NEXT: vpermi2q %zmm20, %zmm19, %zmm16
6722 ; AVX512-FCP-NEXT: vpermt2q %zmm20, %zmm1, %zmm19
6723 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm1 = [0,0,4,12]
6724 ; AVX512-FCP-NEXT: vmovdqa64 %zmm21, %zmm6
6725 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm1, %zmm6
6726 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm5
6727 ; AVX512-FCP-NEXT: vpermt2q %zmm12, %zmm1, %zmm5
6728 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm3
6729 ; AVX512-FCP-NEXT: vpermt2q %zmm13, %zmm1, %zmm3
6730 ; AVX512-FCP-NEXT: vpermi2q %zmm18, %zmm24, %zmm1
6731 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm25 = [1,9,2,10,1,9,2,10]
6732 ; AVX512-FCP-NEXT: # zmm25 = mem[0,1,2,3,0,1,2,3]
6733 ; AVX512-FCP-NEXT: vmovdqa64 %zmm24, %zmm20
6734 ; AVX512-FCP-NEXT: vpermt2q %zmm18, %zmm25, %zmm20
6735 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm29 = [5,13,6,14,5,13,6,14]
6736 ; AVX512-FCP-NEXT: # zmm29 = mem[0,1,2,3,0,1,2,3]
6737 ; AVX512-FCP-NEXT: vmovdqa64 %zmm24, %zmm23
6738 ; AVX512-FCP-NEXT: vmovdqa64 %zmm24, %zmm9
6739 ; AVX512-FCP-NEXT: vpermt2q %zmm18, %zmm29, %zmm23
6740 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [3,11,3,11,3,11,3,11]
6741 ; AVX512-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6742 ; AVX512-FCP-NEXT: vpermt2q %zmm18, %zmm0, %zmm9
6743 ; AVX512-FCP-NEXT: vmovdqu64 %zmm9, (%rsp) # 64-byte Spill
6744 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm18
6745 ; AVX512-FCP-NEXT: vpermt2q %zmm13, %zmm25, %zmm18
6746 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm31
6747 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm24
6748 ; AVX512-FCP-NEXT: vpermt2q %zmm13, %zmm29, %zmm31
6749 ; AVX512-FCP-NEXT: vpermt2q %zmm13, %zmm0, %zmm24
6750 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm13
6751 ; AVX512-FCP-NEXT: vpermt2q %zmm12, %zmm25, %zmm13
6752 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm4
6753 ; AVX512-FCP-NEXT: vpermt2q %zmm12, %zmm29, %zmm4
6754 ; AVX512-FCP-NEXT: vpermt2q %zmm12, %zmm0, %zmm7
6755 ; AVX512-FCP-NEXT: vpermi2q %zmm2, %zmm21, %zmm25
6756 ; AVX512-FCP-NEXT: vpermi2q %zmm2, %zmm21, %zmm29
6757 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm0, %zmm21
6758 ; AVX512-FCP-NEXT: movb $12, %al
6759 ; AVX512-FCP-NEXT: kmovw %eax, %k1
6760 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6761 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm0 {%k1}
6762 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6763 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6764 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm0 {%k1}
6765 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6766 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
6767 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, %zmm9 {%k1}
6768 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm27 {%k1}
6769 ; AVX512-FCP-NEXT: movb $48, %al
6770 ; AVX512-FCP-NEXT: kmovw %eax, %k2
6771 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6772 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm20 {%k2}
6773 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6774 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm23 {%k2}
6775 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6776 ; AVX512-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm12 # 64-byte Folded Reload
6777 ; AVX512-FCP-NEXT: # zmm12 = zmm0[0,1,2,3],mem[4,5,6,7]
6778 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6779 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
6780 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6781 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
6782 ; AVX512-FCP-NEXT: vmovdqa64 (%r8), %zmm0
6783 ; AVX512-FCP-NEXT: vshufi64x2 {{.*#+}} zmm22 = zmm22[0,1,2,3],zmm26[4,5,6,7]
6784 ; AVX512-FCP-NEXT: vmovdqa64 64(%r8), %zmm1
6785 ; AVX512-FCP-NEXT: vmovdqa64 %zmm30, %zmm13 {%k2}
6786 ; AVX512-FCP-NEXT: vmovdqa64 128(%r8), %zmm2
6787 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, %zmm4 {%k2}
6788 ; AVX512-FCP-NEXT: vmovdqa64 192(%r8), %zmm26
6789 ; AVX512-FCP-NEXT: vshufi64x2 {{.*#+}} zmm3 = zmm28[0,1,2,3],zmm17[4,5,6,7]
6790 ; AVX512-FCP-NEXT: vmovdqa64 (%r9), %zmm8
6791 ; AVX512-FCP-NEXT: vmovdqa64 %zmm14, %zmm25 {%k2}
6792 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,9,0,4,5,6,7]
6793 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm5, %zmm20
6794 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm18
6795 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm5, %zmm13
6796 ; AVX512-FCP-NEXT: vpermt2q %zmm26, %zmm5, %zmm25
6797 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,2,9,4,5,6,7]
6798 ; AVX512-FCP-NEXT: vpermt2q %zmm8, %zmm5, %zmm20
6799 ; AVX512-FCP-NEXT: vmovdqa64 64(%r9), %zmm17
6800 ; AVX512-FCP-NEXT: vpermt2q %zmm17, %zmm5, %zmm18
6801 ; AVX512-FCP-NEXT: vmovdqa64 128(%r9), %zmm30
6802 ; AVX512-FCP-NEXT: vpermt2q %zmm30, %zmm5, %zmm13
6803 ; AVX512-FCP-NEXT: vmovdqa64 192(%r9), %zmm14
6804 ; AVX512-FCP-NEXT: vpermt2q %zmm14, %zmm5, %zmm25
6805 ; AVX512-FCP-NEXT: vmovdqa64 %zmm15, %zmm29 {%k2}
6806 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,13,0,4,5,6,7]
6807 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm5, %zmm23
6808 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm31
6809 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm5, %zmm4
6810 ; AVX512-FCP-NEXT: vpermt2q %zmm26, %zmm5, %zmm29
6811 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,2,13,4,5,6,7]
6812 ; AVX512-FCP-NEXT: vpermt2q %zmm8, %zmm5, %zmm23
6813 ; AVX512-FCP-NEXT: vpermt2q %zmm17, %zmm5, %zmm31
6814 ; AVX512-FCP-NEXT: vpermt2q %zmm30, %zmm5, %zmm4
6815 ; AVX512-FCP-NEXT: vpermt2q %zmm14, %zmm5, %zmm29
6816 ; AVX512-FCP-NEXT: vshufi64x2 {{.*#+}} zmm15 = zmm16[0,1,2,3],zmm10[4,5,6,7]
6817 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [14,0,2,3,4,5,15,0]
6818 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm5, %zmm12
6819 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm22
6820 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm5, %zmm3
6821 ; AVX512-FCP-NEXT: vpermt2q %zmm26, %zmm5, %zmm15
6822 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,14,2,3,4,5,6,15]
6823 ; AVX512-FCP-NEXT: vpermt2q %zmm8, %zmm5, %zmm12
6824 ; AVX512-FCP-NEXT: vpermt2q %zmm17, %zmm5, %zmm22
6825 ; AVX512-FCP-NEXT: vpermt2q %zmm30, %zmm5, %zmm3
6826 ; AVX512-FCP-NEXT: vpermt2q %zmm14, %zmm5, %zmm15
6827 ; AVX512-FCP-NEXT: vmovdqa (%rdx), %xmm5
6828 ; AVX512-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
6829 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
6830 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm11 {%k1}
6831 ; AVX512-FCP-NEXT: vmovdqa 64(%rdx), %xmm5
6832 ; AVX512-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
6833 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
6834 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
6835 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm10 {%k1}
6836 ; AVX512-FCP-NEXT: vmovdqa 128(%rdx), %xmm5
6837 ; AVX512-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
6838 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
6839 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
6840 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm16 {%k1}
6841 ; AVX512-FCP-NEXT: vmovdqa 192(%rdx), %xmm5
6842 ; AVX512-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
6843 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
6844 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm19 {%k1}
6845 ; AVX512-FCP-NEXT: vinserti32x4 $2, (%r8), %zmm11, %zmm5
6846 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,1,2,3,4,8,6,7]
6847 ; AVX512-FCP-NEXT: vpermt2q %zmm8, %zmm6, %zmm5
6848 ; AVX512-FCP-NEXT: vinserti32x4 $2, 64(%r8), %zmm10, %zmm10
6849 ; AVX512-FCP-NEXT: vpermt2q %zmm17, %zmm6, %zmm10
6850 ; AVX512-FCP-NEXT: vinserti32x4 $2, 128(%r8), %zmm16, %zmm11
6851 ; AVX512-FCP-NEXT: vpermt2q %zmm30, %zmm6, %zmm11
6852 ; AVX512-FCP-NEXT: vinserti32x4 $2, 192(%r8), %zmm19, %zmm16
6853 ; AVX512-FCP-NEXT: vpermt2q %zmm14, %zmm6, %zmm16
6854 ; AVX512-FCP-NEXT: vmovdqa (%rdi), %ymm6
6855 ; AVX512-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm6 = ymm6[1],mem[1],ymm6[3],mem[3]
6856 ; AVX512-FCP-NEXT: vmovdqu64 (%rsp), %zmm19 # 64-byte Reload
6857 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm19, %zmm6
6858 ; AVX512-FCP-NEXT: movb $16, %al
6859 ; AVX512-FCP-NEXT: kmovw %eax, %k1
6860 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm27 {%k1}
6861 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm19 = [10,0,2,3,4,5,11,0]
6862 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm19, %zmm6
6863 ; AVX512-FCP-NEXT: vmovdqa 64(%rdi), %ymm0
6864 ; AVX512-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6865 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm0, %zmm24, %zmm0
6866 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm9 {%k1}
6867 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm19, %zmm0
6868 ; AVX512-FCP-NEXT: vmovdqa 128(%rdi), %ymm1
6869 ; AVX512-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
6870 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm1, %zmm7, %zmm1
6871 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm28 # 64-byte Reload
6872 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm28 {%k1}
6873 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm19, %zmm1
6874 ; AVX512-FCP-NEXT: vmovdqa 192(%rdi), %ymm2
6875 ; AVX512-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
6876 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm21, %zmm2
6877 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm24 # 64-byte Reload
6878 ; AVX512-FCP-NEXT: vmovdqa64 %zmm26, %zmm24 {%k1}
6879 ; AVX512-FCP-NEXT: vpermt2q %zmm26, %zmm19, %zmm2
6880 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm19 = [0,10,2,3,4,5,6,11]
6881 ; AVX512-FCP-NEXT: vpermt2q %zmm8, %zmm19, %zmm6
6882 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm21 = [0,1,2,3,4,12,6,7]
6883 ; AVX512-FCP-NEXT: vpermt2q %zmm8, %zmm21, %zmm27
6884 ; AVX512-FCP-NEXT: vpermt2q %zmm17, %zmm19, %zmm0
6885 ; AVX512-FCP-NEXT: vpermt2q %zmm17, %zmm21, %zmm9
6886 ; AVX512-FCP-NEXT: vpermt2q %zmm30, %zmm19, %zmm1
6887 ; AVX512-FCP-NEXT: vpermt2q %zmm30, %zmm21, %zmm28
6888 ; AVX512-FCP-NEXT: vpermt2q %zmm14, %zmm19, %zmm2
6889 ; AVX512-FCP-NEXT: vpermt2q %zmm14, %zmm21, %zmm24
6890 ; AVX512-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
6891 ; AVX512-FCP-NEXT: vmovdqa64 %zmm15, 1472(%rax)
6892 ; AVX512-FCP-NEXT: vmovdqa64 %zmm29, 1408(%rax)
6893 ; AVX512-FCP-NEXT: vmovdqa64 %zmm24, 1344(%rax)
6894 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, 1280(%rax)
6895 ; AVX512-FCP-NEXT: vmovdqa64 %zmm25, 1216(%rax)
6896 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, 1088(%rax)
6897 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, 1024(%rax)
6898 ; AVX512-FCP-NEXT: vmovdqa64 %zmm28, 960(%rax)
6899 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, 896(%rax)
6900 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, 832(%rax)
6901 ; AVX512-FCP-NEXT: vmovdqa64 %zmm22, 704(%rax)
6902 ; AVX512-FCP-NEXT: vmovdqa64 %zmm31, 640(%rax)
6903 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, 576(%rax)
6904 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, 512(%rax)
6905 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, 448(%rax)
6906 ; AVX512-FCP-NEXT: vmovdqa64 %zmm12, 320(%rax)
6907 ; AVX512-FCP-NEXT: vmovdqa64 %zmm23, 256(%rax)
6908 ; AVX512-FCP-NEXT: vmovdqa64 %zmm27, 192(%rax)
6909 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, 128(%rax)
6910 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, 64(%rax)
6911 ; AVX512-FCP-NEXT: vmovdqa64 %zmm16, 1152(%rax)
6912 ; AVX512-FCP-NEXT: vmovdqa64 %zmm11, 768(%rax)
6913 ; AVX512-FCP-NEXT: vmovdqa64 %zmm10, 384(%rax)
6914 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, (%rax)
6915 ; AVX512-FCP-NEXT: addq $648, %rsp # imm = 0x288
6916 ; AVX512-FCP-NEXT: vzeroupper
6917 ; AVX512-FCP-NEXT: retq
6919 ; AVX512DQ-LABEL: store_i64_stride6_vf32:
6920 ; AVX512DQ: # %bb.0:
6921 ; AVX512DQ-NEXT: subq $648, %rsp # imm = 0x288
6922 ; AVX512DQ-NEXT: vmovdqa64 (%rdi), %zmm11
6923 ; AVX512DQ-NEXT: vmovdqa64 64(%rdi), %zmm5
6924 ; AVX512DQ-NEXT: vmovdqa64 128(%rdi), %zmm1
6925 ; AVX512DQ-NEXT: vmovdqa64 192(%rdi), %zmm19
6926 ; AVX512DQ-NEXT: vmovdqa64 (%rsi), %zmm29
6927 ; AVX512DQ-NEXT: vmovdqa64 64(%rsi), %zmm25
6928 ; AVX512DQ-NEXT: vmovdqa64 128(%rsi), %zmm23
6929 ; AVX512DQ-NEXT: vmovdqa64 192(%rsi), %zmm20
6930 ; AVX512DQ-NEXT: vmovdqa64 (%rdx), %zmm24
6931 ; AVX512DQ-NEXT: vmovdqa64 64(%rdx), %zmm4
6932 ; AVX512DQ-NEXT: vmovdqa64 128(%rdx), %zmm7
6933 ; AVX512DQ-NEXT: vmovdqa64 192(%rdx), %zmm21
6934 ; AVX512DQ-NEXT: vmovdqa64 (%rcx), %zmm18
6935 ; AVX512DQ-NEXT: vmovdqa64 64(%rcx), %zmm13
6936 ; AVX512DQ-NEXT: vmovdqa64 128(%rcx), %zmm12
6937 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [4,12,5,13,4,12,5,13]
6938 ; AVX512DQ-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
6939 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, %zmm0
6940 ; AVX512DQ-NEXT: vpermt2q %zmm20, %zmm27, %zmm0
6941 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6942 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm0
6943 ; AVX512DQ-NEXT: vpermt2q %zmm23, %zmm27, %zmm0
6944 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6945 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm0
6946 ; AVX512DQ-NEXT: vpermt2q %zmm25, %zmm27, %zmm0
6947 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6948 ; AVX512DQ-NEXT: vpermi2q %zmm29, %zmm11, %zmm27
6949 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm14 = [2,10,2,10,2,10,2,10]
6950 ; AVX512DQ-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6951 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, %zmm0
6952 ; AVX512DQ-NEXT: vpermt2q %zmm29, %zmm14, %zmm0
6953 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6954 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [6,14,6,14,6,14,6,14]
6955 ; AVX512DQ-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6956 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, %zmm0
6957 ; AVX512DQ-NEXT: vpermt2q %zmm29, %zmm15, %zmm0
6958 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6959 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm16 = [7,15,7,15,7,15,7,15]
6960 ; AVX512DQ-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6961 ; AVX512DQ-NEXT: vmovdqa64 %zmm24, %zmm0
6962 ; AVX512DQ-NEXT: vpermt2q %zmm18, %zmm16, %zmm0
6963 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6964 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, %zmm0
6965 ; AVX512DQ-NEXT: vpermt2q %zmm29, %zmm16, %zmm0
6966 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6967 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm0
6968 ; AVX512DQ-NEXT: vpermt2q %zmm25, %zmm14, %zmm0
6969 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6970 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm0
6971 ; AVX512DQ-NEXT: vpermt2q %zmm25, %zmm15, %zmm0
6972 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6973 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm26
6974 ; AVX512DQ-NEXT: vpermt2q %zmm13, %zmm16, %zmm26
6975 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm22
6976 ; AVX512DQ-NEXT: vpermt2q %zmm25, %zmm16, %zmm22
6977 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm0
6978 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm30
6979 ; AVX512DQ-NEXT: vpermt2q %zmm23, %zmm14, %zmm30
6980 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,8,1,9,0,8,1,9]
6981 ; AVX512DQ-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
6982 ; AVX512DQ-NEXT: vpermt2q %zmm29, %zmm1, %zmm11
6983 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm8
6984 ; AVX512DQ-NEXT: vpermt2q %zmm23, %zmm15, %zmm8
6985 ; AVX512DQ-NEXT: vpermt2q %zmm25, %zmm1, %zmm5
6986 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6987 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm28
6988 ; AVX512DQ-NEXT: vpermt2q %zmm23, %zmm16, %zmm28
6989 ; AVX512DQ-NEXT: vpermt2q %zmm23, %zmm1, %zmm0
6990 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6991 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm17
6992 ; AVX512DQ-NEXT: vpermt2q %zmm12, %zmm16, %zmm17
6993 ; AVX512DQ-NEXT: vpermi2q %zmm20, %zmm19, %zmm14
6994 ; AVX512DQ-NEXT: vpermi2q %zmm20, %zmm19, %zmm15
6995 ; AVX512DQ-NEXT: vmovdqa64 192(%rcx), %zmm2
6996 ; AVX512DQ-NEXT: vmovdqa64 %zmm21, %zmm10
6997 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm16, %zmm10
6998 ; AVX512DQ-NEXT: vpermi2q %zmm20, %zmm19, %zmm16
6999 ; AVX512DQ-NEXT: vpermt2q %zmm20, %zmm1, %zmm19
7000 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm1 = [0,0,4,12]
7001 ; AVX512DQ-NEXT: vmovdqa64 %zmm21, %zmm6
7002 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm1, %zmm6
7003 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm5
7004 ; AVX512DQ-NEXT: vpermt2q %zmm12, %zmm1, %zmm5
7005 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm3
7006 ; AVX512DQ-NEXT: vpermt2q %zmm13, %zmm1, %zmm3
7007 ; AVX512DQ-NEXT: vpermi2q %zmm18, %zmm24, %zmm1
7008 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm25 = [1,9,2,10,1,9,2,10]
7009 ; AVX512DQ-NEXT: # zmm25 = mem[0,1,2,3,0,1,2,3]
7010 ; AVX512DQ-NEXT: vmovdqa64 %zmm24, %zmm20
7011 ; AVX512DQ-NEXT: vpermt2q %zmm18, %zmm25, %zmm20
7012 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm29 = [5,13,6,14,5,13,6,14]
7013 ; AVX512DQ-NEXT: # zmm29 = mem[0,1,2,3,0,1,2,3]
7014 ; AVX512DQ-NEXT: vmovdqa64 %zmm24, %zmm23
7015 ; AVX512DQ-NEXT: vmovdqa64 %zmm24, %zmm9
7016 ; AVX512DQ-NEXT: vpermt2q %zmm18, %zmm29, %zmm23
7017 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [3,11,3,11,3,11,3,11]
7018 ; AVX512DQ-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7019 ; AVX512DQ-NEXT: vpermt2q %zmm18, %zmm0, %zmm9
7020 ; AVX512DQ-NEXT: vmovdqu64 %zmm9, (%rsp) # 64-byte Spill
7021 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm18
7022 ; AVX512DQ-NEXT: vpermt2q %zmm13, %zmm25, %zmm18
7023 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm31
7024 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm24
7025 ; AVX512DQ-NEXT: vpermt2q %zmm13, %zmm29, %zmm31
7026 ; AVX512DQ-NEXT: vpermt2q %zmm13, %zmm0, %zmm24
7027 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm13
7028 ; AVX512DQ-NEXT: vpermt2q %zmm12, %zmm25, %zmm13
7029 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm4
7030 ; AVX512DQ-NEXT: vpermt2q %zmm12, %zmm29, %zmm4
7031 ; AVX512DQ-NEXT: vpermt2q %zmm12, %zmm0, %zmm7
7032 ; AVX512DQ-NEXT: vpermi2q %zmm2, %zmm21, %zmm25
7033 ; AVX512DQ-NEXT: vpermi2q %zmm2, %zmm21, %zmm29
7034 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm0, %zmm21
7035 ; AVX512DQ-NEXT: movb $12, %al
7036 ; AVX512DQ-NEXT: kmovw %eax, %k1
7037 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7038 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm0 {%k1}
7039 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7040 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7041 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm0 {%k1}
7042 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7043 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
7044 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, %zmm9 {%k1}
7045 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm27 {%k1}
7046 ; AVX512DQ-NEXT: movb $48, %al
7047 ; AVX512DQ-NEXT: kmovw %eax, %k2
7048 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7049 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm20 {%k2}
7050 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7051 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm23 {%k2}
7052 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7053 ; AVX512DQ-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm12 # 64-byte Folded Reload
7054 ; AVX512DQ-NEXT: # zmm12 = zmm0[0,1,2,3],mem[4,5,6,7]
7055 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7056 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
7057 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7058 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
7059 ; AVX512DQ-NEXT: vmovdqa64 (%r8), %zmm0
7060 ; AVX512DQ-NEXT: vshufi64x2 {{.*#+}} zmm22 = zmm22[0,1,2,3],zmm26[4,5,6,7]
7061 ; AVX512DQ-NEXT: vmovdqa64 64(%r8), %zmm1
7062 ; AVX512DQ-NEXT: vmovdqa64 %zmm30, %zmm13 {%k2}
7063 ; AVX512DQ-NEXT: vmovdqa64 128(%r8), %zmm2
7064 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, %zmm4 {%k2}
7065 ; AVX512DQ-NEXT: vmovdqa64 192(%r8), %zmm26
7066 ; AVX512DQ-NEXT: vshufi64x2 {{.*#+}} zmm3 = zmm28[0,1,2,3],zmm17[4,5,6,7]
7067 ; AVX512DQ-NEXT: vmovdqa64 (%r9), %zmm8
7068 ; AVX512DQ-NEXT: vmovdqa64 %zmm14, %zmm25 {%k2}
7069 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,9,0,4,5,6,7]
7070 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm5, %zmm20
7071 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm5, %zmm18
7072 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm5, %zmm13
7073 ; AVX512DQ-NEXT: vpermt2q %zmm26, %zmm5, %zmm25
7074 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,2,9,4,5,6,7]
7075 ; AVX512DQ-NEXT: vpermt2q %zmm8, %zmm5, %zmm20
7076 ; AVX512DQ-NEXT: vmovdqa64 64(%r9), %zmm17
7077 ; AVX512DQ-NEXT: vpermt2q %zmm17, %zmm5, %zmm18
7078 ; AVX512DQ-NEXT: vmovdqa64 128(%r9), %zmm30
7079 ; AVX512DQ-NEXT: vpermt2q %zmm30, %zmm5, %zmm13
7080 ; AVX512DQ-NEXT: vmovdqa64 192(%r9), %zmm14
7081 ; AVX512DQ-NEXT: vpermt2q %zmm14, %zmm5, %zmm25
7082 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, %zmm29 {%k2}
7083 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,13,0,4,5,6,7]
7084 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm5, %zmm23
7085 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm5, %zmm31
7086 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm5, %zmm4
7087 ; AVX512DQ-NEXT: vpermt2q %zmm26, %zmm5, %zmm29
7088 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,2,13,4,5,6,7]
7089 ; AVX512DQ-NEXT: vpermt2q %zmm8, %zmm5, %zmm23
7090 ; AVX512DQ-NEXT: vpermt2q %zmm17, %zmm5, %zmm31
7091 ; AVX512DQ-NEXT: vpermt2q %zmm30, %zmm5, %zmm4
7092 ; AVX512DQ-NEXT: vpermt2q %zmm14, %zmm5, %zmm29
7093 ; AVX512DQ-NEXT: vshufi64x2 {{.*#+}} zmm15 = zmm16[0,1,2,3],zmm10[4,5,6,7]
7094 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm5 = [14,0,2,3,4,5,15,0]
7095 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm5, %zmm12
7096 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm5, %zmm22
7097 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm5, %zmm3
7098 ; AVX512DQ-NEXT: vpermt2q %zmm26, %zmm5, %zmm15
7099 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,14,2,3,4,5,6,15]
7100 ; AVX512DQ-NEXT: vpermt2q %zmm8, %zmm5, %zmm12
7101 ; AVX512DQ-NEXT: vpermt2q %zmm17, %zmm5, %zmm22
7102 ; AVX512DQ-NEXT: vpermt2q %zmm30, %zmm5, %zmm3
7103 ; AVX512DQ-NEXT: vpermt2q %zmm14, %zmm5, %zmm15
7104 ; AVX512DQ-NEXT: vmovdqa (%rdx), %xmm5
7105 ; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
7106 ; AVX512DQ-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
7107 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm11 {%k1}
7108 ; AVX512DQ-NEXT: vmovdqa 64(%rdx), %xmm5
7109 ; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
7110 ; AVX512DQ-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
7111 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
7112 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm10 {%k1}
7113 ; AVX512DQ-NEXT: vmovdqa 128(%rdx), %xmm5
7114 ; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
7115 ; AVX512DQ-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
7116 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
7117 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm16 {%k1}
7118 ; AVX512DQ-NEXT: vmovdqa 192(%rdx), %xmm5
7119 ; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
7120 ; AVX512DQ-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
7121 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm19 {%k1}
7122 ; AVX512DQ-NEXT: vinserti32x4 $2, (%r8), %zmm11, %zmm5
7123 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,1,2,3,4,8,6,7]
7124 ; AVX512DQ-NEXT: vpermt2q %zmm8, %zmm6, %zmm5
7125 ; AVX512DQ-NEXT: vinserti32x4 $2, 64(%r8), %zmm10, %zmm10
7126 ; AVX512DQ-NEXT: vpermt2q %zmm17, %zmm6, %zmm10
7127 ; AVX512DQ-NEXT: vinserti32x4 $2, 128(%r8), %zmm16, %zmm11
7128 ; AVX512DQ-NEXT: vpermt2q %zmm30, %zmm6, %zmm11
7129 ; AVX512DQ-NEXT: vinserti32x4 $2, 192(%r8), %zmm19, %zmm16
7130 ; AVX512DQ-NEXT: vpermt2q %zmm14, %zmm6, %zmm16
7131 ; AVX512DQ-NEXT: vmovdqa (%rdi), %ymm6
7132 ; AVX512DQ-NEXT: vpunpckhqdq {{.*#+}} ymm6 = ymm6[1],mem[1],ymm6[3],mem[3]
7133 ; AVX512DQ-NEXT: vmovdqu64 (%rsp), %zmm19 # 64-byte Reload
7134 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm6, %zmm19, %zmm6
7135 ; AVX512DQ-NEXT: movb $16, %al
7136 ; AVX512DQ-NEXT: kmovw %eax, %k1
7137 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm27 {%k1}
7138 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm19 = [10,0,2,3,4,5,11,0]
7139 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm19, %zmm6
7140 ; AVX512DQ-NEXT: vmovdqa 64(%rdi), %ymm0
7141 ; AVX512DQ-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
7142 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm0, %zmm24, %zmm0
7143 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm9 {%k1}
7144 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm19, %zmm0
7145 ; AVX512DQ-NEXT: vmovdqa 128(%rdi), %ymm1
7146 ; AVX512DQ-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
7147 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm1, %zmm7, %zmm1
7148 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm28 # 64-byte Reload
7149 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm28 {%k1}
7150 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm19, %zmm1
7151 ; AVX512DQ-NEXT: vmovdqa 192(%rdi), %ymm2
7152 ; AVX512DQ-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
7153 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm2, %zmm21, %zmm2
7154 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm24 # 64-byte Reload
7155 ; AVX512DQ-NEXT: vmovdqa64 %zmm26, %zmm24 {%k1}
7156 ; AVX512DQ-NEXT: vpermt2q %zmm26, %zmm19, %zmm2
7157 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm19 = [0,10,2,3,4,5,6,11]
7158 ; AVX512DQ-NEXT: vpermt2q %zmm8, %zmm19, %zmm6
7159 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm21 = [0,1,2,3,4,12,6,7]
7160 ; AVX512DQ-NEXT: vpermt2q %zmm8, %zmm21, %zmm27
7161 ; AVX512DQ-NEXT: vpermt2q %zmm17, %zmm19, %zmm0
7162 ; AVX512DQ-NEXT: vpermt2q %zmm17, %zmm21, %zmm9
7163 ; AVX512DQ-NEXT: vpermt2q %zmm30, %zmm19, %zmm1
7164 ; AVX512DQ-NEXT: vpermt2q %zmm30, %zmm21, %zmm28
7165 ; AVX512DQ-NEXT: vpermt2q %zmm14, %zmm19, %zmm2
7166 ; AVX512DQ-NEXT: vpermt2q %zmm14, %zmm21, %zmm24
7167 ; AVX512DQ-NEXT: movq {{[0-9]+}}(%rsp), %rax
7168 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, 1472(%rax)
7169 ; AVX512DQ-NEXT: vmovdqa64 %zmm29, 1408(%rax)
7170 ; AVX512DQ-NEXT: vmovdqa64 %zmm24, 1344(%rax)
7171 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 1280(%rax)
7172 ; AVX512DQ-NEXT: vmovdqa64 %zmm25, 1216(%rax)
7173 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, 1088(%rax)
7174 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, 1024(%rax)
7175 ; AVX512DQ-NEXT: vmovdqa64 %zmm28, 960(%rax)
7176 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 896(%rax)
7177 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, 832(%rax)
7178 ; AVX512DQ-NEXT: vmovdqa64 %zmm22, 704(%rax)
7179 ; AVX512DQ-NEXT: vmovdqa64 %zmm31, 640(%rax)
7180 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, 576(%rax)
7181 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, 512(%rax)
7182 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, 448(%rax)
7183 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, 320(%rax)
7184 ; AVX512DQ-NEXT: vmovdqa64 %zmm23, 256(%rax)
7185 ; AVX512DQ-NEXT: vmovdqa64 %zmm27, 192(%rax)
7186 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, 128(%rax)
7187 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, 64(%rax)
7188 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, 1152(%rax)
7189 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, 768(%rax)
7190 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, 384(%rax)
7191 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, (%rax)
7192 ; AVX512DQ-NEXT: addq $648, %rsp # imm = 0x288
7193 ; AVX512DQ-NEXT: vzeroupper
7194 ; AVX512DQ-NEXT: retq
7196 ; AVX512DQ-FCP-LABEL: store_i64_stride6_vf32:
7197 ; AVX512DQ-FCP: # %bb.0:
7198 ; AVX512DQ-FCP-NEXT: subq $648, %rsp # imm = 0x288
7199 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rdi), %zmm11
7200 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rdi), %zmm5
7201 ; AVX512DQ-FCP-NEXT: vmovdqa64 128(%rdi), %zmm1
7202 ; AVX512DQ-FCP-NEXT: vmovdqa64 192(%rdi), %zmm19
7203 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rsi), %zmm29
7204 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rsi), %zmm25
7205 ; AVX512DQ-FCP-NEXT: vmovdqa64 128(%rsi), %zmm23
7206 ; AVX512DQ-FCP-NEXT: vmovdqa64 192(%rsi), %zmm20
7207 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rdx), %zmm24
7208 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rdx), %zmm4
7209 ; AVX512DQ-FCP-NEXT: vmovdqa64 128(%rdx), %zmm7
7210 ; AVX512DQ-FCP-NEXT: vmovdqa64 192(%rdx), %zmm21
7211 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rcx), %zmm18
7212 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rcx), %zmm13
7213 ; AVX512DQ-FCP-NEXT: vmovdqa64 128(%rcx), %zmm12
7214 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [4,12,5,13,4,12,5,13]
7215 ; AVX512DQ-FCP-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
7216 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, %zmm0
7217 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm20, %zmm27, %zmm0
7218 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7219 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm0
7220 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm23, %zmm27, %zmm0
7221 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7222 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
7223 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm25, %zmm27, %zmm0
7224 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7225 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm29, %zmm11, %zmm27
7226 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm14 = [2,10,2,10,2,10,2,10]
7227 ; AVX512DQ-FCP-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7228 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm11, %zmm0
7229 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm29, %zmm14, %zmm0
7230 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7231 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [6,14,6,14,6,14,6,14]
7232 ; AVX512DQ-FCP-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7233 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm11, %zmm0
7234 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm29, %zmm15, %zmm0
7235 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7236 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm16 = [7,15,7,15,7,15,7,15]
7237 ; AVX512DQ-FCP-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7238 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm24, %zmm0
7239 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm18, %zmm16, %zmm0
7240 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7241 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm11, %zmm0
7242 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm29, %zmm16, %zmm0
7243 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7244 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
7245 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm25, %zmm14, %zmm0
7246 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7247 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
7248 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm25, %zmm15, %zmm0
7249 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7250 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm26
7251 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm13, %zmm16, %zmm26
7252 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm22
7253 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm25, %zmm16, %zmm22
7254 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm0
7255 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm30
7256 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm23, %zmm14, %zmm30
7257 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,8,1,9,0,8,1,9]
7258 ; AVX512DQ-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
7259 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm29, %zmm1, %zmm11
7260 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm8
7261 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm23, %zmm15, %zmm8
7262 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm5
7263 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7264 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm28
7265 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm23, %zmm16, %zmm28
7266 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm23, %zmm1, %zmm0
7267 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7268 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm17
7269 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm12, %zmm16, %zmm17
7270 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm20, %zmm19, %zmm14
7271 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm20, %zmm19, %zmm15
7272 ; AVX512DQ-FCP-NEXT: vmovdqa64 192(%rcx), %zmm2
7273 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm21, %zmm10
7274 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm16, %zmm10
7275 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm20, %zmm19, %zmm16
7276 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm20, %zmm1, %zmm19
7277 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm1 = [0,0,4,12]
7278 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm21, %zmm6
7279 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm1, %zmm6
7280 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm5
7281 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm12, %zmm1, %zmm5
7282 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm3
7283 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm13, %zmm1, %zmm3
7284 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm18, %zmm24, %zmm1
7285 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm25 = [1,9,2,10,1,9,2,10]
7286 ; AVX512DQ-FCP-NEXT: # zmm25 = mem[0,1,2,3,0,1,2,3]
7287 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm24, %zmm20
7288 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm18, %zmm25, %zmm20
7289 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm29 = [5,13,6,14,5,13,6,14]
7290 ; AVX512DQ-FCP-NEXT: # zmm29 = mem[0,1,2,3,0,1,2,3]
7291 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm24, %zmm23
7292 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm24, %zmm9
7293 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm18, %zmm29, %zmm23
7294 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [3,11,3,11,3,11,3,11]
7295 ; AVX512DQ-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7296 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm18, %zmm0, %zmm9
7297 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm9, (%rsp) # 64-byte Spill
7298 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm18
7299 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm13, %zmm25, %zmm18
7300 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm31
7301 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm24
7302 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm13, %zmm29, %zmm31
7303 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm13, %zmm0, %zmm24
7304 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm13
7305 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm12, %zmm25, %zmm13
7306 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm4
7307 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm12, %zmm29, %zmm4
7308 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm12, %zmm0, %zmm7
7309 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm2, %zmm21, %zmm25
7310 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm2, %zmm21, %zmm29
7311 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm0, %zmm21
7312 ; AVX512DQ-FCP-NEXT: movb $12, %al
7313 ; AVX512DQ-FCP-NEXT: kmovw %eax, %k1
7314 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7315 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm0 {%k1}
7316 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7317 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7318 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm0 {%k1}
7319 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7320 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
7321 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, %zmm9 {%k1}
7322 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm27 {%k1}
7323 ; AVX512DQ-FCP-NEXT: movb $48, %al
7324 ; AVX512DQ-FCP-NEXT: kmovw %eax, %k2
7325 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7326 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm20 {%k2}
7327 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7328 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm23 {%k2}
7329 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7330 ; AVX512DQ-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm12 # 64-byte Folded Reload
7331 ; AVX512DQ-FCP-NEXT: # zmm12 = zmm0[0,1,2,3],mem[4,5,6,7]
7332 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7333 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
7334 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7335 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
7336 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%r8), %zmm0
7337 ; AVX512DQ-FCP-NEXT: vshufi64x2 {{.*#+}} zmm22 = zmm22[0,1,2,3],zmm26[4,5,6,7]
7338 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%r8), %zmm1
7339 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm30, %zmm13 {%k2}
7340 ; AVX512DQ-FCP-NEXT: vmovdqa64 128(%r8), %zmm2
7341 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, %zmm4 {%k2}
7342 ; AVX512DQ-FCP-NEXT: vmovdqa64 192(%r8), %zmm26
7343 ; AVX512DQ-FCP-NEXT: vshufi64x2 {{.*#+}} zmm3 = zmm28[0,1,2,3],zmm17[4,5,6,7]
7344 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%r9), %zmm8
7345 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm14, %zmm25 {%k2}
7346 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,9,0,4,5,6,7]
7347 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm5, %zmm20
7348 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm18
7349 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm5, %zmm13
7350 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm26, %zmm5, %zmm25
7351 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,2,9,4,5,6,7]
7352 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm8, %zmm5, %zmm20
7353 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%r9), %zmm17
7354 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm17, %zmm5, %zmm18
7355 ; AVX512DQ-FCP-NEXT: vmovdqa64 128(%r9), %zmm30
7356 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm30, %zmm5, %zmm13
7357 ; AVX512DQ-FCP-NEXT: vmovdqa64 192(%r9), %zmm14
7358 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm14, %zmm5, %zmm25
7359 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm15, %zmm29 {%k2}
7360 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,13,0,4,5,6,7]
7361 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm5, %zmm23
7362 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm31
7363 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm5, %zmm4
7364 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm26, %zmm5, %zmm29
7365 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,2,13,4,5,6,7]
7366 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm8, %zmm5, %zmm23
7367 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm17, %zmm5, %zmm31
7368 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm30, %zmm5, %zmm4
7369 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm14, %zmm5, %zmm29
7370 ; AVX512DQ-FCP-NEXT: vshufi64x2 {{.*#+}} zmm15 = zmm16[0,1,2,3],zmm10[4,5,6,7]
7371 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [14,0,2,3,4,5,15,0]
7372 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm5, %zmm12
7373 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm22
7374 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm5, %zmm3
7375 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm26, %zmm5, %zmm15
7376 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,14,2,3,4,5,6,15]
7377 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm8, %zmm5, %zmm12
7378 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm17, %zmm5, %zmm22
7379 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm30, %zmm5, %zmm3
7380 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm14, %zmm5, %zmm15
7381 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdx), %xmm5
7382 ; AVX512DQ-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
7383 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
7384 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm11 {%k1}
7385 ; AVX512DQ-FCP-NEXT: vmovdqa 64(%rdx), %xmm5
7386 ; AVX512DQ-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
7387 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
7388 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
7389 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm10 {%k1}
7390 ; AVX512DQ-FCP-NEXT: vmovdqa 128(%rdx), %xmm5
7391 ; AVX512DQ-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
7392 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
7393 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
7394 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm16 {%k1}
7395 ; AVX512DQ-FCP-NEXT: vmovdqa 192(%rdx), %xmm5
7396 ; AVX512DQ-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
7397 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
7398 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm19 {%k1}
7399 ; AVX512DQ-FCP-NEXT: vinserti32x4 $2, (%r8), %zmm11, %zmm5
7400 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,1,2,3,4,8,6,7]
7401 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm8, %zmm6, %zmm5
7402 ; AVX512DQ-FCP-NEXT: vinserti32x4 $2, 64(%r8), %zmm10, %zmm10
7403 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm17, %zmm6, %zmm10
7404 ; AVX512DQ-FCP-NEXT: vinserti32x4 $2, 128(%r8), %zmm16, %zmm11
7405 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm30, %zmm6, %zmm11
7406 ; AVX512DQ-FCP-NEXT: vinserti32x4 $2, 192(%r8), %zmm19, %zmm16
7407 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm14, %zmm6, %zmm16
7408 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdi), %ymm6
7409 ; AVX512DQ-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm6 = ymm6[1],mem[1],ymm6[3],mem[3]
7410 ; AVX512DQ-FCP-NEXT: vmovdqu64 (%rsp), %zmm19 # 64-byte Reload
7411 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm19, %zmm6
7412 ; AVX512DQ-FCP-NEXT: movb $16, %al
7413 ; AVX512DQ-FCP-NEXT: kmovw %eax, %k1
7414 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm27 {%k1}
7415 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm19 = [10,0,2,3,4,5,11,0]
7416 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm19, %zmm6
7417 ; AVX512DQ-FCP-NEXT: vmovdqa 64(%rdi), %ymm0
7418 ; AVX512DQ-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
7419 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm0, %zmm24, %zmm0
7420 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm9 {%k1}
7421 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm19, %zmm0
7422 ; AVX512DQ-FCP-NEXT: vmovdqa 128(%rdi), %ymm1
7423 ; AVX512DQ-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
7424 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm1, %zmm7, %zmm1
7425 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm28 # 64-byte Reload
7426 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm28 {%k1}
7427 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm19, %zmm1
7428 ; AVX512DQ-FCP-NEXT: vmovdqa 192(%rdi), %ymm2
7429 ; AVX512DQ-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
7430 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm21, %zmm2
7431 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm24 # 64-byte Reload
7432 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm26, %zmm24 {%k1}
7433 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm26, %zmm19, %zmm2
7434 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm19 = [0,10,2,3,4,5,6,11]
7435 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm8, %zmm19, %zmm6
7436 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm21 = [0,1,2,3,4,12,6,7]
7437 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm8, %zmm21, %zmm27
7438 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm17, %zmm19, %zmm0
7439 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm17, %zmm21, %zmm9
7440 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm30, %zmm19, %zmm1
7441 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm30, %zmm21, %zmm28
7442 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm14, %zmm19, %zmm2
7443 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm14, %zmm21, %zmm24
7444 ; AVX512DQ-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
7445 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm15, 1472(%rax)
7446 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm29, 1408(%rax)
7447 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm24, 1344(%rax)
7448 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, 1280(%rax)
7449 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm25, 1216(%rax)
7450 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, 1088(%rax)
7451 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, 1024(%rax)
7452 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm28, 960(%rax)
7453 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, 896(%rax)
7454 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, 832(%rax)
7455 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm22, 704(%rax)
7456 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm31, 640(%rax)
7457 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, 576(%rax)
7458 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, 512(%rax)
7459 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, 448(%rax)
7460 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm12, 320(%rax)
7461 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm23, 256(%rax)
7462 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm27, 192(%rax)
7463 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, 128(%rax)
7464 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, 64(%rax)
7465 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm16, 1152(%rax)
7466 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm11, 768(%rax)
7467 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm10, 384(%rax)
7468 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, (%rax)
7469 ; AVX512DQ-FCP-NEXT: addq $648, %rsp # imm = 0x288
7470 ; AVX512DQ-FCP-NEXT: vzeroupper
7471 ; AVX512DQ-FCP-NEXT: retq
7473 ; AVX512BW-LABEL: store_i64_stride6_vf32:
7474 ; AVX512BW: # %bb.0:
7475 ; AVX512BW-NEXT: subq $648, %rsp # imm = 0x288
7476 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm11
7477 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %zmm5
7478 ; AVX512BW-NEXT: vmovdqa64 128(%rdi), %zmm1
7479 ; AVX512BW-NEXT: vmovdqa64 192(%rdi), %zmm19
7480 ; AVX512BW-NEXT: vmovdqa64 (%rsi), %zmm29
7481 ; AVX512BW-NEXT: vmovdqa64 64(%rsi), %zmm25
7482 ; AVX512BW-NEXT: vmovdqa64 128(%rsi), %zmm23
7483 ; AVX512BW-NEXT: vmovdqa64 192(%rsi), %zmm20
7484 ; AVX512BW-NEXT: vmovdqa64 (%rdx), %zmm24
7485 ; AVX512BW-NEXT: vmovdqa64 64(%rdx), %zmm4
7486 ; AVX512BW-NEXT: vmovdqa64 128(%rdx), %zmm7
7487 ; AVX512BW-NEXT: vmovdqa64 192(%rdx), %zmm21
7488 ; AVX512BW-NEXT: vmovdqa64 (%rcx), %zmm18
7489 ; AVX512BW-NEXT: vmovdqa64 64(%rcx), %zmm13
7490 ; AVX512BW-NEXT: vmovdqa64 128(%rcx), %zmm12
7491 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [4,12,5,13,4,12,5,13]
7492 ; AVX512BW-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
7493 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm0
7494 ; AVX512BW-NEXT: vpermt2q %zmm20, %zmm27, %zmm0
7495 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7496 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm0
7497 ; AVX512BW-NEXT: vpermt2q %zmm23, %zmm27, %zmm0
7498 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7499 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm0
7500 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm27, %zmm0
7501 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7502 ; AVX512BW-NEXT: vpermi2q %zmm29, %zmm11, %zmm27
7503 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm14 = [2,10,2,10,2,10,2,10]
7504 ; AVX512BW-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7505 ; AVX512BW-NEXT: vmovdqa64 %zmm11, %zmm0
7506 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm14, %zmm0
7507 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7508 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [6,14,6,14,6,14,6,14]
7509 ; AVX512BW-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7510 ; AVX512BW-NEXT: vmovdqa64 %zmm11, %zmm0
7511 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm15, %zmm0
7512 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7513 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm16 = [7,15,7,15,7,15,7,15]
7514 ; AVX512BW-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7515 ; AVX512BW-NEXT: vmovdqa64 %zmm24, %zmm0
7516 ; AVX512BW-NEXT: vpermt2q %zmm18, %zmm16, %zmm0
7517 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7518 ; AVX512BW-NEXT: vmovdqa64 %zmm11, %zmm0
7519 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm16, %zmm0
7520 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7521 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm0
7522 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm14, %zmm0
7523 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7524 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm0
7525 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm15, %zmm0
7526 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7527 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm26
7528 ; AVX512BW-NEXT: vpermt2q %zmm13, %zmm16, %zmm26
7529 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm22
7530 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm16, %zmm22
7531 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm0
7532 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm30
7533 ; AVX512BW-NEXT: vpermt2q %zmm23, %zmm14, %zmm30
7534 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,8,1,9,0,8,1,9]
7535 ; AVX512BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
7536 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm1, %zmm11
7537 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm8
7538 ; AVX512BW-NEXT: vpermt2q %zmm23, %zmm15, %zmm8
7539 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm1, %zmm5
7540 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7541 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm28
7542 ; AVX512BW-NEXT: vpermt2q %zmm23, %zmm16, %zmm28
7543 ; AVX512BW-NEXT: vpermt2q %zmm23, %zmm1, %zmm0
7544 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7545 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm17
7546 ; AVX512BW-NEXT: vpermt2q %zmm12, %zmm16, %zmm17
7547 ; AVX512BW-NEXT: vpermi2q %zmm20, %zmm19, %zmm14
7548 ; AVX512BW-NEXT: vpermi2q %zmm20, %zmm19, %zmm15
7549 ; AVX512BW-NEXT: vmovdqa64 192(%rcx), %zmm2
7550 ; AVX512BW-NEXT: vmovdqa64 %zmm21, %zmm10
7551 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm16, %zmm10
7552 ; AVX512BW-NEXT: vpermi2q %zmm20, %zmm19, %zmm16
7553 ; AVX512BW-NEXT: vpermt2q %zmm20, %zmm1, %zmm19
7554 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm1 = [0,0,4,12]
7555 ; AVX512BW-NEXT: vmovdqa64 %zmm21, %zmm6
7556 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm1, %zmm6
7557 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm5
7558 ; AVX512BW-NEXT: vpermt2q %zmm12, %zmm1, %zmm5
7559 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm3
7560 ; AVX512BW-NEXT: vpermt2q %zmm13, %zmm1, %zmm3
7561 ; AVX512BW-NEXT: vpermi2q %zmm18, %zmm24, %zmm1
7562 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm25 = [1,9,2,10,1,9,2,10]
7563 ; AVX512BW-NEXT: # zmm25 = mem[0,1,2,3,0,1,2,3]
7564 ; AVX512BW-NEXT: vmovdqa64 %zmm24, %zmm20
7565 ; AVX512BW-NEXT: vpermt2q %zmm18, %zmm25, %zmm20
7566 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm29 = [5,13,6,14,5,13,6,14]
7567 ; AVX512BW-NEXT: # zmm29 = mem[0,1,2,3,0,1,2,3]
7568 ; AVX512BW-NEXT: vmovdqa64 %zmm24, %zmm23
7569 ; AVX512BW-NEXT: vmovdqa64 %zmm24, %zmm9
7570 ; AVX512BW-NEXT: vpermt2q %zmm18, %zmm29, %zmm23
7571 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [3,11,3,11,3,11,3,11]
7572 ; AVX512BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7573 ; AVX512BW-NEXT: vpermt2q %zmm18, %zmm0, %zmm9
7574 ; AVX512BW-NEXT: vmovdqu64 %zmm9, (%rsp) # 64-byte Spill
7575 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm18
7576 ; AVX512BW-NEXT: vpermt2q %zmm13, %zmm25, %zmm18
7577 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm31
7578 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm24
7579 ; AVX512BW-NEXT: vpermt2q %zmm13, %zmm29, %zmm31
7580 ; AVX512BW-NEXT: vpermt2q %zmm13, %zmm0, %zmm24
7581 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm13
7582 ; AVX512BW-NEXT: vpermt2q %zmm12, %zmm25, %zmm13
7583 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm4
7584 ; AVX512BW-NEXT: vpermt2q %zmm12, %zmm29, %zmm4
7585 ; AVX512BW-NEXT: vpermt2q %zmm12, %zmm0, %zmm7
7586 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm21, %zmm25
7587 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm21, %zmm29
7588 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm0, %zmm21
7589 ; AVX512BW-NEXT: movb $12, %al
7590 ; AVX512BW-NEXT: kmovd %eax, %k1
7591 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7592 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm0 {%k1}
7593 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7594 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7595 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm0 {%k1}
7596 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7597 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
7598 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm9 {%k1}
7599 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm27 {%k1}
7600 ; AVX512BW-NEXT: movb $48, %al
7601 ; AVX512BW-NEXT: kmovd %eax, %k2
7602 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7603 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm20 {%k2}
7604 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7605 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm23 {%k2}
7606 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7607 ; AVX512BW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm12 # 64-byte Folded Reload
7608 ; AVX512BW-NEXT: # zmm12 = zmm0[0,1,2,3],mem[4,5,6,7]
7609 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7610 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
7611 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7612 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
7613 ; AVX512BW-NEXT: vmovdqa64 (%r8), %zmm0
7614 ; AVX512BW-NEXT: vshufi64x2 {{.*#+}} zmm22 = zmm22[0,1,2,3],zmm26[4,5,6,7]
7615 ; AVX512BW-NEXT: vmovdqa64 64(%r8), %zmm1
7616 ; AVX512BW-NEXT: vmovdqa64 %zmm30, %zmm13 {%k2}
7617 ; AVX512BW-NEXT: vmovdqa64 128(%r8), %zmm2
7618 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm4 {%k2}
7619 ; AVX512BW-NEXT: vmovdqa64 192(%r8), %zmm26
7620 ; AVX512BW-NEXT: vshufi64x2 {{.*#+}} zmm3 = zmm28[0,1,2,3],zmm17[4,5,6,7]
7621 ; AVX512BW-NEXT: vmovdqa64 (%r9), %zmm8
7622 ; AVX512BW-NEXT: vmovdqa64 %zmm14, %zmm25 {%k2}
7623 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,9,0,4,5,6,7]
7624 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm5, %zmm20
7625 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm5, %zmm18
7626 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm5, %zmm13
7627 ; AVX512BW-NEXT: vpermt2q %zmm26, %zmm5, %zmm25
7628 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,2,9,4,5,6,7]
7629 ; AVX512BW-NEXT: vpermt2q %zmm8, %zmm5, %zmm20
7630 ; AVX512BW-NEXT: vmovdqa64 64(%r9), %zmm17
7631 ; AVX512BW-NEXT: vpermt2q %zmm17, %zmm5, %zmm18
7632 ; AVX512BW-NEXT: vmovdqa64 128(%r9), %zmm30
7633 ; AVX512BW-NEXT: vpermt2q %zmm30, %zmm5, %zmm13
7634 ; AVX512BW-NEXT: vmovdqa64 192(%r9), %zmm14
7635 ; AVX512BW-NEXT: vpermt2q %zmm14, %zmm5, %zmm25
7636 ; AVX512BW-NEXT: vmovdqa64 %zmm15, %zmm29 {%k2}
7637 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,13,0,4,5,6,7]
7638 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm5, %zmm23
7639 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm5, %zmm31
7640 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm5, %zmm4
7641 ; AVX512BW-NEXT: vpermt2q %zmm26, %zmm5, %zmm29
7642 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,2,13,4,5,6,7]
7643 ; AVX512BW-NEXT: vpermt2q %zmm8, %zmm5, %zmm23
7644 ; AVX512BW-NEXT: vpermt2q %zmm17, %zmm5, %zmm31
7645 ; AVX512BW-NEXT: vpermt2q %zmm30, %zmm5, %zmm4
7646 ; AVX512BW-NEXT: vpermt2q %zmm14, %zmm5, %zmm29
7647 ; AVX512BW-NEXT: vshufi64x2 {{.*#+}} zmm15 = zmm16[0,1,2,3],zmm10[4,5,6,7]
7648 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm5 = [14,0,2,3,4,5,15,0]
7649 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm5, %zmm12
7650 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm5, %zmm22
7651 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm5, %zmm3
7652 ; AVX512BW-NEXT: vpermt2q %zmm26, %zmm5, %zmm15
7653 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,14,2,3,4,5,6,15]
7654 ; AVX512BW-NEXT: vpermt2q %zmm8, %zmm5, %zmm12
7655 ; AVX512BW-NEXT: vpermt2q %zmm17, %zmm5, %zmm22
7656 ; AVX512BW-NEXT: vpermt2q %zmm30, %zmm5, %zmm3
7657 ; AVX512BW-NEXT: vpermt2q %zmm14, %zmm5, %zmm15
7658 ; AVX512BW-NEXT: vmovdqa (%rdx), %xmm5
7659 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
7660 ; AVX512BW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
7661 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm11 {%k1}
7662 ; AVX512BW-NEXT: vmovdqa 64(%rdx), %xmm5
7663 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
7664 ; AVX512BW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
7665 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
7666 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm10 {%k1}
7667 ; AVX512BW-NEXT: vmovdqa 128(%rdx), %xmm5
7668 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
7669 ; AVX512BW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
7670 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
7671 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm16 {%k1}
7672 ; AVX512BW-NEXT: vmovdqa 192(%rdx), %xmm5
7673 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
7674 ; AVX512BW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
7675 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm19 {%k1}
7676 ; AVX512BW-NEXT: vinserti32x4 $2, (%r8), %zmm11, %zmm5
7677 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,1,2,3,4,8,6,7]
7678 ; AVX512BW-NEXT: vpermt2q %zmm8, %zmm6, %zmm5
7679 ; AVX512BW-NEXT: vinserti32x4 $2, 64(%r8), %zmm10, %zmm10
7680 ; AVX512BW-NEXT: vpermt2q %zmm17, %zmm6, %zmm10
7681 ; AVX512BW-NEXT: vinserti32x4 $2, 128(%r8), %zmm16, %zmm11
7682 ; AVX512BW-NEXT: vpermt2q %zmm30, %zmm6, %zmm11
7683 ; AVX512BW-NEXT: vinserti32x4 $2, 192(%r8), %zmm19, %zmm16
7684 ; AVX512BW-NEXT: vpermt2q %zmm14, %zmm6, %zmm16
7685 ; AVX512BW-NEXT: vmovdqa (%rdi), %ymm6
7686 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm6 = ymm6[1],mem[1],ymm6[3],mem[3]
7687 ; AVX512BW-NEXT: vmovdqu64 (%rsp), %zmm19 # 64-byte Reload
7688 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm6, %zmm19, %zmm6
7689 ; AVX512BW-NEXT: movb $16, %al
7690 ; AVX512BW-NEXT: kmovd %eax, %k1
7691 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm27 {%k1}
7692 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm19 = [10,0,2,3,4,5,11,0]
7693 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm19, %zmm6
7694 ; AVX512BW-NEXT: vmovdqa 64(%rdi), %ymm0
7695 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
7696 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm0, %zmm24, %zmm0
7697 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm9 {%k1}
7698 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm19, %zmm0
7699 ; AVX512BW-NEXT: vmovdqa 128(%rdi), %ymm1
7700 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
7701 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm1, %zmm7, %zmm1
7702 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm28 # 64-byte Reload
7703 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm28 {%k1}
7704 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm19, %zmm1
7705 ; AVX512BW-NEXT: vmovdqa 192(%rdi), %ymm2
7706 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
7707 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm2, %zmm21, %zmm2
7708 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm24 # 64-byte Reload
7709 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm24 {%k1}
7710 ; AVX512BW-NEXT: vpermt2q %zmm26, %zmm19, %zmm2
7711 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm19 = [0,10,2,3,4,5,6,11]
7712 ; AVX512BW-NEXT: vpermt2q %zmm8, %zmm19, %zmm6
7713 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm21 = [0,1,2,3,4,12,6,7]
7714 ; AVX512BW-NEXT: vpermt2q %zmm8, %zmm21, %zmm27
7715 ; AVX512BW-NEXT: vpermt2q %zmm17, %zmm19, %zmm0
7716 ; AVX512BW-NEXT: vpermt2q %zmm17, %zmm21, %zmm9
7717 ; AVX512BW-NEXT: vpermt2q %zmm30, %zmm19, %zmm1
7718 ; AVX512BW-NEXT: vpermt2q %zmm30, %zmm21, %zmm28
7719 ; AVX512BW-NEXT: vpermt2q %zmm14, %zmm19, %zmm2
7720 ; AVX512BW-NEXT: vpermt2q %zmm14, %zmm21, %zmm24
7721 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
7722 ; AVX512BW-NEXT: vmovdqa64 %zmm15, 1472(%rax)
7723 ; AVX512BW-NEXT: vmovdqa64 %zmm29, 1408(%rax)
7724 ; AVX512BW-NEXT: vmovdqa64 %zmm24, 1344(%rax)
7725 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 1280(%rax)
7726 ; AVX512BW-NEXT: vmovdqa64 %zmm25, 1216(%rax)
7727 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 1088(%rax)
7728 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 1024(%rax)
7729 ; AVX512BW-NEXT: vmovdqa64 %zmm28, 960(%rax)
7730 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 896(%rax)
7731 ; AVX512BW-NEXT: vmovdqa64 %zmm13, 832(%rax)
7732 ; AVX512BW-NEXT: vmovdqa64 %zmm22, 704(%rax)
7733 ; AVX512BW-NEXT: vmovdqa64 %zmm31, 640(%rax)
7734 ; AVX512BW-NEXT: vmovdqa64 %zmm9, 576(%rax)
7735 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 512(%rax)
7736 ; AVX512BW-NEXT: vmovdqa64 %zmm18, 448(%rax)
7737 ; AVX512BW-NEXT: vmovdqa64 %zmm12, 320(%rax)
7738 ; AVX512BW-NEXT: vmovdqa64 %zmm23, 256(%rax)
7739 ; AVX512BW-NEXT: vmovdqa64 %zmm27, 192(%rax)
7740 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 128(%rax)
7741 ; AVX512BW-NEXT: vmovdqa64 %zmm20, 64(%rax)
7742 ; AVX512BW-NEXT: vmovdqa64 %zmm16, 1152(%rax)
7743 ; AVX512BW-NEXT: vmovdqa64 %zmm11, 768(%rax)
7744 ; AVX512BW-NEXT: vmovdqa64 %zmm10, 384(%rax)
7745 ; AVX512BW-NEXT: vmovdqa64 %zmm5, (%rax)
7746 ; AVX512BW-NEXT: addq $648, %rsp # imm = 0x288
7747 ; AVX512BW-NEXT: vzeroupper
7748 ; AVX512BW-NEXT: retq
7750 ; AVX512BW-FCP-LABEL: store_i64_stride6_vf32:
7751 ; AVX512BW-FCP: # %bb.0:
7752 ; AVX512BW-FCP-NEXT: subq $648, %rsp # imm = 0x288
7753 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm11
7754 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm5
7755 ; AVX512BW-FCP-NEXT: vmovdqa64 128(%rdi), %zmm1
7756 ; AVX512BW-FCP-NEXT: vmovdqa64 192(%rdi), %zmm19
7757 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rsi), %zmm29
7758 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rsi), %zmm25
7759 ; AVX512BW-FCP-NEXT: vmovdqa64 128(%rsi), %zmm23
7760 ; AVX512BW-FCP-NEXT: vmovdqa64 192(%rsi), %zmm20
7761 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdx), %zmm24
7762 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rdx), %zmm4
7763 ; AVX512BW-FCP-NEXT: vmovdqa64 128(%rdx), %zmm7
7764 ; AVX512BW-FCP-NEXT: vmovdqa64 192(%rdx), %zmm21
7765 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rcx), %zmm18
7766 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rcx), %zmm13
7767 ; AVX512BW-FCP-NEXT: vmovdqa64 128(%rcx), %zmm12
7768 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [4,12,5,13,4,12,5,13]
7769 ; AVX512BW-FCP-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
7770 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm0
7771 ; AVX512BW-FCP-NEXT: vpermt2q %zmm20, %zmm27, %zmm0
7772 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7773 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm0
7774 ; AVX512BW-FCP-NEXT: vpermt2q %zmm23, %zmm27, %zmm0
7775 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7776 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
7777 ; AVX512BW-FCP-NEXT: vpermt2q %zmm25, %zmm27, %zmm0
7778 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7779 ; AVX512BW-FCP-NEXT: vpermi2q %zmm29, %zmm11, %zmm27
7780 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm14 = [2,10,2,10,2,10,2,10]
7781 ; AVX512BW-FCP-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7782 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm0
7783 ; AVX512BW-FCP-NEXT: vpermt2q %zmm29, %zmm14, %zmm0
7784 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7785 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [6,14,6,14,6,14,6,14]
7786 ; AVX512BW-FCP-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7787 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm0
7788 ; AVX512BW-FCP-NEXT: vpermt2q %zmm29, %zmm15, %zmm0
7789 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7790 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm16 = [7,15,7,15,7,15,7,15]
7791 ; AVX512BW-FCP-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7792 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm0
7793 ; AVX512BW-FCP-NEXT: vpermt2q %zmm18, %zmm16, %zmm0
7794 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7795 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm0
7796 ; AVX512BW-FCP-NEXT: vpermt2q %zmm29, %zmm16, %zmm0
7797 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7798 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
7799 ; AVX512BW-FCP-NEXT: vpermt2q %zmm25, %zmm14, %zmm0
7800 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7801 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
7802 ; AVX512BW-FCP-NEXT: vpermt2q %zmm25, %zmm15, %zmm0
7803 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7804 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm26
7805 ; AVX512BW-FCP-NEXT: vpermt2q %zmm13, %zmm16, %zmm26
7806 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm22
7807 ; AVX512BW-FCP-NEXT: vpermt2q %zmm25, %zmm16, %zmm22
7808 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm0
7809 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm30
7810 ; AVX512BW-FCP-NEXT: vpermt2q %zmm23, %zmm14, %zmm30
7811 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,8,1,9,0,8,1,9]
7812 ; AVX512BW-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
7813 ; AVX512BW-FCP-NEXT: vpermt2q %zmm29, %zmm1, %zmm11
7814 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm8
7815 ; AVX512BW-FCP-NEXT: vpermt2q %zmm23, %zmm15, %zmm8
7816 ; AVX512BW-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm5
7817 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7818 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm28
7819 ; AVX512BW-FCP-NEXT: vpermt2q %zmm23, %zmm16, %zmm28
7820 ; AVX512BW-FCP-NEXT: vpermt2q %zmm23, %zmm1, %zmm0
7821 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7822 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm17
7823 ; AVX512BW-FCP-NEXT: vpermt2q %zmm12, %zmm16, %zmm17
7824 ; AVX512BW-FCP-NEXT: vpermi2q %zmm20, %zmm19, %zmm14
7825 ; AVX512BW-FCP-NEXT: vpermi2q %zmm20, %zmm19, %zmm15
7826 ; AVX512BW-FCP-NEXT: vmovdqa64 192(%rcx), %zmm2
7827 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm10
7828 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm16, %zmm10
7829 ; AVX512BW-FCP-NEXT: vpermi2q %zmm20, %zmm19, %zmm16
7830 ; AVX512BW-FCP-NEXT: vpermt2q %zmm20, %zmm1, %zmm19
7831 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm1 = [0,0,4,12]
7832 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm6
7833 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm1, %zmm6
7834 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm5
7835 ; AVX512BW-FCP-NEXT: vpermt2q %zmm12, %zmm1, %zmm5
7836 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm3
7837 ; AVX512BW-FCP-NEXT: vpermt2q %zmm13, %zmm1, %zmm3
7838 ; AVX512BW-FCP-NEXT: vpermi2q %zmm18, %zmm24, %zmm1
7839 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm25 = [1,9,2,10,1,9,2,10]
7840 ; AVX512BW-FCP-NEXT: # zmm25 = mem[0,1,2,3,0,1,2,3]
7841 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm20
7842 ; AVX512BW-FCP-NEXT: vpermt2q %zmm18, %zmm25, %zmm20
7843 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm29 = [5,13,6,14,5,13,6,14]
7844 ; AVX512BW-FCP-NEXT: # zmm29 = mem[0,1,2,3,0,1,2,3]
7845 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm23
7846 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm9
7847 ; AVX512BW-FCP-NEXT: vpermt2q %zmm18, %zmm29, %zmm23
7848 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [3,11,3,11,3,11,3,11]
7849 ; AVX512BW-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7850 ; AVX512BW-FCP-NEXT: vpermt2q %zmm18, %zmm0, %zmm9
7851 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm9, (%rsp) # 64-byte Spill
7852 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm18
7853 ; AVX512BW-FCP-NEXT: vpermt2q %zmm13, %zmm25, %zmm18
7854 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm31
7855 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm24
7856 ; AVX512BW-FCP-NEXT: vpermt2q %zmm13, %zmm29, %zmm31
7857 ; AVX512BW-FCP-NEXT: vpermt2q %zmm13, %zmm0, %zmm24
7858 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm13
7859 ; AVX512BW-FCP-NEXT: vpermt2q %zmm12, %zmm25, %zmm13
7860 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm4
7861 ; AVX512BW-FCP-NEXT: vpermt2q %zmm12, %zmm29, %zmm4
7862 ; AVX512BW-FCP-NEXT: vpermt2q %zmm12, %zmm0, %zmm7
7863 ; AVX512BW-FCP-NEXT: vpermi2q %zmm2, %zmm21, %zmm25
7864 ; AVX512BW-FCP-NEXT: vpermi2q %zmm2, %zmm21, %zmm29
7865 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm0, %zmm21
7866 ; AVX512BW-FCP-NEXT: movb $12, %al
7867 ; AVX512BW-FCP-NEXT: kmovd %eax, %k1
7868 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7869 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm0 {%k1}
7870 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7871 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7872 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm0 {%k1}
7873 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7874 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
7875 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm9 {%k1}
7876 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm27 {%k1}
7877 ; AVX512BW-FCP-NEXT: movb $48, %al
7878 ; AVX512BW-FCP-NEXT: kmovd %eax, %k2
7879 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7880 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm20 {%k2}
7881 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7882 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm23 {%k2}
7883 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7884 ; AVX512BW-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm12 # 64-byte Folded Reload
7885 ; AVX512BW-FCP-NEXT: # zmm12 = zmm0[0,1,2,3],mem[4,5,6,7]
7886 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7887 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
7888 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7889 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
7890 ; AVX512BW-FCP-NEXT: vmovdqa64 (%r8), %zmm0
7891 ; AVX512BW-FCP-NEXT: vshufi64x2 {{.*#+}} zmm22 = zmm22[0,1,2,3],zmm26[4,5,6,7]
7892 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%r8), %zmm1
7893 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm30, %zmm13 {%k2}
7894 ; AVX512BW-FCP-NEXT: vmovdqa64 128(%r8), %zmm2
7895 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm4 {%k2}
7896 ; AVX512BW-FCP-NEXT: vmovdqa64 192(%r8), %zmm26
7897 ; AVX512BW-FCP-NEXT: vshufi64x2 {{.*#+}} zmm3 = zmm28[0,1,2,3],zmm17[4,5,6,7]
7898 ; AVX512BW-FCP-NEXT: vmovdqa64 (%r9), %zmm8
7899 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm14, %zmm25 {%k2}
7900 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,9,0,4,5,6,7]
7901 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm5, %zmm20
7902 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm18
7903 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm5, %zmm13
7904 ; AVX512BW-FCP-NEXT: vpermt2q %zmm26, %zmm5, %zmm25
7905 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,2,9,4,5,6,7]
7906 ; AVX512BW-FCP-NEXT: vpermt2q %zmm8, %zmm5, %zmm20
7907 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%r9), %zmm17
7908 ; AVX512BW-FCP-NEXT: vpermt2q %zmm17, %zmm5, %zmm18
7909 ; AVX512BW-FCP-NEXT: vmovdqa64 128(%r9), %zmm30
7910 ; AVX512BW-FCP-NEXT: vpermt2q %zmm30, %zmm5, %zmm13
7911 ; AVX512BW-FCP-NEXT: vmovdqa64 192(%r9), %zmm14
7912 ; AVX512BW-FCP-NEXT: vpermt2q %zmm14, %zmm5, %zmm25
7913 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm29 {%k2}
7914 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,13,0,4,5,6,7]
7915 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm5, %zmm23
7916 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm31
7917 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm5, %zmm4
7918 ; AVX512BW-FCP-NEXT: vpermt2q %zmm26, %zmm5, %zmm29
7919 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,2,13,4,5,6,7]
7920 ; AVX512BW-FCP-NEXT: vpermt2q %zmm8, %zmm5, %zmm23
7921 ; AVX512BW-FCP-NEXT: vpermt2q %zmm17, %zmm5, %zmm31
7922 ; AVX512BW-FCP-NEXT: vpermt2q %zmm30, %zmm5, %zmm4
7923 ; AVX512BW-FCP-NEXT: vpermt2q %zmm14, %zmm5, %zmm29
7924 ; AVX512BW-FCP-NEXT: vshufi64x2 {{.*#+}} zmm15 = zmm16[0,1,2,3],zmm10[4,5,6,7]
7925 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [14,0,2,3,4,5,15,0]
7926 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm5, %zmm12
7927 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm22
7928 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm5, %zmm3
7929 ; AVX512BW-FCP-NEXT: vpermt2q %zmm26, %zmm5, %zmm15
7930 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,14,2,3,4,5,6,15]
7931 ; AVX512BW-FCP-NEXT: vpermt2q %zmm8, %zmm5, %zmm12
7932 ; AVX512BW-FCP-NEXT: vpermt2q %zmm17, %zmm5, %zmm22
7933 ; AVX512BW-FCP-NEXT: vpermt2q %zmm30, %zmm5, %zmm3
7934 ; AVX512BW-FCP-NEXT: vpermt2q %zmm14, %zmm5, %zmm15
7935 ; AVX512BW-FCP-NEXT: vmovdqa (%rdx), %xmm5
7936 ; AVX512BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
7937 ; AVX512BW-FCP-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
7938 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm11 {%k1}
7939 ; AVX512BW-FCP-NEXT: vmovdqa 64(%rdx), %xmm5
7940 ; AVX512BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
7941 ; AVX512BW-FCP-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
7942 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
7943 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm10 {%k1}
7944 ; AVX512BW-FCP-NEXT: vmovdqa 128(%rdx), %xmm5
7945 ; AVX512BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
7946 ; AVX512BW-FCP-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
7947 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
7948 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm16 {%k1}
7949 ; AVX512BW-FCP-NEXT: vmovdqa 192(%rdx), %xmm5
7950 ; AVX512BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
7951 ; AVX512BW-FCP-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
7952 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm19 {%k1}
7953 ; AVX512BW-FCP-NEXT: vinserti32x4 $2, (%r8), %zmm11, %zmm5
7954 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,1,2,3,4,8,6,7]
7955 ; AVX512BW-FCP-NEXT: vpermt2q %zmm8, %zmm6, %zmm5
7956 ; AVX512BW-FCP-NEXT: vinserti32x4 $2, 64(%r8), %zmm10, %zmm10
7957 ; AVX512BW-FCP-NEXT: vpermt2q %zmm17, %zmm6, %zmm10
7958 ; AVX512BW-FCP-NEXT: vinserti32x4 $2, 128(%r8), %zmm16, %zmm11
7959 ; AVX512BW-FCP-NEXT: vpermt2q %zmm30, %zmm6, %zmm11
7960 ; AVX512BW-FCP-NEXT: vinserti32x4 $2, 192(%r8), %zmm19, %zmm16
7961 ; AVX512BW-FCP-NEXT: vpermt2q %zmm14, %zmm6, %zmm16
7962 ; AVX512BW-FCP-NEXT: vmovdqa (%rdi), %ymm6
7963 ; AVX512BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm6 = ymm6[1],mem[1],ymm6[3],mem[3]
7964 ; AVX512BW-FCP-NEXT: vmovdqu64 (%rsp), %zmm19 # 64-byte Reload
7965 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm19, %zmm6
7966 ; AVX512BW-FCP-NEXT: movb $16, %al
7967 ; AVX512BW-FCP-NEXT: kmovd %eax, %k1
7968 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm27 {%k1}
7969 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm19 = [10,0,2,3,4,5,11,0]
7970 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm19, %zmm6
7971 ; AVX512BW-FCP-NEXT: vmovdqa 64(%rdi), %ymm0
7972 ; AVX512BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
7973 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm0, %zmm24, %zmm0
7974 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm9 {%k1}
7975 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm19, %zmm0
7976 ; AVX512BW-FCP-NEXT: vmovdqa 128(%rdi), %ymm1
7977 ; AVX512BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
7978 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm1, %zmm7, %zmm1
7979 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm28 # 64-byte Reload
7980 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm28 {%k1}
7981 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm19, %zmm1
7982 ; AVX512BW-FCP-NEXT: vmovdqa 192(%rdi), %ymm2
7983 ; AVX512BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
7984 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm21, %zmm2
7985 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm24 # 64-byte Reload
7986 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm24 {%k1}
7987 ; AVX512BW-FCP-NEXT: vpermt2q %zmm26, %zmm19, %zmm2
7988 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm19 = [0,10,2,3,4,5,6,11]
7989 ; AVX512BW-FCP-NEXT: vpermt2q %zmm8, %zmm19, %zmm6
7990 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm21 = [0,1,2,3,4,12,6,7]
7991 ; AVX512BW-FCP-NEXT: vpermt2q %zmm8, %zmm21, %zmm27
7992 ; AVX512BW-FCP-NEXT: vpermt2q %zmm17, %zmm19, %zmm0
7993 ; AVX512BW-FCP-NEXT: vpermt2q %zmm17, %zmm21, %zmm9
7994 ; AVX512BW-FCP-NEXT: vpermt2q %zmm30, %zmm19, %zmm1
7995 ; AVX512BW-FCP-NEXT: vpermt2q %zmm30, %zmm21, %zmm28
7996 ; AVX512BW-FCP-NEXT: vpermt2q %zmm14, %zmm19, %zmm2
7997 ; AVX512BW-FCP-NEXT: vpermt2q %zmm14, %zmm21, %zmm24
7998 ; AVX512BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
7999 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm15, 1472(%rax)
8000 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm29, 1408(%rax)
8001 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm24, 1344(%rax)
8002 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, 1280(%rax)
8003 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm25, 1216(%rax)
8004 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, 1088(%rax)
8005 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, 1024(%rax)
8006 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm28, 960(%rax)
8007 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, 896(%rax)
8008 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, 832(%rax)
8009 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm22, 704(%rax)
8010 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm31, 640(%rax)
8011 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, 576(%rax)
8012 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, 512(%rax)
8013 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, 448(%rax)
8014 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm12, 320(%rax)
8015 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm23, 256(%rax)
8016 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm27, 192(%rax)
8017 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, 128(%rax)
8018 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, 64(%rax)
8019 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm16, 1152(%rax)
8020 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm11, 768(%rax)
8021 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, 384(%rax)
8022 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, (%rax)
8023 ; AVX512BW-FCP-NEXT: addq $648, %rsp # imm = 0x288
8024 ; AVX512BW-FCP-NEXT: vzeroupper
8025 ; AVX512BW-FCP-NEXT: retq
8027 ; AVX512DQ-BW-LABEL: store_i64_stride6_vf32:
8028 ; AVX512DQ-BW: # %bb.0:
8029 ; AVX512DQ-BW-NEXT: subq $648, %rsp # imm = 0x288
8030 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdi), %zmm11
8031 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rdi), %zmm5
8032 ; AVX512DQ-BW-NEXT: vmovdqa64 128(%rdi), %zmm1
8033 ; AVX512DQ-BW-NEXT: vmovdqa64 192(%rdi), %zmm19
8034 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rsi), %zmm29
8035 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rsi), %zmm25
8036 ; AVX512DQ-BW-NEXT: vmovdqa64 128(%rsi), %zmm23
8037 ; AVX512DQ-BW-NEXT: vmovdqa64 192(%rsi), %zmm20
8038 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdx), %zmm24
8039 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rdx), %zmm4
8040 ; AVX512DQ-BW-NEXT: vmovdqa64 128(%rdx), %zmm7
8041 ; AVX512DQ-BW-NEXT: vmovdqa64 192(%rdx), %zmm21
8042 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rcx), %zmm18
8043 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rcx), %zmm13
8044 ; AVX512DQ-BW-NEXT: vmovdqa64 128(%rcx), %zmm12
8045 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [4,12,5,13,4,12,5,13]
8046 ; AVX512DQ-BW-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
8047 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, %zmm0
8048 ; AVX512DQ-BW-NEXT: vpermt2q %zmm20, %zmm27, %zmm0
8049 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8050 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm0
8051 ; AVX512DQ-BW-NEXT: vpermt2q %zmm23, %zmm27, %zmm0
8052 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8053 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm0
8054 ; AVX512DQ-BW-NEXT: vpermt2q %zmm25, %zmm27, %zmm0
8055 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8056 ; AVX512DQ-BW-NEXT: vpermi2q %zmm29, %zmm11, %zmm27
8057 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm14 = [2,10,2,10,2,10,2,10]
8058 ; AVX512DQ-BW-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8059 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm11, %zmm0
8060 ; AVX512DQ-BW-NEXT: vpermt2q %zmm29, %zmm14, %zmm0
8061 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8062 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [6,14,6,14,6,14,6,14]
8063 ; AVX512DQ-BW-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8064 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm11, %zmm0
8065 ; AVX512DQ-BW-NEXT: vpermt2q %zmm29, %zmm15, %zmm0
8066 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8067 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm16 = [7,15,7,15,7,15,7,15]
8068 ; AVX512DQ-BW-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8069 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm24, %zmm0
8070 ; AVX512DQ-BW-NEXT: vpermt2q %zmm18, %zmm16, %zmm0
8071 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8072 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm11, %zmm0
8073 ; AVX512DQ-BW-NEXT: vpermt2q %zmm29, %zmm16, %zmm0
8074 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8075 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm0
8076 ; AVX512DQ-BW-NEXT: vpermt2q %zmm25, %zmm14, %zmm0
8077 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8078 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm0
8079 ; AVX512DQ-BW-NEXT: vpermt2q %zmm25, %zmm15, %zmm0
8080 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8081 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm26
8082 ; AVX512DQ-BW-NEXT: vpermt2q %zmm13, %zmm16, %zmm26
8083 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm22
8084 ; AVX512DQ-BW-NEXT: vpermt2q %zmm25, %zmm16, %zmm22
8085 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm0
8086 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm30
8087 ; AVX512DQ-BW-NEXT: vpermt2q %zmm23, %zmm14, %zmm30
8088 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,8,1,9,0,8,1,9]
8089 ; AVX512DQ-BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
8090 ; AVX512DQ-BW-NEXT: vpermt2q %zmm29, %zmm1, %zmm11
8091 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm8
8092 ; AVX512DQ-BW-NEXT: vpermt2q %zmm23, %zmm15, %zmm8
8093 ; AVX512DQ-BW-NEXT: vpermt2q %zmm25, %zmm1, %zmm5
8094 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8095 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm28
8096 ; AVX512DQ-BW-NEXT: vpermt2q %zmm23, %zmm16, %zmm28
8097 ; AVX512DQ-BW-NEXT: vpermt2q %zmm23, %zmm1, %zmm0
8098 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8099 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm17
8100 ; AVX512DQ-BW-NEXT: vpermt2q %zmm12, %zmm16, %zmm17
8101 ; AVX512DQ-BW-NEXT: vpermi2q %zmm20, %zmm19, %zmm14
8102 ; AVX512DQ-BW-NEXT: vpermi2q %zmm20, %zmm19, %zmm15
8103 ; AVX512DQ-BW-NEXT: vmovdqa64 192(%rcx), %zmm2
8104 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm21, %zmm10
8105 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm16, %zmm10
8106 ; AVX512DQ-BW-NEXT: vpermi2q %zmm20, %zmm19, %zmm16
8107 ; AVX512DQ-BW-NEXT: vpermt2q %zmm20, %zmm1, %zmm19
8108 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm1 = [0,0,4,12]
8109 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm21, %zmm6
8110 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm1, %zmm6
8111 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm5
8112 ; AVX512DQ-BW-NEXT: vpermt2q %zmm12, %zmm1, %zmm5
8113 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm3
8114 ; AVX512DQ-BW-NEXT: vpermt2q %zmm13, %zmm1, %zmm3
8115 ; AVX512DQ-BW-NEXT: vpermi2q %zmm18, %zmm24, %zmm1
8116 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm25 = [1,9,2,10,1,9,2,10]
8117 ; AVX512DQ-BW-NEXT: # zmm25 = mem[0,1,2,3,0,1,2,3]
8118 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm24, %zmm20
8119 ; AVX512DQ-BW-NEXT: vpermt2q %zmm18, %zmm25, %zmm20
8120 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm29 = [5,13,6,14,5,13,6,14]
8121 ; AVX512DQ-BW-NEXT: # zmm29 = mem[0,1,2,3,0,1,2,3]
8122 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm24, %zmm23
8123 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm24, %zmm9
8124 ; AVX512DQ-BW-NEXT: vpermt2q %zmm18, %zmm29, %zmm23
8125 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [3,11,3,11,3,11,3,11]
8126 ; AVX512DQ-BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8127 ; AVX512DQ-BW-NEXT: vpermt2q %zmm18, %zmm0, %zmm9
8128 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm9, (%rsp) # 64-byte Spill
8129 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm18
8130 ; AVX512DQ-BW-NEXT: vpermt2q %zmm13, %zmm25, %zmm18
8131 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm31
8132 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm24
8133 ; AVX512DQ-BW-NEXT: vpermt2q %zmm13, %zmm29, %zmm31
8134 ; AVX512DQ-BW-NEXT: vpermt2q %zmm13, %zmm0, %zmm24
8135 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm13
8136 ; AVX512DQ-BW-NEXT: vpermt2q %zmm12, %zmm25, %zmm13
8137 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm4
8138 ; AVX512DQ-BW-NEXT: vpermt2q %zmm12, %zmm29, %zmm4
8139 ; AVX512DQ-BW-NEXT: vpermt2q %zmm12, %zmm0, %zmm7
8140 ; AVX512DQ-BW-NEXT: vpermi2q %zmm2, %zmm21, %zmm25
8141 ; AVX512DQ-BW-NEXT: vpermi2q %zmm2, %zmm21, %zmm29
8142 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm0, %zmm21
8143 ; AVX512DQ-BW-NEXT: movb $12, %al
8144 ; AVX512DQ-BW-NEXT: kmovd %eax, %k1
8145 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8146 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm0 {%k1}
8147 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8148 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8149 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm0 {%k1}
8150 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8151 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
8152 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, %zmm9 {%k1}
8153 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm27 {%k1}
8154 ; AVX512DQ-BW-NEXT: movb $48, %al
8155 ; AVX512DQ-BW-NEXT: kmovd %eax, %k2
8156 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8157 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm20 {%k2}
8158 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8159 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm23 {%k2}
8160 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8161 ; AVX512DQ-BW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm12 # 64-byte Folded Reload
8162 ; AVX512DQ-BW-NEXT: # zmm12 = zmm0[0,1,2,3],mem[4,5,6,7]
8163 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8164 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
8165 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8166 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
8167 ; AVX512DQ-BW-NEXT: vmovdqa64 (%r8), %zmm0
8168 ; AVX512DQ-BW-NEXT: vshufi64x2 {{.*#+}} zmm22 = zmm22[0,1,2,3],zmm26[4,5,6,7]
8169 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%r8), %zmm1
8170 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm30, %zmm13 {%k2}
8171 ; AVX512DQ-BW-NEXT: vmovdqa64 128(%r8), %zmm2
8172 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, %zmm4 {%k2}
8173 ; AVX512DQ-BW-NEXT: vmovdqa64 192(%r8), %zmm26
8174 ; AVX512DQ-BW-NEXT: vshufi64x2 {{.*#+}} zmm3 = zmm28[0,1,2,3],zmm17[4,5,6,7]
8175 ; AVX512DQ-BW-NEXT: vmovdqa64 (%r9), %zmm8
8176 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm14, %zmm25 {%k2}
8177 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,9,0,4,5,6,7]
8178 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm5, %zmm20
8179 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm5, %zmm18
8180 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm5, %zmm13
8181 ; AVX512DQ-BW-NEXT: vpermt2q %zmm26, %zmm5, %zmm25
8182 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,2,9,4,5,6,7]
8183 ; AVX512DQ-BW-NEXT: vpermt2q %zmm8, %zmm5, %zmm20
8184 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%r9), %zmm17
8185 ; AVX512DQ-BW-NEXT: vpermt2q %zmm17, %zmm5, %zmm18
8186 ; AVX512DQ-BW-NEXT: vmovdqa64 128(%r9), %zmm30
8187 ; AVX512DQ-BW-NEXT: vpermt2q %zmm30, %zmm5, %zmm13
8188 ; AVX512DQ-BW-NEXT: vmovdqa64 192(%r9), %zmm14
8189 ; AVX512DQ-BW-NEXT: vpermt2q %zmm14, %zmm5, %zmm25
8190 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm15, %zmm29 {%k2}
8191 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,13,0,4,5,6,7]
8192 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm5, %zmm23
8193 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm5, %zmm31
8194 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm5, %zmm4
8195 ; AVX512DQ-BW-NEXT: vpermt2q %zmm26, %zmm5, %zmm29
8196 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,2,13,4,5,6,7]
8197 ; AVX512DQ-BW-NEXT: vpermt2q %zmm8, %zmm5, %zmm23
8198 ; AVX512DQ-BW-NEXT: vpermt2q %zmm17, %zmm5, %zmm31
8199 ; AVX512DQ-BW-NEXT: vpermt2q %zmm30, %zmm5, %zmm4
8200 ; AVX512DQ-BW-NEXT: vpermt2q %zmm14, %zmm5, %zmm29
8201 ; AVX512DQ-BW-NEXT: vshufi64x2 {{.*#+}} zmm15 = zmm16[0,1,2,3],zmm10[4,5,6,7]
8202 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm5 = [14,0,2,3,4,5,15,0]
8203 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm5, %zmm12
8204 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm5, %zmm22
8205 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm5, %zmm3
8206 ; AVX512DQ-BW-NEXT: vpermt2q %zmm26, %zmm5, %zmm15
8207 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,14,2,3,4,5,6,15]
8208 ; AVX512DQ-BW-NEXT: vpermt2q %zmm8, %zmm5, %zmm12
8209 ; AVX512DQ-BW-NEXT: vpermt2q %zmm17, %zmm5, %zmm22
8210 ; AVX512DQ-BW-NEXT: vpermt2q %zmm30, %zmm5, %zmm3
8211 ; AVX512DQ-BW-NEXT: vpermt2q %zmm14, %zmm5, %zmm15
8212 ; AVX512DQ-BW-NEXT: vmovdqa (%rdx), %xmm5
8213 ; AVX512DQ-BW-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
8214 ; AVX512DQ-BW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
8215 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm11 {%k1}
8216 ; AVX512DQ-BW-NEXT: vmovdqa 64(%rdx), %xmm5
8217 ; AVX512DQ-BW-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
8218 ; AVX512DQ-BW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
8219 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
8220 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm10 {%k1}
8221 ; AVX512DQ-BW-NEXT: vmovdqa 128(%rdx), %xmm5
8222 ; AVX512DQ-BW-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
8223 ; AVX512DQ-BW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
8224 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
8225 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm16 {%k1}
8226 ; AVX512DQ-BW-NEXT: vmovdqa 192(%rdx), %xmm5
8227 ; AVX512DQ-BW-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
8228 ; AVX512DQ-BW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
8229 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm19 {%k1}
8230 ; AVX512DQ-BW-NEXT: vinserti32x4 $2, (%r8), %zmm11, %zmm5
8231 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,1,2,3,4,8,6,7]
8232 ; AVX512DQ-BW-NEXT: vpermt2q %zmm8, %zmm6, %zmm5
8233 ; AVX512DQ-BW-NEXT: vinserti32x4 $2, 64(%r8), %zmm10, %zmm10
8234 ; AVX512DQ-BW-NEXT: vpermt2q %zmm17, %zmm6, %zmm10
8235 ; AVX512DQ-BW-NEXT: vinserti32x4 $2, 128(%r8), %zmm16, %zmm11
8236 ; AVX512DQ-BW-NEXT: vpermt2q %zmm30, %zmm6, %zmm11
8237 ; AVX512DQ-BW-NEXT: vinserti32x4 $2, 192(%r8), %zmm19, %zmm16
8238 ; AVX512DQ-BW-NEXT: vpermt2q %zmm14, %zmm6, %zmm16
8239 ; AVX512DQ-BW-NEXT: vmovdqa (%rdi), %ymm6
8240 ; AVX512DQ-BW-NEXT: vpunpckhqdq {{.*#+}} ymm6 = ymm6[1],mem[1],ymm6[3],mem[3]
8241 ; AVX512DQ-BW-NEXT: vmovdqu64 (%rsp), %zmm19 # 64-byte Reload
8242 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm6, %zmm19, %zmm6
8243 ; AVX512DQ-BW-NEXT: movb $16, %al
8244 ; AVX512DQ-BW-NEXT: kmovd %eax, %k1
8245 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm27 {%k1}
8246 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm19 = [10,0,2,3,4,5,11,0]
8247 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm19, %zmm6
8248 ; AVX512DQ-BW-NEXT: vmovdqa 64(%rdi), %ymm0
8249 ; AVX512DQ-BW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
8250 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm0, %zmm24, %zmm0
8251 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm9 {%k1}
8252 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm19, %zmm0
8253 ; AVX512DQ-BW-NEXT: vmovdqa 128(%rdi), %ymm1
8254 ; AVX512DQ-BW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
8255 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm1, %zmm7, %zmm1
8256 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm28 # 64-byte Reload
8257 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm28 {%k1}
8258 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm19, %zmm1
8259 ; AVX512DQ-BW-NEXT: vmovdqa 192(%rdi), %ymm2
8260 ; AVX512DQ-BW-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
8261 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm2, %zmm21, %zmm2
8262 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm24 # 64-byte Reload
8263 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm26, %zmm24 {%k1}
8264 ; AVX512DQ-BW-NEXT: vpermt2q %zmm26, %zmm19, %zmm2
8265 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm19 = [0,10,2,3,4,5,6,11]
8266 ; AVX512DQ-BW-NEXT: vpermt2q %zmm8, %zmm19, %zmm6
8267 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm21 = [0,1,2,3,4,12,6,7]
8268 ; AVX512DQ-BW-NEXT: vpermt2q %zmm8, %zmm21, %zmm27
8269 ; AVX512DQ-BW-NEXT: vpermt2q %zmm17, %zmm19, %zmm0
8270 ; AVX512DQ-BW-NEXT: vpermt2q %zmm17, %zmm21, %zmm9
8271 ; AVX512DQ-BW-NEXT: vpermt2q %zmm30, %zmm19, %zmm1
8272 ; AVX512DQ-BW-NEXT: vpermt2q %zmm30, %zmm21, %zmm28
8273 ; AVX512DQ-BW-NEXT: vpermt2q %zmm14, %zmm19, %zmm2
8274 ; AVX512DQ-BW-NEXT: vpermt2q %zmm14, %zmm21, %zmm24
8275 ; AVX512DQ-BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
8276 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm15, 1472(%rax)
8277 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm29, 1408(%rax)
8278 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm24, 1344(%rax)
8279 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, 1280(%rax)
8280 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm25, 1216(%rax)
8281 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, 1088(%rax)
8282 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, 1024(%rax)
8283 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm28, 960(%rax)
8284 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, 896(%rax)
8285 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, 832(%rax)
8286 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm22, 704(%rax)
8287 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm31, 640(%rax)
8288 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, 576(%rax)
8289 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, 512(%rax)
8290 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, 448(%rax)
8291 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm12, 320(%rax)
8292 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm23, 256(%rax)
8293 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm27, 192(%rax)
8294 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, 128(%rax)
8295 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, 64(%rax)
8296 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm16, 1152(%rax)
8297 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm11, 768(%rax)
8298 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, 384(%rax)
8299 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, (%rax)
8300 ; AVX512DQ-BW-NEXT: addq $648, %rsp # imm = 0x288
8301 ; AVX512DQ-BW-NEXT: vzeroupper
8302 ; AVX512DQ-BW-NEXT: retq
8304 ; AVX512DQ-BW-FCP-LABEL: store_i64_stride6_vf32:
8305 ; AVX512DQ-BW-FCP: # %bb.0:
8306 ; AVX512DQ-BW-FCP-NEXT: subq $648, %rsp # imm = 0x288
8307 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm11
8308 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm5
8309 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 128(%rdi), %zmm1
8310 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 192(%rdi), %zmm19
8311 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rsi), %zmm29
8312 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rsi), %zmm25
8313 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 128(%rsi), %zmm23
8314 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 192(%rsi), %zmm20
8315 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdx), %zmm24
8316 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rdx), %zmm4
8317 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 128(%rdx), %zmm7
8318 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 192(%rdx), %zmm21
8319 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rcx), %zmm18
8320 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rcx), %zmm13
8321 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 128(%rcx), %zmm12
8322 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [4,12,5,13,4,12,5,13]
8323 ; AVX512DQ-BW-FCP-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
8324 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm0
8325 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm20, %zmm27, %zmm0
8326 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8327 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm0
8328 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm23, %zmm27, %zmm0
8329 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8330 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
8331 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm25, %zmm27, %zmm0
8332 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8333 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm29, %zmm11, %zmm27
8334 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm14 = [2,10,2,10,2,10,2,10]
8335 ; AVX512DQ-BW-FCP-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8336 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm0
8337 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm29, %zmm14, %zmm0
8338 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8339 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [6,14,6,14,6,14,6,14]
8340 ; AVX512DQ-BW-FCP-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8341 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm0
8342 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm29, %zmm15, %zmm0
8343 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8344 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm16 = [7,15,7,15,7,15,7,15]
8345 ; AVX512DQ-BW-FCP-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8346 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm0
8347 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm18, %zmm16, %zmm0
8348 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8349 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm0
8350 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm29, %zmm16, %zmm0
8351 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8352 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
8353 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm25, %zmm14, %zmm0
8354 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8355 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
8356 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm25, %zmm15, %zmm0
8357 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8358 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm26
8359 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm13, %zmm16, %zmm26
8360 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm22
8361 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm25, %zmm16, %zmm22
8362 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm0
8363 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm30
8364 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm23, %zmm14, %zmm30
8365 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,8,1,9,0,8,1,9]
8366 ; AVX512DQ-BW-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
8367 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm29, %zmm1, %zmm11
8368 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm8
8369 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm23, %zmm15, %zmm8
8370 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm25, %zmm1, %zmm5
8371 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8372 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm28
8373 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm23, %zmm16, %zmm28
8374 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm23, %zmm1, %zmm0
8375 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8376 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm17
8377 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm12, %zmm16, %zmm17
8378 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm20, %zmm19, %zmm14
8379 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm20, %zmm19, %zmm15
8380 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 192(%rcx), %zmm2
8381 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm10
8382 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm16, %zmm10
8383 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm20, %zmm19, %zmm16
8384 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm20, %zmm1, %zmm19
8385 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm1 = [0,0,4,12]
8386 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm6
8387 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm1, %zmm6
8388 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm5
8389 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm12, %zmm1, %zmm5
8390 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm3
8391 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm13, %zmm1, %zmm3
8392 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm18, %zmm24, %zmm1
8393 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm25 = [1,9,2,10,1,9,2,10]
8394 ; AVX512DQ-BW-FCP-NEXT: # zmm25 = mem[0,1,2,3,0,1,2,3]
8395 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm20
8396 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm18, %zmm25, %zmm20
8397 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm29 = [5,13,6,14,5,13,6,14]
8398 ; AVX512DQ-BW-FCP-NEXT: # zmm29 = mem[0,1,2,3,0,1,2,3]
8399 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm23
8400 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm9
8401 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm18, %zmm29, %zmm23
8402 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [3,11,3,11,3,11,3,11]
8403 ; AVX512DQ-BW-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8404 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm18, %zmm0, %zmm9
8405 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm9, (%rsp) # 64-byte Spill
8406 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm18
8407 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm13, %zmm25, %zmm18
8408 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm31
8409 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm24
8410 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm13, %zmm29, %zmm31
8411 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm13, %zmm0, %zmm24
8412 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm13
8413 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm12, %zmm25, %zmm13
8414 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm4
8415 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm12, %zmm29, %zmm4
8416 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm12, %zmm0, %zmm7
8417 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm2, %zmm21, %zmm25
8418 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm2, %zmm21, %zmm29
8419 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm0, %zmm21
8420 ; AVX512DQ-BW-FCP-NEXT: movb $12, %al
8421 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k1
8422 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8423 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm0 {%k1}
8424 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8425 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8426 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm0 {%k1}
8427 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8428 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
8429 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm9 {%k1}
8430 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm27 {%k1}
8431 ; AVX512DQ-BW-FCP-NEXT: movb $48, %al
8432 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k2
8433 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8434 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm20 {%k2}
8435 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8436 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm23 {%k2}
8437 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8438 ; AVX512DQ-BW-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm12 # 64-byte Folded Reload
8439 ; AVX512DQ-BW-FCP-NEXT: # zmm12 = zmm0[0,1,2,3],mem[4,5,6,7]
8440 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8441 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
8442 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8443 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
8444 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%r8), %zmm0
8445 ; AVX512DQ-BW-FCP-NEXT: vshufi64x2 {{.*#+}} zmm22 = zmm22[0,1,2,3],zmm26[4,5,6,7]
8446 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%r8), %zmm1
8447 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm30, %zmm13 {%k2}
8448 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 128(%r8), %zmm2
8449 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm4 {%k2}
8450 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 192(%r8), %zmm26
8451 ; AVX512DQ-BW-FCP-NEXT: vshufi64x2 {{.*#+}} zmm3 = zmm28[0,1,2,3],zmm17[4,5,6,7]
8452 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%r9), %zmm8
8453 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm14, %zmm25 {%k2}
8454 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,9,0,4,5,6,7]
8455 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm5, %zmm20
8456 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm18
8457 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm5, %zmm13
8458 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm26, %zmm5, %zmm25
8459 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,2,9,4,5,6,7]
8460 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm8, %zmm5, %zmm20
8461 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%r9), %zmm17
8462 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm17, %zmm5, %zmm18
8463 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 128(%r9), %zmm30
8464 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm30, %zmm5, %zmm13
8465 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 192(%r9), %zmm14
8466 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm14, %zmm5, %zmm25
8467 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm29 {%k2}
8468 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,13,0,4,5,6,7]
8469 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm5, %zmm23
8470 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm31
8471 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm5, %zmm4
8472 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm26, %zmm5, %zmm29
8473 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,1,2,13,4,5,6,7]
8474 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm8, %zmm5, %zmm23
8475 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm17, %zmm5, %zmm31
8476 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm30, %zmm5, %zmm4
8477 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm14, %zmm5, %zmm29
8478 ; AVX512DQ-BW-FCP-NEXT: vshufi64x2 {{.*#+}} zmm15 = zmm16[0,1,2,3],zmm10[4,5,6,7]
8479 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [14,0,2,3,4,5,15,0]
8480 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm5, %zmm12
8481 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm5, %zmm22
8482 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm5, %zmm3
8483 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm26, %zmm5, %zmm15
8484 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm5 = [0,14,2,3,4,5,6,15]
8485 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm8, %zmm5, %zmm12
8486 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm17, %zmm5, %zmm22
8487 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm30, %zmm5, %zmm3
8488 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm14, %zmm5, %zmm15
8489 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rdx), %xmm5
8490 ; AVX512DQ-BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
8491 ; AVX512DQ-BW-FCP-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
8492 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm11 {%k1}
8493 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 64(%rdx), %xmm5
8494 ; AVX512DQ-BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
8495 ; AVX512DQ-BW-FCP-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
8496 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
8497 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm10 {%k1}
8498 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 128(%rdx), %xmm5
8499 ; AVX512DQ-BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
8500 ; AVX512DQ-BW-FCP-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
8501 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
8502 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm16 {%k1}
8503 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 192(%rdx), %xmm5
8504 ; AVX512DQ-BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
8505 ; AVX512DQ-BW-FCP-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
8506 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm19 {%k1}
8507 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $2, (%r8), %zmm11, %zmm5
8508 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,1,2,3,4,8,6,7]
8509 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm8, %zmm6, %zmm5
8510 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $2, 64(%r8), %zmm10, %zmm10
8511 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm17, %zmm6, %zmm10
8512 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $2, 128(%r8), %zmm16, %zmm11
8513 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm30, %zmm6, %zmm11
8514 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $2, 192(%r8), %zmm19, %zmm16
8515 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm14, %zmm6, %zmm16
8516 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rdi), %ymm6
8517 ; AVX512DQ-BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm6 = ymm6[1],mem[1],ymm6[3],mem[3]
8518 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 (%rsp), %zmm19 # 64-byte Reload
8519 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm19, %zmm6
8520 ; AVX512DQ-BW-FCP-NEXT: movb $16, %al
8521 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k1
8522 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm27 {%k1}
8523 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm19 = [10,0,2,3,4,5,11,0]
8524 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm19, %zmm6
8525 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 64(%rdi), %ymm0
8526 ; AVX512DQ-BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
8527 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm0, %zmm24, %zmm0
8528 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm9 {%k1}
8529 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm19, %zmm0
8530 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 128(%rdi), %ymm1
8531 ; AVX512DQ-BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
8532 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm1, %zmm7, %zmm1
8533 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm28 # 64-byte Reload
8534 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm28 {%k1}
8535 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm19, %zmm1
8536 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 192(%rdi), %ymm2
8537 ; AVX512DQ-BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
8538 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm21, %zmm2
8539 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm24 # 64-byte Reload
8540 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm26, %zmm24 {%k1}
8541 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm26, %zmm19, %zmm2
8542 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm19 = [0,10,2,3,4,5,6,11]
8543 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm8, %zmm19, %zmm6
8544 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm21 = [0,1,2,3,4,12,6,7]
8545 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm8, %zmm21, %zmm27
8546 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm17, %zmm19, %zmm0
8547 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm17, %zmm21, %zmm9
8548 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm30, %zmm19, %zmm1
8549 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm30, %zmm21, %zmm28
8550 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm14, %zmm19, %zmm2
8551 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm14, %zmm21, %zmm24
8552 ; AVX512DQ-BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
8553 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm15, 1472(%rax)
8554 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm29, 1408(%rax)
8555 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm24, 1344(%rax)
8556 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, 1280(%rax)
8557 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm25, 1216(%rax)
8558 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, 1088(%rax)
8559 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, 1024(%rax)
8560 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm28, 960(%rax)
8561 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, 896(%rax)
8562 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, 832(%rax)
8563 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm22, 704(%rax)
8564 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm31, 640(%rax)
8565 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, 576(%rax)
8566 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, 512(%rax)
8567 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, 448(%rax)
8568 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm12, 320(%rax)
8569 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm23, 256(%rax)
8570 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm27, 192(%rax)
8571 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, 128(%rax)
8572 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, 64(%rax)
8573 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm16, 1152(%rax)
8574 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm11, 768(%rax)
8575 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, 384(%rax)
8576 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, (%rax)
8577 ; AVX512DQ-BW-FCP-NEXT: addq $648, %rsp # imm = 0x288
8578 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
8579 ; AVX512DQ-BW-FCP-NEXT: retq
8580 %in.vec0 = load <32 x i64>, ptr %in.vecptr0, align 64
8581 %in.vec1 = load <32 x i64>, ptr %in.vecptr1, align 64
8582 %in.vec2 = load <32 x i64>, ptr %in.vecptr2, align 64
8583 %in.vec3 = load <32 x i64>, ptr %in.vecptr3, align 64
8584 %in.vec4 = load <32 x i64>, ptr %in.vecptr4, align 64
8585 %in.vec5 = load <32 x i64>, ptr %in.vecptr5, align 64
8586 %1 = shufflevector <32 x i64> %in.vec0, <32 x i64> %in.vec1, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
8587 %2 = shufflevector <32 x i64> %in.vec2, <32 x i64> %in.vec3, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
8588 %3 = shufflevector <32 x i64> %in.vec4, <32 x i64> %in.vec5, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
8589 %4 = shufflevector <64 x i64> %1, <64 x i64> %2, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
8590 %5 = shufflevector <64 x i64> %3, <64 x i64> poison, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
8591 %6 = shufflevector <128 x i64> %4, <128 x i64> %5, <192 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191>
8592 %interleaved.vec = shufflevector <192 x i64> %6, <192 x i64> poison, <192 x i32> <i32 0, i32 32, i32 64, i32 96, i32 128, i32 160, i32 1, i32 33, i32 65, i32 97, i32 129, i32 161, i32 2, i32 34, i32 66, i32 98, i32 130, i32 162, i32 3, i32 35, i32 67, i32 99, i32 131, i32 163, i32 4, i32 36, i32 68, i32 100, i32 132, i32 164, i32 5, i32 37, i32 69, i32 101, i32 133, i32 165, i32 6, i32 38, i32 70, i32 102, i32 134, i32 166, i32 7, i32 39, i32 71, i32 103, i32 135, i32 167, i32 8, i32 40, i32 72, i32 104, i32 136, i32 168, i32 9, i32 41, i32 73, i32 105, i32 137, i32 169, i32 10, i32 42, i32 74, i32 106, i32 138, i32 170, i32 11, i32 43, i32 75, i32 107, i32 139, i32 171, i32 12, i32 44, i32 76, i32 108, i32 140, i32 172, i32 13, i32 45, i32 77, i32 109, i32 141, i32 173, i32 14, i32 46, i32 78, i32 110, i32 142, i32 174, i32 15, i32 47, i32 79, i32 111, i32 143, i32 175, i32 16, i32 48, i32 80, i32 112, i32 144, i32 176, i32 17, i32 49, i32 81, i32 113, i32 145, i32 177, i32 18, i32 50, i32 82, i32 114, i32 146, i32 178, i32 19, i32 51, i32 83, i32 115, i32 147, i32 179, i32 20, i32 52, i32 84, i32 116, i32 148, i32 180, i32 21, i32 53, i32 85, i32 117, i32 149, i32 181, i32 22, i32 54, i32 86, i32 118, i32 150, i32 182, i32 23, i32 55, i32 87, i32 119, i32 151, i32 183, i32 24, i32 56, i32 88, i32 120, i32 152, i32 184, i32 25, i32 57, i32 89, i32 121, i32 153, i32 185, i32 26, i32 58, i32 90, i32 122, i32 154, i32 186, i32 27, i32 59, i32 91, i32 123, i32 155, i32 187, i32 28, i32 60, i32 92, i32 124, i32 156, i32 188, i32 29, i32 61, i32 93, i32 125, i32 157, i32 189, i32 30, i32 62, i32 94, i32 126, i32 158, i32 190, i32 31, i32 63, i32 95, i32 127, i32 159, i32 191>
8593 store <192 x i64> %interleaved.vec, ptr %out.vec, align 64
8597 define void @store_i64_stride6_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
8598 ; SSE-LABEL: store_i64_stride6_vf64:
8600 ; SSE-NEXT: subq $2712, %rsp # imm = 0xA98
8601 ; SSE-NEXT: movaps (%rdi), %xmm7
8602 ; SSE-NEXT: movaps 16(%rdi), %xmm8
8603 ; SSE-NEXT: movaps 32(%rdi), %xmm9
8604 ; SSE-NEXT: movaps (%rsi), %xmm2
8605 ; SSE-NEXT: movaps 16(%rsi), %xmm1
8606 ; SSE-NEXT: movaps 32(%rsi), %xmm0
8607 ; SSE-NEXT: movaps (%rdx), %xmm10
8608 ; SSE-NEXT: movaps 16(%rdx), %xmm11
8609 ; SSE-NEXT: movaps 32(%rdx), %xmm12
8610 ; SSE-NEXT: movaps (%rcx), %xmm4
8611 ; SSE-NEXT: movaps 16(%rcx), %xmm3
8612 ; SSE-NEXT: movaps 16(%r8), %xmm14
8613 ; SSE-NEXT: movaps (%r8), %xmm13
8614 ; SSE-NEXT: movaps 16(%r9), %xmm5
8615 ; SSE-NEXT: movaps (%r9), %xmm6
8616 ; SSE-NEXT: movaps %xmm7, %xmm15
8617 ; SSE-NEXT: movlhps {{.*#+}} xmm15 = xmm15[0],xmm2[0]
8618 ; SSE-NEXT: movaps %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8619 ; SSE-NEXT: unpckhpd {{.*#+}} xmm7 = xmm7[1],xmm2[1]
8620 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8621 ; SSE-NEXT: movaps %xmm10, %xmm2
8622 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm4[0]
8623 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8624 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm4[1]
8625 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8626 ; SSE-NEXT: movaps %xmm13, %xmm2
8627 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm6[0]
8628 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8629 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm6[1]
8630 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8631 ; SSE-NEXT: movaps %xmm8, %xmm2
8632 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm1[0]
8633 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8634 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm1[1]
8635 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8636 ; SSE-NEXT: movaps %xmm11, %xmm1
8637 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
8638 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8639 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm3[1]
8640 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8641 ; SSE-NEXT: movaps %xmm14, %xmm1
8642 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm5[0]
8643 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8644 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm5[1]
8645 ; SSE-NEXT: movaps %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8646 ; SSE-NEXT: movaps %xmm9, %xmm1
8647 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8648 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8649 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm0[1]
8650 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8651 ; SSE-NEXT: movaps 32(%rcx), %xmm0
8652 ; SSE-NEXT: movaps %xmm12, %xmm1
8653 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8654 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8655 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm0[1]
8656 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8657 ; SSE-NEXT: movaps 32(%r8), %xmm2
8658 ; SSE-NEXT: movaps 32(%r9), %xmm0
8659 ; SSE-NEXT: movaps %xmm2, %xmm1
8660 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8661 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8662 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8663 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8664 ; SSE-NEXT: movaps 48(%rdi), %xmm2
8665 ; SSE-NEXT: movaps 48(%rsi), %xmm0
8666 ; SSE-NEXT: movaps %xmm2, %xmm1
8667 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8668 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8669 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8670 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8671 ; SSE-NEXT: movaps 48(%rdx), %xmm2
8672 ; SSE-NEXT: movaps 48(%rcx), %xmm0
8673 ; SSE-NEXT: movaps %xmm2, %xmm1
8674 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8675 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8676 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8677 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8678 ; SSE-NEXT: movaps 48(%r8), %xmm2
8679 ; SSE-NEXT: movaps 48(%r9), %xmm0
8680 ; SSE-NEXT: movaps %xmm2, %xmm1
8681 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8682 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8683 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8684 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8685 ; SSE-NEXT: movaps 64(%rdi), %xmm2
8686 ; SSE-NEXT: movaps 64(%rsi), %xmm0
8687 ; SSE-NEXT: movaps %xmm2, %xmm1
8688 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8689 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8690 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8691 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8692 ; SSE-NEXT: movaps 64(%rdx), %xmm2
8693 ; SSE-NEXT: movaps 64(%rcx), %xmm0
8694 ; SSE-NEXT: movaps %xmm2, %xmm1
8695 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8696 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8697 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8698 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8699 ; SSE-NEXT: movaps 64(%r8), %xmm2
8700 ; SSE-NEXT: movaps 64(%r9), %xmm0
8701 ; SSE-NEXT: movaps %xmm2, %xmm1
8702 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8703 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8704 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8705 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8706 ; SSE-NEXT: movaps 80(%rdi), %xmm2
8707 ; SSE-NEXT: movaps 80(%rsi), %xmm0
8708 ; SSE-NEXT: movaps %xmm2, %xmm1
8709 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8710 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8711 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8712 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8713 ; SSE-NEXT: movaps 80(%rdx), %xmm2
8714 ; SSE-NEXT: movaps 80(%rcx), %xmm0
8715 ; SSE-NEXT: movaps %xmm2, %xmm1
8716 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8717 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8718 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8719 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8720 ; SSE-NEXT: movaps 80(%r8), %xmm2
8721 ; SSE-NEXT: movaps 80(%r9), %xmm0
8722 ; SSE-NEXT: movaps %xmm2, %xmm1
8723 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8724 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8725 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8726 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8727 ; SSE-NEXT: movaps 96(%rdi), %xmm2
8728 ; SSE-NEXT: movaps 96(%rsi), %xmm0
8729 ; SSE-NEXT: movaps %xmm2, %xmm1
8730 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8731 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8732 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8733 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8734 ; SSE-NEXT: movaps 96(%rdx), %xmm2
8735 ; SSE-NEXT: movaps 96(%rcx), %xmm0
8736 ; SSE-NEXT: movaps %xmm2, %xmm1
8737 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8738 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8739 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8740 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8741 ; SSE-NEXT: movaps 96(%r8), %xmm2
8742 ; SSE-NEXT: movaps 96(%r9), %xmm0
8743 ; SSE-NEXT: movaps %xmm2, %xmm1
8744 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8745 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8746 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8747 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8748 ; SSE-NEXT: movaps 112(%rdi), %xmm2
8749 ; SSE-NEXT: movaps 112(%rsi), %xmm0
8750 ; SSE-NEXT: movaps %xmm2, %xmm1
8751 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8752 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8753 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8754 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8755 ; SSE-NEXT: movaps 112(%rdx), %xmm2
8756 ; SSE-NEXT: movaps 112(%rcx), %xmm0
8757 ; SSE-NEXT: movaps %xmm2, %xmm1
8758 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8759 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8760 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8761 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8762 ; SSE-NEXT: movaps 112(%r8), %xmm2
8763 ; SSE-NEXT: movaps 112(%r9), %xmm0
8764 ; SSE-NEXT: movaps %xmm2, %xmm1
8765 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8766 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8767 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8768 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8769 ; SSE-NEXT: movaps 128(%rdi), %xmm2
8770 ; SSE-NEXT: movaps 128(%rsi), %xmm0
8771 ; SSE-NEXT: movaps %xmm2, %xmm1
8772 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8773 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8774 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8775 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8776 ; SSE-NEXT: movaps 128(%rdx), %xmm2
8777 ; SSE-NEXT: movaps 128(%rcx), %xmm0
8778 ; SSE-NEXT: movaps %xmm2, %xmm1
8779 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8780 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8781 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8782 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8783 ; SSE-NEXT: movaps 128(%r8), %xmm2
8784 ; SSE-NEXT: movaps 128(%r9), %xmm0
8785 ; SSE-NEXT: movaps %xmm2, %xmm1
8786 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8787 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8788 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8789 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8790 ; SSE-NEXT: movaps 144(%rdi), %xmm2
8791 ; SSE-NEXT: movaps 144(%rsi), %xmm0
8792 ; SSE-NEXT: movaps %xmm2, %xmm1
8793 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8794 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8795 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8796 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8797 ; SSE-NEXT: movaps 144(%rdx), %xmm2
8798 ; SSE-NEXT: movaps 144(%rcx), %xmm0
8799 ; SSE-NEXT: movaps %xmm2, %xmm1
8800 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8801 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8802 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8803 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8804 ; SSE-NEXT: movaps 144(%r8), %xmm2
8805 ; SSE-NEXT: movaps 144(%r9), %xmm0
8806 ; SSE-NEXT: movaps %xmm2, %xmm1
8807 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8808 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8809 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8810 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8811 ; SSE-NEXT: movaps 160(%rdi), %xmm2
8812 ; SSE-NEXT: movaps 160(%rsi), %xmm0
8813 ; SSE-NEXT: movaps %xmm2, %xmm1
8814 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8815 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8816 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8817 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8818 ; SSE-NEXT: movaps 160(%rdx), %xmm2
8819 ; SSE-NEXT: movaps 160(%rcx), %xmm0
8820 ; SSE-NEXT: movaps %xmm2, %xmm1
8821 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8822 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8823 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8824 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8825 ; SSE-NEXT: movaps 160(%r8), %xmm2
8826 ; SSE-NEXT: movaps 160(%r9), %xmm0
8827 ; SSE-NEXT: movaps %xmm2, %xmm1
8828 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8829 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8830 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8831 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8832 ; SSE-NEXT: movaps 176(%rdi), %xmm2
8833 ; SSE-NEXT: movaps 176(%rsi), %xmm0
8834 ; SSE-NEXT: movaps %xmm2, %xmm1
8835 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8836 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8837 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8838 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8839 ; SSE-NEXT: movaps 176(%rdx), %xmm2
8840 ; SSE-NEXT: movaps 176(%rcx), %xmm0
8841 ; SSE-NEXT: movaps %xmm2, %xmm1
8842 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8843 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8844 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8845 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8846 ; SSE-NEXT: movaps 176(%r8), %xmm2
8847 ; SSE-NEXT: movaps 176(%r9), %xmm0
8848 ; SSE-NEXT: movaps %xmm2, %xmm1
8849 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8850 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8851 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8852 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8853 ; SSE-NEXT: movaps 192(%rdi), %xmm2
8854 ; SSE-NEXT: movaps 192(%rsi), %xmm0
8855 ; SSE-NEXT: movaps %xmm2, %xmm1
8856 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8857 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8858 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8859 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8860 ; SSE-NEXT: movaps 192(%rdx), %xmm2
8861 ; SSE-NEXT: movaps 192(%rcx), %xmm0
8862 ; SSE-NEXT: movaps %xmm2, %xmm1
8863 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8864 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8865 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8866 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8867 ; SSE-NEXT: movaps 192(%r8), %xmm2
8868 ; SSE-NEXT: movaps 192(%r9), %xmm0
8869 ; SSE-NEXT: movaps %xmm2, %xmm1
8870 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8871 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8872 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8873 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8874 ; SSE-NEXT: movaps 208(%rdi), %xmm2
8875 ; SSE-NEXT: movaps 208(%rsi), %xmm0
8876 ; SSE-NEXT: movaps %xmm2, %xmm1
8877 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8878 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8879 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8880 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8881 ; SSE-NEXT: movaps 208(%rdx), %xmm2
8882 ; SSE-NEXT: movaps 208(%rcx), %xmm0
8883 ; SSE-NEXT: movaps %xmm2, %xmm1
8884 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8885 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8886 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8887 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8888 ; SSE-NEXT: movaps 208(%r8), %xmm2
8889 ; SSE-NEXT: movaps 208(%r9), %xmm0
8890 ; SSE-NEXT: movaps %xmm2, %xmm1
8891 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8892 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8893 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8894 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8895 ; SSE-NEXT: movaps 224(%rdi), %xmm2
8896 ; SSE-NEXT: movaps 224(%rsi), %xmm0
8897 ; SSE-NEXT: movaps %xmm2, %xmm1
8898 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8899 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8900 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8901 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8902 ; SSE-NEXT: movaps 224(%rdx), %xmm2
8903 ; SSE-NEXT: movaps 224(%rcx), %xmm0
8904 ; SSE-NEXT: movaps %xmm2, %xmm1
8905 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8906 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8907 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8908 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8909 ; SSE-NEXT: movaps 224(%r8), %xmm2
8910 ; SSE-NEXT: movaps 224(%r9), %xmm0
8911 ; SSE-NEXT: movaps %xmm2, %xmm1
8912 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8913 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8914 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8915 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8916 ; SSE-NEXT: movaps 240(%rdi), %xmm2
8917 ; SSE-NEXT: movaps 240(%rsi), %xmm0
8918 ; SSE-NEXT: movaps %xmm2, %xmm1
8919 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8920 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8921 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8922 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8923 ; SSE-NEXT: movaps 240(%rdx), %xmm2
8924 ; SSE-NEXT: movaps 240(%rcx), %xmm0
8925 ; SSE-NEXT: movaps %xmm2, %xmm1
8926 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8927 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8928 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8929 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8930 ; SSE-NEXT: movaps 240(%r8), %xmm2
8931 ; SSE-NEXT: movaps 240(%r9), %xmm0
8932 ; SSE-NEXT: movaps %xmm2, %xmm1
8933 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8934 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8935 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8936 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8937 ; SSE-NEXT: movaps 256(%rdi), %xmm2
8938 ; SSE-NEXT: movaps 256(%rsi), %xmm0
8939 ; SSE-NEXT: movaps %xmm2, %xmm1
8940 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8941 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8942 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8943 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8944 ; SSE-NEXT: movaps 256(%rdx), %xmm2
8945 ; SSE-NEXT: movaps 256(%rcx), %xmm0
8946 ; SSE-NEXT: movaps %xmm2, %xmm1
8947 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8948 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8949 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8950 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8951 ; SSE-NEXT: movaps 256(%r8), %xmm2
8952 ; SSE-NEXT: movaps 256(%r9), %xmm0
8953 ; SSE-NEXT: movaps %xmm2, %xmm1
8954 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8955 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8956 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8957 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8958 ; SSE-NEXT: movaps 272(%rdi), %xmm2
8959 ; SSE-NEXT: movaps 272(%rsi), %xmm0
8960 ; SSE-NEXT: movaps %xmm2, %xmm1
8961 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8962 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8963 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8964 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8965 ; SSE-NEXT: movaps 272(%rdx), %xmm2
8966 ; SSE-NEXT: movaps 272(%rcx), %xmm0
8967 ; SSE-NEXT: movaps %xmm2, %xmm1
8968 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8969 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8970 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8971 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8972 ; SSE-NEXT: movaps 272(%r8), %xmm2
8973 ; SSE-NEXT: movaps 272(%r9), %xmm0
8974 ; SSE-NEXT: movaps %xmm2, %xmm1
8975 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8976 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8977 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8978 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8979 ; SSE-NEXT: movaps 288(%rdi), %xmm2
8980 ; SSE-NEXT: movaps 288(%rsi), %xmm0
8981 ; SSE-NEXT: movaps %xmm2, %xmm1
8982 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8983 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8984 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8985 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8986 ; SSE-NEXT: movaps 288(%rdx), %xmm2
8987 ; SSE-NEXT: movaps 288(%rcx), %xmm0
8988 ; SSE-NEXT: movaps %xmm2, %xmm1
8989 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8990 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8991 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8992 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8993 ; SSE-NEXT: movaps 288(%r8), %xmm2
8994 ; SSE-NEXT: movaps 288(%r9), %xmm0
8995 ; SSE-NEXT: movaps %xmm2, %xmm1
8996 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
8997 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8998 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
8999 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9000 ; SSE-NEXT: movaps 304(%rdi), %xmm2
9001 ; SSE-NEXT: movaps 304(%rsi), %xmm0
9002 ; SSE-NEXT: movaps %xmm2, %xmm1
9003 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9004 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9005 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9006 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9007 ; SSE-NEXT: movaps 304(%rdx), %xmm2
9008 ; SSE-NEXT: movaps 304(%rcx), %xmm0
9009 ; SSE-NEXT: movaps %xmm2, %xmm1
9010 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9011 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9012 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9013 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9014 ; SSE-NEXT: movaps 304(%r8), %xmm2
9015 ; SSE-NEXT: movaps 304(%r9), %xmm0
9016 ; SSE-NEXT: movaps %xmm2, %xmm1
9017 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9018 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9019 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9020 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9021 ; SSE-NEXT: movaps 320(%rdi), %xmm2
9022 ; SSE-NEXT: movaps 320(%rsi), %xmm0
9023 ; SSE-NEXT: movaps %xmm2, %xmm1
9024 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9025 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9026 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9027 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9028 ; SSE-NEXT: movaps 320(%rdx), %xmm2
9029 ; SSE-NEXT: movaps 320(%rcx), %xmm0
9030 ; SSE-NEXT: movaps %xmm2, %xmm1
9031 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9032 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9033 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9034 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9035 ; SSE-NEXT: movaps 320(%r8), %xmm2
9036 ; SSE-NEXT: movaps 320(%r9), %xmm0
9037 ; SSE-NEXT: movaps %xmm2, %xmm1
9038 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9039 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9040 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9041 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9042 ; SSE-NEXT: movaps 336(%rdi), %xmm2
9043 ; SSE-NEXT: movaps 336(%rsi), %xmm0
9044 ; SSE-NEXT: movaps %xmm2, %xmm1
9045 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9046 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9047 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9048 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9049 ; SSE-NEXT: movaps 336(%rdx), %xmm2
9050 ; SSE-NEXT: movaps 336(%rcx), %xmm0
9051 ; SSE-NEXT: movaps %xmm2, %xmm1
9052 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9053 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9054 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9055 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9056 ; SSE-NEXT: movaps 336(%r8), %xmm2
9057 ; SSE-NEXT: movaps 336(%r9), %xmm0
9058 ; SSE-NEXT: movaps %xmm2, %xmm1
9059 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9060 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9061 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9062 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9063 ; SSE-NEXT: movaps 352(%rdi), %xmm2
9064 ; SSE-NEXT: movaps 352(%rsi), %xmm0
9065 ; SSE-NEXT: movaps %xmm2, %xmm1
9066 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9067 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9068 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9069 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9070 ; SSE-NEXT: movaps 352(%rdx), %xmm2
9071 ; SSE-NEXT: movaps 352(%rcx), %xmm0
9072 ; SSE-NEXT: movaps %xmm2, %xmm1
9073 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9074 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9075 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9076 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9077 ; SSE-NEXT: movaps 352(%r8), %xmm2
9078 ; SSE-NEXT: movaps 352(%r9), %xmm0
9079 ; SSE-NEXT: movaps %xmm2, %xmm1
9080 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9081 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9082 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9083 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9084 ; SSE-NEXT: movaps 368(%rdi), %xmm2
9085 ; SSE-NEXT: movaps 368(%rsi), %xmm0
9086 ; SSE-NEXT: movaps %xmm2, %xmm1
9087 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9088 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9089 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9090 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9091 ; SSE-NEXT: movaps 368(%rdx), %xmm2
9092 ; SSE-NEXT: movaps 368(%rcx), %xmm0
9093 ; SSE-NEXT: movaps %xmm2, %xmm1
9094 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9095 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9096 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9097 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9098 ; SSE-NEXT: movaps 368(%r8), %xmm2
9099 ; SSE-NEXT: movaps 368(%r9), %xmm0
9100 ; SSE-NEXT: movaps %xmm2, %xmm1
9101 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9102 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9103 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9104 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9105 ; SSE-NEXT: movaps 384(%rdi), %xmm2
9106 ; SSE-NEXT: movaps 384(%rsi), %xmm0
9107 ; SSE-NEXT: movaps %xmm2, %xmm1
9108 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9109 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9110 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9111 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9112 ; SSE-NEXT: movaps 384(%rdx), %xmm2
9113 ; SSE-NEXT: movaps 384(%rcx), %xmm0
9114 ; SSE-NEXT: movaps %xmm2, %xmm1
9115 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9116 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9117 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9118 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9119 ; SSE-NEXT: movaps 384(%r8), %xmm2
9120 ; SSE-NEXT: movaps 384(%r9), %xmm0
9121 ; SSE-NEXT: movaps %xmm2, %xmm1
9122 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9123 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9124 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9125 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9126 ; SSE-NEXT: movaps 400(%rdi), %xmm2
9127 ; SSE-NEXT: movaps 400(%rsi), %xmm0
9128 ; SSE-NEXT: movaps %xmm2, %xmm1
9129 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9130 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9131 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9132 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9133 ; SSE-NEXT: movaps 400(%rdx), %xmm2
9134 ; SSE-NEXT: movaps 400(%rcx), %xmm0
9135 ; SSE-NEXT: movaps %xmm2, %xmm1
9136 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9137 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9138 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9139 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9140 ; SSE-NEXT: movaps 400(%r8), %xmm2
9141 ; SSE-NEXT: movaps 400(%r9), %xmm0
9142 ; SSE-NEXT: movaps %xmm2, %xmm1
9143 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9144 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9145 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9146 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9147 ; SSE-NEXT: movaps 416(%rdi), %xmm2
9148 ; SSE-NEXT: movaps 416(%rsi), %xmm0
9149 ; SSE-NEXT: movaps %xmm2, %xmm1
9150 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9151 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9152 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9153 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9154 ; SSE-NEXT: movaps 416(%rdx), %xmm2
9155 ; SSE-NEXT: movaps 416(%rcx), %xmm0
9156 ; SSE-NEXT: movaps %xmm2, %xmm1
9157 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9158 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9159 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9160 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9161 ; SSE-NEXT: movaps 416(%r8), %xmm2
9162 ; SSE-NEXT: movaps 416(%r9), %xmm0
9163 ; SSE-NEXT: movaps %xmm2, %xmm1
9164 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9165 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9166 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9167 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9168 ; SSE-NEXT: movaps 432(%rdi), %xmm2
9169 ; SSE-NEXT: movaps 432(%rsi), %xmm0
9170 ; SSE-NEXT: movaps %xmm2, %xmm1
9171 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9172 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9173 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9174 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9175 ; SSE-NEXT: movaps 432(%rdx), %xmm2
9176 ; SSE-NEXT: movaps 432(%rcx), %xmm0
9177 ; SSE-NEXT: movaps %xmm2, %xmm1
9178 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9179 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9180 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9181 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9182 ; SSE-NEXT: movaps 432(%r8), %xmm2
9183 ; SSE-NEXT: movaps 432(%r9), %xmm0
9184 ; SSE-NEXT: movaps %xmm2, %xmm1
9185 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9186 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9187 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9188 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9189 ; SSE-NEXT: movaps 448(%rdi), %xmm2
9190 ; SSE-NEXT: movaps 448(%rsi), %xmm0
9191 ; SSE-NEXT: movaps %xmm2, %xmm1
9192 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9193 ; SSE-NEXT: movaps %xmm1, (%rsp) # 16-byte Spill
9194 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9195 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9196 ; SSE-NEXT: movaps 448(%rdx), %xmm2
9197 ; SSE-NEXT: movaps 448(%rcx), %xmm0
9198 ; SSE-NEXT: movaps %xmm2, %xmm1
9199 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9200 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9201 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9202 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9203 ; SSE-NEXT: movaps 448(%r8), %xmm2
9204 ; SSE-NEXT: movaps 448(%r9), %xmm0
9205 ; SSE-NEXT: movaps %xmm2, %xmm1
9206 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9207 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9208 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9209 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9210 ; SSE-NEXT: movaps 464(%rdi), %xmm15
9211 ; SSE-NEXT: movaps 464(%rsi), %xmm0
9212 ; SSE-NEXT: movaps %xmm15, %xmm1
9213 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9214 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9215 ; SSE-NEXT: unpckhpd {{.*#+}} xmm15 = xmm15[1],xmm0[1]
9216 ; SSE-NEXT: movaps 464(%rdx), %xmm14
9217 ; SSE-NEXT: movaps 464(%rcx), %xmm0
9218 ; SSE-NEXT: movaps %xmm14, %xmm1
9219 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9220 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9221 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm0[1]
9222 ; SSE-NEXT: movaps 464(%r8), %xmm11
9223 ; SSE-NEXT: movaps 464(%r9), %xmm0
9224 ; SSE-NEXT: movaps %xmm11, %xmm1
9225 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
9226 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9227 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm0[1]
9228 ; SSE-NEXT: movaps 480(%rdi), %xmm12
9229 ; SSE-NEXT: movaps 480(%rsi), %xmm0
9230 ; SSE-NEXT: movaps %xmm12, %xmm13
9231 ; SSE-NEXT: movlhps {{.*#+}} xmm13 = xmm13[0],xmm0[0]
9232 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm0[1]
9233 ; SSE-NEXT: movaps 480(%rdx), %xmm8
9234 ; SSE-NEXT: movaps 480(%rcx), %xmm0
9235 ; SSE-NEXT: movaps %xmm8, %xmm10
9236 ; SSE-NEXT: movlhps {{.*#+}} xmm10 = xmm10[0],xmm0[0]
9237 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm0[1]
9238 ; SSE-NEXT: movaps 480(%r8), %xmm5
9239 ; SSE-NEXT: movaps 480(%r9), %xmm0
9240 ; SSE-NEXT: movaps %xmm5, %xmm9
9241 ; SSE-NEXT: movlhps {{.*#+}} xmm9 = xmm9[0],xmm0[0]
9242 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm0[1]
9243 ; SSE-NEXT: movaps 496(%rdi), %xmm6
9244 ; SSE-NEXT: movaps 496(%rsi), %xmm1
9245 ; SSE-NEXT: movaps %xmm6, %xmm7
9246 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm1[0]
9247 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm1[1]
9248 ; SSE-NEXT: movaps 496(%rdx), %xmm1
9249 ; SSE-NEXT: movaps 496(%rcx), %xmm0
9250 ; SSE-NEXT: movaps %xmm1, %xmm4
9251 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm0[0]
9252 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
9253 ; SSE-NEXT: movaps 496(%r8), %xmm0
9254 ; SSE-NEXT: movaps 496(%r9), %xmm3
9255 ; SSE-NEXT: movaps %xmm0, %xmm2
9256 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm3[0]
9257 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
9258 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
9259 ; SSE-NEXT: movaps %xmm0, 3056(%rax)
9260 ; SSE-NEXT: movaps %xmm1, 3040(%rax)
9261 ; SSE-NEXT: movaps %xmm6, 3024(%rax)
9262 ; SSE-NEXT: movaps %xmm2, 3008(%rax)
9263 ; SSE-NEXT: movaps %xmm4, 2992(%rax)
9264 ; SSE-NEXT: movaps %xmm7, 2976(%rax)
9265 ; SSE-NEXT: movaps %xmm5, 2960(%rax)
9266 ; SSE-NEXT: movaps %xmm8, 2944(%rax)
9267 ; SSE-NEXT: movaps %xmm12, 2928(%rax)
9268 ; SSE-NEXT: movaps %xmm9, 2912(%rax)
9269 ; SSE-NEXT: movaps %xmm10, 2896(%rax)
9270 ; SSE-NEXT: movaps %xmm13, 2880(%rax)
9271 ; SSE-NEXT: movaps %xmm11, 2864(%rax)
9272 ; SSE-NEXT: movaps %xmm14, 2848(%rax)
9273 ; SSE-NEXT: movaps %xmm15, 2832(%rax)
9274 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9275 ; SSE-NEXT: movaps %xmm0, 2816(%rax)
9276 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9277 ; SSE-NEXT: movaps %xmm0, 2800(%rax)
9278 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9279 ; SSE-NEXT: movaps %xmm0, 2784(%rax)
9280 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9281 ; SSE-NEXT: movaps %xmm0, 2768(%rax)
9282 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9283 ; SSE-NEXT: movaps %xmm0, 2752(%rax)
9284 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9285 ; SSE-NEXT: movaps %xmm0, 2736(%rax)
9286 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9287 ; SSE-NEXT: movaps %xmm0, 2720(%rax)
9288 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9289 ; SSE-NEXT: movaps %xmm0, 2704(%rax)
9290 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
9291 ; SSE-NEXT: movaps %xmm0, 2688(%rax)
9292 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9293 ; SSE-NEXT: movaps %xmm0, 2672(%rax)
9294 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9295 ; SSE-NEXT: movaps %xmm0, 2656(%rax)
9296 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9297 ; SSE-NEXT: movaps %xmm0, 2640(%rax)
9298 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9299 ; SSE-NEXT: movaps %xmm0, 2624(%rax)
9300 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9301 ; SSE-NEXT: movaps %xmm0, 2608(%rax)
9302 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9303 ; SSE-NEXT: movaps %xmm0, 2592(%rax)
9304 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9305 ; SSE-NEXT: movaps %xmm0, 2576(%rax)
9306 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9307 ; SSE-NEXT: movaps %xmm0, 2560(%rax)
9308 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9309 ; SSE-NEXT: movaps %xmm0, 2544(%rax)
9310 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9311 ; SSE-NEXT: movaps %xmm0, 2528(%rax)
9312 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9313 ; SSE-NEXT: movaps %xmm0, 2512(%rax)
9314 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9315 ; SSE-NEXT: movaps %xmm0, 2496(%rax)
9316 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9317 ; SSE-NEXT: movaps %xmm0, 2480(%rax)
9318 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9319 ; SSE-NEXT: movaps %xmm0, 2464(%rax)
9320 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9321 ; SSE-NEXT: movaps %xmm0, 2448(%rax)
9322 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9323 ; SSE-NEXT: movaps %xmm0, 2432(%rax)
9324 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9325 ; SSE-NEXT: movaps %xmm0, 2416(%rax)
9326 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9327 ; SSE-NEXT: movaps %xmm0, 2400(%rax)
9328 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9329 ; SSE-NEXT: movaps %xmm0, 2384(%rax)
9330 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9331 ; SSE-NEXT: movaps %xmm0, 2368(%rax)
9332 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9333 ; SSE-NEXT: movaps %xmm0, 2352(%rax)
9334 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9335 ; SSE-NEXT: movaps %xmm0, 2336(%rax)
9336 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9337 ; SSE-NEXT: movaps %xmm0, 2320(%rax)
9338 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9339 ; SSE-NEXT: movaps %xmm0, 2304(%rax)
9340 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9341 ; SSE-NEXT: movaps %xmm0, 2288(%rax)
9342 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9343 ; SSE-NEXT: movaps %xmm0, 2272(%rax)
9344 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9345 ; SSE-NEXT: movaps %xmm0, 2256(%rax)
9346 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9347 ; SSE-NEXT: movaps %xmm0, 2240(%rax)
9348 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9349 ; SSE-NEXT: movaps %xmm0, 2224(%rax)
9350 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9351 ; SSE-NEXT: movaps %xmm0, 2208(%rax)
9352 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9353 ; SSE-NEXT: movaps %xmm0, 2192(%rax)
9354 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9355 ; SSE-NEXT: movaps %xmm0, 2176(%rax)
9356 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9357 ; SSE-NEXT: movaps %xmm0, 2160(%rax)
9358 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9359 ; SSE-NEXT: movaps %xmm0, 2144(%rax)
9360 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9361 ; SSE-NEXT: movaps %xmm0, 2128(%rax)
9362 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9363 ; SSE-NEXT: movaps %xmm0, 2112(%rax)
9364 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9365 ; SSE-NEXT: movaps %xmm0, 2096(%rax)
9366 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9367 ; SSE-NEXT: movaps %xmm0, 2080(%rax)
9368 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9369 ; SSE-NEXT: movaps %xmm0, 2064(%rax)
9370 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9371 ; SSE-NEXT: movaps %xmm0, 2048(%rax)
9372 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9373 ; SSE-NEXT: movaps %xmm0, 2032(%rax)
9374 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9375 ; SSE-NEXT: movaps %xmm0, 2016(%rax)
9376 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9377 ; SSE-NEXT: movaps %xmm0, 2000(%rax)
9378 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9379 ; SSE-NEXT: movaps %xmm0, 1984(%rax)
9380 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9381 ; SSE-NEXT: movaps %xmm0, 1968(%rax)
9382 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9383 ; SSE-NEXT: movaps %xmm0, 1952(%rax)
9384 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9385 ; SSE-NEXT: movaps %xmm0, 1936(%rax)
9386 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9387 ; SSE-NEXT: movaps %xmm0, 1920(%rax)
9388 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9389 ; SSE-NEXT: movaps %xmm0, 1904(%rax)
9390 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9391 ; SSE-NEXT: movaps %xmm0, 1888(%rax)
9392 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9393 ; SSE-NEXT: movaps %xmm0, 1872(%rax)
9394 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9395 ; SSE-NEXT: movaps %xmm0, 1856(%rax)
9396 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9397 ; SSE-NEXT: movaps %xmm0, 1840(%rax)
9398 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9399 ; SSE-NEXT: movaps %xmm0, 1824(%rax)
9400 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9401 ; SSE-NEXT: movaps %xmm0, 1808(%rax)
9402 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9403 ; SSE-NEXT: movaps %xmm0, 1792(%rax)
9404 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9405 ; SSE-NEXT: movaps %xmm0, 1776(%rax)
9406 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9407 ; SSE-NEXT: movaps %xmm0, 1760(%rax)
9408 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9409 ; SSE-NEXT: movaps %xmm0, 1744(%rax)
9410 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9411 ; SSE-NEXT: movaps %xmm0, 1728(%rax)
9412 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9413 ; SSE-NEXT: movaps %xmm0, 1712(%rax)
9414 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9415 ; SSE-NEXT: movaps %xmm0, 1696(%rax)
9416 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9417 ; SSE-NEXT: movaps %xmm0, 1680(%rax)
9418 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9419 ; SSE-NEXT: movaps %xmm0, 1664(%rax)
9420 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9421 ; SSE-NEXT: movaps %xmm0, 1648(%rax)
9422 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9423 ; SSE-NEXT: movaps %xmm0, 1632(%rax)
9424 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9425 ; SSE-NEXT: movaps %xmm0, 1616(%rax)
9426 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9427 ; SSE-NEXT: movaps %xmm0, 1600(%rax)
9428 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9429 ; SSE-NEXT: movaps %xmm0, 1584(%rax)
9430 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9431 ; SSE-NEXT: movaps %xmm0, 1568(%rax)
9432 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9433 ; SSE-NEXT: movaps %xmm0, 1552(%rax)
9434 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9435 ; SSE-NEXT: movaps %xmm0, 1536(%rax)
9436 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9437 ; SSE-NEXT: movaps %xmm0, 1520(%rax)
9438 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9439 ; SSE-NEXT: movaps %xmm0, 1504(%rax)
9440 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9441 ; SSE-NEXT: movaps %xmm0, 1488(%rax)
9442 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9443 ; SSE-NEXT: movaps %xmm0, 1472(%rax)
9444 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9445 ; SSE-NEXT: movaps %xmm0, 1456(%rax)
9446 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9447 ; SSE-NEXT: movaps %xmm0, 1440(%rax)
9448 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9449 ; SSE-NEXT: movaps %xmm0, 1424(%rax)
9450 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9451 ; SSE-NEXT: movaps %xmm0, 1408(%rax)
9452 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9453 ; SSE-NEXT: movaps %xmm0, 1392(%rax)
9454 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9455 ; SSE-NEXT: movaps %xmm0, 1376(%rax)
9456 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9457 ; SSE-NEXT: movaps %xmm0, 1360(%rax)
9458 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9459 ; SSE-NEXT: movaps %xmm0, 1344(%rax)
9460 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9461 ; SSE-NEXT: movaps %xmm0, 1328(%rax)
9462 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9463 ; SSE-NEXT: movaps %xmm0, 1312(%rax)
9464 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9465 ; SSE-NEXT: movaps %xmm0, 1296(%rax)
9466 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9467 ; SSE-NEXT: movaps %xmm0, 1280(%rax)
9468 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9469 ; SSE-NEXT: movaps %xmm0, 1264(%rax)
9470 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9471 ; SSE-NEXT: movaps %xmm0, 1248(%rax)
9472 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9473 ; SSE-NEXT: movaps %xmm0, 1232(%rax)
9474 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9475 ; SSE-NEXT: movaps %xmm0, 1216(%rax)
9476 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9477 ; SSE-NEXT: movaps %xmm0, 1200(%rax)
9478 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9479 ; SSE-NEXT: movaps %xmm0, 1184(%rax)
9480 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9481 ; SSE-NEXT: movaps %xmm0, 1168(%rax)
9482 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9483 ; SSE-NEXT: movaps %xmm0, 1152(%rax)
9484 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9485 ; SSE-NEXT: movaps %xmm0, 1136(%rax)
9486 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9487 ; SSE-NEXT: movaps %xmm0, 1120(%rax)
9488 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9489 ; SSE-NEXT: movaps %xmm0, 1104(%rax)
9490 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9491 ; SSE-NEXT: movaps %xmm0, 1088(%rax)
9492 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9493 ; SSE-NEXT: movaps %xmm0, 1072(%rax)
9494 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9495 ; SSE-NEXT: movaps %xmm0, 1056(%rax)
9496 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9497 ; SSE-NEXT: movaps %xmm0, 1040(%rax)
9498 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9499 ; SSE-NEXT: movaps %xmm0, 1024(%rax)
9500 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9501 ; SSE-NEXT: movaps %xmm0, 1008(%rax)
9502 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9503 ; SSE-NEXT: movaps %xmm0, 992(%rax)
9504 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9505 ; SSE-NEXT: movaps %xmm0, 976(%rax)
9506 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9507 ; SSE-NEXT: movaps %xmm0, 960(%rax)
9508 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9509 ; SSE-NEXT: movaps %xmm0, 944(%rax)
9510 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9511 ; SSE-NEXT: movaps %xmm0, 928(%rax)
9512 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9513 ; SSE-NEXT: movaps %xmm0, 912(%rax)
9514 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9515 ; SSE-NEXT: movaps %xmm0, 896(%rax)
9516 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9517 ; SSE-NEXT: movaps %xmm0, 880(%rax)
9518 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9519 ; SSE-NEXT: movaps %xmm0, 864(%rax)
9520 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9521 ; SSE-NEXT: movaps %xmm0, 848(%rax)
9522 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9523 ; SSE-NEXT: movaps %xmm0, 832(%rax)
9524 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9525 ; SSE-NEXT: movaps %xmm0, 816(%rax)
9526 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9527 ; SSE-NEXT: movaps %xmm0, 800(%rax)
9528 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9529 ; SSE-NEXT: movaps %xmm0, 784(%rax)
9530 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9531 ; SSE-NEXT: movaps %xmm0, 768(%rax)
9532 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9533 ; SSE-NEXT: movaps %xmm0, 752(%rax)
9534 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9535 ; SSE-NEXT: movaps %xmm0, 736(%rax)
9536 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9537 ; SSE-NEXT: movaps %xmm0, 720(%rax)
9538 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9539 ; SSE-NEXT: movaps %xmm0, 704(%rax)
9540 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9541 ; SSE-NEXT: movaps %xmm0, 688(%rax)
9542 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9543 ; SSE-NEXT: movaps %xmm0, 672(%rax)
9544 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9545 ; SSE-NEXT: movaps %xmm0, 656(%rax)
9546 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9547 ; SSE-NEXT: movaps %xmm0, 640(%rax)
9548 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9549 ; SSE-NEXT: movaps %xmm0, 624(%rax)
9550 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9551 ; SSE-NEXT: movaps %xmm0, 608(%rax)
9552 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9553 ; SSE-NEXT: movaps %xmm0, 592(%rax)
9554 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9555 ; SSE-NEXT: movaps %xmm0, 576(%rax)
9556 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9557 ; SSE-NEXT: movaps %xmm0, 560(%rax)
9558 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9559 ; SSE-NEXT: movaps %xmm0, 544(%rax)
9560 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9561 ; SSE-NEXT: movaps %xmm0, 528(%rax)
9562 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9563 ; SSE-NEXT: movaps %xmm0, 512(%rax)
9564 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9565 ; SSE-NEXT: movaps %xmm0, 496(%rax)
9566 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9567 ; SSE-NEXT: movaps %xmm0, 480(%rax)
9568 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9569 ; SSE-NEXT: movaps %xmm0, 464(%rax)
9570 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9571 ; SSE-NEXT: movaps %xmm0, 448(%rax)
9572 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9573 ; SSE-NEXT: movaps %xmm0, 432(%rax)
9574 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9575 ; SSE-NEXT: movaps %xmm0, 416(%rax)
9576 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9577 ; SSE-NEXT: movaps %xmm0, 400(%rax)
9578 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9579 ; SSE-NEXT: movaps %xmm0, 384(%rax)
9580 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9581 ; SSE-NEXT: movaps %xmm0, 368(%rax)
9582 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9583 ; SSE-NEXT: movaps %xmm0, 352(%rax)
9584 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9585 ; SSE-NEXT: movaps %xmm0, 336(%rax)
9586 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9587 ; SSE-NEXT: movaps %xmm0, 320(%rax)
9588 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9589 ; SSE-NEXT: movaps %xmm0, 304(%rax)
9590 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9591 ; SSE-NEXT: movaps %xmm0, 288(%rax)
9592 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9593 ; SSE-NEXT: movaps %xmm0, 272(%rax)
9594 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9595 ; SSE-NEXT: movaps %xmm0, 256(%rax)
9596 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9597 ; SSE-NEXT: movaps %xmm0, 240(%rax)
9598 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9599 ; SSE-NEXT: movaps %xmm0, 224(%rax)
9600 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9601 ; SSE-NEXT: movaps %xmm0, 208(%rax)
9602 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9603 ; SSE-NEXT: movaps %xmm0, 192(%rax)
9604 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9605 ; SSE-NEXT: movaps %xmm0, 176(%rax)
9606 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9607 ; SSE-NEXT: movaps %xmm0, 160(%rax)
9608 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9609 ; SSE-NEXT: movaps %xmm0, 144(%rax)
9610 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9611 ; SSE-NEXT: movaps %xmm0, 128(%rax)
9612 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9613 ; SSE-NEXT: movaps %xmm0, 112(%rax)
9614 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9615 ; SSE-NEXT: movaps %xmm0, 96(%rax)
9616 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9617 ; SSE-NEXT: movaps %xmm0, 80(%rax)
9618 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9619 ; SSE-NEXT: movaps %xmm0, 64(%rax)
9620 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9621 ; SSE-NEXT: movaps %xmm0, 48(%rax)
9622 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9623 ; SSE-NEXT: movaps %xmm0, 32(%rax)
9624 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9625 ; SSE-NEXT: movaps %xmm0, 16(%rax)
9626 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9627 ; SSE-NEXT: movaps %xmm0, (%rax)
9628 ; SSE-NEXT: addq $2712, %rsp # imm = 0xA98
9631 ; AVX-LABEL: store_i64_stride6_vf64:
9633 ; AVX-NEXT: subq $3464, %rsp # imm = 0xD88
9634 ; AVX-NEXT: vmovaps 32(%r8), %ymm7
9635 ; AVX-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9636 ; AVX-NEXT: vmovapd (%r8), %ymm0
9637 ; AVX-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
9638 ; AVX-NEXT: vmovaps (%rsi), %xmm2
9639 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9640 ; AVX-NEXT: vmovaps 32(%rsi), %xmm5
9641 ; AVX-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9642 ; AVX-NEXT: vmovaps 64(%rsi), %xmm6
9643 ; AVX-NEXT: vmovaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9644 ; AVX-NEXT: vmovaps (%rdi), %xmm3
9645 ; AVX-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9646 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm2[1]
9647 ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
9648 ; AVX-NEXT: vblendpd {{.*#+}} ymm3 = ymm0[0,1],ymm3[2,3]
9649 ; AVX-NEXT: vblendpd {{.*#+}} ymm1 = ymm3[0],ymm1[1],ymm3[2,3]
9650 ; AVX-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9651 ; AVX-NEXT: vmovaps (%rcx), %xmm2
9652 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9653 ; AVX-NEXT: vmovaps (%rdx), %xmm1
9654 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9655 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9656 ; AVX-NEXT: vinsertf128 $1, (%r9), %ymm1, %ymm3
9657 ; AVX-NEXT: vbroadcastsd 8(%r8), %ymm4
9658 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm4[4,5,6,7]
9659 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm3[2,3],ymm1[4,5],ymm3[6,7]
9660 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9661 ; AVX-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
9662 ; AVX-NEXT: vmovaps 32(%rdi), %xmm2
9663 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9664 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm2[1],xmm5[1]
9665 ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
9666 ; AVX-NEXT: vblendps {{.*#+}} ymm3 = ymm7[0,1,2,3],ymm3[4,5,6,7]
9667 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5,6,7]
9668 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9669 ; AVX-NEXT: vmovaps 32(%rcx), %xmm2
9670 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9671 ; AVX-NEXT: vmovaps 32(%rdx), %xmm1
9672 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9673 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9674 ; AVX-NEXT: vbroadcastsd 40(%r8), %ymm3
9675 ; AVX-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
9676 ; AVX-NEXT: vinsertf128 $1, 32(%r9), %ymm1, %ymm1
9677 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
9678 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9679 ; AVX-NEXT: vmovaps 64(%rdi), %xmm1
9680 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9681 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm6[1]
9682 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
9683 ; AVX-NEXT: vmovapd 64(%r8), %ymm15
9684 ; AVX-NEXT: vblendpd {{.*#+}} ymm1 = ymm15[0,1],ymm1[2,3]
9685 ; AVX-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
9686 ; AVX-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0],ymm3[1],ymm1[2,3]
9687 ; AVX-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9688 ; AVX-NEXT: vmovaps 64(%rcx), %xmm2
9689 ; AVX-NEXT: vmovaps %xmm2, (%rsp) # 16-byte Spill
9690 ; AVX-NEXT: vmovaps 64(%rdx), %xmm1
9691 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9692 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9693 ; AVX-NEXT: vbroadcastsd 72(%r8), %ymm3
9694 ; AVX-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
9695 ; AVX-NEXT: vinsertf128 $1, 64(%r9), %ymm1, %ymm1
9696 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
9697 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9698 ; AVX-NEXT: vmovaps 96(%rsi), %xmm2
9699 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9700 ; AVX-NEXT: vmovaps 96(%rdi), %xmm1
9701 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9702 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9703 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
9704 ; AVX-NEXT: vmovapd 96(%r8), %ymm9
9705 ; AVX-NEXT: vblendpd {{.*#+}} ymm1 = ymm9[0,1],ymm1[2,3]
9706 ; AVX-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
9707 ; AVX-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0],ymm3[1],ymm1[2,3]
9708 ; AVX-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9709 ; AVX-NEXT: vmovaps 96(%rcx), %xmm2
9710 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9711 ; AVX-NEXT: vmovaps 96(%rdx), %xmm1
9712 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9713 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9714 ; AVX-NEXT: vbroadcastsd 104(%r8), %ymm3
9715 ; AVX-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
9716 ; AVX-NEXT: vinsertf128 $1, 96(%r9), %ymm1, %ymm1
9717 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
9718 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9719 ; AVX-NEXT: vmovaps 128(%rsi), %xmm2
9720 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9721 ; AVX-NEXT: vmovaps 128(%rdi), %xmm1
9722 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9723 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9724 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
9725 ; AVX-NEXT: vmovaps 128(%r8), %ymm2
9726 ; AVX-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9727 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
9728 ; AVX-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
9729 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm3[2,3],ymm1[4,5,6,7]
9730 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9731 ; AVX-NEXT: vmovaps 128(%rcx), %xmm2
9732 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9733 ; AVX-NEXT: vmovaps 128(%rdx), %xmm1
9734 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9735 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9736 ; AVX-NEXT: vbroadcastsd 136(%r8), %ymm3
9737 ; AVX-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
9738 ; AVX-NEXT: vinsertf128 $1, 128(%r9), %ymm1, %ymm1
9739 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
9740 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9741 ; AVX-NEXT: vmovaps 160(%rsi), %xmm2
9742 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9743 ; AVX-NEXT: vmovaps 160(%rdi), %xmm1
9744 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9745 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9746 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
9747 ; AVX-NEXT: vmovapd 160(%r8), %ymm8
9748 ; AVX-NEXT: vblendpd {{.*#+}} ymm1 = ymm8[0,1],ymm1[2,3]
9749 ; AVX-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
9750 ; AVX-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0],ymm3[1],ymm1[2,3]
9751 ; AVX-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9752 ; AVX-NEXT: vmovaps 160(%rcx), %xmm2
9753 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9754 ; AVX-NEXT: vmovaps 160(%rdx), %xmm1
9755 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9756 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9757 ; AVX-NEXT: vbroadcastsd 168(%r8), %ymm3
9758 ; AVX-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
9759 ; AVX-NEXT: vinsertf128 $1, 160(%r9), %ymm1, %ymm1
9760 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
9761 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9762 ; AVX-NEXT: vmovaps 192(%rsi), %xmm2
9763 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9764 ; AVX-NEXT: vmovaps 192(%rdi), %xmm1
9765 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9766 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9767 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
9768 ; AVX-NEXT: vmovapd 192(%r8), %ymm7
9769 ; AVX-NEXT: vblendpd {{.*#+}} ymm1 = ymm7[0,1],ymm1[2,3]
9770 ; AVX-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
9771 ; AVX-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0],ymm3[1],ymm1[2,3]
9772 ; AVX-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9773 ; AVX-NEXT: vmovaps 192(%rcx), %xmm2
9774 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9775 ; AVX-NEXT: vmovaps 192(%rdx), %xmm1
9776 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9777 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9778 ; AVX-NEXT: vbroadcastsd 200(%r8), %ymm3
9779 ; AVX-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
9780 ; AVX-NEXT: vinsertf128 $1, 192(%r9), %ymm1, %ymm1
9781 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
9782 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9783 ; AVX-NEXT: vmovaps 224(%rsi), %xmm2
9784 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9785 ; AVX-NEXT: vmovaps 224(%rdi), %xmm1
9786 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9787 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9788 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
9789 ; AVX-NEXT: vmovapd 224(%r8), %ymm11
9790 ; AVX-NEXT: vblendpd {{.*#+}} ymm1 = ymm11[0,1],ymm1[2,3]
9791 ; AVX-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
9792 ; AVX-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0],ymm3[1],ymm1[2,3]
9793 ; AVX-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9794 ; AVX-NEXT: vmovaps 224(%rcx), %xmm2
9795 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9796 ; AVX-NEXT: vmovaps 224(%rdx), %xmm1
9797 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9798 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9799 ; AVX-NEXT: vbroadcastsd 232(%r8), %ymm3
9800 ; AVX-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
9801 ; AVX-NEXT: vinsertf128 $1, 224(%r9), %ymm1, %ymm1
9802 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
9803 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9804 ; AVX-NEXT: vmovaps 256(%rsi), %xmm2
9805 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9806 ; AVX-NEXT: vmovaps 256(%rdi), %xmm1
9807 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9808 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9809 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
9810 ; AVX-NEXT: vmovaps 256(%r8), %ymm2
9811 ; AVX-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9812 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
9813 ; AVX-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
9814 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm3[2,3],ymm1[4,5,6,7]
9815 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9816 ; AVX-NEXT: vmovaps 256(%rcx), %xmm2
9817 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9818 ; AVX-NEXT: vmovaps 256(%rdx), %xmm1
9819 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9820 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9821 ; AVX-NEXT: vbroadcastsd 264(%r8), %ymm3
9822 ; AVX-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
9823 ; AVX-NEXT: vinsertf128 $1, 256(%r9), %ymm1, %ymm1
9824 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
9825 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9826 ; AVX-NEXT: vmovaps 288(%rsi), %xmm2
9827 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9828 ; AVX-NEXT: vmovaps 288(%rdi), %xmm1
9829 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9830 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9831 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
9832 ; AVX-NEXT: vmovaps 288(%r8), %ymm2
9833 ; AVX-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9834 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
9835 ; AVX-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
9836 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm3[2,3],ymm1[4,5,6,7]
9837 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9838 ; AVX-NEXT: vmovaps 288(%rcx), %xmm2
9839 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9840 ; AVX-NEXT: vmovaps 288(%rdx), %xmm1
9841 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9842 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9843 ; AVX-NEXT: vbroadcastsd 296(%r8), %ymm3
9844 ; AVX-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
9845 ; AVX-NEXT: vinsertf128 $1, 288(%r9), %ymm1, %ymm1
9846 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
9847 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9848 ; AVX-NEXT: vmovaps 320(%rsi), %xmm2
9849 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9850 ; AVX-NEXT: vmovaps 320(%rdi), %xmm1
9851 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9852 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9853 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
9854 ; AVX-NEXT: vmovaps 320(%r8), %ymm2
9855 ; AVX-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9856 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
9857 ; AVX-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
9858 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm3[2,3],ymm1[4,5,6,7]
9859 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9860 ; AVX-NEXT: vmovaps 320(%rcx), %xmm2
9861 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9862 ; AVX-NEXT: vmovaps 320(%rdx), %xmm1
9863 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9864 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9865 ; AVX-NEXT: vbroadcastsd 328(%r8), %ymm3
9866 ; AVX-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
9867 ; AVX-NEXT: vinsertf128 $1, 320(%r9), %ymm1, %ymm1
9868 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
9869 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9870 ; AVX-NEXT: vmovaps 352(%rsi), %xmm2
9871 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9872 ; AVX-NEXT: vmovaps 352(%rdi), %xmm1
9873 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9874 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9875 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
9876 ; AVX-NEXT: vmovaps 352(%r8), %ymm2
9877 ; AVX-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9878 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
9879 ; AVX-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
9880 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm3[2,3],ymm1[4,5,6,7]
9881 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9882 ; AVX-NEXT: vmovaps 352(%rcx), %xmm2
9883 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9884 ; AVX-NEXT: vmovaps 352(%rdx), %xmm1
9885 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9886 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9887 ; AVX-NEXT: vbroadcastsd 360(%r8), %ymm3
9888 ; AVX-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
9889 ; AVX-NEXT: vinsertf128 $1, 352(%r9), %ymm1, %ymm1
9890 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
9891 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9892 ; AVX-NEXT: vmovaps 384(%rsi), %xmm2
9893 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9894 ; AVX-NEXT: vmovaps 384(%rdi), %xmm1
9895 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9896 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9897 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
9898 ; AVX-NEXT: vmovapd 384(%r8), %ymm12
9899 ; AVX-NEXT: vblendpd {{.*#+}} ymm1 = ymm12[0,1],ymm1[2,3]
9900 ; AVX-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
9901 ; AVX-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0],ymm3[1],ymm1[2,3]
9902 ; AVX-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9903 ; AVX-NEXT: vmovaps 384(%rcx), %xmm2
9904 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9905 ; AVX-NEXT: vmovaps 384(%rdx), %xmm1
9906 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9907 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9908 ; AVX-NEXT: vbroadcastsd 392(%r8), %ymm3
9909 ; AVX-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
9910 ; AVX-NEXT: vinsertf128 $1, 384(%r9), %ymm1, %ymm1
9911 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
9912 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9913 ; AVX-NEXT: vmovaps 416(%rsi), %xmm2
9914 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9915 ; AVX-NEXT: vmovaps 416(%rdi), %xmm1
9916 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9917 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9918 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
9919 ; AVX-NEXT: vmovapd 416(%r8), %ymm13
9920 ; AVX-NEXT: vblendpd {{.*#+}} ymm1 = ymm13[0,1],ymm1[2,3]
9921 ; AVX-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
9922 ; AVX-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0],ymm3[1],ymm1[2,3]
9923 ; AVX-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9924 ; AVX-NEXT: vmovaps 416(%rcx), %xmm2
9925 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9926 ; AVX-NEXT: vmovaps 416(%rdx), %xmm1
9927 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9928 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9929 ; AVX-NEXT: vbroadcastsd 424(%r8), %ymm3
9930 ; AVX-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
9931 ; AVX-NEXT: vinsertf128 $1, 416(%r9), %ymm1, %ymm1
9932 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
9933 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9934 ; AVX-NEXT: vmovaps 448(%rsi), %xmm2
9935 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9936 ; AVX-NEXT: vmovaps 448(%rdi), %xmm1
9937 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9938 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9939 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
9940 ; AVX-NEXT: vmovapd 448(%r8), %ymm14
9941 ; AVX-NEXT: vblendpd {{.*#+}} ymm1 = ymm14[0,1],ymm1[2,3]
9942 ; AVX-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
9943 ; AVX-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0],ymm3[1],ymm1[2,3]
9944 ; AVX-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9945 ; AVX-NEXT: vmovaps 448(%rcx), %xmm2
9946 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9947 ; AVX-NEXT: vmovaps 448(%rdx), %xmm1
9948 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9949 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9950 ; AVX-NEXT: vbroadcastsd 456(%r8), %ymm3
9951 ; AVX-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
9952 ; AVX-NEXT: vinsertf128 $1, 448(%r9), %ymm1, %ymm1
9953 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
9954 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9955 ; AVX-NEXT: vmovaps 480(%rsi), %xmm2
9956 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9957 ; AVX-NEXT: vmovaps 480(%rdi), %xmm1
9958 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9959 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9960 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm3
9961 ; AVX-NEXT: vmovapd 480(%r8), %ymm5
9962 ; AVX-NEXT: vblendpd {{.*#+}} ymm3 = ymm5[0,1],ymm3[2,3]
9963 ; AVX-NEXT: vmovddup {{.*#+}} xmm2 = mem[0,0]
9964 ; AVX-NEXT: vblendpd {{.*#+}} ymm1 = ymm3[0],ymm2[1],ymm3[2,3]
9965 ; AVX-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9966 ; AVX-NEXT: vmovaps 480(%rcx), %xmm1
9967 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9968 ; AVX-NEXT: vmovaps 480(%rdx), %xmm2
9969 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9970 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9971 ; AVX-NEXT: vbroadcastsd 488(%r8), %ymm3
9972 ; AVX-NEXT: vblendps {{.*#+}} ymm3 = ymm2[0,1,2,3],ymm3[4,5,6,7]
9973 ; AVX-NEXT: vinsertf128 $1, 480(%r9), %ymm2, %ymm2
9974 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7]
9975 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9976 ; AVX-NEXT: vmovapd (%rdi), %ymm2
9977 ; AVX-NEXT: vmovapd (%rsi), %ymm3
9978 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
9979 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm0[2,3],ymm2[2,3]
9980 ; AVX-NEXT: vmovapd (%r9), %ymm4
9981 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm4[2,3],ymm3[2,3]
9982 ; AVX-NEXT: vshufpd {{.*#+}} ymm0 = ymm2[0],ymm3[0],ymm2[2],ymm3[3]
9983 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9984 ; AVX-NEXT: vmovapd 32(%rdi), %ymm2
9985 ; AVX-NEXT: vmovapd 32(%rsi), %ymm3
9986 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
9987 ; AVX-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2, %ymm0 # 32-byte Folded Reload
9988 ; AVX-NEXT: # ymm0 = mem[2,3],ymm2[2,3]
9989 ; AVX-NEXT: vmovapd 32(%r9), %ymm6
9990 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm6[2,3],ymm3[2,3]
9991 ; AVX-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[3]
9992 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9993 ; AVX-NEXT: vmovapd 64(%rdi), %ymm0
9994 ; AVX-NEXT: vmovapd 64(%rsi), %ymm3
9995 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm3[1],ymm0[3],ymm3[3]
9996 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm15[2,3],ymm0[2,3]
9997 ; AVX-NEXT: vmovapd 64(%r9), %ymm15
9998 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm15[2,3],ymm3[2,3]
9999 ; AVX-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[3]
10000 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10001 ; AVX-NEXT: vmovapd 96(%rdi), %ymm3
10002 ; AVX-NEXT: vmovapd 96(%rsi), %ymm0
10003 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm0[1],ymm3[3],ymm0[3]
10004 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm9 = ymm9[2,3],ymm3[2,3]
10005 ; AVX-NEXT: vmovapd 96(%r9), %ymm3
10006 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm3[2,3],ymm0[2,3]
10007 ; AVX-NEXT: vshufpd {{.*#+}} ymm0 = ymm9[0],ymm0[0],ymm9[2],ymm0[3]
10008 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10009 ; AVX-NEXT: vmovapd 128(%rdi), %ymm9
10010 ; AVX-NEXT: vmovapd 128(%rsi), %ymm0
10011 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm9 = ymm9[1],ymm0[1],ymm9[3],ymm0[3]
10012 ; AVX-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm1 # 32-byte Folded Reload
10013 ; AVX-NEXT: # ymm1 = mem[2,3],ymm9[2,3]
10014 ; AVX-NEXT: vmovapd 128(%r9), %ymm9
10015 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm9[2,3],ymm0[2,3]
10016 ; AVX-NEXT: vshufpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[3]
10017 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10018 ; AVX-NEXT: vmovapd 160(%rdi), %ymm0
10019 ; AVX-NEXT: vmovapd 160(%rsi), %ymm1
10020 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
10021 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm8[2,3],ymm0[2,3]
10022 ; AVX-NEXT: vmovapd 160(%r9), %ymm8
10023 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm8[2,3],ymm1[2,3]
10024 ; AVX-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[3]
10025 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10026 ; AVX-NEXT: vmovapd 192(%rdi), %ymm0
10027 ; AVX-NEXT: vmovapd 192(%rsi), %ymm1
10028 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
10029 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm7[2,3],ymm0[2,3]
10030 ; AVX-NEXT: vmovapd 192(%r9), %ymm7
10031 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm7[2,3],ymm1[2,3]
10032 ; AVX-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[3]
10033 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10034 ; AVX-NEXT: vmovapd 224(%rdi), %ymm0
10035 ; AVX-NEXT: vmovapd 224(%rsi), %ymm1
10036 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
10037 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm11[2,3],ymm0[2,3]
10038 ; AVX-NEXT: vmovapd 224(%r9), %ymm10
10039 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm10[2,3],ymm1[2,3]
10040 ; AVX-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[3]
10041 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10042 ; AVX-NEXT: vmovapd 256(%rdi), %ymm1
10043 ; AVX-NEXT: vmovapd 256(%rsi), %ymm0
10044 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
10045 ; AVX-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
10046 ; AVX-NEXT: # ymm1 = mem[2,3],ymm1[2,3]
10047 ; AVX-NEXT: vmovapd 256(%r9), %ymm11
10048 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm11[2,3],ymm0[2,3]
10049 ; AVX-NEXT: vshufpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[3]
10050 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10051 ; AVX-NEXT: vmovapd 288(%rdi), %ymm0
10052 ; AVX-NEXT: vmovapd 288(%rsi), %ymm1
10053 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
10054 ; AVX-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10055 ; AVX-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
10056 ; AVX-NEXT: vmovapd 288(%r9), %ymm2
10057 ; AVX-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10058 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm2[2,3],ymm1[2,3]
10059 ; AVX-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[3]
10060 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10061 ; AVX-NEXT: vmovapd 320(%rdi), %ymm1
10062 ; AVX-NEXT: vmovapd 320(%rsi), %ymm0
10063 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
10064 ; AVX-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
10065 ; AVX-NEXT: # ymm1 = mem[2,3],ymm1[2,3]
10066 ; AVX-NEXT: vmovapd 320(%r9), %ymm2
10067 ; AVX-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10068 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm2[2,3],ymm0[2,3]
10069 ; AVX-NEXT: vshufpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[3]
10070 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10071 ; AVX-NEXT: vmovapd 352(%rdi), %ymm0
10072 ; AVX-NEXT: vmovapd 352(%rsi), %ymm1
10073 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
10074 ; AVX-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm2 # 32-byte Folded Reload
10075 ; AVX-NEXT: # ymm2 = mem[2,3],ymm0[2,3]
10076 ; AVX-NEXT: vmovapd 352(%r9), %ymm0
10077 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10078 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm0[2,3],ymm1[2,3]
10079 ; AVX-NEXT: vshufpd {{.*#+}} ymm0 = ymm2[0],ymm1[0],ymm2[2],ymm1[3]
10080 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10081 ; AVX-NEXT: vmovapd 384(%rdi), %ymm0
10082 ; AVX-NEXT: vmovapd 384(%rsi), %ymm1
10083 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
10084 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm12[2,3],ymm0[2,3]
10085 ; AVX-NEXT: vmovapd 384(%r9), %ymm12
10086 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm12[2,3],ymm1[2,3]
10087 ; AVX-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[3]
10088 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10089 ; AVX-NEXT: vmovapd 416(%rdi), %ymm0
10090 ; AVX-NEXT: vmovapd 416(%rsi), %ymm1
10091 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
10092 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm13[2,3],ymm0[2,3]
10093 ; AVX-NEXT: vmovapd 416(%r9), %ymm13
10094 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm13[2,3],ymm1[2,3]
10095 ; AVX-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[3]
10096 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10097 ; AVX-NEXT: vmovapd 448(%rdi), %ymm0
10098 ; AVX-NEXT: vmovapd 448(%rsi), %ymm1
10099 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
10100 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm14[2,3],ymm0[2,3]
10101 ; AVX-NEXT: vmovapd 448(%r9), %ymm14
10102 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm14[2,3],ymm1[2,3]
10103 ; AVX-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[3]
10104 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10105 ; AVX-NEXT: vmovapd 480(%rdi), %ymm0
10106 ; AVX-NEXT: vmovapd 480(%rsi), %ymm1
10107 ; AVX-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
10108 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm5[2,3],ymm0[2,3]
10109 ; AVX-NEXT: vmovapd 480(%r9), %ymm5
10110 ; AVX-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm5[2,3],ymm1[2,3]
10111 ; AVX-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[3]
10112 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10113 ; AVX-NEXT: vmovaps 16(%rdi), %xmm0
10114 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
10115 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
10116 ; AVX-NEXT: vbroadcastsd 16(%rcx), %ymm1
10117 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10118 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10119 ; AVX-NEXT: vmovapd 16(%rdx), %xmm0
10120 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
10121 ; AVX-NEXT: vbroadcastsd 24(%r8), %ymm1
10122 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
10123 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm4[3]
10124 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10125 ; AVX-NEXT: vmovaps 48(%rdi), %xmm0
10126 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
10127 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
10128 ; AVX-NEXT: vbroadcastsd 48(%rcx), %ymm1
10129 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10130 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10131 ; AVX-NEXT: vmovapd 48(%rdx), %xmm0
10132 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
10133 ; AVX-NEXT: vbroadcastsd 56(%r8), %ymm1
10134 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
10135 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm6[3]
10136 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10137 ; AVX-NEXT: vmovaps 80(%rdi), %xmm0
10138 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
10139 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
10140 ; AVX-NEXT: vbroadcastsd 80(%rcx), %ymm1
10141 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10142 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10143 ; AVX-NEXT: vmovapd 80(%rdx), %xmm0
10144 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
10145 ; AVX-NEXT: vbroadcastsd 88(%r8), %ymm1
10146 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
10147 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm15[3]
10148 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10149 ; AVX-NEXT: vmovaps 112(%rdi), %xmm0
10150 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
10151 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
10152 ; AVX-NEXT: vbroadcastsd 112(%rcx), %ymm1
10153 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10154 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10155 ; AVX-NEXT: vmovapd 112(%rdx), %xmm0
10156 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
10157 ; AVX-NEXT: vbroadcastsd 120(%r8), %ymm1
10158 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
10159 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm3[3]
10160 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10161 ; AVX-NEXT: vmovaps 144(%rdi), %xmm0
10162 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
10163 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
10164 ; AVX-NEXT: vbroadcastsd 144(%rcx), %ymm1
10165 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10166 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10167 ; AVX-NEXT: vmovapd 144(%rdx), %xmm0
10168 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
10169 ; AVX-NEXT: vbroadcastsd 152(%r8), %ymm1
10170 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
10171 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm9[3]
10172 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10173 ; AVX-NEXT: vmovaps 176(%rdi), %xmm0
10174 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
10175 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
10176 ; AVX-NEXT: vbroadcastsd 176(%rcx), %ymm1
10177 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10178 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10179 ; AVX-NEXT: vmovapd 176(%rdx), %xmm0
10180 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
10181 ; AVX-NEXT: vbroadcastsd 184(%r8), %ymm1
10182 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
10183 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm8[3]
10184 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10185 ; AVX-NEXT: vmovaps 208(%rdi), %xmm0
10186 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
10187 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
10188 ; AVX-NEXT: vbroadcastsd 208(%rcx), %ymm1
10189 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10190 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10191 ; AVX-NEXT: vmovapd 208(%rdx), %xmm0
10192 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
10193 ; AVX-NEXT: vbroadcastsd 216(%r8), %ymm1
10194 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
10195 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm7[3]
10196 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10197 ; AVX-NEXT: vmovaps 240(%rdi), %xmm0
10198 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
10199 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
10200 ; AVX-NEXT: vbroadcastsd 240(%rcx), %ymm1
10201 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10202 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10203 ; AVX-NEXT: vmovapd 240(%rdx), %xmm0
10204 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
10205 ; AVX-NEXT: vbroadcastsd 248(%r8), %ymm1
10206 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
10207 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm10[3]
10208 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10209 ; AVX-NEXT: vmovaps 272(%rdi), %xmm0
10210 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
10211 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
10212 ; AVX-NEXT: vbroadcastsd 272(%rcx), %ymm1
10213 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10214 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10215 ; AVX-NEXT: vmovapd 272(%rdx), %xmm0
10216 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
10217 ; AVX-NEXT: vbroadcastsd 280(%r8), %ymm1
10218 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
10219 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm11[3]
10220 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10221 ; AVX-NEXT: vmovaps 304(%rdi), %xmm0
10222 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
10223 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
10224 ; AVX-NEXT: vbroadcastsd 304(%rcx), %ymm1
10225 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10226 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10227 ; AVX-NEXT: vmovaps 304(%rdx), %xmm0
10228 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
10229 ; AVX-NEXT: vbroadcastsd 312(%r8), %ymm1
10230 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
10231 ; AVX-NEXT: vblendps $192, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10232 ; AVX-NEXT: # ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
10233 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10234 ; AVX-NEXT: vmovaps 336(%rdi), %xmm0
10235 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
10236 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
10237 ; AVX-NEXT: vbroadcastsd 336(%rcx), %ymm1
10238 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10239 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10240 ; AVX-NEXT: vmovaps 336(%rdx), %xmm0
10241 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
10242 ; AVX-NEXT: vbroadcastsd 344(%r8), %ymm1
10243 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
10244 ; AVX-NEXT: vblendps $192, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10245 ; AVX-NEXT: # ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
10246 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10247 ; AVX-NEXT: vmovaps 368(%rdi), %xmm0
10248 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
10249 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
10250 ; AVX-NEXT: vbroadcastsd 368(%rcx), %ymm1
10251 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10252 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10253 ; AVX-NEXT: vmovaps 368(%rdx), %xmm0
10254 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
10255 ; AVX-NEXT: vbroadcastsd 376(%r8), %ymm1
10256 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
10257 ; AVX-NEXT: vblendps $192, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10258 ; AVX-NEXT: # ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
10259 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10260 ; AVX-NEXT: vmovaps 400(%rdi), %xmm0
10261 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
10262 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
10263 ; AVX-NEXT: vbroadcastsd 400(%rcx), %ymm1
10264 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10265 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10266 ; AVX-NEXT: vmovapd 400(%rdx), %xmm0
10267 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
10268 ; AVX-NEXT: vbroadcastsd 408(%r8), %ymm1
10269 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
10270 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm12[3]
10271 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10272 ; AVX-NEXT: vmovaps 432(%rdi), %xmm0
10273 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
10274 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
10275 ; AVX-NEXT: vbroadcastsd 432(%rcx), %ymm1
10276 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10277 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10278 ; AVX-NEXT: vmovapd 432(%rdx), %xmm0
10279 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
10280 ; AVX-NEXT: vbroadcastsd 440(%r8), %ymm1
10281 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
10282 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm13[3]
10283 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10284 ; AVX-NEXT: vmovaps 464(%rdi), %xmm0
10285 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
10286 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
10287 ; AVX-NEXT: vbroadcastsd 464(%rcx), %ymm1
10288 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10289 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10290 ; AVX-NEXT: vmovapd 464(%rdx), %xmm0
10291 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
10292 ; AVX-NEXT: vbroadcastsd 472(%r8), %ymm1
10293 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
10294 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm14[3]
10295 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10296 ; AVX-NEXT: vmovaps 496(%rdi), %xmm0
10297 ; AVX-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
10298 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
10299 ; AVX-NEXT: vbroadcastsd 496(%rcx), %ymm1
10300 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10301 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10302 ; AVX-NEXT: vmovapd 496(%rdx), %xmm0
10303 ; AVX-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
10304 ; AVX-NEXT: vbroadcastsd 504(%r8), %ymm1
10305 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
10306 ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm5[3]
10307 ; AVX-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10308 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10309 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
10310 ; AVX-NEXT: # xmm0 = xmm0[0],mem[0]
10311 ; AVX-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10312 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10313 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
10314 ; AVX-NEXT: # xmm0 = xmm0[0],mem[0]
10315 ; AVX-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10316 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10317 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
10318 ; AVX-NEXT: # xmm0 = xmm0[0],mem[0]
10319 ; AVX-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10320 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10321 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
10322 ; AVX-NEXT: # xmm0 = xmm0[0],mem[0]
10323 ; AVX-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10324 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10325 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
10326 ; AVX-NEXT: # xmm0 = xmm0[0],mem[0]
10327 ; AVX-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10328 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10329 ; AVX-NEXT: vunpcklpd (%rsp), %xmm0, %xmm0 # 16-byte Folded Reload
10330 ; AVX-NEXT: # xmm0 = xmm0[0],mem[0]
10331 ; AVX-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10332 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10333 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
10334 ; AVX-NEXT: # xmm0 = xmm0[0],mem[0]
10335 ; AVX-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10336 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10337 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
10338 ; AVX-NEXT: # xmm0 = xmm0[0],mem[0]
10339 ; AVX-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10340 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10341 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
10342 ; AVX-NEXT: # xmm0 = xmm0[0],mem[0]
10343 ; AVX-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10344 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10345 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
10346 ; AVX-NEXT: # xmm0 = xmm0[0],mem[0]
10347 ; AVX-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10348 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10349 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
10350 ; AVX-NEXT: # xmm0 = xmm0[0],mem[0]
10351 ; AVX-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10352 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10353 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
10354 ; AVX-NEXT: # xmm0 = xmm0[0],mem[0]
10355 ; AVX-NEXT: vmovaps %xmm0, (%rsp) # 16-byte Spill
10356 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10357 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
10358 ; AVX-NEXT: # xmm0 = xmm0[0],mem[0]
10359 ; AVX-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10360 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10361 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
10362 ; AVX-NEXT: # xmm0 = xmm0[0],mem[0]
10363 ; AVX-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10364 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10365 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
10366 ; AVX-NEXT: # xmm0 = xmm0[0],mem[0]
10367 ; AVX-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10368 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10369 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
10370 ; AVX-NEXT: # xmm0 = xmm0[0],mem[0]
10371 ; AVX-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10372 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10373 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm15 # 16-byte Folded Reload
10374 ; AVX-NEXT: # xmm15 = xmm0[0],mem[0]
10375 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10376 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm14 # 16-byte Folded Reload
10377 ; AVX-NEXT: # xmm14 = xmm0[0],mem[0]
10378 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10379 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm13 # 16-byte Folded Reload
10380 ; AVX-NEXT: # xmm13 = xmm0[0],mem[0]
10381 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10382 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm12 # 16-byte Folded Reload
10383 ; AVX-NEXT: # xmm12 = xmm0[0],mem[0]
10384 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10385 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm11 # 16-byte Folded Reload
10386 ; AVX-NEXT: # xmm11 = xmm0[0],mem[0]
10387 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10388 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm10 # 16-byte Folded Reload
10389 ; AVX-NEXT: # xmm10 = xmm0[0],mem[0]
10390 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10391 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm9 # 16-byte Folded Reload
10392 ; AVX-NEXT: # xmm9 = xmm0[0],mem[0]
10393 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10394 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm8 # 16-byte Folded Reload
10395 ; AVX-NEXT: # xmm8 = xmm0[0],mem[0]
10396 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10397 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm7 # 16-byte Folded Reload
10398 ; AVX-NEXT: # xmm7 = xmm0[0],mem[0]
10399 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10400 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm6 # 16-byte Folded Reload
10401 ; AVX-NEXT: # xmm6 = xmm0[0],mem[0]
10402 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10403 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm5 # 16-byte Folded Reload
10404 ; AVX-NEXT: # xmm5 = xmm0[0],mem[0]
10405 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10406 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm4 # 16-byte Folded Reload
10407 ; AVX-NEXT: # xmm4 = xmm0[0],mem[0]
10408 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10409 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm3 # 16-byte Folded Reload
10410 ; AVX-NEXT: # xmm3 = xmm0[0],mem[0]
10411 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10412 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm2 # 16-byte Folded Reload
10413 ; AVX-NEXT: # xmm2 = xmm0[0],mem[0]
10414 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10415 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm1 # 16-byte Folded Reload
10416 ; AVX-NEXT: # xmm1 = xmm0[0],mem[0]
10417 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10418 ; AVX-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
10419 ; AVX-NEXT: # xmm0 = xmm0[0],mem[0]
10420 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
10421 ; AVX-NEXT: vmovaps %xmm0, 16(%rax)
10422 ; AVX-NEXT: vmovaps %xmm1, (%rax)
10423 ; AVX-NEXT: vmovaps %xmm2, 2320(%rax)
10424 ; AVX-NEXT: vmovaps %xmm3, 2304(%rax)
10425 ; AVX-NEXT: vmovaps %xmm4, 2704(%rax)
10426 ; AVX-NEXT: vmovaps %xmm5, 2688(%rax)
10427 ; AVX-NEXT: vmovaps %xmm6, 2896(%rax)
10428 ; AVX-NEXT: vmovaps %xmm7, 2880(%rax)
10429 ; AVX-NEXT: vmovaps %xmm8, 2512(%rax)
10430 ; AVX-NEXT: vmovaps %xmm9, 2496(%rax)
10431 ; AVX-NEXT: vmovaps %xmm10, 1936(%rax)
10432 ; AVX-NEXT: vmovaps %xmm11, 1920(%rax)
10433 ; AVX-NEXT: vmovaps %xmm12, 2128(%rax)
10434 ; AVX-NEXT: vmovaps %xmm13, 2112(%rax)
10435 ; AVX-NEXT: vmovaps %xmm14, 1744(%rax)
10436 ; AVX-NEXT: vmovaps %xmm15, 1728(%rax)
10437 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10438 ; AVX-NEXT: vmovaps %xmm0, 1168(%rax)
10439 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10440 ; AVX-NEXT: vmovaps %xmm0, 1152(%rax)
10441 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10442 ; AVX-NEXT: vmovaps %xmm0, 1360(%rax)
10443 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10444 ; AVX-NEXT: vmovaps %xmm0, 1344(%rax)
10445 ; AVX-NEXT: vmovaps (%rsp), %xmm0 # 16-byte Reload
10446 ; AVX-NEXT: vmovaps %xmm0, 976(%rax)
10447 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10448 ; AVX-NEXT: vmovaps %xmm0, 960(%rax)
10449 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10450 ; AVX-NEXT: vmovaps %xmm0, 592(%rax)
10451 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10452 ; AVX-NEXT: vmovaps %xmm0, 576(%rax)
10453 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10454 ; AVX-NEXT: vmovaps %xmm0, 208(%rax)
10455 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10456 ; AVX-NEXT: vmovaps %xmm0, 192(%rax)
10457 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10458 ; AVX-NEXT: vmovaps %xmm0, 400(%rax)
10459 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10460 ; AVX-NEXT: vmovaps %xmm0, 384(%rax)
10461 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10462 ; AVX-NEXT: vmovaps %xmm0, 784(%rax)
10463 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10464 ; AVX-NEXT: vmovaps %xmm0, 768(%rax)
10465 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10466 ; AVX-NEXT: vmovaps %xmm0, 1552(%rax)
10467 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10468 ; AVX-NEXT: vmovaps %xmm0, 1536(%rax)
10469 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10470 ; AVX-NEXT: vmovaps %ymm0, 3008(%rax)
10471 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10472 ; AVX-NEXT: vmovaps %ymm0, 2816(%rax)
10473 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10474 ; AVX-NEXT: vmovaps %ymm0, 2624(%rax)
10475 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10476 ; AVX-NEXT: vmovaps %ymm0, 2432(%rax)
10477 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10478 ; AVX-NEXT: vmovaps %ymm0, 2240(%rax)
10479 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10480 ; AVX-NEXT: vmovaps %ymm0, 2048(%rax)
10481 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10482 ; AVX-NEXT: vmovaps %ymm0, 1856(%rax)
10483 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10484 ; AVX-NEXT: vmovaps %ymm0, 1664(%rax)
10485 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10486 ; AVX-NEXT: vmovaps %ymm0, 1472(%rax)
10487 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10488 ; AVX-NEXT: vmovaps %ymm0, 1280(%rax)
10489 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10490 ; AVX-NEXT: vmovaps %ymm0, 1088(%rax)
10491 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10492 ; AVX-NEXT: vmovaps %ymm0, 896(%rax)
10493 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10494 ; AVX-NEXT: vmovaps %ymm0, 704(%rax)
10495 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10496 ; AVX-NEXT: vmovaps %ymm0, 512(%rax)
10497 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10498 ; AVX-NEXT: vmovaps %ymm0, 320(%rax)
10499 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10500 ; AVX-NEXT: vmovaps %ymm0, 128(%rax)
10501 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10502 ; AVX-NEXT: vmovaps %ymm0, 3040(%rax)
10503 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10504 ; AVX-NEXT: vmovaps %ymm0, 2976(%rax)
10505 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10506 ; AVX-NEXT: vmovaps %ymm0, 2944(%rax)
10507 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10508 ; AVX-NEXT: vmovaps %ymm0, 2912(%rax)
10509 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10510 ; AVX-NEXT: vmovaps %ymm0, 2848(%rax)
10511 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10512 ; AVX-NEXT: vmovaps %ymm0, 2784(%rax)
10513 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10514 ; AVX-NEXT: vmovaps %ymm0, 2752(%rax)
10515 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10516 ; AVX-NEXT: vmovaps %ymm0, 2720(%rax)
10517 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10518 ; AVX-NEXT: vmovaps %ymm0, 2656(%rax)
10519 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10520 ; AVX-NEXT: vmovaps %ymm0, 2592(%rax)
10521 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10522 ; AVX-NEXT: vmovaps %ymm0, 2560(%rax)
10523 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10524 ; AVX-NEXT: vmovaps %ymm0, 2528(%rax)
10525 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10526 ; AVX-NEXT: vmovaps %ymm0, 2464(%rax)
10527 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10528 ; AVX-NEXT: vmovaps %ymm0, 2400(%rax)
10529 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10530 ; AVX-NEXT: vmovaps %ymm0, 2368(%rax)
10531 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10532 ; AVX-NEXT: vmovaps %ymm0, 2336(%rax)
10533 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10534 ; AVX-NEXT: vmovaps %ymm0, 2272(%rax)
10535 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10536 ; AVX-NEXT: vmovaps %ymm0, 2208(%rax)
10537 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10538 ; AVX-NEXT: vmovaps %ymm0, 2176(%rax)
10539 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10540 ; AVX-NEXT: vmovaps %ymm0, 2144(%rax)
10541 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10542 ; AVX-NEXT: vmovaps %ymm0, 2080(%rax)
10543 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10544 ; AVX-NEXT: vmovaps %ymm0, 2016(%rax)
10545 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10546 ; AVX-NEXT: vmovaps %ymm0, 1984(%rax)
10547 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10548 ; AVX-NEXT: vmovaps %ymm0, 1952(%rax)
10549 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10550 ; AVX-NEXT: vmovaps %ymm0, 1888(%rax)
10551 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10552 ; AVX-NEXT: vmovaps %ymm0, 1824(%rax)
10553 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10554 ; AVX-NEXT: vmovaps %ymm0, 1792(%rax)
10555 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10556 ; AVX-NEXT: vmovaps %ymm0, 1760(%rax)
10557 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10558 ; AVX-NEXT: vmovaps %ymm0, 1696(%rax)
10559 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10560 ; AVX-NEXT: vmovaps %ymm0, 1632(%rax)
10561 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10562 ; AVX-NEXT: vmovaps %ymm0, 1600(%rax)
10563 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10564 ; AVX-NEXT: vmovaps %ymm0, 1568(%rax)
10565 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10566 ; AVX-NEXT: vmovaps %ymm0, 1504(%rax)
10567 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10568 ; AVX-NEXT: vmovaps %ymm0, 1440(%rax)
10569 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10570 ; AVX-NEXT: vmovaps %ymm0, 1408(%rax)
10571 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10572 ; AVX-NEXT: vmovaps %ymm0, 1376(%rax)
10573 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10574 ; AVX-NEXT: vmovaps %ymm0, 1312(%rax)
10575 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10576 ; AVX-NEXT: vmovaps %ymm0, 1248(%rax)
10577 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10578 ; AVX-NEXT: vmovaps %ymm0, 1216(%rax)
10579 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10580 ; AVX-NEXT: vmovaps %ymm0, 1184(%rax)
10581 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10582 ; AVX-NEXT: vmovaps %ymm0, 1120(%rax)
10583 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10584 ; AVX-NEXT: vmovaps %ymm0, 1056(%rax)
10585 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10586 ; AVX-NEXT: vmovaps %ymm0, 1024(%rax)
10587 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10588 ; AVX-NEXT: vmovaps %ymm0, 992(%rax)
10589 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10590 ; AVX-NEXT: vmovaps %ymm0, 928(%rax)
10591 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10592 ; AVX-NEXT: vmovaps %ymm0, 864(%rax)
10593 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10594 ; AVX-NEXT: vmovaps %ymm0, 832(%rax)
10595 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10596 ; AVX-NEXT: vmovaps %ymm0, 800(%rax)
10597 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10598 ; AVX-NEXT: vmovaps %ymm0, 736(%rax)
10599 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10600 ; AVX-NEXT: vmovaps %ymm0, 672(%rax)
10601 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10602 ; AVX-NEXT: vmovaps %ymm0, 640(%rax)
10603 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10604 ; AVX-NEXT: vmovaps %ymm0, 608(%rax)
10605 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10606 ; AVX-NEXT: vmovaps %ymm0, 544(%rax)
10607 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10608 ; AVX-NEXT: vmovaps %ymm0, 480(%rax)
10609 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10610 ; AVX-NEXT: vmovaps %ymm0, 448(%rax)
10611 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10612 ; AVX-NEXT: vmovaps %ymm0, 416(%rax)
10613 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10614 ; AVX-NEXT: vmovaps %ymm0, 352(%rax)
10615 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10616 ; AVX-NEXT: vmovaps %ymm0, 288(%rax)
10617 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10618 ; AVX-NEXT: vmovaps %ymm0, 256(%rax)
10619 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10620 ; AVX-NEXT: vmovaps %ymm0, 224(%rax)
10621 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10622 ; AVX-NEXT: vmovaps %ymm0, 160(%rax)
10623 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10624 ; AVX-NEXT: vmovaps %ymm0, 96(%rax)
10625 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10626 ; AVX-NEXT: vmovaps %ymm0, 64(%rax)
10627 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10628 ; AVX-NEXT: vmovaps %ymm0, 32(%rax)
10629 ; AVX-NEXT: addq $3464, %rsp # imm = 0xD88
10630 ; AVX-NEXT: vzeroupper
10633 ; AVX2-LABEL: store_i64_stride6_vf64:
10635 ; AVX2-NEXT: subq $2968, %rsp # imm = 0xB98
10636 ; AVX2-NEXT: vmovaps 32(%r8), %ymm3
10637 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10638 ; AVX2-NEXT: vmovaps (%r8), %ymm4
10639 ; AVX2-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10640 ; AVX2-NEXT: vmovaps (%r9), %xmm0
10641 ; AVX2-NEXT: vmovddup {{.*#+}} xmm1 = xmm0[0,0]
10642 ; AVX2-NEXT: vmovaps (%rsi), %xmm7
10643 ; AVX2-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10644 ; AVX2-NEXT: vmovaps 32(%rsi), %xmm5
10645 ; AVX2-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10646 ; AVX2-NEXT: vmovaps (%rdi), %xmm2
10647 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10648 ; AVX2-NEXT: vmovaps 32(%rdi), %xmm6
10649 ; AVX2-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10650 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm7[1]
10651 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm4[0,1],ymm2[0,1]
10652 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3],ymm2[4,5,6,7]
10653 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10654 ; AVX2-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
10655 ; AVX2-NEXT: vmovaps (%rcx), %xmm2
10656 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10657 ; AVX2-NEXT: vmovaps (%rdx), %xmm1
10658 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10659 ; AVX2-NEXT: vmovaps 32(%rdx), %xmm4
10660 ; AVX2-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10661 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
10662 ; AVX2-NEXT: vbroadcastsd 8(%r8), %ymm2
10663 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
10664 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
10665 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10666 ; AVX2-NEXT: vmovaps 32(%r9), %xmm0
10667 ; AVX2-NEXT: vmovddup {{.*#+}} xmm1 = xmm0[0,0]
10668 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm6[1],xmm5[1]
10669 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm3[0,1],ymm2[0,1]
10670 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3],ymm2[4,5,6,7]
10671 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10672 ; AVX2-NEXT: vmovaps 32(%rcx), %xmm1
10673 ; AVX2-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10674 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm4[1],xmm1[1]
10675 ; AVX2-NEXT: vbroadcastsd 40(%r8), %ymm2
10676 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
10677 ; AVX2-NEXT: vmovaps 64(%r8), %ymm2
10678 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10679 ; AVX2-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
10680 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
10681 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10682 ; AVX2-NEXT: vmovaps 64(%rsi), %xmm1
10683 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10684 ; AVX2-NEXT: vmovaps 64(%rdi), %xmm0
10685 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10686 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
10687 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm2[0,1],ymm0[0,1]
10688 ; AVX2-NEXT: vmovaps 64(%r9), %xmm1
10689 ; AVX2-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
10690 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
10691 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10692 ; AVX2-NEXT: vmovaps 64(%rdx), %xmm0
10693 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10694 ; AVX2-NEXT: vmovaps 64(%rcx), %xmm2
10695 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10696 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
10697 ; AVX2-NEXT: vbroadcastsd 72(%r8), %ymm2
10698 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
10699 ; AVX2-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
10700 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10701 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10702 ; AVX2-NEXT: vmovaps 96(%r8), %ymm1
10703 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10704 ; AVX2-NEXT: vmovaps 96(%rsi), %xmm2
10705 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10706 ; AVX2-NEXT: vmovaps 96(%rdi), %xmm0
10707 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10708 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
10709 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
10710 ; AVX2-NEXT: vmovaps 96(%r9), %xmm1
10711 ; AVX2-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
10712 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
10713 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10714 ; AVX2-NEXT: vmovaps 96(%rcx), %xmm2
10715 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10716 ; AVX2-NEXT: vmovaps 96(%rdx), %xmm0
10717 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10718 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
10719 ; AVX2-NEXT: vbroadcastsd 104(%r8), %ymm2
10720 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
10721 ; AVX2-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
10722 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10723 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10724 ; AVX2-NEXT: vmovaps 128(%r8), %ymm1
10725 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10726 ; AVX2-NEXT: vmovaps 128(%rsi), %xmm2
10727 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10728 ; AVX2-NEXT: vmovaps 128(%rdi), %xmm0
10729 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10730 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
10731 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
10732 ; AVX2-NEXT: vmovaps 128(%r9), %xmm1
10733 ; AVX2-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
10734 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
10735 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10736 ; AVX2-NEXT: vmovaps 128(%rcx), %xmm2
10737 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10738 ; AVX2-NEXT: vmovaps 128(%rdx), %xmm0
10739 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10740 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
10741 ; AVX2-NEXT: vbroadcastsd 136(%r8), %ymm2
10742 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
10743 ; AVX2-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
10744 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10745 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10746 ; AVX2-NEXT: vmovaps 160(%r8), %ymm1
10747 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10748 ; AVX2-NEXT: vmovaps 160(%rsi), %xmm2
10749 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10750 ; AVX2-NEXT: vmovaps 160(%rdi), %xmm0
10751 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10752 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
10753 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
10754 ; AVX2-NEXT: vmovaps 160(%r9), %xmm1
10755 ; AVX2-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
10756 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
10757 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10758 ; AVX2-NEXT: vmovaps 160(%rcx), %xmm2
10759 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10760 ; AVX2-NEXT: vmovaps 160(%rdx), %xmm0
10761 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10762 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
10763 ; AVX2-NEXT: vbroadcastsd 168(%r8), %ymm2
10764 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
10765 ; AVX2-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
10766 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10767 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10768 ; AVX2-NEXT: vmovaps 192(%r8), %ymm1
10769 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10770 ; AVX2-NEXT: vmovaps 192(%rsi), %xmm2
10771 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10772 ; AVX2-NEXT: vmovaps 192(%rdi), %xmm0
10773 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10774 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
10775 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
10776 ; AVX2-NEXT: vmovaps 192(%r9), %xmm1
10777 ; AVX2-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
10778 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
10779 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10780 ; AVX2-NEXT: vmovaps 192(%rcx), %xmm2
10781 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10782 ; AVX2-NEXT: vmovaps 192(%rdx), %xmm0
10783 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10784 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
10785 ; AVX2-NEXT: vbroadcastsd 200(%r8), %ymm2
10786 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
10787 ; AVX2-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
10788 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10789 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10790 ; AVX2-NEXT: vmovaps 224(%r8), %ymm1
10791 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10792 ; AVX2-NEXT: vmovaps 224(%rsi), %xmm2
10793 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10794 ; AVX2-NEXT: vmovaps 224(%rdi), %xmm0
10795 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10796 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
10797 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
10798 ; AVX2-NEXT: vmovaps 224(%r9), %xmm1
10799 ; AVX2-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
10800 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
10801 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10802 ; AVX2-NEXT: vmovaps 224(%rcx), %xmm2
10803 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10804 ; AVX2-NEXT: vmovaps 224(%rdx), %xmm0
10805 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10806 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
10807 ; AVX2-NEXT: vbroadcastsd 232(%r8), %ymm2
10808 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
10809 ; AVX2-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
10810 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10811 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10812 ; AVX2-NEXT: vmovaps 256(%r8), %ymm1
10813 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10814 ; AVX2-NEXT: vmovaps 256(%rsi), %xmm2
10815 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10816 ; AVX2-NEXT: vmovaps 256(%rdi), %xmm0
10817 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10818 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
10819 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
10820 ; AVX2-NEXT: vmovaps 256(%r9), %xmm1
10821 ; AVX2-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
10822 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
10823 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10824 ; AVX2-NEXT: vmovaps 256(%rcx), %xmm2
10825 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10826 ; AVX2-NEXT: vmovaps 256(%rdx), %xmm0
10827 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10828 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
10829 ; AVX2-NEXT: vbroadcastsd 264(%r8), %ymm2
10830 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
10831 ; AVX2-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
10832 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10833 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10834 ; AVX2-NEXT: vmovaps 288(%r8), %ymm1
10835 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10836 ; AVX2-NEXT: vmovaps 288(%rsi), %xmm2
10837 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10838 ; AVX2-NEXT: vmovaps 288(%rdi), %xmm0
10839 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10840 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
10841 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
10842 ; AVX2-NEXT: vmovaps 288(%r9), %xmm1
10843 ; AVX2-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
10844 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
10845 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10846 ; AVX2-NEXT: vmovaps 288(%rcx), %xmm2
10847 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10848 ; AVX2-NEXT: vmovaps 288(%rdx), %xmm0
10849 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10850 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
10851 ; AVX2-NEXT: vbroadcastsd 296(%r8), %ymm2
10852 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
10853 ; AVX2-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
10854 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10855 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10856 ; AVX2-NEXT: vmovaps 320(%r8), %ymm1
10857 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10858 ; AVX2-NEXT: vmovaps 320(%rsi), %xmm2
10859 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10860 ; AVX2-NEXT: vmovaps 320(%rdi), %xmm0
10861 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10862 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
10863 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
10864 ; AVX2-NEXT: vmovaps 320(%r9), %xmm1
10865 ; AVX2-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
10866 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
10867 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10868 ; AVX2-NEXT: vmovaps 320(%rcx), %xmm2
10869 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10870 ; AVX2-NEXT: vmovaps 320(%rdx), %xmm0
10871 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10872 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
10873 ; AVX2-NEXT: vbroadcastsd 328(%r8), %ymm2
10874 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
10875 ; AVX2-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
10876 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10877 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10878 ; AVX2-NEXT: vmovaps 352(%r8), %ymm1
10879 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10880 ; AVX2-NEXT: vmovaps 352(%rsi), %xmm2
10881 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10882 ; AVX2-NEXT: vmovaps 352(%rdi), %xmm0
10883 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10884 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
10885 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
10886 ; AVX2-NEXT: vmovaps 352(%r9), %xmm1
10887 ; AVX2-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
10888 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
10889 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10890 ; AVX2-NEXT: vmovaps 352(%rcx), %xmm2
10891 ; AVX2-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10892 ; AVX2-NEXT: vmovaps 352(%rdx), %xmm0
10893 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10894 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
10895 ; AVX2-NEXT: vbroadcastsd 360(%r8), %ymm2
10896 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
10897 ; AVX2-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
10898 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10899 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10900 ; AVX2-NEXT: vmovaps 384(%r8), %ymm1
10901 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10902 ; AVX2-NEXT: vmovaps 384(%rsi), %xmm2
10903 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10904 ; AVX2-NEXT: vmovaps 384(%rdi), %xmm0
10905 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10906 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
10907 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
10908 ; AVX2-NEXT: vmovaps 384(%r9), %xmm1
10909 ; AVX2-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
10910 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
10911 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10912 ; AVX2-NEXT: vmovaps 384(%rcx), %xmm0
10913 ; AVX2-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10914 ; AVX2-NEXT: vmovaps 384(%rdx), %xmm15
10915 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm15[1],xmm0[1]
10916 ; AVX2-NEXT: vbroadcastsd 392(%r8), %ymm2
10917 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
10918 ; AVX2-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
10919 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10920 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10921 ; AVX2-NEXT: vmovaps 416(%r8), %ymm1
10922 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10923 ; AVX2-NEXT: vmovaps 416(%rsi), %xmm12
10924 ; AVX2-NEXT: vmovaps 416(%rdi), %xmm13
10925 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm13[1],xmm12[1]
10926 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
10927 ; AVX2-NEXT: vmovaps 416(%r9), %xmm1
10928 ; AVX2-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
10929 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
10930 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10931 ; AVX2-NEXT: vmovaps 416(%rcx), %xmm10
10932 ; AVX2-NEXT: vmovaps 416(%rdx), %xmm11
10933 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm11[1],xmm10[1]
10934 ; AVX2-NEXT: vbroadcastsd 424(%r8), %ymm2
10935 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
10936 ; AVX2-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
10937 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10938 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10939 ; AVX2-NEXT: vmovaps 448(%r8), %ymm1
10940 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10941 ; AVX2-NEXT: vmovaps 448(%rsi), %xmm8
10942 ; AVX2-NEXT: vmovaps 448(%rdi), %xmm9
10943 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm9[1],xmm8[1]
10944 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
10945 ; AVX2-NEXT: vmovaps 448(%r9), %xmm1
10946 ; AVX2-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
10947 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
10948 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10949 ; AVX2-NEXT: vmovaps 448(%rcx), %xmm6
10950 ; AVX2-NEXT: vmovaps 448(%rdx), %xmm7
10951 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm6[1]
10952 ; AVX2-NEXT: vbroadcastsd 456(%r8), %ymm2
10953 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
10954 ; AVX2-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
10955 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10956 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10957 ; AVX2-NEXT: vmovaps 480(%r8), %ymm1
10958 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10959 ; AVX2-NEXT: vmovaps 480(%rsi), %xmm4
10960 ; AVX2-NEXT: vmovaps 480(%rdi), %xmm5
10961 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm4[1]
10962 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm1[0,1],ymm0[0,1]
10963 ; AVX2-NEXT: vmovaps 480(%r9), %xmm0
10964 ; AVX2-NEXT: vmovddup {{.*#+}} xmm2 = xmm0[0,0]
10965 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3],ymm1[4,5,6,7]
10966 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10967 ; AVX2-NEXT: vmovaps 480(%rcx), %xmm2
10968 ; AVX2-NEXT: vmovaps 480(%rdx), %xmm3
10969 ; AVX2-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm3[1],xmm2[1]
10970 ; AVX2-NEXT: vbroadcastsd 488(%r8), %ymm14
10971 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm14[4,5,6,7]
10972 ; AVX2-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
10973 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
10974 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10975 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10976 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
10977 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
10978 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
10979 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
10980 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10981 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10982 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
10983 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
10984 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
10985 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
10986 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10987 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10988 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
10989 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
10990 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
10991 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
10992 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10993 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10994 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
10995 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
10996 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
10997 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
10998 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10999 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11000 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
11001 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11002 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
11003 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11004 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11005 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11006 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
11007 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11008 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
11009 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11010 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11011 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11012 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
11013 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11014 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
11015 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11016 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11017 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11018 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
11019 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11020 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
11021 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11022 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11023 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11024 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
11025 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11026 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
11027 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11028 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11029 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11030 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
11031 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11032 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
11033 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11034 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11035 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11036 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
11037 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11038 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
11039 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11040 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11041 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11042 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
11043 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11044 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
11045 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11046 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11047 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11048 ; AVX2-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
11049 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11050 ; AVX2-NEXT: vinsertf128 $1, %xmm15, %ymm1, %ymm1
11051 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11052 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11053 ; AVX2-NEXT: vinsertf128 $1, %xmm10, %ymm12, %ymm0
11054 ; AVX2-NEXT: vinsertf128 $1, %xmm11, %ymm13, %ymm1
11055 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11056 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11057 ; AVX2-NEXT: vinsertf128 $1, %xmm6, %ymm8, %ymm0
11058 ; AVX2-NEXT: vinsertf128 $1, %xmm7, %ymm9, %ymm1
11059 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11060 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11061 ; AVX2-NEXT: vinsertf128 $1, %xmm2, %ymm4, %ymm0
11062 ; AVX2-NEXT: vinsertf128 $1, %xmm3, %ymm5, %ymm1
11063 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11064 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11065 ; AVX2-NEXT: vmovaps (%rdi), %ymm0
11066 ; AVX2-NEXT: vmovaps (%rsi), %ymm1
11067 ; AVX2-NEXT: vmovaps (%rdx), %ymm2
11068 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11069 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
11070 ; AVX2-NEXT: vbroadcastsd 16(%rcx), %ymm4
11071 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
11072 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11073 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
11074 ; AVX2-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11075 ; AVX2-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
11076 ; AVX2-NEXT: vbroadcastsd 16(%r9), %ymm1
11077 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
11078 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11079 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
11080 ; AVX2-NEXT: vbroadcastsd 24(%r8), %ymm1
11081 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm12 = ymm0[2,3],ymm1[2,3]
11082 ; AVX2-NEXT: vmovaps 32(%rdi), %ymm0
11083 ; AVX2-NEXT: vmovaps 32(%rsi), %ymm1
11084 ; AVX2-NEXT: vmovaps 32(%rdx), %ymm2
11085 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11086 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
11087 ; AVX2-NEXT: vbroadcastsd 48(%rcx), %ymm4
11088 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
11089 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11090 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
11091 ; AVX2-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11092 ; AVX2-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
11093 ; AVX2-NEXT: vbroadcastsd 48(%r9), %ymm1
11094 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
11095 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11096 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
11097 ; AVX2-NEXT: vbroadcastsd 56(%r8), %ymm1
11098 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm13 = ymm0[2,3],ymm1[2,3]
11099 ; AVX2-NEXT: vmovaps 64(%rdi), %ymm0
11100 ; AVX2-NEXT: vmovaps 64(%rsi), %ymm1
11101 ; AVX2-NEXT: vmovaps 64(%rdx), %ymm2
11102 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11103 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
11104 ; AVX2-NEXT: vbroadcastsd 80(%rcx), %ymm4
11105 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
11106 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11107 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
11108 ; AVX2-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11109 ; AVX2-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
11110 ; AVX2-NEXT: vbroadcastsd 80(%r9), %ymm1
11111 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
11112 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11113 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
11114 ; AVX2-NEXT: vbroadcastsd 88(%r8), %ymm1
11115 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm14 = ymm0[2,3],ymm1[2,3]
11116 ; AVX2-NEXT: vmovaps 96(%rdi), %ymm0
11117 ; AVX2-NEXT: vmovaps 96(%rsi), %ymm1
11118 ; AVX2-NEXT: vmovaps 96(%rdx), %ymm2
11119 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11120 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
11121 ; AVX2-NEXT: vbroadcastsd 112(%rcx), %ymm4
11122 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
11123 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11124 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
11125 ; AVX2-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11126 ; AVX2-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
11127 ; AVX2-NEXT: vbroadcastsd 112(%r9), %ymm1
11128 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
11129 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11130 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
11131 ; AVX2-NEXT: vbroadcastsd 120(%r8), %ymm1
11132 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
11133 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11134 ; AVX2-NEXT: vmovaps 128(%rdi), %ymm0
11135 ; AVX2-NEXT: vmovaps 128(%rsi), %ymm1
11136 ; AVX2-NEXT: vmovaps 128(%rdx), %ymm2
11137 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11138 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
11139 ; AVX2-NEXT: vbroadcastsd 144(%rcx), %ymm4
11140 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
11141 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11142 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
11143 ; AVX2-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11144 ; AVX2-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
11145 ; AVX2-NEXT: vbroadcastsd 144(%r9), %ymm1
11146 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
11147 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11148 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
11149 ; AVX2-NEXT: vbroadcastsd 152(%r8), %ymm1
11150 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
11151 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11152 ; AVX2-NEXT: vmovaps 160(%rdi), %ymm0
11153 ; AVX2-NEXT: vmovaps 160(%rsi), %ymm1
11154 ; AVX2-NEXT: vmovaps 160(%rdx), %ymm2
11155 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11156 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
11157 ; AVX2-NEXT: vbroadcastsd 176(%rcx), %ymm4
11158 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
11159 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11160 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
11161 ; AVX2-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11162 ; AVX2-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
11163 ; AVX2-NEXT: vbroadcastsd 176(%r9), %ymm1
11164 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
11165 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11166 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
11167 ; AVX2-NEXT: vbroadcastsd 184(%r8), %ymm1
11168 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
11169 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11170 ; AVX2-NEXT: vmovaps 192(%rdi), %ymm0
11171 ; AVX2-NEXT: vmovaps 192(%rsi), %ymm1
11172 ; AVX2-NEXT: vmovaps 192(%rdx), %ymm2
11173 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11174 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
11175 ; AVX2-NEXT: vbroadcastsd 208(%rcx), %ymm4
11176 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
11177 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11178 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
11179 ; AVX2-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11180 ; AVX2-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
11181 ; AVX2-NEXT: vbroadcastsd 208(%r9), %ymm1
11182 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
11183 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11184 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
11185 ; AVX2-NEXT: vbroadcastsd 216(%r8), %ymm1
11186 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
11187 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11188 ; AVX2-NEXT: vmovaps 224(%rdi), %ymm0
11189 ; AVX2-NEXT: vmovaps 224(%rsi), %ymm1
11190 ; AVX2-NEXT: vmovaps 224(%rdx), %ymm2
11191 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11192 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
11193 ; AVX2-NEXT: vbroadcastsd 240(%rcx), %ymm4
11194 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
11195 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11196 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
11197 ; AVX2-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11198 ; AVX2-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
11199 ; AVX2-NEXT: vbroadcastsd 240(%r9), %ymm1
11200 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
11201 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11202 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
11203 ; AVX2-NEXT: vbroadcastsd 248(%r8), %ymm1
11204 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
11205 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11206 ; AVX2-NEXT: vmovaps 256(%rdi), %ymm0
11207 ; AVX2-NEXT: vmovaps 256(%rsi), %ymm1
11208 ; AVX2-NEXT: vmovaps 256(%rdx), %ymm2
11209 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11210 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
11211 ; AVX2-NEXT: vbroadcastsd 272(%rcx), %ymm4
11212 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
11213 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11214 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
11215 ; AVX2-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11216 ; AVX2-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
11217 ; AVX2-NEXT: vbroadcastsd 272(%r9), %ymm1
11218 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
11219 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11220 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
11221 ; AVX2-NEXT: vbroadcastsd 280(%r8), %ymm1
11222 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
11223 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11224 ; AVX2-NEXT: vmovaps 288(%rdi), %ymm0
11225 ; AVX2-NEXT: vmovaps 288(%rsi), %ymm1
11226 ; AVX2-NEXT: vmovaps 288(%rdx), %ymm2
11227 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11228 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
11229 ; AVX2-NEXT: vbroadcastsd 304(%rcx), %ymm4
11230 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
11231 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11232 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
11233 ; AVX2-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11234 ; AVX2-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
11235 ; AVX2-NEXT: vbroadcastsd 304(%r9), %ymm1
11236 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
11237 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11238 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
11239 ; AVX2-NEXT: vbroadcastsd 312(%r8), %ymm1
11240 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
11241 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11242 ; AVX2-NEXT: vmovaps 320(%rdi), %ymm0
11243 ; AVX2-NEXT: vmovaps 320(%rsi), %ymm1
11244 ; AVX2-NEXT: vmovaps 320(%rdx), %ymm2
11245 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11246 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
11247 ; AVX2-NEXT: vbroadcastsd 336(%rcx), %ymm4
11248 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
11249 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11250 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
11251 ; AVX2-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11252 ; AVX2-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
11253 ; AVX2-NEXT: vbroadcastsd 336(%r9), %ymm1
11254 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
11255 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11256 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
11257 ; AVX2-NEXT: vbroadcastsd 344(%r8), %ymm1
11258 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
11259 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11260 ; AVX2-NEXT: vmovaps 352(%rdi), %ymm0
11261 ; AVX2-NEXT: vmovaps 352(%rsi), %ymm1
11262 ; AVX2-NEXT: vmovaps 352(%rdx), %ymm2
11263 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11264 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
11265 ; AVX2-NEXT: vbroadcastsd 368(%rcx), %ymm4
11266 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
11267 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11268 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
11269 ; AVX2-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11270 ; AVX2-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
11271 ; AVX2-NEXT: vbroadcastsd 368(%r9), %ymm1
11272 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
11273 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11274 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
11275 ; AVX2-NEXT: vbroadcastsd 376(%r8), %ymm1
11276 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
11277 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11278 ; AVX2-NEXT: vmovaps 384(%rdi), %ymm0
11279 ; AVX2-NEXT: vmovaps 384(%rsi), %ymm1
11280 ; AVX2-NEXT: vmovaps 384(%rdx), %ymm2
11281 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11282 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
11283 ; AVX2-NEXT: vbroadcastsd 400(%rcx), %ymm4
11284 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
11285 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11286 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
11287 ; AVX2-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11288 ; AVX2-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
11289 ; AVX2-NEXT: vbroadcastsd 400(%r9), %ymm1
11290 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
11291 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11292 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
11293 ; AVX2-NEXT: vbroadcastsd 408(%r8), %ymm1
11294 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm15 = ymm0[2,3],ymm1[2,3]
11295 ; AVX2-NEXT: vmovaps 416(%rdi), %ymm0
11296 ; AVX2-NEXT: vmovaps 416(%rsi), %ymm1
11297 ; AVX2-NEXT: vmovaps 416(%rdx), %ymm2
11298 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11299 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
11300 ; AVX2-NEXT: vbroadcastsd 432(%rcx), %ymm4
11301 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
11302 ; AVX2-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11303 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
11304 ; AVX2-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11305 ; AVX2-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
11306 ; AVX2-NEXT: vbroadcastsd 432(%r9), %ymm1
11307 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
11308 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11309 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
11310 ; AVX2-NEXT: vbroadcastsd 440(%r8), %ymm1
11311 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm10 = ymm0[2,3],ymm1[2,3]
11312 ; AVX2-NEXT: vmovaps 448(%rdi), %ymm0
11313 ; AVX2-NEXT: vmovaps 448(%rsi), %ymm1
11314 ; AVX2-NEXT: vmovaps 448(%rdx), %ymm2
11315 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11316 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],ymm2[2,3]
11317 ; AVX2-NEXT: vbroadcastsd 464(%rcx), %ymm5
11318 ; AVX2-NEXT: vblendps {{.*#+}} ymm11 = ymm4[0,1,2,3,4,5],ymm5[6,7]
11319 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
11320 ; AVX2-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11321 ; AVX2-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
11322 ; AVX2-NEXT: vbroadcastsd 464(%r9), %ymm1
11323 ; AVX2-NEXT: vblendps {{.*#+}} ymm9 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
11324 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
11325 ; AVX2-NEXT: vbroadcastsd 472(%r8), %ymm2
11326 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm6 = ymm0[2,3],ymm2[2,3]
11327 ; AVX2-NEXT: vmovaps 480(%rdi), %ymm1
11328 ; AVX2-NEXT: vmovaps 480(%rsi), %ymm2
11329 ; AVX2-NEXT: vmovaps 480(%rdx), %ymm4
11330 ; AVX2-NEXT: vunpcklpd {{.*#+}} ymm7 = ymm1[0],ymm2[0],ymm1[2],ymm2[2]
11331 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm7 = ymm7[2,3],ymm4[2,3]
11332 ; AVX2-NEXT: vbroadcastsd 496(%rcx), %ymm8
11333 ; AVX2-NEXT: vblendps {{.*#+}} ymm5 = ymm7[0,1,2,3,4,5],ymm8[6,7]
11334 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
11335 ; AVX2-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
11336 ; AVX2-NEXT: # ymm1 = mem[2,3],ymm1[2,3]
11337 ; AVX2-NEXT: vbroadcastsd 496(%r9), %ymm2
11338 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1],ymm2[2,3],ymm1[4,5,6,7]
11339 ; AVX2-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm4[1],mem[1],ymm4[3],mem[3]
11340 ; AVX2-NEXT: vbroadcastsd 504(%r8), %ymm2
11341 ; AVX2-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[2,3],ymm2[2,3]
11342 ; AVX2-NEXT: movq {{[0-9]+}}(%rsp), %rax
11343 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm12[0,1,2,3,4,5],mem[6,7]
11344 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11345 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm13[0,1,2,3,4,5],mem[6,7]
11346 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11347 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm14[0,1,2,3,4,5],mem[6,7]
11348 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11349 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11350 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],mem[6,7]
11351 ; AVX2-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11352 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11353 ; AVX2-NEXT: vblendps {{.*#+}} ymm14 = ymm1[0,1,2,3,4,5],mem[6,7]
11354 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11355 ; AVX2-NEXT: vblendps {{.*#+}} ymm13 = ymm1[0,1,2,3,4,5],mem[6,7]
11356 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11357 ; AVX2-NEXT: vblendps {{.*#+}} ymm12 = ymm1[0,1,2,3,4,5],mem[6,7]
11358 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11359 ; AVX2-NEXT: vblendps {{.*#+}} ymm8 = ymm1[0,1,2,3,4,5],mem[6,7]
11360 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11361 ; AVX2-NEXT: vblendps {{.*#+}} ymm7 = ymm1[0,1,2,3,4,5],mem[6,7]
11362 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11363 ; AVX2-NEXT: vblendps {{.*#+}} ymm4 = ymm1[0,1,2,3,4,5],mem[6,7]
11364 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11365 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm1[0,1,2,3,4,5],mem[6,7]
11366 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11367 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],mem[6,7]
11368 ; AVX2-NEXT: vblendps {{.*#+}} ymm15 = ymm15[0,1,2,3,4,5],mem[6,7]
11369 ; AVX2-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1,2,3,4,5],mem[6,7]
11370 ; AVX2-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3,4,5],mem[6,7]
11371 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
11372 ; AVX2-NEXT: vmovaps %ymm0, 3040(%rax)
11373 ; AVX2-NEXT: vmovaps %ymm3, 3008(%rax)
11374 ; AVX2-NEXT: vmovaps %ymm5, 2976(%rax)
11375 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11376 ; AVX2-NEXT: vmovaps %ymm0, 2880(%rax)
11377 ; AVX2-NEXT: vmovaps %ymm6, 2848(%rax)
11378 ; AVX2-NEXT: vmovaps %ymm9, 2816(%rax)
11379 ; AVX2-NEXT: vmovaps %ymm11, 2784(%rax)
11380 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11381 ; AVX2-NEXT: vmovaps %ymm0, 2688(%rax)
11382 ; AVX2-NEXT: vmovaps %ymm10, 2656(%rax)
11383 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11384 ; AVX2-NEXT: vmovaps %ymm0, 2624(%rax)
11385 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11386 ; AVX2-NEXT: vmovaps %ymm0, 2592(%rax)
11387 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11388 ; AVX2-NEXT: vmovaps %ymm0, 2496(%rax)
11389 ; AVX2-NEXT: vmovaps %ymm15, 2464(%rax)
11390 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11391 ; AVX2-NEXT: vmovaps %ymm0, 2432(%rax)
11392 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11393 ; AVX2-NEXT: vmovaps %ymm0, 2400(%rax)
11394 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11395 ; AVX2-NEXT: vmovaps %ymm0, 2304(%rax)
11396 ; AVX2-NEXT: vmovaps %ymm1, 2272(%rax)
11397 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11398 ; AVX2-NEXT: vmovaps %ymm0, 2240(%rax)
11399 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11400 ; AVX2-NEXT: vmovaps %ymm0, 2208(%rax)
11401 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11402 ; AVX2-NEXT: vmovaps %ymm0, 2112(%rax)
11403 ; AVX2-NEXT: vmovaps %ymm2, 2080(%rax)
11404 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11405 ; AVX2-NEXT: vmovaps %ymm0, 2048(%rax)
11406 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11407 ; AVX2-NEXT: vmovaps %ymm0, 2016(%rax)
11408 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11409 ; AVX2-NEXT: vmovaps %ymm0, 1920(%rax)
11410 ; AVX2-NEXT: vmovaps %ymm4, 1888(%rax)
11411 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11412 ; AVX2-NEXT: vmovaps %ymm0, 1856(%rax)
11413 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11414 ; AVX2-NEXT: vmovaps %ymm0, 1824(%rax)
11415 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11416 ; AVX2-NEXT: vmovaps %ymm0, 1728(%rax)
11417 ; AVX2-NEXT: vmovaps %ymm7, 1696(%rax)
11418 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11419 ; AVX2-NEXT: vmovaps %ymm0, 1664(%rax)
11420 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11421 ; AVX2-NEXT: vmovaps %ymm0, 1632(%rax)
11422 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11423 ; AVX2-NEXT: vmovaps %ymm0, 1536(%rax)
11424 ; AVX2-NEXT: vmovaps %ymm8, 1504(%rax)
11425 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11426 ; AVX2-NEXT: vmovaps %ymm0, 1472(%rax)
11427 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11428 ; AVX2-NEXT: vmovaps %ymm0, 1440(%rax)
11429 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11430 ; AVX2-NEXT: vmovaps %ymm0, 1344(%rax)
11431 ; AVX2-NEXT: vmovaps %ymm12, 1312(%rax)
11432 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11433 ; AVX2-NEXT: vmovaps %ymm0, 1280(%rax)
11434 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11435 ; AVX2-NEXT: vmovaps %ymm0, 1248(%rax)
11436 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11437 ; AVX2-NEXT: vmovaps %ymm0, 1152(%rax)
11438 ; AVX2-NEXT: vmovaps %ymm13, 1120(%rax)
11439 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11440 ; AVX2-NEXT: vmovaps %ymm0, 1088(%rax)
11441 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11442 ; AVX2-NEXT: vmovaps %ymm0, 1056(%rax)
11443 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11444 ; AVX2-NEXT: vmovaps %ymm0, 960(%rax)
11445 ; AVX2-NEXT: vmovaps %ymm14, 928(%rax)
11446 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11447 ; AVX2-NEXT: vmovaps %ymm0, 896(%rax)
11448 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11449 ; AVX2-NEXT: vmovaps %ymm0, 864(%rax)
11450 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11451 ; AVX2-NEXT: vmovaps %ymm0, 768(%rax)
11452 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11453 ; AVX2-NEXT: vmovaps %ymm0, 736(%rax)
11454 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11455 ; AVX2-NEXT: vmovaps %ymm0, 704(%rax)
11456 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11457 ; AVX2-NEXT: vmovaps %ymm0, 672(%rax)
11458 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11459 ; AVX2-NEXT: vmovaps %ymm0, 576(%rax)
11460 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11461 ; AVX2-NEXT: vmovaps %ymm0, 544(%rax)
11462 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11463 ; AVX2-NEXT: vmovaps %ymm0, 512(%rax)
11464 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11465 ; AVX2-NEXT: vmovaps %ymm0, 480(%rax)
11466 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11467 ; AVX2-NEXT: vmovaps %ymm0, 384(%rax)
11468 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11469 ; AVX2-NEXT: vmovaps %ymm0, 352(%rax)
11470 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11471 ; AVX2-NEXT: vmovaps %ymm0, 320(%rax)
11472 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11473 ; AVX2-NEXT: vmovaps %ymm0, 288(%rax)
11474 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11475 ; AVX2-NEXT: vmovaps %ymm0, 192(%rax)
11476 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11477 ; AVX2-NEXT: vmovaps %ymm0, 160(%rax)
11478 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11479 ; AVX2-NEXT: vmovaps %ymm0, 128(%rax)
11480 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11481 ; AVX2-NEXT: vmovaps %ymm0, 96(%rax)
11482 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11483 ; AVX2-NEXT: vmovaps %ymm0, (%rax)
11484 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11485 ; AVX2-NEXT: vmovaps %ymm0, 2944(%rax)
11486 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11487 ; AVX2-NEXT: vmovaps %ymm0, 2912(%rax)
11488 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11489 ; AVX2-NEXT: vmovaps %ymm0, 2752(%rax)
11490 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11491 ; AVX2-NEXT: vmovaps %ymm0, 2720(%rax)
11492 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11493 ; AVX2-NEXT: vmovaps %ymm0, 2560(%rax)
11494 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11495 ; AVX2-NEXT: vmovaps %ymm0, 2528(%rax)
11496 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11497 ; AVX2-NEXT: vmovaps %ymm0, 2368(%rax)
11498 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11499 ; AVX2-NEXT: vmovaps %ymm0, 2336(%rax)
11500 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11501 ; AVX2-NEXT: vmovaps %ymm0, 2176(%rax)
11502 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11503 ; AVX2-NEXT: vmovaps %ymm0, 2144(%rax)
11504 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11505 ; AVX2-NEXT: vmovaps %ymm0, 1984(%rax)
11506 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11507 ; AVX2-NEXT: vmovaps %ymm0, 1952(%rax)
11508 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11509 ; AVX2-NEXT: vmovaps %ymm0, 1792(%rax)
11510 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11511 ; AVX2-NEXT: vmovaps %ymm0, 1760(%rax)
11512 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11513 ; AVX2-NEXT: vmovaps %ymm0, 1600(%rax)
11514 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11515 ; AVX2-NEXT: vmovaps %ymm0, 1568(%rax)
11516 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11517 ; AVX2-NEXT: vmovaps %ymm0, 1408(%rax)
11518 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11519 ; AVX2-NEXT: vmovaps %ymm0, 1376(%rax)
11520 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11521 ; AVX2-NEXT: vmovaps %ymm0, 1216(%rax)
11522 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11523 ; AVX2-NEXT: vmovaps %ymm0, 1184(%rax)
11524 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11525 ; AVX2-NEXT: vmovaps %ymm0, 1024(%rax)
11526 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11527 ; AVX2-NEXT: vmovaps %ymm0, 992(%rax)
11528 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11529 ; AVX2-NEXT: vmovaps %ymm0, 832(%rax)
11530 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11531 ; AVX2-NEXT: vmovaps %ymm0, 800(%rax)
11532 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11533 ; AVX2-NEXT: vmovaps %ymm0, 640(%rax)
11534 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11535 ; AVX2-NEXT: vmovaps %ymm0, 608(%rax)
11536 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11537 ; AVX2-NEXT: vmovaps %ymm0, 448(%rax)
11538 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11539 ; AVX2-NEXT: vmovaps %ymm0, 416(%rax)
11540 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11541 ; AVX2-NEXT: vmovaps %ymm0, 256(%rax)
11542 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11543 ; AVX2-NEXT: vmovaps %ymm0, 224(%rax)
11544 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11545 ; AVX2-NEXT: vmovaps %ymm0, 64(%rax)
11546 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11547 ; AVX2-NEXT: vmovaps %ymm0, 32(%rax)
11548 ; AVX2-NEXT: addq $2968, %rsp # imm = 0xB98
11549 ; AVX2-NEXT: vzeroupper
11552 ; AVX2-FP-LABEL: store_i64_stride6_vf64:
11553 ; AVX2-FP: # %bb.0:
11554 ; AVX2-FP-NEXT: subq $2968, %rsp # imm = 0xB98
11555 ; AVX2-FP-NEXT: vmovaps 32(%r8), %ymm3
11556 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11557 ; AVX2-FP-NEXT: vmovaps (%r8), %ymm4
11558 ; AVX2-FP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11559 ; AVX2-FP-NEXT: vmovaps (%r9), %xmm0
11560 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm1 = xmm0[0,0]
11561 ; AVX2-FP-NEXT: vmovaps (%rsi), %xmm7
11562 ; AVX2-FP-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11563 ; AVX2-FP-NEXT: vmovaps 32(%rsi), %xmm5
11564 ; AVX2-FP-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11565 ; AVX2-FP-NEXT: vmovaps (%rdi), %xmm2
11566 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11567 ; AVX2-FP-NEXT: vmovaps 32(%rdi), %xmm6
11568 ; AVX2-FP-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11569 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm7[1]
11570 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm4[0,1],ymm2[0,1]
11571 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3],ymm2[4,5,6,7]
11572 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11573 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
11574 ; AVX2-FP-NEXT: vmovaps (%rcx), %xmm2
11575 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11576 ; AVX2-FP-NEXT: vmovaps (%rdx), %xmm1
11577 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11578 ; AVX2-FP-NEXT: vmovaps 32(%rdx), %xmm4
11579 ; AVX2-FP-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11580 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
11581 ; AVX2-FP-NEXT: vbroadcastsd 8(%r8), %ymm2
11582 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
11583 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
11584 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11585 ; AVX2-FP-NEXT: vmovaps 32(%r9), %xmm0
11586 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm1 = xmm0[0,0]
11587 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm6[1],xmm5[1]
11588 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm3[0,1],ymm2[0,1]
11589 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3],ymm2[4,5,6,7]
11590 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11591 ; AVX2-FP-NEXT: vmovaps 32(%rcx), %xmm1
11592 ; AVX2-FP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11593 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm4[1],xmm1[1]
11594 ; AVX2-FP-NEXT: vbroadcastsd 40(%r8), %ymm2
11595 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
11596 ; AVX2-FP-NEXT: vmovaps 64(%r8), %ymm2
11597 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11598 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
11599 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
11600 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11601 ; AVX2-FP-NEXT: vmovaps 64(%rsi), %xmm1
11602 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11603 ; AVX2-FP-NEXT: vmovaps 64(%rdi), %xmm0
11604 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11605 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
11606 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm2[0,1],ymm0[0,1]
11607 ; AVX2-FP-NEXT: vmovaps 64(%r9), %xmm1
11608 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
11609 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
11610 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11611 ; AVX2-FP-NEXT: vmovaps 64(%rdx), %xmm0
11612 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11613 ; AVX2-FP-NEXT: vmovaps 64(%rcx), %xmm2
11614 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11615 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
11616 ; AVX2-FP-NEXT: vbroadcastsd 72(%r8), %ymm2
11617 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
11618 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
11619 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
11620 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11621 ; AVX2-FP-NEXT: vmovaps 96(%r8), %ymm1
11622 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11623 ; AVX2-FP-NEXT: vmovaps 96(%rsi), %xmm2
11624 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11625 ; AVX2-FP-NEXT: vmovaps 96(%rdi), %xmm0
11626 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11627 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
11628 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
11629 ; AVX2-FP-NEXT: vmovaps 96(%r9), %xmm1
11630 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
11631 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
11632 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11633 ; AVX2-FP-NEXT: vmovaps 96(%rcx), %xmm2
11634 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11635 ; AVX2-FP-NEXT: vmovaps 96(%rdx), %xmm0
11636 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11637 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
11638 ; AVX2-FP-NEXT: vbroadcastsd 104(%r8), %ymm2
11639 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
11640 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
11641 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
11642 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11643 ; AVX2-FP-NEXT: vmovaps 128(%r8), %ymm1
11644 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11645 ; AVX2-FP-NEXT: vmovaps 128(%rsi), %xmm2
11646 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11647 ; AVX2-FP-NEXT: vmovaps 128(%rdi), %xmm0
11648 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11649 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
11650 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
11651 ; AVX2-FP-NEXT: vmovaps 128(%r9), %xmm1
11652 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
11653 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
11654 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11655 ; AVX2-FP-NEXT: vmovaps 128(%rcx), %xmm2
11656 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11657 ; AVX2-FP-NEXT: vmovaps 128(%rdx), %xmm0
11658 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11659 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
11660 ; AVX2-FP-NEXT: vbroadcastsd 136(%r8), %ymm2
11661 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
11662 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
11663 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
11664 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11665 ; AVX2-FP-NEXT: vmovaps 160(%r8), %ymm1
11666 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11667 ; AVX2-FP-NEXT: vmovaps 160(%rsi), %xmm2
11668 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11669 ; AVX2-FP-NEXT: vmovaps 160(%rdi), %xmm0
11670 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11671 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
11672 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
11673 ; AVX2-FP-NEXT: vmovaps 160(%r9), %xmm1
11674 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
11675 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
11676 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11677 ; AVX2-FP-NEXT: vmovaps 160(%rcx), %xmm2
11678 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11679 ; AVX2-FP-NEXT: vmovaps 160(%rdx), %xmm0
11680 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11681 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
11682 ; AVX2-FP-NEXT: vbroadcastsd 168(%r8), %ymm2
11683 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
11684 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
11685 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
11686 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11687 ; AVX2-FP-NEXT: vmovaps 192(%r8), %ymm1
11688 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11689 ; AVX2-FP-NEXT: vmovaps 192(%rsi), %xmm2
11690 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11691 ; AVX2-FP-NEXT: vmovaps 192(%rdi), %xmm0
11692 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11693 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
11694 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
11695 ; AVX2-FP-NEXT: vmovaps 192(%r9), %xmm1
11696 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
11697 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
11698 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11699 ; AVX2-FP-NEXT: vmovaps 192(%rcx), %xmm2
11700 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11701 ; AVX2-FP-NEXT: vmovaps 192(%rdx), %xmm0
11702 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11703 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
11704 ; AVX2-FP-NEXT: vbroadcastsd 200(%r8), %ymm2
11705 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
11706 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
11707 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
11708 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11709 ; AVX2-FP-NEXT: vmovaps 224(%r8), %ymm1
11710 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11711 ; AVX2-FP-NEXT: vmovaps 224(%rsi), %xmm2
11712 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11713 ; AVX2-FP-NEXT: vmovaps 224(%rdi), %xmm0
11714 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11715 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
11716 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
11717 ; AVX2-FP-NEXT: vmovaps 224(%r9), %xmm1
11718 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
11719 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
11720 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11721 ; AVX2-FP-NEXT: vmovaps 224(%rcx), %xmm2
11722 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11723 ; AVX2-FP-NEXT: vmovaps 224(%rdx), %xmm0
11724 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11725 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
11726 ; AVX2-FP-NEXT: vbroadcastsd 232(%r8), %ymm2
11727 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
11728 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
11729 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
11730 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11731 ; AVX2-FP-NEXT: vmovaps 256(%r8), %ymm1
11732 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11733 ; AVX2-FP-NEXT: vmovaps 256(%rsi), %xmm2
11734 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11735 ; AVX2-FP-NEXT: vmovaps 256(%rdi), %xmm0
11736 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11737 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
11738 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
11739 ; AVX2-FP-NEXT: vmovaps 256(%r9), %xmm1
11740 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
11741 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
11742 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11743 ; AVX2-FP-NEXT: vmovaps 256(%rcx), %xmm2
11744 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11745 ; AVX2-FP-NEXT: vmovaps 256(%rdx), %xmm0
11746 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11747 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
11748 ; AVX2-FP-NEXT: vbroadcastsd 264(%r8), %ymm2
11749 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
11750 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
11751 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
11752 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11753 ; AVX2-FP-NEXT: vmovaps 288(%r8), %ymm1
11754 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11755 ; AVX2-FP-NEXT: vmovaps 288(%rsi), %xmm2
11756 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11757 ; AVX2-FP-NEXT: vmovaps 288(%rdi), %xmm0
11758 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11759 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
11760 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
11761 ; AVX2-FP-NEXT: vmovaps 288(%r9), %xmm1
11762 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
11763 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
11764 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11765 ; AVX2-FP-NEXT: vmovaps 288(%rcx), %xmm2
11766 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11767 ; AVX2-FP-NEXT: vmovaps 288(%rdx), %xmm0
11768 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11769 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
11770 ; AVX2-FP-NEXT: vbroadcastsd 296(%r8), %ymm2
11771 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
11772 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
11773 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
11774 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11775 ; AVX2-FP-NEXT: vmovaps 320(%r8), %ymm1
11776 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11777 ; AVX2-FP-NEXT: vmovaps 320(%rsi), %xmm2
11778 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11779 ; AVX2-FP-NEXT: vmovaps 320(%rdi), %xmm0
11780 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11781 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
11782 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
11783 ; AVX2-FP-NEXT: vmovaps 320(%r9), %xmm1
11784 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
11785 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
11786 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11787 ; AVX2-FP-NEXT: vmovaps 320(%rcx), %xmm2
11788 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11789 ; AVX2-FP-NEXT: vmovaps 320(%rdx), %xmm0
11790 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11791 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
11792 ; AVX2-FP-NEXT: vbroadcastsd 328(%r8), %ymm2
11793 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
11794 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
11795 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
11796 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11797 ; AVX2-FP-NEXT: vmovaps 352(%r8), %ymm1
11798 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11799 ; AVX2-FP-NEXT: vmovaps 352(%rsi), %xmm2
11800 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11801 ; AVX2-FP-NEXT: vmovaps 352(%rdi), %xmm0
11802 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11803 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
11804 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
11805 ; AVX2-FP-NEXT: vmovaps 352(%r9), %xmm1
11806 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
11807 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
11808 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11809 ; AVX2-FP-NEXT: vmovaps 352(%rcx), %xmm2
11810 ; AVX2-FP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11811 ; AVX2-FP-NEXT: vmovaps 352(%rdx), %xmm0
11812 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11813 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
11814 ; AVX2-FP-NEXT: vbroadcastsd 360(%r8), %ymm2
11815 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
11816 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
11817 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
11818 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11819 ; AVX2-FP-NEXT: vmovaps 384(%r8), %ymm1
11820 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11821 ; AVX2-FP-NEXT: vmovaps 384(%rsi), %xmm2
11822 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11823 ; AVX2-FP-NEXT: vmovaps 384(%rdi), %xmm0
11824 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11825 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
11826 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
11827 ; AVX2-FP-NEXT: vmovaps 384(%r9), %xmm1
11828 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
11829 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
11830 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11831 ; AVX2-FP-NEXT: vmovaps 384(%rcx), %xmm0
11832 ; AVX2-FP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11833 ; AVX2-FP-NEXT: vmovaps 384(%rdx), %xmm15
11834 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm15[1],xmm0[1]
11835 ; AVX2-FP-NEXT: vbroadcastsd 392(%r8), %ymm2
11836 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
11837 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
11838 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
11839 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11840 ; AVX2-FP-NEXT: vmovaps 416(%r8), %ymm1
11841 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11842 ; AVX2-FP-NEXT: vmovaps 416(%rsi), %xmm12
11843 ; AVX2-FP-NEXT: vmovaps 416(%rdi), %xmm13
11844 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm13[1],xmm12[1]
11845 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
11846 ; AVX2-FP-NEXT: vmovaps 416(%r9), %xmm1
11847 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
11848 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
11849 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11850 ; AVX2-FP-NEXT: vmovaps 416(%rcx), %xmm10
11851 ; AVX2-FP-NEXT: vmovaps 416(%rdx), %xmm11
11852 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm11[1],xmm10[1]
11853 ; AVX2-FP-NEXT: vbroadcastsd 424(%r8), %ymm2
11854 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
11855 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
11856 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
11857 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11858 ; AVX2-FP-NEXT: vmovaps 448(%r8), %ymm1
11859 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11860 ; AVX2-FP-NEXT: vmovaps 448(%rsi), %xmm8
11861 ; AVX2-FP-NEXT: vmovaps 448(%rdi), %xmm9
11862 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm9[1],xmm8[1]
11863 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
11864 ; AVX2-FP-NEXT: vmovaps 448(%r9), %xmm1
11865 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
11866 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
11867 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11868 ; AVX2-FP-NEXT: vmovaps 448(%rcx), %xmm6
11869 ; AVX2-FP-NEXT: vmovaps 448(%rdx), %xmm7
11870 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm6[1]
11871 ; AVX2-FP-NEXT: vbroadcastsd 456(%r8), %ymm2
11872 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
11873 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
11874 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
11875 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11876 ; AVX2-FP-NEXT: vmovaps 480(%r8), %ymm1
11877 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11878 ; AVX2-FP-NEXT: vmovaps 480(%rsi), %xmm4
11879 ; AVX2-FP-NEXT: vmovaps 480(%rdi), %xmm5
11880 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm4[1]
11881 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm1[0,1],ymm0[0,1]
11882 ; AVX2-FP-NEXT: vmovaps 480(%r9), %xmm0
11883 ; AVX2-FP-NEXT: vmovddup {{.*#+}} xmm2 = xmm0[0,0]
11884 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3],ymm1[4,5,6,7]
11885 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11886 ; AVX2-FP-NEXT: vmovaps 480(%rcx), %xmm2
11887 ; AVX2-FP-NEXT: vmovaps 480(%rdx), %xmm3
11888 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm3[1],xmm2[1]
11889 ; AVX2-FP-NEXT: vbroadcastsd 488(%r8), %ymm14
11890 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm14[4,5,6,7]
11891 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
11892 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
11893 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11894 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11895 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
11896 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11897 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
11898 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11899 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11900 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11901 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
11902 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11903 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
11904 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11905 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11906 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11907 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
11908 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11909 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
11910 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11911 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11912 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11913 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
11914 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11915 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
11916 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11917 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11918 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11919 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
11920 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11921 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
11922 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11923 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11924 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11925 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
11926 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11927 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
11928 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11929 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11930 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11931 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
11932 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11933 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
11934 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11935 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11936 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11937 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
11938 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11939 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
11940 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11941 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11942 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11943 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
11944 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11945 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
11946 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11947 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11948 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11949 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
11950 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11951 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
11952 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11953 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11954 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11955 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
11956 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11957 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
11958 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11959 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11960 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11961 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
11962 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11963 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
11964 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11965 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11966 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11967 ; AVX2-FP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
11968 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11969 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm15, %ymm1, %ymm1
11970 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11971 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11972 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm10, %ymm12, %ymm0
11973 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm11, %ymm13, %ymm1
11974 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11975 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11976 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm6, %ymm8, %ymm0
11977 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm7, %ymm9, %ymm1
11978 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11979 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11980 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm2, %ymm4, %ymm0
11981 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm3, %ymm5, %ymm1
11982 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11983 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11984 ; AVX2-FP-NEXT: vmovaps (%rdi), %ymm0
11985 ; AVX2-FP-NEXT: vmovaps (%rsi), %ymm1
11986 ; AVX2-FP-NEXT: vmovaps (%rdx), %ymm2
11987 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11988 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
11989 ; AVX2-FP-NEXT: vbroadcastsd 16(%rcx), %ymm4
11990 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
11991 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11992 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
11993 ; AVX2-FP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11994 ; AVX2-FP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
11995 ; AVX2-FP-NEXT: vbroadcastsd 16(%r9), %ymm1
11996 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
11997 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11998 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
11999 ; AVX2-FP-NEXT: vbroadcastsd 24(%r8), %ymm1
12000 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm12 = ymm0[2,3],ymm1[2,3]
12001 ; AVX2-FP-NEXT: vmovaps 32(%rdi), %ymm0
12002 ; AVX2-FP-NEXT: vmovaps 32(%rsi), %ymm1
12003 ; AVX2-FP-NEXT: vmovaps 32(%rdx), %ymm2
12004 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
12005 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
12006 ; AVX2-FP-NEXT: vbroadcastsd 48(%rcx), %ymm4
12007 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
12008 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12009 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
12010 ; AVX2-FP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12011 ; AVX2-FP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
12012 ; AVX2-FP-NEXT: vbroadcastsd 48(%r9), %ymm1
12013 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
12014 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12015 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
12016 ; AVX2-FP-NEXT: vbroadcastsd 56(%r8), %ymm1
12017 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm13 = ymm0[2,3],ymm1[2,3]
12018 ; AVX2-FP-NEXT: vmovaps 64(%rdi), %ymm0
12019 ; AVX2-FP-NEXT: vmovaps 64(%rsi), %ymm1
12020 ; AVX2-FP-NEXT: vmovaps 64(%rdx), %ymm2
12021 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
12022 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
12023 ; AVX2-FP-NEXT: vbroadcastsd 80(%rcx), %ymm4
12024 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
12025 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12026 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
12027 ; AVX2-FP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12028 ; AVX2-FP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
12029 ; AVX2-FP-NEXT: vbroadcastsd 80(%r9), %ymm1
12030 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
12031 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12032 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
12033 ; AVX2-FP-NEXT: vbroadcastsd 88(%r8), %ymm1
12034 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm14 = ymm0[2,3],ymm1[2,3]
12035 ; AVX2-FP-NEXT: vmovaps 96(%rdi), %ymm0
12036 ; AVX2-FP-NEXT: vmovaps 96(%rsi), %ymm1
12037 ; AVX2-FP-NEXT: vmovaps 96(%rdx), %ymm2
12038 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
12039 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
12040 ; AVX2-FP-NEXT: vbroadcastsd 112(%rcx), %ymm4
12041 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
12042 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12043 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
12044 ; AVX2-FP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12045 ; AVX2-FP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
12046 ; AVX2-FP-NEXT: vbroadcastsd 112(%r9), %ymm1
12047 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
12048 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12049 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
12050 ; AVX2-FP-NEXT: vbroadcastsd 120(%r8), %ymm1
12051 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
12052 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12053 ; AVX2-FP-NEXT: vmovaps 128(%rdi), %ymm0
12054 ; AVX2-FP-NEXT: vmovaps 128(%rsi), %ymm1
12055 ; AVX2-FP-NEXT: vmovaps 128(%rdx), %ymm2
12056 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
12057 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
12058 ; AVX2-FP-NEXT: vbroadcastsd 144(%rcx), %ymm4
12059 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
12060 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12061 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
12062 ; AVX2-FP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12063 ; AVX2-FP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
12064 ; AVX2-FP-NEXT: vbroadcastsd 144(%r9), %ymm1
12065 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
12066 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12067 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
12068 ; AVX2-FP-NEXT: vbroadcastsd 152(%r8), %ymm1
12069 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
12070 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12071 ; AVX2-FP-NEXT: vmovaps 160(%rdi), %ymm0
12072 ; AVX2-FP-NEXT: vmovaps 160(%rsi), %ymm1
12073 ; AVX2-FP-NEXT: vmovaps 160(%rdx), %ymm2
12074 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
12075 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
12076 ; AVX2-FP-NEXT: vbroadcastsd 176(%rcx), %ymm4
12077 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
12078 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12079 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
12080 ; AVX2-FP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12081 ; AVX2-FP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
12082 ; AVX2-FP-NEXT: vbroadcastsd 176(%r9), %ymm1
12083 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
12084 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12085 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
12086 ; AVX2-FP-NEXT: vbroadcastsd 184(%r8), %ymm1
12087 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
12088 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12089 ; AVX2-FP-NEXT: vmovaps 192(%rdi), %ymm0
12090 ; AVX2-FP-NEXT: vmovaps 192(%rsi), %ymm1
12091 ; AVX2-FP-NEXT: vmovaps 192(%rdx), %ymm2
12092 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
12093 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
12094 ; AVX2-FP-NEXT: vbroadcastsd 208(%rcx), %ymm4
12095 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
12096 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12097 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
12098 ; AVX2-FP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12099 ; AVX2-FP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
12100 ; AVX2-FP-NEXT: vbroadcastsd 208(%r9), %ymm1
12101 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
12102 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12103 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
12104 ; AVX2-FP-NEXT: vbroadcastsd 216(%r8), %ymm1
12105 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
12106 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12107 ; AVX2-FP-NEXT: vmovaps 224(%rdi), %ymm0
12108 ; AVX2-FP-NEXT: vmovaps 224(%rsi), %ymm1
12109 ; AVX2-FP-NEXT: vmovaps 224(%rdx), %ymm2
12110 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
12111 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
12112 ; AVX2-FP-NEXT: vbroadcastsd 240(%rcx), %ymm4
12113 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
12114 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12115 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
12116 ; AVX2-FP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12117 ; AVX2-FP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
12118 ; AVX2-FP-NEXT: vbroadcastsd 240(%r9), %ymm1
12119 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
12120 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12121 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
12122 ; AVX2-FP-NEXT: vbroadcastsd 248(%r8), %ymm1
12123 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
12124 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12125 ; AVX2-FP-NEXT: vmovaps 256(%rdi), %ymm0
12126 ; AVX2-FP-NEXT: vmovaps 256(%rsi), %ymm1
12127 ; AVX2-FP-NEXT: vmovaps 256(%rdx), %ymm2
12128 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
12129 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
12130 ; AVX2-FP-NEXT: vbroadcastsd 272(%rcx), %ymm4
12131 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
12132 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12133 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
12134 ; AVX2-FP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12135 ; AVX2-FP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
12136 ; AVX2-FP-NEXT: vbroadcastsd 272(%r9), %ymm1
12137 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
12138 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12139 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
12140 ; AVX2-FP-NEXT: vbroadcastsd 280(%r8), %ymm1
12141 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
12142 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12143 ; AVX2-FP-NEXT: vmovaps 288(%rdi), %ymm0
12144 ; AVX2-FP-NEXT: vmovaps 288(%rsi), %ymm1
12145 ; AVX2-FP-NEXT: vmovaps 288(%rdx), %ymm2
12146 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
12147 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
12148 ; AVX2-FP-NEXT: vbroadcastsd 304(%rcx), %ymm4
12149 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
12150 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12151 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
12152 ; AVX2-FP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12153 ; AVX2-FP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
12154 ; AVX2-FP-NEXT: vbroadcastsd 304(%r9), %ymm1
12155 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
12156 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12157 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
12158 ; AVX2-FP-NEXT: vbroadcastsd 312(%r8), %ymm1
12159 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
12160 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12161 ; AVX2-FP-NEXT: vmovaps 320(%rdi), %ymm0
12162 ; AVX2-FP-NEXT: vmovaps 320(%rsi), %ymm1
12163 ; AVX2-FP-NEXT: vmovaps 320(%rdx), %ymm2
12164 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
12165 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
12166 ; AVX2-FP-NEXT: vbroadcastsd 336(%rcx), %ymm4
12167 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
12168 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12169 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
12170 ; AVX2-FP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12171 ; AVX2-FP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
12172 ; AVX2-FP-NEXT: vbroadcastsd 336(%r9), %ymm1
12173 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
12174 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12175 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
12176 ; AVX2-FP-NEXT: vbroadcastsd 344(%r8), %ymm1
12177 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
12178 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12179 ; AVX2-FP-NEXT: vmovaps 352(%rdi), %ymm0
12180 ; AVX2-FP-NEXT: vmovaps 352(%rsi), %ymm1
12181 ; AVX2-FP-NEXT: vmovaps 352(%rdx), %ymm2
12182 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
12183 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
12184 ; AVX2-FP-NEXT: vbroadcastsd 368(%rcx), %ymm4
12185 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
12186 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12187 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
12188 ; AVX2-FP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12189 ; AVX2-FP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
12190 ; AVX2-FP-NEXT: vbroadcastsd 368(%r9), %ymm1
12191 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
12192 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12193 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
12194 ; AVX2-FP-NEXT: vbroadcastsd 376(%r8), %ymm1
12195 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
12196 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12197 ; AVX2-FP-NEXT: vmovaps 384(%rdi), %ymm0
12198 ; AVX2-FP-NEXT: vmovaps 384(%rsi), %ymm1
12199 ; AVX2-FP-NEXT: vmovaps 384(%rdx), %ymm2
12200 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
12201 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
12202 ; AVX2-FP-NEXT: vbroadcastsd 400(%rcx), %ymm4
12203 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
12204 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12205 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
12206 ; AVX2-FP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12207 ; AVX2-FP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
12208 ; AVX2-FP-NEXT: vbroadcastsd 400(%r9), %ymm1
12209 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
12210 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12211 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
12212 ; AVX2-FP-NEXT: vbroadcastsd 408(%r8), %ymm1
12213 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm15 = ymm0[2,3],ymm1[2,3]
12214 ; AVX2-FP-NEXT: vmovaps 416(%rdi), %ymm0
12215 ; AVX2-FP-NEXT: vmovaps 416(%rsi), %ymm1
12216 ; AVX2-FP-NEXT: vmovaps 416(%rdx), %ymm2
12217 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
12218 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
12219 ; AVX2-FP-NEXT: vbroadcastsd 432(%rcx), %ymm4
12220 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
12221 ; AVX2-FP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12222 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
12223 ; AVX2-FP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12224 ; AVX2-FP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
12225 ; AVX2-FP-NEXT: vbroadcastsd 432(%r9), %ymm1
12226 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
12227 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12228 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
12229 ; AVX2-FP-NEXT: vbroadcastsd 440(%r8), %ymm1
12230 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm10 = ymm0[2,3],ymm1[2,3]
12231 ; AVX2-FP-NEXT: vmovaps 448(%rdi), %ymm0
12232 ; AVX2-FP-NEXT: vmovaps 448(%rsi), %ymm1
12233 ; AVX2-FP-NEXT: vmovaps 448(%rdx), %ymm2
12234 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
12235 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],ymm2[2,3]
12236 ; AVX2-FP-NEXT: vbroadcastsd 464(%rcx), %ymm5
12237 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm11 = ymm4[0,1,2,3,4,5],ymm5[6,7]
12238 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
12239 ; AVX2-FP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12240 ; AVX2-FP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
12241 ; AVX2-FP-NEXT: vbroadcastsd 464(%r9), %ymm1
12242 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm9 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
12243 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
12244 ; AVX2-FP-NEXT: vbroadcastsd 472(%r8), %ymm2
12245 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm6 = ymm0[2,3],ymm2[2,3]
12246 ; AVX2-FP-NEXT: vmovaps 480(%rdi), %ymm1
12247 ; AVX2-FP-NEXT: vmovaps 480(%rsi), %ymm2
12248 ; AVX2-FP-NEXT: vmovaps 480(%rdx), %ymm4
12249 ; AVX2-FP-NEXT: vunpcklpd {{.*#+}} ymm7 = ymm1[0],ymm2[0],ymm1[2],ymm2[2]
12250 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm7 = ymm7[2,3],ymm4[2,3]
12251 ; AVX2-FP-NEXT: vbroadcastsd 496(%rcx), %ymm8
12252 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm5 = ymm7[0,1,2,3,4,5],ymm8[6,7]
12253 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
12254 ; AVX2-FP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
12255 ; AVX2-FP-NEXT: # ymm1 = mem[2,3],ymm1[2,3]
12256 ; AVX2-FP-NEXT: vbroadcastsd 496(%r9), %ymm2
12257 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1],ymm2[2,3],ymm1[4,5,6,7]
12258 ; AVX2-FP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm4[1],mem[1],ymm4[3],mem[3]
12259 ; AVX2-FP-NEXT: vbroadcastsd 504(%r8), %ymm2
12260 ; AVX2-FP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[2,3],ymm2[2,3]
12261 ; AVX2-FP-NEXT: movq {{[0-9]+}}(%rsp), %rax
12262 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm12[0,1,2,3,4,5],mem[6,7]
12263 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12264 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm13[0,1,2,3,4,5],mem[6,7]
12265 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12266 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm14[0,1,2,3,4,5],mem[6,7]
12267 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12268 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12269 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],mem[6,7]
12270 ; AVX2-FP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12271 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12272 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm14 = ymm1[0,1,2,3,4,5],mem[6,7]
12273 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12274 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm13 = ymm1[0,1,2,3,4,5],mem[6,7]
12275 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12276 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm12 = ymm1[0,1,2,3,4,5],mem[6,7]
12277 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12278 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm8 = ymm1[0,1,2,3,4,5],mem[6,7]
12279 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12280 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm7 = ymm1[0,1,2,3,4,5],mem[6,7]
12281 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12282 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm4 = ymm1[0,1,2,3,4,5],mem[6,7]
12283 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12284 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm1[0,1,2,3,4,5],mem[6,7]
12285 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12286 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],mem[6,7]
12287 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm15 = ymm15[0,1,2,3,4,5],mem[6,7]
12288 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1,2,3,4,5],mem[6,7]
12289 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3,4,5],mem[6,7]
12290 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
12291 ; AVX2-FP-NEXT: vmovaps %ymm0, 3040(%rax)
12292 ; AVX2-FP-NEXT: vmovaps %ymm3, 3008(%rax)
12293 ; AVX2-FP-NEXT: vmovaps %ymm5, 2976(%rax)
12294 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12295 ; AVX2-FP-NEXT: vmovaps %ymm0, 2880(%rax)
12296 ; AVX2-FP-NEXT: vmovaps %ymm6, 2848(%rax)
12297 ; AVX2-FP-NEXT: vmovaps %ymm9, 2816(%rax)
12298 ; AVX2-FP-NEXT: vmovaps %ymm11, 2784(%rax)
12299 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12300 ; AVX2-FP-NEXT: vmovaps %ymm0, 2688(%rax)
12301 ; AVX2-FP-NEXT: vmovaps %ymm10, 2656(%rax)
12302 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12303 ; AVX2-FP-NEXT: vmovaps %ymm0, 2624(%rax)
12304 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12305 ; AVX2-FP-NEXT: vmovaps %ymm0, 2592(%rax)
12306 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12307 ; AVX2-FP-NEXT: vmovaps %ymm0, 2496(%rax)
12308 ; AVX2-FP-NEXT: vmovaps %ymm15, 2464(%rax)
12309 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12310 ; AVX2-FP-NEXT: vmovaps %ymm0, 2432(%rax)
12311 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12312 ; AVX2-FP-NEXT: vmovaps %ymm0, 2400(%rax)
12313 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12314 ; AVX2-FP-NEXT: vmovaps %ymm0, 2304(%rax)
12315 ; AVX2-FP-NEXT: vmovaps %ymm1, 2272(%rax)
12316 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12317 ; AVX2-FP-NEXT: vmovaps %ymm0, 2240(%rax)
12318 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12319 ; AVX2-FP-NEXT: vmovaps %ymm0, 2208(%rax)
12320 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12321 ; AVX2-FP-NEXT: vmovaps %ymm0, 2112(%rax)
12322 ; AVX2-FP-NEXT: vmovaps %ymm2, 2080(%rax)
12323 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12324 ; AVX2-FP-NEXT: vmovaps %ymm0, 2048(%rax)
12325 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12326 ; AVX2-FP-NEXT: vmovaps %ymm0, 2016(%rax)
12327 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12328 ; AVX2-FP-NEXT: vmovaps %ymm0, 1920(%rax)
12329 ; AVX2-FP-NEXT: vmovaps %ymm4, 1888(%rax)
12330 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12331 ; AVX2-FP-NEXT: vmovaps %ymm0, 1856(%rax)
12332 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12333 ; AVX2-FP-NEXT: vmovaps %ymm0, 1824(%rax)
12334 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12335 ; AVX2-FP-NEXT: vmovaps %ymm0, 1728(%rax)
12336 ; AVX2-FP-NEXT: vmovaps %ymm7, 1696(%rax)
12337 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12338 ; AVX2-FP-NEXT: vmovaps %ymm0, 1664(%rax)
12339 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12340 ; AVX2-FP-NEXT: vmovaps %ymm0, 1632(%rax)
12341 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12342 ; AVX2-FP-NEXT: vmovaps %ymm0, 1536(%rax)
12343 ; AVX2-FP-NEXT: vmovaps %ymm8, 1504(%rax)
12344 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12345 ; AVX2-FP-NEXT: vmovaps %ymm0, 1472(%rax)
12346 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12347 ; AVX2-FP-NEXT: vmovaps %ymm0, 1440(%rax)
12348 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12349 ; AVX2-FP-NEXT: vmovaps %ymm0, 1344(%rax)
12350 ; AVX2-FP-NEXT: vmovaps %ymm12, 1312(%rax)
12351 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12352 ; AVX2-FP-NEXT: vmovaps %ymm0, 1280(%rax)
12353 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12354 ; AVX2-FP-NEXT: vmovaps %ymm0, 1248(%rax)
12355 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12356 ; AVX2-FP-NEXT: vmovaps %ymm0, 1152(%rax)
12357 ; AVX2-FP-NEXT: vmovaps %ymm13, 1120(%rax)
12358 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12359 ; AVX2-FP-NEXT: vmovaps %ymm0, 1088(%rax)
12360 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12361 ; AVX2-FP-NEXT: vmovaps %ymm0, 1056(%rax)
12362 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12363 ; AVX2-FP-NEXT: vmovaps %ymm0, 960(%rax)
12364 ; AVX2-FP-NEXT: vmovaps %ymm14, 928(%rax)
12365 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12366 ; AVX2-FP-NEXT: vmovaps %ymm0, 896(%rax)
12367 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12368 ; AVX2-FP-NEXT: vmovaps %ymm0, 864(%rax)
12369 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12370 ; AVX2-FP-NEXT: vmovaps %ymm0, 768(%rax)
12371 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12372 ; AVX2-FP-NEXT: vmovaps %ymm0, 736(%rax)
12373 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12374 ; AVX2-FP-NEXT: vmovaps %ymm0, 704(%rax)
12375 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12376 ; AVX2-FP-NEXT: vmovaps %ymm0, 672(%rax)
12377 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12378 ; AVX2-FP-NEXT: vmovaps %ymm0, 576(%rax)
12379 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12380 ; AVX2-FP-NEXT: vmovaps %ymm0, 544(%rax)
12381 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12382 ; AVX2-FP-NEXT: vmovaps %ymm0, 512(%rax)
12383 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12384 ; AVX2-FP-NEXT: vmovaps %ymm0, 480(%rax)
12385 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12386 ; AVX2-FP-NEXT: vmovaps %ymm0, 384(%rax)
12387 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12388 ; AVX2-FP-NEXT: vmovaps %ymm0, 352(%rax)
12389 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12390 ; AVX2-FP-NEXT: vmovaps %ymm0, 320(%rax)
12391 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12392 ; AVX2-FP-NEXT: vmovaps %ymm0, 288(%rax)
12393 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12394 ; AVX2-FP-NEXT: vmovaps %ymm0, 192(%rax)
12395 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12396 ; AVX2-FP-NEXT: vmovaps %ymm0, 160(%rax)
12397 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12398 ; AVX2-FP-NEXT: vmovaps %ymm0, 128(%rax)
12399 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12400 ; AVX2-FP-NEXT: vmovaps %ymm0, 96(%rax)
12401 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12402 ; AVX2-FP-NEXT: vmovaps %ymm0, (%rax)
12403 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12404 ; AVX2-FP-NEXT: vmovaps %ymm0, 2944(%rax)
12405 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12406 ; AVX2-FP-NEXT: vmovaps %ymm0, 2912(%rax)
12407 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12408 ; AVX2-FP-NEXT: vmovaps %ymm0, 2752(%rax)
12409 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12410 ; AVX2-FP-NEXT: vmovaps %ymm0, 2720(%rax)
12411 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12412 ; AVX2-FP-NEXT: vmovaps %ymm0, 2560(%rax)
12413 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12414 ; AVX2-FP-NEXT: vmovaps %ymm0, 2528(%rax)
12415 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12416 ; AVX2-FP-NEXT: vmovaps %ymm0, 2368(%rax)
12417 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12418 ; AVX2-FP-NEXT: vmovaps %ymm0, 2336(%rax)
12419 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12420 ; AVX2-FP-NEXT: vmovaps %ymm0, 2176(%rax)
12421 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12422 ; AVX2-FP-NEXT: vmovaps %ymm0, 2144(%rax)
12423 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12424 ; AVX2-FP-NEXT: vmovaps %ymm0, 1984(%rax)
12425 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12426 ; AVX2-FP-NEXT: vmovaps %ymm0, 1952(%rax)
12427 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12428 ; AVX2-FP-NEXT: vmovaps %ymm0, 1792(%rax)
12429 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12430 ; AVX2-FP-NEXT: vmovaps %ymm0, 1760(%rax)
12431 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12432 ; AVX2-FP-NEXT: vmovaps %ymm0, 1600(%rax)
12433 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12434 ; AVX2-FP-NEXT: vmovaps %ymm0, 1568(%rax)
12435 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12436 ; AVX2-FP-NEXT: vmovaps %ymm0, 1408(%rax)
12437 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12438 ; AVX2-FP-NEXT: vmovaps %ymm0, 1376(%rax)
12439 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12440 ; AVX2-FP-NEXT: vmovaps %ymm0, 1216(%rax)
12441 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12442 ; AVX2-FP-NEXT: vmovaps %ymm0, 1184(%rax)
12443 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12444 ; AVX2-FP-NEXT: vmovaps %ymm0, 1024(%rax)
12445 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12446 ; AVX2-FP-NEXT: vmovaps %ymm0, 992(%rax)
12447 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12448 ; AVX2-FP-NEXT: vmovaps %ymm0, 832(%rax)
12449 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12450 ; AVX2-FP-NEXT: vmovaps %ymm0, 800(%rax)
12451 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12452 ; AVX2-FP-NEXT: vmovaps %ymm0, 640(%rax)
12453 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12454 ; AVX2-FP-NEXT: vmovaps %ymm0, 608(%rax)
12455 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12456 ; AVX2-FP-NEXT: vmovaps %ymm0, 448(%rax)
12457 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12458 ; AVX2-FP-NEXT: vmovaps %ymm0, 416(%rax)
12459 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12460 ; AVX2-FP-NEXT: vmovaps %ymm0, 256(%rax)
12461 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12462 ; AVX2-FP-NEXT: vmovaps %ymm0, 224(%rax)
12463 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12464 ; AVX2-FP-NEXT: vmovaps %ymm0, 64(%rax)
12465 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12466 ; AVX2-FP-NEXT: vmovaps %ymm0, 32(%rax)
12467 ; AVX2-FP-NEXT: addq $2968, %rsp # imm = 0xB98
12468 ; AVX2-FP-NEXT: vzeroupper
12469 ; AVX2-FP-NEXT: retq
12471 ; AVX2-FCP-LABEL: store_i64_stride6_vf64:
12472 ; AVX2-FCP: # %bb.0:
12473 ; AVX2-FCP-NEXT: subq $2968, %rsp # imm = 0xB98
12474 ; AVX2-FCP-NEXT: vmovaps 32(%r8), %ymm3
12475 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12476 ; AVX2-FCP-NEXT: vmovaps (%r8), %ymm4
12477 ; AVX2-FCP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12478 ; AVX2-FCP-NEXT: vmovaps (%r9), %xmm0
12479 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm1 = xmm0[0,0]
12480 ; AVX2-FCP-NEXT: vmovaps (%rsi), %xmm7
12481 ; AVX2-FCP-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12482 ; AVX2-FCP-NEXT: vmovaps 32(%rsi), %xmm5
12483 ; AVX2-FCP-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12484 ; AVX2-FCP-NEXT: vmovaps (%rdi), %xmm2
12485 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12486 ; AVX2-FCP-NEXT: vmovaps 32(%rdi), %xmm6
12487 ; AVX2-FCP-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12488 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm7[1]
12489 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm4[0,1],ymm2[0,1]
12490 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3],ymm2[4,5,6,7]
12491 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12492 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
12493 ; AVX2-FCP-NEXT: vmovaps (%rcx), %xmm2
12494 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12495 ; AVX2-FCP-NEXT: vmovaps (%rdx), %xmm1
12496 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12497 ; AVX2-FCP-NEXT: vmovaps 32(%rdx), %xmm4
12498 ; AVX2-FCP-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12499 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
12500 ; AVX2-FCP-NEXT: vbroadcastsd 8(%r8), %ymm2
12501 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
12502 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
12503 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12504 ; AVX2-FCP-NEXT: vmovaps 32(%r9), %xmm0
12505 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm1 = xmm0[0,0]
12506 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm6[1],xmm5[1]
12507 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm3[0,1],ymm2[0,1]
12508 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3],ymm2[4,5,6,7]
12509 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12510 ; AVX2-FCP-NEXT: vmovaps 32(%rcx), %xmm1
12511 ; AVX2-FCP-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12512 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm4[1],xmm1[1]
12513 ; AVX2-FCP-NEXT: vbroadcastsd 40(%r8), %ymm2
12514 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
12515 ; AVX2-FCP-NEXT: vmovaps 64(%r8), %ymm2
12516 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12517 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
12518 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
12519 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12520 ; AVX2-FCP-NEXT: vmovaps 64(%rsi), %xmm1
12521 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12522 ; AVX2-FCP-NEXT: vmovaps 64(%rdi), %xmm0
12523 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12524 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
12525 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm2[0,1],ymm0[0,1]
12526 ; AVX2-FCP-NEXT: vmovaps 64(%r9), %xmm1
12527 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
12528 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
12529 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12530 ; AVX2-FCP-NEXT: vmovaps 64(%rdx), %xmm0
12531 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12532 ; AVX2-FCP-NEXT: vmovaps 64(%rcx), %xmm2
12533 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12534 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
12535 ; AVX2-FCP-NEXT: vbroadcastsd 72(%r8), %ymm2
12536 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
12537 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
12538 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
12539 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12540 ; AVX2-FCP-NEXT: vmovaps 96(%r8), %ymm1
12541 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12542 ; AVX2-FCP-NEXT: vmovaps 96(%rsi), %xmm2
12543 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12544 ; AVX2-FCP-NEXT: vmovaps 96(%rdi), %xmm0
12545 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12546 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
12547 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
12548 ; AVX2-FCP-NEXT: vmovaps 96(%r9), %xmm1
12549 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
12550 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
12551 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12552 ; AVX2-FCP-NEXT: vmovaps 96(%rcx), %xmm2
12553 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12554 ; AVX2-FCP-NEXT: vmovaps 96(%rdx), %xmm0
12555 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12556 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
12557 ; AVX2-FCP-NEXT: vbroadcastsd 104(%r8), %ymm2
12558 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
12559 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
12560 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
12561 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12562 ; AVX2-FCP-NEXT: vmovaps 128(%r8), %ymm1
12563 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12564 ; AVX2-FCP-NEXT: vmovaps 128(%rsi), %xmm2
12565 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12566 ; AVX2-FCP-NEXT: vmovaps 128(%rdi), %xmm0
12567 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12568 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
12569 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
12570 ; AVX2-FCP-NEXT: vmovaps 128(%r9), %xmm1
12571 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
12572 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
12573 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12574 ; AVX2-FCP-NEXT: vmovaps 128(%rcx), %xmm2
12575 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12576 ; AVX2-FCP-NEXT: vmovaps 128(%rdx), %xmm0
12577 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12578 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
12579 ; AVX2-FCP-NEXT: vbroadcastsd 136(%r8), %ymm2
12580 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
12581 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
12582 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
12583 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12584 ; AVX2-FCP-NEXT: vmovaps 160(%r8), %ymm1
12585 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12586 ; AVX2-FCP-NEXT: vmovaps 160(%rsi), %xmm2
12587 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12588 ; AVX2-FCP-NEXT: vmovaps 160(%rdi), %xmm0
12589 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12590 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
12591 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
12592 ; AVX2-FCP-NEXT: vmovaps 160(%r9), %xmm1
12593 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
12594 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
12595 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12596 ; AVX2-FCP-NEXT: vmovaps 160(%rcx), %xmm2
12597 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12598 ; AVX2-FCP-NEXT: vmovaps 160(%rdx), %xmm0
12599 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12600 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
12601 ; AVX2-FCP-NEXT: vbroadcastsd 168(%r8), %ymm2
12602 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
12603 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
12604 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
12605 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12606 ; AVX2-FCP-NEXT: vmovaps 192(%r8), %ymm1
12607 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12608 ; AVX2-FCP-NEXT: vmovaps 192(%rsi), %xmm2
12609 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12610 ; AVX2-FCP-NEXT: vmovaps 192(%rdi), %xmm0
12611 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12612 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
12613 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
12614 ; AVX2-FCP-NEXT: vmovaps 192(%r9), %xmm1
12615 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
12616 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
12617 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12618 ; AVX2-FCP-NEXT: vmovaps 192(%rcx), %xmm2
12619 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12620 ; AVX2-FCP-NEXT: vmovaps 192(%rdx), %xmm0
12621 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12622 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
12623 ; AVX2-FCP-NEXT: vbroadcastsd 200(%r8), %ymm2
12624 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
12625 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
12626 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
12627 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12628 ; AVX2-FCP-NEXT: vmovaps 224(%r8), %ymm1
12629 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12630 ; AVX2-FCP-NEXT: vmovaps 224(%rsi), %xmm2
12631 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12632 ; AVX2-FCP-NEXT: vmovaps 224(%rdi), %xmm0
12633 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12634 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
12635 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
12636 ; AVX2-FCP-NEXT: vmovaps 224(%r9), %xmm1
12637 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
12638 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
12639 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12640 ; AVX2-FCP-NEXT: vmovaps 224(%rcx), %xmm2
12641 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12642 ; AVX2-FCP-NEXT: vmovaps 224(%rdx), %xmm0
12643 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12644 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
12645 ; AVX2-FCP-NEXT: vbroadcastsd 232(%r8), %ymm2
12646 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
12647 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
12648 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
12649 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12650 ; AVX2-FCP-NEXT: vmovaps 256(%r8), %ymm1
12651 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12652 ; AVX2-FCP-NEXT: vmovaps 256(%rsi), %xmm2
12653 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12654 ; AVX2-FCP-NEXT: vmovaps 256(%rdi), %xmm0
12655 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12656 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
12657 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
12658 ; AVX2-FCP-NEXT: vmovaps 256(%r9), %xmm1
12659 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
12660 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
12661 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12662 ; AVX2-FCP-NEXT: vmovaps 256(%rcx), %xmm2
12663 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12664 ; AVX2-FCP-NEXT: vmovaps 256(%rdx), %xmm0
12665 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12666 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
12667 ; AVX2-FCP-NEXT: vbroadcastsd 264(%r8), %ymm2
12668 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
12669 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
12670 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
12671 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12672 ; AVX2-FCP-NEXT: vmovaps 288(%r8), %ymm1
12673 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12674 ; AVX2-FCP-NEXT: vmovaps 288(%rsi), %xmm2
12675 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12676 ; AVX2-FCP-NEXT: vmovaps 288(%rdi), %xmm0
12677 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12678 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
12679 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
12680 ; AVX2-FCP-NEXT: vmovaps 288(%r9), %xmm1
12681 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
12682 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
12683 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12684 ; AVX2-FCP-NEXT: vmovaps 288(%rcx), %xmm2
12685 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12686 ; AVX2-FCP-NEXT: vmovaps 288(%rdx), %xmm0
12687 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12688 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
12689 ; AVX2-FCP-NEXT: vbroadcastsd 296(%r8), %ymm2
12690 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
12691 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
12692 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
12693 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12694 ; AVX2-FCP-NEXT: vmovaps 320(%r8), %ymm1
12695 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12696 ; AVX2-FCP-NEXT: vmovaps 320(%rsi), %xmm2
12697 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12698 ; AVX2-FCP-NEXT: vmovaps 320(%rdi), %xmm0
12699 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12700 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
12701 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
12702 ; AVX2-FCP-NEXT: vmovaps 320(%r9), %xmm1
12703 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
12704 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
12705 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12706 ; AVX2-FCP-NEXT: vmovaps 320(%rcx), %xmm2
12707 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12708 ; AVX2-FCP-NEXT: vmovaps 320(%rdx), %xmm0
12709 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12710 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
12711 ; AVX2-FCP-NEXT: vbroadcastsd 328(%r8), %ymm2
12712 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
12713 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
12714 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
12715 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12716 ; AVX2-FCP-NEXT: vmovaps 352(%r8), %ymm1
12717 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12718 ; AVX2-FCP-NEXT: vmovaps 352(%rsi), %xmm2
12719 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12720 ; AVX2-FCP-NEXT: vmovaps 352(%rdi), %xmm0
12721 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12722 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
12723 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
12724 ; AVX2-FCP-NEXT: vmovaps 352(%r9), %xmm1
12725 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
12726 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
12727 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12728 ; AVX2-FCP-NEXT: vmovaps 352(%rcx), %xmm2
12729 ; AVX2-FCP-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12730 ; AVX2-FCP-NEXT: vmovaps 352(%rdx), %xmm0
12731 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12732 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
12733 ; AVX2-FCP-NEXT: vbroadcastsd 360(%r8), %ymm2
12734 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
12735 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
12736 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
12737 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12738 ; AVX2-FCP-NEXT: vmovaps 384(%r8), %ymm1
12739 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12740 ; AVX2-FCP-NEXT: vmovaps 384(%rsi), %xmm2
12741 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12742 ; AVX2-FCP-NEXT: vmovaps 384(%rdi), %xmm0
12743 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12744 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
12745 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
12746 ; AVX2-FCP-NEXT: vmovaps 384(%r9), %xmm1
12747 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
12748 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
12749 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12750 ; AVX2-FCP-NEXT: vmovaps 384(%rcx), %xmm0
12751 ; AVX2-FCP-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12752 ; AVX2-FCP-NEXT: vmovaps 384(%rdx), %xmm15
12753 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm15[1],xmm0[1]
12754 ; AVX2-FCP-NEXT: vbroadcastsd 392(%r8), %ymm2
12755 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
12756 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
12757 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
12758 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12759 ; AVX2-FCP-NEXT: vmovaps 416(%r8), %ymm1
12760 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12761 ; AVX2-FCP-NEXT: vmovaps 416(%rsi), %xmm12
12762 ; AVX2-FCP-NEXT: vmovaps 416(%rdi), %xmm13
12763 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm13[1],xmm12[1]
12764 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
12765 ; AVX2-FCP-NEXT: vmovaps 416(%r9), %xmm1
12766 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
12767 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
12768 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12769 ; AVX2-FCP-NEXT: vmovaps 416(%rcx), %xmm10
12770 ; AVX2-FCP-NEXT: vmovaps 416(%rdx), %xmm11
12771 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm11[1],xmm10[1]
12772 ; AVX2-FCP-NEXT: vbroadcastsd 424(%r8), %ymm2
12773 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
12774 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
12775 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
12776 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12777 ; AVX2-FCP-NEXT: vmovaps 448(%r8), %ymm1
12778 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12779 ; AVX2-FCP-NEXT: vmovaps 448(%rsi), %xmm8
12780 ; AVX2-FCP-NEXT: vmovaps 448(%rdi), %xmm9
12781 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm9[1],xmm8[1]
12782 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
12783 ; AVX2-FCP-NEXT: vmovaps 448(%r9), %xmm1
12784 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
12785 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
12786 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12787 ; AVX2-FCP-NEXT: vmovaps 448(%rcx), %xmm6
12788 ; AVX2-FCP-NEXT: vmovaps 448(%rdx), %xmm7
12789 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm6[1]
12790 ; AVX2-FCP-NEXT: vbroadcastsd 456(%r8), %ymm2
12791 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
12792 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
12793 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
12794 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12795 ; AVX2-FCP-NEXT: vmovaps 480(%r8), %ymm1
12796 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12797 ; AVX2-FCP-NEXT: vmovaps 480(%rsi), %xmm4
12798 ; AVX2-FCP-NEXT: vmovaps 480(%rdi), %xmm5
12799 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm4[1]
12800 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm1[0,1],ymm0[0,1]
12801 ; AVX2-FCP-NEXT: vmovaps 480(%r9), %xmm0
12802 ; AVX2-FCP-NEXT: vmovddup {{.*#+}} xmm2 = xmm0[0,0]
12803 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3],ymm1[4,5,6,7]
12804 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12805 ; AVX2-FCP-NEXT: vmovaps 480(%rcx), %xmm2
12806 ; AVX2-FCP-NEXT: vmovaps 480(%rdx), %xmm3
12807 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm3[1],xmm2[1]
12808 ; AVX2-FCP-NEXT: vbroadcastsd 488(%r8), %ymm14
12809 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm14[4,5,6,7]
12810 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
12811 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
12812 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12813 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12814 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
12815 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12816 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
12817 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12818 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12819 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12820 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
12821 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12822 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
12823 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12824 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12825 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12826 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
12827 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12828 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
12829 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12830 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12831 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12832 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
12833 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12834 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
12835 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12836 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12837 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12838 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
12839 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12840 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
12841 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12842 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12843 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12844 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
12845 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12846 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
12847 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12848 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12849 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12850 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
12851 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12852 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
12853 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12854 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12855 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12856 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
12857 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12858 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
12859 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12860 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12861 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12862 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
12863 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12864 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
12865 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12866 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12867 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12868 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
12869 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12870 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
12871 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12872 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12873 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12874 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
12875 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12876 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
12877 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12878 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12879 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12880 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
12881 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12882 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
12883 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12884 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12885 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12886 ; AVX2-FCP-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
12887 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12888 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm15, %ymm1, %ymm1
12889 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12890 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12891 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm10, %ymm12, %ymm0
12892 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm11, %ymm13, %ymm1
12893 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12894 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12895 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm6, %ymm8, %ymm0
12896 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm7, %ymm9, %ymm1
12897 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12898 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12899 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm2, %ymm4, %ymm0
12900 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm3, %ymm5, %ymm1
12901 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12902 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12903 ; AVX2-FCP-NEXT: vmovaps (%rdi), %ymm0
12904 ; AVX2-FCP-NEXT: vmovaps (%rsi), %ymm1
12905 ; AVX2-FCP-NEXT: vmovaps (%rdx), %ymm2
12906 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
12907 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
12908 ; AVX2-FCP-NEXT: vbroadcastsd 16(%rcx), %ymm4
12909 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
12910 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12911 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
12912 ; AVX2-FCP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12913 ; AVX2-FCP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
12914 ; AVX2-FCP-NEXT: vbroadcastsd 16(%r9), %ymm1
12915 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
12916 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12917 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
12918 ; AVX2-FCP-NEXT: vbroadcastsd 24(%r8), %ymm1
12919 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm12 = ymm0[2,3],ymm1[2,3]
12920 ; AVX2-FCP-NEXT: vmovaps 32(%rdi), %ymm0
12921 ; AVX2-FCP-NEXT: vmovaps 32(%rsi), %ymm1
12922 ; AVX2-FCP-NEXT: vmovaps 32(%rdx), %ymm2
12923 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
12924 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
12925 ; AVX2-FCP-NEXT: vbroadcastsd 48(%rcx), %ymm4
12926 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
12927 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12928 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
12929 ; AVX2-FCP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12930 ; AVX2-FCP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
12931 ; AVX2-FCP-NEXT: vbroadcastsd 48(%r9), %ymm1
12932 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
12933 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12934 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
12935 ; AVX2-FCP-NEXT: vbroadcastsd 56(%r8), %ymm1
12936 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm13 = ymm0[2,3],ymm1[2,3]
12937 ; AVX2-FCP-NEXT: vmovaps 64(%rdi), %ymm0
12938 ; AVX2-FCP-NEXT: vmovaps 64(%rsi), %ymm1
12939 ; AVX2-FCP-NEXT: vmovaps 64(%rdx), %ymm2
12940 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
12941 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
12942 ; AVX2-FCP-NEXT: vbroadcastsd 80(%rcx), %ymm4
12943 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
12944 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12945 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
12946 ; AVX2-FCP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12947 ; AVX2-FCP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
12948 ; AVX2-FCP-NEXT: vbroadcastsd 80(%r9), %ymm1
12949 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
12950 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12951 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
12952 ; AVX2-FCP-NEXT: vbroadcastsd 88(%r8), %ymm1
12953 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm14 = ymm0[2,3],ymm1[2,3]
12954 ; AVX2-FCP-NEXT: vmovaps 96(%rdi), %ymm0
12955 ; AVX2-FCP-NEXT: vmovaps 96(%rsi), %ymm1
12956 ; AVX2-FCP-NEXT: vmovaps 96(%rdx), %ymm2
12957 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
12958 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
12959 ; AVX2-FCP-NEXT: vbroadcastsd 112(%rcx), %ymm4
12960 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
12961 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12962 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
12963 ; AVX2-FCP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12964 ; AVX2-FCP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
12965 ; AVX2-FCP-NEXT: vbroadcastsd 112(%r9), %ymm1
12966 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
12967 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12968 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
12969 ; AVX2-FCP-NEXT: vbroadcastsd 120(%r8), %ymm1
12970 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
12971 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12972 ; AVX2-FCP-NEXT: vmovaps 128(%rdi), %ymm0
12973 ; AVX2-FCP-NEXT: vmovaps 128(%rsi), %ymm1
12974 ; AVX2-FCP-NEXT: vmovaps 128(%rdx), %ymm2
12975 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
12976 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
12977 ; AVX2-FCP-NEXT: vbroadcastsd 144(%rcx), %ymm4
12978 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
12979 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12980 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
12981 ; AVX2-FCP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12982 ; AVX2-FCP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
12983 ; AVX2-FCP-NEXT: vbroadcastsd 144(%r9), %ymm1
12984 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
12985 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12986 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
12987 ; AVX2-FCP-NEXT: vbroadcastsd 152(%r8), %ymm1
12988 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
12989 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12990 ; AVX2-FCP-NEXT: vmovaps 160(%rdi), %ymm0
12991 ; AVX2-FCP-NEXT: vmovaps 160(%rsi), %ymm1
12992 ; AVX2-FCP-NEXT: vmovaps 160(%rdx), %ymm2
12993 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
12994 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
12995 ; AVX2-FCP-NEXT: vbroadcastsd 176(%rcx), %ymm4
12996 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
12997 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12998 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
12999 ; AVX2-FCP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13000 ; AVX2-FCP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
13001 ; AVX2-FCP-NEXT: vbroadcastsd 176(%r9), %ymm1
13002 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
13003 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13004 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
13005 ; AVX2-FCP-NEXT: vbroadcastsd 184(%r8), %ymm1
13006 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
13007 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13008 ; AVX2-FCP-NEXT: vmovaps 192(%rdi), %ymm0
13009 ; AVX2-FCP-NEXT: vmovaps 192(%rsi), %ymm1
13010 ; AVX2-FCP-NEXT: vmovaps 192(%rdx), %ymm2
13011 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
13012 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
13013 ; AVX2-FCP-NEXT: vbroadcastsd 208(%rcx), %ymm4
13014 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
13015 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13016 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
13017 ; AVX2-FCP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13018 ; AVX2-FCP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
13019 ; AVX2-FCP-NEXT: vbroadcastsd 208(%r9), %ymm1
13020 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
13021 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13022 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
13023 ; AVX2-FCP-NEXT: vbroadcastsd 216(%r8), %ymm1
13024 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
13025 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13026 ; AVX2-FCP-NEXT: vmovaps 224(%rdi), %ymm0
13027 ; AVX2-FCP-NEXT: vmovaps 224(%rsi), %ymm1
13028 ; AVX2-FCP-NEXT: vmovaps 224(%rdx), %ymm2
13029 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
13030 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
13031 ; AVX2-FCP-NEXT: vbroadcastsd 240(%rcx), %ymm4
13032 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
13033 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13034 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
13035 ; AVX2-FCP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13036 ; AVX2-FCP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
13037 ; AVX2-FCP-NEXT: vbroadcastsd 240(%r9), %ymm1
13038 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
13039 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13040 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
13041 ; AVX2-FCP-NEXT: vbroadcastsd 248(%r8), %ymm1
13042 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
13043 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13044 ; AVX2-FCP-NEXT: vmovaps 256(%rdi), %ymm0
13045 ; AVX2-FCP-NEXT: vmovaps 256(%rsi), %ymm1
13046 ; AVX2-FCP-NEXT: vmovaps 256(%rdx), %ymm2
13047 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
13048 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
13049 ; AVX2-FCP-NEXT: vbroadcastsd 272(%rcx), %ymm4
13050 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
13051 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13052 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
13053 ; AVX2-FCP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13054 ; AVX2-FCP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
13055 ; AVX2-FCP-NEXT: vbroadcastsd 272(%r9), %ymm1
13056 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
13057 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13058 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
13059 ; AVX2-FCP-NEXT: vbroadcastsd 280(%r8), %ymm1
13060 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
13061 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13062 ; AVX2-FCP-NEXT: vmovaps 288(%rdi), %ymm0
13063 ; AVX2-FCP-NEXT: vmovaps 288(%rsi), %ymm1
13064 ; AVX2-FCP-NEXT: vmovaps 288(%rdx), %ymm2
13065 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
13066 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
13067 ; AVX2-FCP-NEXT: vbroadcastsd 304(%rcx), %ymm4
13068 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
13069 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13070 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
13071 ; AVX2-FCP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13072 ; AVX2-FCP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
13073 ; AVX2-FCP-NEXT: vbroadcastsd 304(%r9), %ymm1
13074 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
13075 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13076 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
13077 ; AVX2-FCP-NEXT: vbroadcastsd 312(%r8), %ymm1
13078 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
13079 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13080 ; AVX2-FCP-NEXT: vmovaps 320(%rdi), %ymm0
13081 ; AVX2-FCP-NEXT: vmovaps 320(%rsi), %ymm1
13082 ; AVX2-FCP-NEXT: vmovaps 320(%rdx), %ymm2
13083 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
13084 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
13085 ; AVX2-FCP-NEXT: vbroadcastsd 336(%rcx), %ymm4
13086 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
13087 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13088 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
13089 ; AVX2-FCP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13090 ; AVX2-FCP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
13091 ; AVX2-FCP-NEXT: vbroadcastsd 336(%r9), %ymm1
13092 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
13093 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13094 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
13095 ; AVX2-FCP-NEXT: vbroadcastsd 344(%r8), %ymm1
13096 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
13097 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13098 ; AVX2-FCP-NEXT: vmovaps 352(%rdi), %ymm0
13099 ; AVX2-FCP-NEXT: vmovaps 352(%rsi), %ymm1
13100 ; AVX2-FCP-NEXT: vmovaps 352(%rdx), %ymm2
13101 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
13102 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
13103 ; AVX2-FCP-NEXT: vbroadcastsd 368(%rcx), %ymm4
13104 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
13105 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13106 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
13107 ; AVX2-FCP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13108 ; AVX2-FCP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
13109 ; AVX2-FCP-NEXT: vbroadcastsd 368(%r9), %ymm1
13110 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
13111 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13112 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
13113 ; AVX2-FCP-NEXT: vbroadcastsd 376(%r8), %ymm1
13114 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
13115 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13116 ; AVX2-FCP-NEXT: vmovaps 384(%rdi), %ymm0
13117 ; AVX2-FCP-NEXT: vmovaps 384(%rsi), %ymm1
13118 ; AVX2-FCP-NEXT: vmovaps 384(%rdx), %ymm2
13119 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
13120 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
13121 ; AVX2-FCP-NEXT: vbroadcastsd 400(%rcx), %ymm4
13122 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
13123 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13124 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
13125 ; AVX2-FCP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13126 ; AVX2-FCP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
13127 ; AVX2-FCP-NEXT: vbroadcastsd 400(%r9), %ymm1
13128 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
13129 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13130 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
13131 ; AVX2-FCP-NEXT: vbroadcastsd 408(%r8), %ymm1
13132 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm15 = ymm0[2,3],ymm1[2,3]
13133 ; AVX2-FCP-NEXT: vmovaps 416(%rdi), %ymm0
13134 ; AVX2-FCP-NEXT: vmovaps 416(%rsi), %ymm1
13135 ; AVX2-FCP-NEXT: vmovaps 416(%rdx), %ymm2
13136 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
13137 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
13138 ; AVX2-FCP-NEXT: vbroadcastsd 432(%rcx), %ymm4
13139 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
13140 ; AVX2-FCP-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13141 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
13142 ; AVX2-FCP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13143 ; AVX2-FCP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
13144 ; AVX2-FCP-NEXT: vbroadcastsd 432(%r9), %ymm1
13145 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
13146 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13147 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
13148 ; AVX2-FCP-NEXT: vbroadcastsd 440(%r8), %ymm1
13149 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm10 = ymm0[2,3],ymm1[2,3]
13150 ; AVX2-FCP-NEXT: vmovaps 448(%rdi), %ymm0
13151 ; AVX2-FCP-NEXT: vmovaps 448(%rsi), %ymm1
13152 ; AVX2-FCP-NEXT: vmovaps 448(%rdx), %ymm2
13153 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
13154 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],ymm2[2,3]
13155 ; AVX2-FCP-NEXT: vbroadcastsd 464(%rcx), %ymm5
13156 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm11 = ymm4[0,1,2,3,4,5],ymm5[6,7]
13157 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
13158 ; AVX2-FCP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13159 ; AVX2-FCP-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
13160 ; AVX2-FCP-NEXT: vbroadcastsd 464(%r9), %ymm1
13161 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm9 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
13162 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
13163 ; AVX2-FCP-NEXT: vbroadcastsd 472(%r8), %ymm2
13164 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm6 = ymm0[2,3],ymm2[2,3]
13165 ; AVX2-FCP-NEXT: vmovaps 480(%rdi), %ymm1
13166 ; AVX2-FCP-NEXT: vmovaps 480(%rsi), %ymm2
13167 ; AVX2-FCP-NEXT: vmovaps 480(%rdx), %ymm4
13168 ; AVX2-FCP-NEXT: vunpcklpd {{.*#+}} ymm7 = ymm1[0],ymm2[0],ymm1[2],ymm2[2]
13169 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm7 = ymm7[2,3],ymm4[2,3]
13170 ; AVX2-FCP-NEXT: vbroadcastsd 496(%rcx), %ymm8
13171 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm5 = ymm7[0,1,2,3,4,5],ymm8[6,7]
13172 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
13173 ; AVX2-FCP-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
13174 ; AVX2-FCP-NEXT: # ymm1 = mem[2,3],ymm1[2,3]
13175 ; AVX2-FCP-NEXT: vbroadcastsd 496(%r9), %ymm2
13176 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1],ymm2[2,3],ymm1[4,5,6,7]
13177 ; AVX2-FCP-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm4[1],mem[1],ymm4[3],mem[3]
13178 ; AVX2-FCP-NEXT: vbroadcastsd 504(%r8), %ymm2
13179 ; AVX2-FCP-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[2,3],ymm2[2,3]
13180 ; AVX2-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
13181 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm12[0,1,2,3,4,5],mem[6,7]
13182 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13183 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm13[0,1,2,3,4,5],mem[6,7]
13184 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13185 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm14[0,1,2,3,4,5],mem[6,7]
13186 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13187 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13188 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],mem[6,7]
13189 ; AVX2-FCP-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13190 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13191 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm14 = ymm1[0,1,2,3,4,5],mem[6,7]
13192 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13193 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm13 = ymm1[0,1,2,3,4,5],mem[6,7]
13194 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13195 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm12 = ymm1[0,1,2,3,4,5],mem[6,7]
13196 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13197 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm8 = ymm1[0,1,2,3,4,5],mem[6,7]
13198 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13199 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm7 = ymm1[0,1,2,3,4,5],mem[6,7]
13200 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13201 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm4 = ymm1[0,1,2,3,4,5],mem[6,7]
13202 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13203 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm1[0,1,2,3,4,5],mem[6,7]
13204 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13205 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],mem[6,7]
13206 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm15 = ymm15[0,1,2,3,4,5],mem[6,7]
13207 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1,2,3,4,5],mem[6,7]
13208 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3,4,5],mem[6,7]
13209 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
13210 ; AVX2-FCP-NEXT: vmovaps %ymm0, 3040(%rax)
13211 ; AVX2-FCP-NEXT: vmovaps %ymm3, 3008(%rax)
13212 ; AVX2-FCP-NEXT: vmovaps %ymm5, 2976(%rax)
13213 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13214 ; AVX2-FCP-NEXT: vmovaps %ymm0, 2880(%rax)
13215 ; AVX2-FCP-NEXT: vmovaps %ymm6, 2848(%rax)
13216 ; AVX2-FCP-NEXT: vmovaps %ymm9, 2816(%rax)
13217 ; AVX2-FCP-NEXT: vmovaps %ymm11, 2784(%rax)
13218 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13219 ; AVX2-FCP-NEXT: vmovaps %ymm0, 2688(%rax)
13220 ; AVX2-FCP-NEXT: vmovaps %ymm10, 2656(%rax)
13221 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13222 ; AVX2-FCP-NEXT: vmovaps %ymm0, 2624(%rax)
13223 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13224 ; AVX2-FCP-NEXT: vmovaps %ymm0, 2592(%rax)
13225 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13226 ; AVX2-FCP-NEXT: vmovaps %ymm0, 2496(%rax)
13227 ; AVX2-FCP-NEXT: vmovaps %ymm15, 2464(%rax)
13228 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13229 ; AVX2-FCP-NEXT: vmovaps %ymm0, 2432(%rax)
13230 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13231 ; AVX2-FCP-NEXT: vmovaps %ymm0, 2400(%rax)
13232 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13233 ; AVX2-FCP-NEXT: vmovaps %ymm0, 2304(%rax)
13234 ; AVX2-FCP-NEXT: vmovaps %ymm1, 2272(%rax)
13235 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13236 ; AVX2-FCP-NEXT: vmovaps %ymm0, 2240(%rax)
13237 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13238 ; AVX2-FCP-NEXT: vmovaps %ymm0, 2208(%rax)
13239 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13240 ; AVX2-FCP-NEXT: vmovaps %ymm0, 2112(%rax)
13241 ; AVX2-FCP-NEXT: vmovaps %ymm2, 2080(%rax)
13242 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13243 ; AVX2-FCP-NEXT: vmovaps %ymm0, 2048(%rax)
13244 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13245 ; AVX2-FCP-NEXT: vmovaps %ymm0, 2016(%rax)
13246 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13247 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1920(%rax)
13248 ; AVX2-FCP-NEXT: vmovaps %ymm4, 1888(%rax)
13249 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13250 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1856(%rax)
13251 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13252 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1824(%rax)
13253 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13254 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1728(%rax)
13255 ; AVX2-FCP-NEXT: vmovaps %ymm7, 1696(%rax)
13256 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13257 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1664(%rax)
13258 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13259 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1632(%rax)
13260 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13261 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1536(%rax)
13262 ; AVX2-FCP-NEXT: vmovaps %ymm8, 1504(%rax)
13263 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13264 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1472(%rax)
13265 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13266 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1440(%rax)
13267 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13268 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1344(%rax)
13269 ; AVX2-FCP-NEXT: vmovaps %ymm12, 1312(%rax)
13270 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13271 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1280(%rax)
13272 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13273 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1248(%rax)
13274 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13275 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1152(%rax)
13276 ; AVX2-FCP-NEXT: vmovaps %ymm13, 1120(%rax)
13277 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13278 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1088(%rax)
13279 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13280 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1056(%rax)
13281 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13282 ; AVX2-FCP-NEXT: vmovaps %ymm0, 960(%rax)
13283 ; AVX2-FCP-NEXT: vmovaps %ymm14, 928(%rax)
13284 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13285 ; AVX2-FCP-NEXT: vmovaps %ymm0, 896(%rax)
13286 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13287 ; AVX2-FCP-NEXT: vmovaps %ymm0, 864(%rax)
13288 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13289 ; AVX2-FCP-NEXT: vmovaps %ymm0, 768(%rax)
13290 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13291 ; AVX2-FCP-NEXT: vmovaps %ymm0, 736(%rax)
13292 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13293 ; AVX2-FCP-NEXT: vmovaps %ymm0, 704(%rax)
13294 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13295 ; AVX2-FCP-NEXT: vmovaps %ymm0, 672(%rax)
13296 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13297 ; AVX2-FCP-NEXT: vmovaps %ymm0, 576(%rax)
13298 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13299 ; AVX2-FCP-NEXT: vmovaps %ymm0, 544(%rax)
13300 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13301 ; AVX2-FCP-NEXT: vmovaps %ymm0, 512(%rax)
13302 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13303 ; AVX2-FCP-NEXT: vmovaps %ymm0, 480(%rax)
13304 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13305 ; AVX2-FCP-NEXT: vmovaps %ymm0, 384(%rax)
13306 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13307 ; AVX2-FCP-NEXT: vmovaps %ymm0, 352(%rax)
13308 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13309 ; AVX2-FCP-NEXT: vmovaps %ymm0, 320(%rax)
13310 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13311 ; AVX2-FCP-NEXT: vmovaps %ymm0, 288(%rax)
13312 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13313 ; AVX2-FCP-NEXT: vmovaps %ymm0, 192(%rax)
13314 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13315 ; AVX2-FCP-NEXT: vmovaps %ymm0, 160(%rax)
13316 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13317 ; AVX2-FCP-NEXT: vmovaps %ymm0, 128(%rax)
13318 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13319 ; AVX2-FCP-NEXT: vmovaps %ymm0, 96(%rax)
13320 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13321 ; AVX2-FCP-NEXT: vmovaps %ymm0, (%rax)
13322 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13323 ; AVX2-FCP-NEXT: vmovaps %ymm0, 2944(%rax)
13324 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13325 ; AVX2-FCP-NEXT: vmovaps %ymm0, 2912(%rax)
13326 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13327 ; AVX2-FCP-NEXT: vmovaps %ymm0, 2752(%rax)
13328 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13329 ; AVX2-FCP-NEXT: vmovaps %ymm0, 2720(%rax)
13330 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13331 ; AVX2-FCP-NEXT: vmovaps %ymm0, 2560(%rax)
13332 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13333 ; AVX2-FCP-NEXT: vmovaps %ymm0, 2528(%rax)
13334 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13335 ; AVX2-FCP-NEXT: vmovaps %ymm0, 2368(%rax)
13336 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13337 ; AVX2-FCP-NEXT: vmovaps %ymm0, 2336(%rax)
13338 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13339 ; AVX2-FCP-NEXT: vmovaps %ymm0, 2176(%rax)
13340 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13341 ; AVX2-FCP-NEXT: vmovaps %ymm0, 2144(%rax)
13342 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13343 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1984(%rax)
13344 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13345 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1952(%rax)
13346 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13347 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1792(%rax)
13348 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13349 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1760(%rax)
13350 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13351 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1600(%rax)
13352 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13353 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1568(%rax)
13354 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13355 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1408(%rax)
13356 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13357 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1376(%rax)
13358 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13359 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1216(%rax)
13360 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13361 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1184(%rax)
13362 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13363 ; AVX2-FCP-NEXT: vmovaps %ymm0, 1024(%rax)
13364 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13365 ; AVX2-FCP-NEXT: vmovaps %ymm0, 992(%rax)
13366 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13367 ; AVX2-FCP-NEXT: vmovaps %ymm0, 832(%rax)
13368 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13369 ; AVX2-FCP-NEXT: vmovaps %ymm0, 800(%rax)
13370 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13371 ; AVX2-FCP-NEXT: vmovaps %ymm0, 640(%rax)
13372 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13373 ; AVX2-FCP-NEXT: vmovaps %ymm0, 608(%rax)
13374 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13375 ; AVX2-FCP-NEXT: vmovaps %ymm0, 448(%rax)
13376 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13377 ; AVX2-FCP-NEXT: vmovaps %ymm0, 416(%rax)
13378 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13379 ; AVX2-FCP-NEXT: vmovaps %ymm0, 256(%rax)
13380 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13381 ; AVX2-FCP-NEXT: vmovaps %ymm0, 224(%rax)
13382 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13383 ; AVX2-FCP-NEXT: vmovaps %ymm0, 64(%rax)
13384 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13385 ; AVX2-FCP-NEXT: vmovaps %ymm0, 32(%rax)
13386 ; AVX2-FCP-NEXT: addq $2968, %rsp # imm = 0xB98
13387 ; AVX2-FCP-NEXT: vzeroupper
13388 ; AVX2-FCP-NEXT: retq
13390 ; AVX512-LABEL: store_i64_stride6_vf64:
13392 ; AVX512-NEXT: subq $3720, %rsp # imm = 0xE88
13393 ; AVX512-NEXT: vmovdqa64 (%rdi), %zmm11
13394 ; AVX512-NEXT: vmovdqa64 64(%rdi), %zmm10
13395 ; AVX512-NEXT: vmovdqa64 128(%rdi), %zmm9
13396 ; AVX512-NEXT: vmovdqa64 192(%rdi), %zmm8
13397 ; AVX512-NEXT: vmovdqa64 256(%rdi), %zmm7
13398 ; AVX512-NEXT: vmovdqa64 320(%rdi), %zmm6
13399 ; AVX512-NEXT: vmovdqa64 384(%rdi), %zmm5
13400 ; AVX512-NEXT: vmovdqa64 448(%rdi), %zmm4
13401 ; AVX512-NEXT: vmovdqa64 (%rsi), %zmm30
13402 ; AVX512-NEXT: vmovdqa64 64(%rsi), %zmm29
13403 ; AVX512-NEXT: vmovdqa64 128(%rsi), %zmm28
13404 ; AVX512-NEXT: vmovdqa64 192(%rsi), %zmm27
13405 ; AVX512-NEXT: vmovdqa64 256(%rsi), %zmm1
13406 ; AVX512-NEXT: vmovdqa64 320(%rsi), %zmm13
13407 ; AVX512-NEXT: vmovdqa64 384(%rsi), %zmm24
13408 ; AVX512-NEXT: vmovdqa64 448(%rsi), %zmm0
13409 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [4,12,5,13,4,12,5,13]
13410 ; AVX512-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
13411 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm22
13412 ; AVX512-NEXT: vpermt2q %zmm0, %zmm2, %zmm22
13413 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm3
13414 ; AVX512-NEXT: vpermt2q %zmm24, %zmm2, %zmm3
13415 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13416 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm3
13417 ; AVX512-NEXT: vpermt2q %zmm13, %zmm2, %zmm3
13418 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13419 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm3
13420 ; AVX512-NEXT: vpermt2q %zmm1, %zmm2, %zmm3
13421 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13422 ; AVX512-NEXT: vmovdqa64 %zmm8, %zmm3
13423 ; AVX512-NEXT: vpermt2q %zmm27, %zmm2, %zmm3
13424 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13425 ; AVX512-NEXT: vmovdqa64 %zmm9, %zmm3
13426 ; AVX512-NEXT: vpermt2q %zmm28, %zmm2, %zmm3
13427 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13428 ; AVX512-NEXT: vmovdqa64 %zmm10, %zmm3
13429 ; AVX512-NEXT: vpermt2q %zmm29, %zmm2, %zmm3
13430 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13431 ; AVX512-NEXT: vpermi2q %zmm30, %zmm11, %zmm2
13432 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13433 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [2,10,2,10,2,10,2,10]
13434 ; AVX512-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
13435 ; AVX512-NEXT: vmovdqa64 %zmm11, %zmm3
13436 ; AVX512-NEXT: vpermt2q %zmm30, %zmm2, %zmm3
13437 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13438 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm12
13439 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [6,14,6,14,6,14,6,14]
13440 ; AVX512-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
13441 ; AVX512-NEXT: vmovdqa64 %zmm11, %zmm3
13442 ; AVX512-NEXT: vpermt2q %zmm30, %zmm2, %zmm3
13443 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13444 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [7,15,7,15,7,15,7,15]
13445 ; AVX512-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
13446 ; AVX512-NEXT: vmovdqa64 %zmm11, %zmm3
13447 ; AVX512-NEXT: vpermt2q %zmm30, %zmm15, %zmm3
13448 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13449 ; AVX512-NEXT: vmovdqa64 %zmm10, %zmm3
13450 ; AVX512-NEXT: vpermt2q %zmm29, %zmm12, %zmm3
13451 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13452 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,8,1,9,0,8,1,9]
13453 ; AVX512-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
13454 ; AVX512-NEXT: vpermt2q %zmm30, %zmm3, %zmm11
13455 ; AVX512-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13456 ; AVX512-NEXT: vmovdqa64 %zmm10, %zmm11
13457 ; AVX512-NEXT: vpermt2q %zmm29, %zmm2, %zmm11
13458 ; AVX512-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13459 ; AVX512-NEXT: vmovdqa64 %zmm10, %zmm11
13460 ; AVX512-NEXT: vpermt2q %zmm29, %zmm15, %zmm11
13461 ; AVX512-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13462 ; AVX512-NEXT: vpermt2q %zmm29, %zmm3, %zmm10
13463 ; AVX512-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13464 ; AVX512-NEXT: vmovdqa64 %zmm9, %zmm10
13465 ; AVX512-NEXT: vpermt2q %zmm28, %zmm12, %zmm10
13466 ; AVX512-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13467 ; AVX512-NEXT: vmovdqa64 %zmm9, %zmm10
13468 ; AVX512-NEXT: vpermt2q %zmm28, %zmm2, %zmm10
13469 ; AVX512-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13470 ; AVX512-NEXT: vmovdqa64 %zmm9, %zmm10
13471 ; AVX512-NEXT: vpermt2q %zmm28, %zmm15, %zmm10
13472 ; AVX512-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13473 ; AVX512-NEXT: vpermt2q %zmm28, %zmm3, %zmm9
13474 ; AVX512-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13475 ; AVX512-NEXT: vmovdqa64 %zmm8, %zmm9
13476 ; AVX512-NEXT: vpermt2q %zmm27, %zmm12, %zmm9
13477 ; AVX512-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13478 ; AVX512-NEXT: vmovdqa64 %zmm8, %zmm9
13479 ; AVX512-NEXT: vpermt2q %zmm27, %zmm2, %zmm9
13480 ; AVX512-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13481 ; AVX512-NEXT: vmovdqa64 %zmm8, %zmm9
13482 ; AVX512-NEXT: vpermt2q %zmm27, %zmm15, %zmm9
13483 ; AVX512-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13484 ; AVX512-NEXT: vpermt2q %zmm27, %zmm3, %zmm8
13485 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13486 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm8
13487 ; AVX512-NEXT: vpermt2q %zmm1, %zmm12, %zmm8
13488 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13489 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm8
13490 ; AVX512-NEXT: vpermt2q %zmm1, %zmm2, %zmm8
13491 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13492 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm8
13493 ; AVX512-NEXT: vpermt2q %zmm1, %zmm15, %zmm8
13494 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13495 ; AVX512-NEXT: vpermt2q %zmm1, %zmm3, %zmm7
13496 ; AVX512-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13497 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm1
13498 ; AVX512-NEXT: vpermt2q %zmm13, %zmm12, %zmm1
13499 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13500 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm1
13501 ; AVX512-NEXT: vpermt2q %zmm13, %zmm2, %zmm1
13502 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13503 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm1
13504 ; AVX512-NEXT: vpermt2q %zmm13, %zmm15, %zmm1
13505 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13506 ; AVX512-NEXT: vpermt2q %zmm13, %zmm3, %zmm6
13507 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13508 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm1
13509 ; AVX512-NEXT: vpermt2q %zmm24, %zmm12, %zmm1
13510 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13511 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm1
13512 ; AVX512-NEXT: vpermt2q %zmm24, %zmm2, %zmm1
13513 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13514 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm8
13515 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm1
13516 ; AVX512-NEXT: vpermt2q %zmm24, %zmm15, %zmm1
13517 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13518 ; AVX512-NEXT: vpermt2q %zmm24, %zmm3, %zmm5
13519 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13520 ; AVX512-NEXT: vmovdqa64 (%rdx), %zmm5
13521 ; AVX512-NEXT: vmovdqa64 (%rcx), %zmm14
13522 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm29
13523 ; AVX512-NEXT: vmovdqa64 %zmm15, %zmm21
13524 ; AVX512-NEXT: vpermt2q %zmm14, %zmm15, %zmm29
13525 ; AVX512-NEXT: vmovdqa64 64(%rdx), %zmm1
13526 ; AVX512-NEXT: vmovdqa64 64(%rcx), %zmm11
13527 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm2
13528 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm20
13529 ; AVX512-NEXT: vpermt2q %zmm11, %zmm15, %zmm2
13530 ; AVX512-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
13531 ; AVX512-NEXT: vmovdqa64 128(%rdx), %zmm23
13532 ; AVX512-NEXT: vmovdqa64 128(%rcx), %zmm9
13533 ; AVX512-NEXT: vmovdqa64 %zmm23, %zmm1
13534 ; AVX512-NEXT: vpermt2q %zmm9, %zmm15, %zmm1
13535 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13536 ; AVX512-NEXT: vmovdqa64 192(%rdx), %zmm18
13537 ; AVX512-NEXT: vmovdqa64 192(%rcx), %zmm6
13538 ; AVX512-NEXT: vmovdqa64 %zmm18, %zmm1
13539 ; AVX512-NEXT: vpermt2q %zmm6, %zmm15, %zmm1
13540 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13541 ; AVX512-NEXT: vmovdqa64 256(%rdx), %zmm17
13542 ; AVX512-NEXT: vmovdqa64 256(%rcx), %zmm7
13543 ; AVX512-NEXT: vmovdqa64 %zmm17, %zmm1
13544 ; AVX512-NEXT: vpermt2q %zmm7, %zmm15, %zmm1
13545 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13546 ; AVX512-NEXT: vmovdqa64 320(%rdx), %zmm15
13547 ; AVX512-NEXT: vmovdqa64 320(%rcx), %zmm10
13548 ; AVX512-NEXT: vmovdqa64 %zmm15, %zmm1
13549 ; AVX512-NEXT: vpermt2q %zmm10, %zmm21, %zmm1
13550 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13551 ; AVX512-NEXT: vmovdqa64 384(%rdx), %zmm13
13552 ; AVX512-NEXT: vmovdqa64 384(%rcx), %zmm2
13553 ; AVX512-NEXT: vmovdqa64 %zmm13, %zmm1
13554 ; AVX512-NEXT: vpermt2q %zmm2, %zmm21, %zmm1
13555 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13556 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm1
13557 ; AVX512-NEXT: vpermi2q %zmm0, %zmm4, %zmm12
13558 ; AVX512-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13559 ; AVX512-NEXT: vpermi2q %zmm0, %zmm4, %zmm8
13560 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13561 ; AVX512-NEXT: vmovdqa64 448(%rdx), %zmm16
13562 ; AVX512-NEXT: vmovdqa64 448(%rcx), %zmm4
13563 ; AVX512-NEXT: vmovdqa64 %zmm16, %zmm8
13564 ; AVX512-NEXT: vpermt2q %zmm4, %zmm21, %zmm8
13565 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13566 ; AVX512-NEXT: vpermi2q %zmm0, %zmm1, %zmm21
13567 ; AVX512-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13568 ; AVX512-NEXT: vpermt2q %zmm0, %zmm3, %zmm1
13569 ; AVX512-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13570 ; AVX512-NEXT: vpmovsxbq {{.*#+}} ymm1 = [0,0,4,12]
13571 ; AVX512-NEXT: vmovdqa64 %zmm16, %zmm24
13572 ; AVX512-NEXT: vpermt2q %zmm4, %zmm1, %zmm24
13573 ; AVX512-NEXT: vmovdqa64 %zmm13, %zmm19
13574 ; AVX512-NEXT: vpermt2q %zmm2, %zmm1, %zmm19
13575 ; AVX512-NEXT: vmovdqa64 %zmm15, %zmm0
13576 ; AVX512-NEXT: vpermt2q %zmm10, %zmm1, %zmm0
13577 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13578 ; AVX512-NEXT: vmovdqa64 %zmm17, %zmm31
13579 ; AVX512-NEXT: vpermt2q %zmm7, %zmm1, %zmm31
13580 ; AVX512-NEXT: vmovdqa64 %zmm18, %zmm21
13581 ; AVX512-NEXT: vpermt2q %zmm6, %zmm1, %zmm21
13582 ; AVX512-NEXT: vmovdqa64 %zmm23, %zmm12
13583 ; AVX512-NEXT: vpermt2q %zmm9, %zmm1, %zmm12
13584 ; AVX512-NEXT: vmovdqa64 %zmm20, %zmm3
13585 ; AVX512-NEXT: vpermt2q %zmm11, %zmm1, %zmm3
13586 ; AVX512-NEXT: vpermi2q %zmm14, %zmm5, %zmm1
13587 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [1,9,2,10,1,9,2,10]
13588 ; AVX512-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
13589 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm0
13590 ; AVX512-NEXT: vpermt2q %zmm14, %zmm27, %zmm0
13591 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13592 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [5,13,6,14,5,13,6,14]
13593 ; AVX512-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
13594 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm0
13595 ; AVX512-NEXT: vpermt2q %zmm14, %zmm28, %zmm0
13596 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13597 ; AVX512-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [3,11,3,11,3,11,3,11]
13598 ; AVX512-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
13599 ; AVX512-NEXT: vpermt2q %zmm14, %zmm0, %zmm5
13600 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13601 ; AVX512-NEXT: vmovdqa64 %zmm20, %zmm5
13602 ; AVX512-NEXT: vpermt2q %zmm11, %zmm27, %zmm5
13603 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13604 ; AVX512-NEXT: vmovdqa64 %zmm20, %zmm5
13605 ; AVX512-NEXT: vpermt2q %zmm11, %zmm28, %zmm5
13606 ; AVX512-NEXT: vpermt2q %zmm11, %zmm0, %zmm20
13607 ; AVX512-NEXT: vmovdqu64 %zmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13608 ; AVX512-NEXT: vmovdqa64 %zmm23, %zmm8
13609 ; AVX512-NEXT: vpermt2q %zmm9, %zmm27, %zmm8
13610 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13611 ; AVX512-NEXT: vmovdqa64 %zmm23, %zmm11
13612 ; AVX512-NEXT: vpermt2q %zmm9, %zmm28, %zmm11
13613 ; AVX512-NEXT: vmovdqa64 %zmm11, %zmm30
13614 ; AVX512-NEXT: vpermt2q %zmm9, %zmm0, %zmm23
13615 ; AVX512-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13616 ; AVX512-NEXT: vmovdqa64 %zmm18, %zmm9
13617 ; AVX512-NEXT: vpermt2q %zmm6, %zmm27, %zmm9
13618 ; AVX512-NEXT: vmovdqa64 %zmm18, %zmm8
13619 ; AVX512-NEXT: vpermt2q %zmm6, %zmm28, %zmm8
13620 ; AVX512-NEXT: vpermt2q %zmm6, %zmm0, %zmm18
13621 ; AVX512-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13622 ; AVX512-NEXT: vmovdqa64 %zmm17, %zmm6
13623 ; AVX512-NEXT: vpermt2q %zmm7, %zmm27, %zmm6
13624 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13625 ; AVX512-NEXT: vmovdqa64 %zmm17, %zmm6
13626 ; AVX512-NEXT: vmovdqa64 %zmm17, %zmm14
13627 ; AVX512-NEXT: vpermt2q %zmm7, %zmm28, %zmm6
13628 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm11
13629 ; AVX512-NEXT: vpermt2q %zmm7, %zmm0, %zmm14
13630 ; AVX512-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13631 ; AVX512-NEXT: vmovdqa64 %zmm15, %zmm6
13632 ; AVX512-NEXT: vmovdqa64 %zmm15, %zmm7
13633 ; AVX512-NEXT: vpermt2q %zmm10, %zmm27, %zmm7
13634 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm26
13635 ; AVX512-NEXT: vmovdqa64 %zmm15, %zmm7
13636 ; AVX512-NEXT: vpermt2q %zmm10, %zmm28, %zmm7
13637 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm25
13638 ; AVX512-NEXT: vpermt2q %zmm10, %zmm0, %zmm6
13639 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13640 ; AVX512-NEXT: vmovdqa64 %zmm13, %zmm6
13641 ; AVX512-NEXT: vmovdqa64 %zmm13, %zmm7
13642 ; AVX512-NEXT: vpermt2q %zmm2, %zmm27, %zmm7
13643 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm10
13644 ; AVX512-NEXT: vmovdqa64 %zmm13, %zmm7
13645 ; AVX512-NEXT: vpermt2q %zmm2, %zmm28, %zmm7
13646 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm23
13647 ; AVX512-NEXT: vpermt2q %zmm2, %zmm0, %zmm6
13648 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13649 ; AVX512-NEXT: vpermi2q %zmm4, %zmm16, %zmm27
13650 ; AVX512-NEXT: vpermi2q %zmm4, %zmm16, %zmm28
13651 ; AVX512-NEXT: vpermt2q %zmm4, %zmm0, %zmm16
13652 ; AVX512-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13653 ; AVX512-NEXT: movb $12, %al
13654 ; AVX512-NEXT: kmovw %eax, %k1
13655 ; AVX512-NEXT: vmovdqa64 %zmm24, %zmm22 {%k1}
13656 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
13657 ; AVX512-NEXT: vmovdqa64 %zmm19, %zmm18 {%k1}
13658 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
13659 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13660 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
13661 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
13662 ; AVX512-NEXT: vmovdqa64 %zmm31, %zmm17 {%k1}
13663 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
13664 ; AVX512-NEXT: vmovdqa64 %zmm21, %zmm16 {%k1}
13665 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
13666 ; AVX512-NEXT: vmovdqa64 %zmm12, %zmm21 {%k1}
13667 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
13668 ; AVX512-NEXT: vmovdqa64 %zmm3, %zmm31 {%k1}
13669 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
13670 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm20 {%k1}
13671 ; AVX512-NEXT: movb $48, %al
13672 ; AVX512-NEXT: kmovw %eax, %k2
13673 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13674 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13675 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm2 {%k2}
13676 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13677 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
13678 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm3 {%k2}
13679 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13680 ; AVX512-NEXT: vshufi64x2 {{.*#+}} zmm14 = zmm0[0,1,2,3],zmm29[4,5,6,7]
13681 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13682 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
13683 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm4 {%k2}
13684 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13685 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
13686 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13687 ; AVX512-NEXT: vshufi64x2 $228, (%rsp), %zmm0, %zmm0 # 64-byte Folded Reload
13688 ; AVX512-NEXT: # zmm0 = zmm0[0,1,2,3],mem[4,5,6,7]
13689 ; AVX512-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
13690 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
13691 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13692 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm6 {%k2}
13693 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13694 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm30 {%k2}
13695 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13696 ; AVX512-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm15 # 64-byte Folded Reload
13697 ; AVX512-NEXT: # zmm15 = zmm0[0,1,2,3],mem[4,5,6,7]
13698 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13699 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
13700 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13701 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
13702 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13703 ; AVX512-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm13 # 64-byte Folded Reload
13704 ; AVX512-NEXT: # zmm13 = zmm0[0,1,2,3],mem[4,5,6,7]
13705 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
13706 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13707 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm7 {%k2}
13708 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13709 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
13710 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13711 ; AVX512-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm12 # 64-byte Folded Reload
13712 ; AVX512-NEXT: # zmm12 = zmm0[0,1,2,3],mem[4,5,6,7]
13713 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13714 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm26 {%k2}
13715 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13716 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm25 {%k2}
13717 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13718 ; AVX512-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 64-byte Folded Reload
13719 ; AVX512-NEXT: # zmm0 = zmm0[0,1,2,3],mem[4,5,6,7]
13720 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13721 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13722 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
13723 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13724 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm23 {%k2}
13725 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13726 ; AVX512-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 64-byte Folded Reload
13727 ; AVX512-NEXT: # zmm0 = zmm0[0,1,2,3],mem[4,5,6,7]
13728 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13729 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13730 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm27 {%k2}
13731 ; AVX512-NEXT: vmovdqa64 (%r8), %zmm29
13732 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,1,9,0,4,5,6,7]
13733 ; AVX512-NEXT: vpermt2q %zmm29, %zmm1, %zmm2
13734 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13735 ; AVX512-NEXT: vmovdqa64 64(%r8), %zmm2
13736 ; AVX512-NEXT: vpermt2q %zmm2, %zmm1, %zmm4
13737 ; AVX512-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13738 ; AVX512-NEXT: vmovdqa64 128(%r8), %zmm0
13739 ; AVX512-NEXT: vpermt2q %zmm0, %zmm1, %zmm6
13740 ; AVX512-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13741 ; AVX512-NEXT: vmovdqa64 192(%r8), %zmm4
13742 ; AVX512-NEXT: vpermt2q %zmm4, %zmm1, %zmm9
13743 ; AVX512-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13744 ; AVX512-NEXT: vmovdqa64 256(%r8), %zmm6
13745 ; AVX512-NEXT: vpermt2q %zmm6, %zmm1, %zmm7
13746 ; AVX512-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13747 ; AVX512-NEXT: vmovdqa64 320(%r8), %zmm7
13748 ; AVX512-NEXT: vpermt2q %zmm7, %zmm1, %zmm26
13749 ; AVX512-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13750 ; AVX512-NEXT: vmovdqa64 384(%r8), %zmm9
13751 ; AVX512-NEXT: vpermt2q %zmm9, %zmm1, %zmm10
13752 ; AVX512-NEXT: vmovdqa64 %zmm10, %zmm24
13753 ; AVX512-NEXT: vmovdqa64 448(%r8), %zmm10
13754 ; AVX512-NEXT: vpermt2q %zmm10, %zmm1, %zmm27
13755 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13756 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm28 {%k2}
13757 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,1,13,0,4,5,6,7]
13758 ; AVX512-NEXT: vpermt2q %zmm29, %zmm1, %zmm3
13759 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13760 ; AVX512-NEXT: vpermt2q %zmm2, %zmm1, %zmm5
13761 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13762 ; AVX512-NEXT: vpermt2q %zmm0, %zmm1, %zmm30
13763 ; AVX512-NEXT: vmovdqu64 %zmm30, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13764 ; AVX512-NEXT: vpermt2q %zmm4, %zmm1, %zmm8
13765 ; AVX512-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13766 ; AVX512-NEXT: vpermt2q %zmm6, %zmm1, %zmm11
13767 ; AVX512-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13768 ; AVX512-NEXT: vpermt2q %zmm7, %zmm1, %zmm25
13769 ; AVX512-NEXT: vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13770 ; AVX512-NEXT: vpermt2q %zmm9, %zmm1, %zmm23
13771 ; AVX512-NEXT: vpermt2q %zmm10, %zmm1, %zmm28
13772 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13773 ; AVX512-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm3 # 64-byte Folded Reload
13774 ; AVX512-NEXT: # zmm3 = zmm1[0,1,2,3],mem[4,5,6,7]
13775 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm1 = [14,0,2,3,4,5,15,0]
13776 ; AVX512-NEXT: vpermt2q %zmm29, %zmm1, %zmm14
13777 ; AVX512-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13778 ; AVX512-NEXT: vmovdqu64 (%rsp), %zmm14 # 64-byte Reload
13779 ; AVX512-NEXT: vpermt2q %zmm2, %zmm1, %zmm14
13780 ; AVX512-NEXT: vpermt2q %zmm0, %zmm1, %zmm15
13781 ; AVX512-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13782 ; AVX512-NEXT: vpermt2q %zmm4, %zmm1, %zmm13
13783 ; AVX512-NEXT: vpermt2q %zmm6, %zmm1, %zmm12
13784 ; AVX512-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13785 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
13786 ; AVX512-NEXT: vpermt2q %zmm7, %zmm1, %zmm12
13787 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
13788 ; AVX512-NEXT: vpermt2q %zmm9, %zmm1, %zmm15
13789 ; AVX512-NEXT: vpermt2q %zmm10, %zmm1, %zmm3
13790 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13791 ; AVX512-NEXT: vmovdqa (%rdi), %ymm1
13792 ; AVX512-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
13793 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
13794 ; AVX512-NEXT: vinserti64x4 $0, %ymm1, %zmm3, %zmm3
13795 ; AVX512-NEXT: vmovdqa 64(%rdi), %ymm1
13796 ; AVX512-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
13797 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
13798 ; AVX512-NEXT: vinserti64x4 $0, %ymm1, %zmm5, %zmm5
13799 ; AVX512-NEXT: movb $16, %al
13800 ; AVX512-NEXT: kmovw %eax, %k2
13801 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm31 {%k2}
13802 ; AVX512-NEXT: vmovdqu64 %zmm31, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13803 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm1 = [10,0,2,3,4,5,11,0]
13804 ; AVX512-NEXT: vpermt2q %zmm2, %zmm1, %zmm5
13805 ; AVX512-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13806 ; AVX512-NEXT: vmovdqa 128(%rdi), %ymm2
13807 ; AVX512-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
13808 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
13809 ; AVX512-NEXT: vinserti64x4 $0, %ymm2, %zmm5, %zmm31
13810 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm21 {%k2}
13811 ; AVX512-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13812 ; AVX512-NEXT: vpermt2q %zmm0, %zmm1, %zmm31
13813 ; AVX512-NEXT: vmovdqa 192(%rdi), %ymm2
13814 ; AVX512-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
13815 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13816 ; AVX512-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm30
13817 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm16 {%k2}
13818 ; AVX512-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13819 ; AVX512-NEXT: vpermt2q %zmm4, %zmm1, %zmm30
13820 ; AVX512-NEXT: vmovdqa 256(%rdi), %ymm2
13821 ; AVX512-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
13822 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13823 ; AVX512-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm26
13824 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm17 {%k2}
13825 ; AVX512-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13826 ; AVX512-NEXT: vpermt2q %zmm6, %zmm1, %zmm26
13827 ; AVX512-NEXT: vmovdqa 320(%rdi), %ymm2
13828 ; AVX512-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
13829 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13830 ; AVX512-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm25
13831 ; AVX512-NEXT: vmovdqa64 %zmm7, %zmm19 {%k2}
13832 ; AVX512-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13833 ; AVX512-NEXT: vpermt2q %zmm7, %zmm1, %zmm25
13834 ; AVX512-NEXT: vmovdqa 384(%rdi), %ymm2
13835 ; AVX512-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
13836 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13837 ; AVX512-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm19
13838 ; AVX512-NEXT: vmovdqa64 %zmm9, %zmm18 {%k2}
13839 ; AVX512-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13840 ; AVX512-NEXT: vpermt2q %zmm9, %zmm1, %zmm19
13841 ; AVX512-NEXT: vmovdqa 448(%rdi), %ymm2
13842 ; AVX512-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
13843 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13844 ; AVX512-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm16
13845 ; AVX512-NEXT: vmovdqa64 %zmm10, %zmm22 {%k2}
13846 ; AVX512-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13847 ; AVX512-NEXT: vpermt2q %zmm10, %zmm1, %zmm16
13848 ; AVX512-NEXT: vpermt2q %zmm29, %zmm1, %zmm3
13849 ; AVX512-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13850 ; AVX512-NEXT: vmovdqa64 %zmm29, %zmm20 {%k2}
13851 ; AVX512-NEXT: vmovdqu64 %zmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13852 ; AVX512-NEXT: vmovdqa64 (%r9), %zmm29
13853 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,1,2,9,4,5,6,7]
13854 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13855 ; AVX512-NEXT: vpermt2q %zmm29, %zmm6, %zmm0
13856 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13857 ; AVX512-NEXT: vmovdqa64 64(%r9), %zmm3
13858 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13859 ; AVX512-NEXT: vpermt2q %zmm3, %zmm6, %zmm0
13860 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13861 ; AVX512-NEXT: vmovdqa64 128(%r9), %zmm4
13862 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13863 ; AVX512-NEXT: vpermt2q %zmm4, %zmm6, %zmm0
13864 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13865 ; AVX512-NEXT: vmovdqa64 192(%r9), %zmm5
13866 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13867 ; AVX512-NEXT: vpermt2q %zmm5, %zmm6, %zmm0
13868 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13869 ; AVX512-NEXT: vmovdqa64 256(%r9), %zmm11
13870 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13871 ; AVX512-NEXT: vpermt2q %zmm11, %zmm6, %zmm0
13872 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13873 ; AVX512-NEXT: vmovdqa64 320(%r9), %zmm8
13874 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13875 ; AVX512-NEXT: vpermt2q %zmm8, %zmm6, %zmm0
13876 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13877 ; AVX512-NEXT: vmovdqa64 384(%r9), %zmm7
13878 ; AVX512-NEXT: vpermt2q %zmm7, %zmm6, %zmm24
13879 ; AVX512-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13880 ; AVX512-NEXT: vmovdqa64 448(%r9), %zmm22
13881 ; AVX512-NEXT: vpermt2q %zmm22, %zmm6, %zmm27
13882 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,1,2,13,4,5,6,7]
13883 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13884 ; AVX512-NEXT: vpermt2q %zmm29, %zmm6, %zmm0
13885 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13886 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13887 ; AVX512-NEXT: vpermt2q %zmm3, %zmm6, %zmm0
13888 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13889 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13890 ; AVX512-NEXT: vpermt2q %zmm4, %zmm6, %zmm0
13891 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13892 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13893 ; AVX512-NEXT: vpermt2q %zmm5, %zmm6, %zmm0
13894 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13895 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13896 ; AVX512-NEXT: vpermt2q %zmm11, %zmm6, %zmm0
13897 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13898 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13899 ; AVX512-NEXT: vpermt2q %zmm8, %zmm6, %zmm0
13900 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13901 ; AVX512-NEXT: vpermt2q %zmm7, %zmm6, %zmm23
13902 ; AVX512-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13903 ; AVX512-NEXT: vpermt2q %zmm22, %zmm6, %zmm28
13904 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,14,2,3,4,5,6,15]
13905 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13906 ; AVX512-NEXT: vpermt2q %zmm29, %zmm6, %zmm0
13907 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13908 ; AVX512-NEXT: vpermt2q %zmm3, %zmm6, %zmm14
13909 ; AVX512-NEXT: vmovdqu64 %zmm14, (%rsp) # 64-byte Spill
13910 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13911 ; AVX512-NEXT: vpermt2q %zmm4, %zmm6, %zmm0
13912 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13913 ; AVX512-NEXT: vpermt2q %zmm5, %zmm6, %zmm13
13914 ; AVX512-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13915 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13916 ; AVX512-NEXT: vpermt2q %zmm11, %zmm6, %zmm0
13917 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13918 ; AVX512-NEXT: vpermt2q %zmm8, %zmm6, %zmm12
13919 ; AVX512-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13920 ; AVX512-NEXT: vpermt2q %zmm7, %zmm6, %zmm15
13921 ; AVX512-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13922 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13923 ; AVX512-NEXT: vpermt2q %zmm22, %zmm6, %zmm0
13924 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13925 ; AVX512-NEXT: vmovdqa (%rdx), %xmm6
13926 ; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
13927 ; AVX512-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
13928 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13929 ; AVX512-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm0 {%k1}
13930 ; AVX512-NEXT: vmovdqa 64(%rdx), %xmm6
13931 ; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
13932 ; AVX512-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
13933 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
13934 ; AVX512-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm14 {%k1}
13935 ; AVX512-NEXT: vmovdqa 128(%rdx), %xmm6
13936 ; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
13937 ; AVX512-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
13938 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
13939 ; AVX512-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm15 {%k1}
13940 ; AVX512-NEXT: vmovdqa 192(%rdx), %xmm6
13941 ; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
13942 ; AVX512-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
13943 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
13944 ; AVX512-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm10 {%k1}
13945 ; AVX512-NEXT: vmovdqa 256(%rdx), %xmm6
13946 ; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
13947 ; AVX512-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
13948 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
13949 ; AVX512-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm12 {%k1}
13950 ; AVX512-NEXT: vmovdqa 320(%rdx), %xmm6
13951 ; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
13952 ; AVX512-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
13953 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13954 ; AVX512-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm2 {%k1}
13955 ; AVX512-NEXT: vmovdqa 384(%rdx), %xmm6
13956 ; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
13957 ; AVX512-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
13958 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
13959 ; AVX512-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm9 {%k1}
13960 ; AVX512-NEXT: vmovdqa 448(%rdx), %xmm6
13961 ; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
13962 ; AVX512-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
13963 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13964 ; AVX512-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm1 {%k1}
13965 ; AVX512-NEXT: vinserti32x4 $2, (%r8), %zmm0, %zmm24
13966 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm0 = [0,1,2,3,4,8,6,7]
13967 ; AVX512-NEXT: vpermt2q %zmm29, %zmm0, %zmm24
13968 ; AVX512-NEXT: vinserti32x4 $2, 64(%r8), %zmm14, %zmm23
13969 ; AVX512-NEXT: vpermt2q %zmm3, %zmm0, %zmm23
13970 ; AVX512-NEXT: vinserti32x4 $2, 128(%r8), %zmm15, %zmm21
13971 ; AVX512-NEXT: vpermt2q %zmm4, %zmm0, %zmm21
13972 ; AVX512-NEXT: vinserti32x4 $2, 192(%r8), %zmm10, %zmm20
13973 ; AVX512-NEXT: vpermt2q %zmm5, %zmm0, %zmm20
13974 ; AVX512-NEXT: vinserti32x4 $2, 256(%r8), %zmm12, %zmm18
13975 ; AVX512-NEXT: vpermt2q %zmm11, %zmm0, %zmm18
13976 ; AVX512-NEXT: vinserti32x4 $2, 320(%r8), %zmm2, %zmm17
13977 ; AVX512-NEXT: vpermt2q %zmm8, %zmm0, %zmm17
13978 ; AVX512-NEXT: vinserti32x4 $2, 384(%r8), %zmm9, %zmm15
13979 ; AVX512-NEXT: vpermt2q %zmm7, %zmm0, %zmm15
13980 ; AVX512-NEXT: vinserti32x4 $2, 448(%r8), %zmm1, %zmm14
13981 ; AVX512-NEXT: vpermt2q %zmm22, %zmm0, %zmm14
13982 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm0 = [0,10,2,3,4,5,6,11]
13983 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
13984 ; AVX512-NEXT: vpermt2q %zmm29, %zmm0, %zmm13
13985 ; AVX512-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,1,2,3,4,12,6,7]
13986 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
13987 ; AVX512-NEXT: vpermt2q %zmm29, %zmm1, %zmm12
13988 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
13989 ; AVX512-NEXT: vpermt2q %zmm3, %zmm0, %zmm10
13990 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
13991 ; AVX512-NEXT: vpermt2q %zmm3, %zmm1, %zmm9
13992 ; AVX512-NEXT: vpermt2q %zmm4, %zmm0, %zmm31
13993 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
13994 ; AVX512-NEXT: vpermt2q %zmm4, %zmm1, %zmm6
13995 ; AVX512-NEXT: vpermt2q %zmm5, %zmm0, %zmm30
13996 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm29 # 64-byte Reload
13997 ; AVX512-NEXT: vpermt2q %zmm5, %zmm1, %zmm29
13998 ; AVX512-NEXT: vpermt2q %zmm11, %zmm0, %zmm26
13999 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
14000 ; AVX512-NEXT: vpermt2q %zmm11, %zmm1, %zmm4
14001 ; AVX512-NEXT: vpermt2q %zmm8, %zmm0, %zmm25
14002 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
14003 ; AVX512-NEXT: vpermt2q %zmm8, %zmm1, %zmm3
14004 ; AVX512-NEXT: vpermt2q %zmm7, %zmm0, %zmm19
14005 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
14006 ; AVX512-NEXT: vpermt2q %zmm7, %zmm1, %zmm2
14007 ; AVX512-NEXT: vpermt2q %zmm22, %zmm0, %zmm16
14008 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14009 ; AVX512-NEXT: vpermt2q %zmm22, %zmm1, %zmm0
14010 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
14011 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14012 ; AVX512-NEXT: vmovaps %zmm1, 3008(%rax)
14013 ; AVX512-NEXT: vmovdqa64 %zmm28, 2944(%rax)
14014 ; AVX512-NEXT: vmovdqa64 %zmm0, 2880(%rax)
14015 ; AVX512-NEXT: vmovdqa64 %zmm16, 2816(%rax)
14016 ; AVX512-NEXT: vmovdqa64 %zmm27, 2752(%rax)
14017 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14018 ; AVX512-NEXT: vmovaps %zmm0, 2624(%rax)
14019 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14020 ; AVX512-NEXT: vmovaps %zmm0, 2560(%rax)
14021 ; AVX512-NEXT: vmovdqa64 %zmm2, 2496(%rax)
14022 ; AVX512-NEXT: vmovdqa64 %zmm19, 2432(%rax)
14023 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14024 ; AVX512-NEXT: vmovaps %zmm0, 2368(%rax)
14025 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14026 ; AVX512-NEXT: vmovaps %zmm0, 2240(%rax)
14027 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14028 ; AVX512-NEXT: vmovaps %zmm0, 2176(%rax)
14029 ; AVX512-NEXT: vmovdqa64 %zmm3, 2112(%rax)
14030 ; AVX512-NEXT: vmovdqa64 %zmm25, 2048(%rax)
14031 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14032 ; AVX512-NEXT: vmovaps %zmm0, 1984(%rax)
14033 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14034 ; AVX512-NEXT: vmovaps %zmm0, 1856(%rax)
14035 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14036 ; AVX512-NEXT: vmovaps %zmm0, 1792(%rax)
14037 ; AVX512-NEXT: vmovdqa64 %zmm4, 1728(%rax)
14038 ; AVX512-NEXT: vmovdqa64 %zmm26, 1664(%rax)
14039 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14040 ; AVX512-NEXT: vmovaps %zmm0, 1600(%rax)
14041 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14042 ; AVX512-NEXT: vmovaps %zmm0, 1472(%rax)
14043 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14044 ; AVX512-NEXT: vmovaps %zmm0, 1408(%rax)
14045 ; AVX512-NEXT: vmovdqa64 %zmm29, 1344(%rax)
14046 ; AVX512-NEXT: vmovdqa64 %zmm30, 1280(%rax)
14047 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14048 ; AVX512-NEXT: vmovaps %zmm0, 1216(%rax)
14049 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14050 ; AVX512-NEXT: vmovaps %zmm0, 1088(%rax)
14051 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14052 ; AVX512-NEXT: vmovaps %zmm0, 1024(%rax)
14053 ; AVX512-NEXT: vmovdqa64 %zmm6, 960(%rax)
14054 ; AVX512-NEXT: vmovdqa64 %zmm31, 896(%rax)
14055 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14056 ; AVX512-NEXT: vmovaps %zmm0, 832(%rax)
14057 ; AVX512-NEXT: vmovups (%rsp), %zmm0 # 64-byte Reload
14058 ; AVX512-NEXT: vmovaps %zmm0, 704(%rax)
14059 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14060 ; AVX512-NEXT: vmovaps %zmm0, 640(%rax)
14061 ; AVX512-NEXT: vmovdqa64 %zmm9, 576(%rax)
14062 ; AVX512-NEXT: vmovdqa64 %zmm10, 512(%rax)
14063 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14064 ; AVX512-NEXT: vmovaps %zmm0, 448(%rax)
14065 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14066 ; AVX512-NEXT: vmovaps %zmm0, 320(%rax)
14067 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14068 ; AVX512-NEXT: vmovaps %zmm0, 256(%rax)
14069 ; AVX512-NEXT: vmovdqa64 %zmm12, 192(%rax)
14070 ; AVX512-NEXT: vmovdqa64 %zmm13, 128(%rax)
14071 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14072 ; AVX512-NEXT: vmovaps %zmm0, 64(%rax)
14073 ; AVX512-NEXT: vmovdqa64 %zmm14, 2688(%rax)
14074 ; AVX512-NEXT: vmovdqa64 %zmm15, 2304(%rax)
14075 ; AVX512-NEXT: vmovdqa64 %zmm17, 1920(%rax)
14076 ; AVX512-NEXT: vmovdqa64 %zmm18, 1536(%rax)
14077 ; AVX512-NEXT: vmovdqa64 %zmm20, 1152(%rax)
14078 ; AVX512-NEXT: vmovdqa64 %zmm21, 768(%rax)
14079 ; AVX512-NEXT: vmovdqa64 %zmm23, 384(%rax)
14080 ; AVX512-NEXT: vmovdqa64 %zmm24, (%rax)
14081 ; AVX512-NEXT: addq $3720, %rsp # imm = 0xE88
14082 ; AVX512-NEXT: vzeroupper
14083 ; AVX512-NEXT: retq
14085 ; AVX512-FCP-LABEL: store_i64_stride6_vf64:
14086 ; AVX512-FCP: # %bb.0:
14087 ; AVX512-FCP-NEXT: subq $3720, %rsp # imm = 0xE88
14088 ; AVX512-FCP-NEXT: vmovdqa64 (%rdi), %zmm11
14089 ; AVX512-FCP-NEXT: vmovdqa64 64(%rdi), %zmm10
14090 ; AVX512-FCP-NEXT: vmovdqa64 128(%rdi), %zmm9
14091 ; AVX512-FCP-NEXT: vmovdqa64 192(%rdi), %zmm8
14092 ; AVX512-FCP-NEXT: vmovdqa64 256(%rdi), %zmm7
14093 ; AVX512-FCP-NEXT: vmovdqa64 320(%rdi), %zmm6
14094 ; AVX512-FCP-NEXT: vmovdqa64 384(%rdi), %zmm5
14095 ; AVX512-FCP-NEXT: vmovdqa64 448(%rdi), %zmm4
14096 ; AVX512-FCP-NEXT: vmovdqa64 (%rsi), %zmm30
14097 ; AVX512-FCP-NEXT: vmovdqa64 64(%rsi), %zmm29
14098 ; AVX512-FCP-NEXT: vmovdqa64 128(%rsi), %zmm28
14099 ; AVX512-FCP-NEXT: vmovdqa64 192(%rsi), %zmm27
14100 ; AVX512-FCP-NEXT: vmovdqa64 256(%rsi), %zmm1
14101 ; AVX512-FCP-NEXT: vmovdqa64 320(%rsi), %zmm13
14102 ; AVX512-FCP-NEXT: vmovdqa64 384(%rsi), %zmm24
14103 ; AVX512-FCP-NEXT: vmovdqa64 448(%rsi), %zmm0
14104 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [4,12,5,13,4,12,5,13]
14105 ; AVX512-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
14106 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm22
14107 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm22
14108 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm3
14109 ; AVX512-FCP-NEXT: vpermt2q %zmm24, %zmm2, %zmm3
14110 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14111 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm3
14112 ; AVX512-FCP-NEXT: vpermt2q %zmm13, %zmm2, %zmm3
14113 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14114 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm3
14115 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm3
14116 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14117 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, %zmm3
14118 ; AVX512-FCP-NEXT: vpermt2q %zmm27, %zmm2, %zmm3
14119 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14120 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, %zmm3
14121 ; AVX512-FCP-NEXT: vpermt2q %zmm28, %zmm2, %zmm3
14122 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14123 ; AVX512-FCP-NEXT: vmovdqa64 %zmm10, %zmm3
14124 ; AVX512-FCP-NEXT: vpermt2q %zmm29, %zmm2, %zmm3
14125 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14126 ; AVX512-FCP-NEXT: vpermi2q %zmm30, %zmm11, %zmm2
14127 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14128 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [2,10,2,10,2,10,2,10]
14129 ; AVX512-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14130 ; AVX512-FCP-NEXT: vmovdqa64 %zmm11, %zmm3
14131 ; AVX512-FCP-NEXT: vpermt2q %zmm30, %zmm2, %zmm3
14132 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14133 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm12
14134 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [6,14,6,14,6,14,6,14]
14135 ; AVX512-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14136 ; AVX512-FCP-NEXT: vmovdqa64 %zmm11, %zmm3
14137 ; AVX512-FCP-NEXT: vpermt2q %zmm30, %zmm2, %zmm3
14138 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14139 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [7,15,7,15,7,15,7,15]
14140 ; AVX512-FCP-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14141 ; AVX512-FCP-NEXT: vmovdqa64 %zmm11, %zmm3
14142 ; AVX512-FCP-NEXT: vpermt2q %zmm30, %zmm15, %zmm3
14143 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14144 ; AVX512-FCP-NEXT: vmovdqa64 %zmm10, %zmm3
14145 ; AVX512-FCP-NEXT: vpermt2q %zmm29, %zmm12, %zmm3
14146 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14147 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,8,1,9,0,8,1,9]
14148 ; AVX512-FCP-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
14149 ; AVX512-FCP-NEXT: vpermt2q %zmm30, %zmm3, %zmm11
14150 ; AVX512-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14151 ; AVX512-FCP-NEXT: vmovdqa64 %zmm10, %zmm11
14152 ; AVX512-FCP-NEXT: vpermt2q %zmm29, %zmm2, %zmm11
14153 ; AVX512-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14154 ; AVX512-FCP-NEXT: vmovdqa64 %zmm10, %zmm11
14155 ; AVX512-FCP-NEXT: vpermt2q %zmm29, %zmm15, %zmm11
14156 ; AVX512-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14157 ; AVX512-FCP-NEXT: vpermt2q %zmm29, %zmm3, %zmm10
14158 ; AVX512-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14159 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, %zmm10
14160 ; AVX512-FCP-NEXT: vpermt2q %zmm28, %zmm12, %zmm10
14161 ; AVX512-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14162 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, %zmm10
14163 ; AVX512-FCP-NEXT: vpermt2q %zmm28, %zmm2, %zmm10
14164 ; AVX512-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14165 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, %zmm10
14166 ; AVX512-FCP-NEXT: vpermt2q %zmm28, %zmm15, %zmm10
14167 ; AVX512-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14168 ; AVX512-FCP-NEXT: vpermt2q %zmm28, %zmm3, %zmm9
14169 ; AVX512-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14170 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, %zmm9
14171 ; AVX512-FCP-NEXT: vpermt2q %zmm27, %zmm12, %zmm9
14172 ; AVX512-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14173 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, %zmm9
14174 ; AVX512-FCP-NEXT: vpermt2q %zmm27, %zmm2, %zmm9
14175 ; AVX512-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14176 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, %zmm9
14177 ; AVX512-FCP-NEXT: vpermt2q %zmm27, %zmm15, %zmm9
14178 ; AVX512-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14179 ; AVX512-FCP-NEXT: vpermt2q %zmm27, %zmm3, %zmm8
14180 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14181 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm8
14182 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm12, %zmm8
14183 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14184 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm8
14185 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm8
14186 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14187 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm8
14188 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm15, %zmm8
14189 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14190 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm3, %zmm7
14191 ; AVX512-FCP-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14192 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm1
14193 ; AVX512-FCP-NEXT: vpermt2q %zmm13, %zmm12, %zmm1
14194 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14195 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm1
14196 ; AVX512-FCP-NEXT: vpermt2q %zmm13, %zmm2, %zmm1
14197 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14198 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm1
14199 ; AVX512-FCP-NEXT: vpermt2q %zmm13, %zmm15, %zmm1
14200 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14201 ; AVX512-FCP-NEXT: vpermt2q %zmm13, %zmm3, %zmm6
14202 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14203 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
14204 ; AVX512-FCP-NEXT: vpermt2q %zmm24, %zmm12, %zmm1
14205 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14206 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
14207 ; AVX512-FCP-NEXT: vpermt2q %zmm24, %zmm2, %zmm1
14208 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14209 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm8
14210 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
14211 ; AVX512-FCP-NEXT: vpermt2q %zmm24, %zmm15, %zmm1
14212 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14213 ; AVX512-FCP-NEXT: vpermt2q %zmm24, %zmm3, %zmm5
14214 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14215 ; AVX512-FCP-NEXT: vmovdqa64 (%rdx), %zmm5
14216 ; AVX512-FCP-NEXT: vmovdqa64 (%rcx), %zmm14
14217 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm29
14218 ; AVX512-FCP-NEXT: vmovdqa64 %zmm15, %zmm21
14219 ; AVX512-FCP-NEXT: vpermt2q %zmm14, %zmm15, %zmm29
14220 ; AVX512-FCP-NEXT: vmovdqa64 64(%rdx), %zmm1
14221 ; AVX512-FCP-NEXT: vmovdqa64 64(%rcx), %zmm11
14222 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm2
14223 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm20
14224 ; AVX512-FCP-NEXT: vpermt2q %zmm11, %zmm15, %zmm2
14225 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
14226 ; AVX512-FCP-NEXT: vmovdqa64 128(%rdx), %zmm23
14227 ; AVX512-FCP-NEXT: vmovdqa64 128(%rcx), %zmm9
14228 ; AVX512-FCP-NEXT: vmovdqa64 %zmm23, %zmm1
14229 ; AVX512-FCP-NEXT: vpermt2q %zmm9, %zmm15, %zmm1
14230 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14231 ; AVX512-FCP-NEXT: vmovdqa64 192(%rdx), %zmm18
14232 ; AVX512-FCP-NEXT: vmovdqa64 192(%rcx), %zmm6
14233 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, %zmm1
14234 ; AVX512-FCP-NEXT: vpermt2q %zmm6, %zmm15, %zmm1
14235 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14236 ; AVX512-FCP-NEXT: vmovdqa64 256(%rdx), %zmm17
14237 ; AVX512-FCP-NEXT: vmovdqa64 256(%rcx), %zmm7
14238 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, %zmm1
14239 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm15, %zmm1
14240 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14241 ; AVX512-FCP-NEXT: vmovdqa64 320(%rdx), %zmm15
14242 ; AVX512-FCP-NEXT: vmovdqa64 320(%rcx), %zmm10
14243 ; AVX512-FCP-NEXT: vmovdqa64 %zmm15, %zmm1
14244 ; AVX512-FCP-NEXT: vpermt2q %zmm10, %zmm21, %zmm1
14245 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14246 ; AVX512-FCP-NEXT: vmovdqa64 384(%rdx), %zmm13
14247 ; AVX512-FCP-NEXT: vmovdqa64 384(%rcx), %zmm2
14248 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, %zmm1
14249 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm21, %zmm1
14250 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14251 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm1
14252 ; AVX512-FCP-NEXT: vpermi2q %zmm0, %zmm4, %zmm12
14253 ; AVX512-FCP-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14254 ; AVX512-FCP-NEXT: vpermi2q %zmm0, %zmm4, %zmm8
14255 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14256 ; AVX512-FCP-NEXT: vmovdqa64 448(%rdx), %zmm16
14257 ; AVX512-FCP-NEXT: vmovdqa64 448(%rcx), %zmm4
14258 ; AVX512-FCP-NEXT: vmovdqa64 %zmm16, %zmm8
14259 ; AVX512-FCP-NEXT: vpermt2q %zmm4, %zmm21, %zmm8
14260 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14261 ; AVX512-FCP-NEXT: vpermi2q %zmm0, %zmm1, %zmm21
14262 ; AVX512-FCP-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14263 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm3, %zmm1
14264 ; AVX512-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14265 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} ymm1 = [0,0,4,12]
14266 ; AVX512-FCP-NEXT: vmovdqa64 %zmm16, %zmm24
14267 ; AVX512-FCP-NEXT: vpermt2q %zmm4, %zmm1, %zmm24
14268 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, %zmm19
14269 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm1, %zmm19
14270 ; AVX512-FCP-NEXT: vmovdqa64 %zmm15, %zmm0
14271 ; AVX512-FCP-NEXT: vpermt2q %zmm10, %zmm1, %zmm0
14272 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14273 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, %zmm31
14274 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm31
14275 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, %zmm21
14276 ; AVX512-FCP-NEXT: vpermt2q %zmm6, %zmm1, %zmm21
14277 ; AVX512-FCP-NEXT: vmovdqa64 %zmm23, %zmm12
14278 ; AVX512-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm12
14279 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, %zmm3
14280 ; AVX512-FCP-NEXT: vpermt2q %zmm11, %zmm1, %zmm3
14281 ; AVX512-FCP-NEXT: vpermi2q %zmm14, %zmm5, %zmm1
14282 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [1,9,2,10,1,9,2,10]
14283 ; AVX512-FCP-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
14284 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
14285 ; AVX512-FCP-NEXT: vpermt2q %zmm14, %zmm27, %zmm0
14286 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14287 ; AVX512-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [5,13,6,14,5,13,6,14]
14288 ; AVX512-FCP-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
14289 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
14290 ; AVX512-FCP-NEXT: vpermt2q %zmm14, %zmm28, %zmm0
14291 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14292 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [3,11,3,11,3,11,3,11]
14293 ; AVX512-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14294 ; AVX512-FCP-NEXT: vpermt2q %zmm14, %zmm0, %zmm5
14295 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14296 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, %zmm5
14297 ; AVX512-FCP-NEXT: vpermt2q %zmm11, %zmm27, %zmm5
14298 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14299 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, %zmm5
14300 ; AVX512-FCP-NEXT: vpermt2q %zmm11, %zmm28, %zmm5
14301 ; AVX512-FCP-NEXT: vpermt2q %zmm11, %zmm0, %zmm20
14302 ; AVX512-FCP-NEXT: vmovdqu64 %zmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14303 ; AVX512-FCP-NEXT: vmovdqa64 %zmm23, %zmm8
14304 ; AVX512-FCP-NEXT: vpermt2q %zmm9, %zmm27, %zmm8
14305 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14306 ; AVX512-FCP-NEXT: vmovdqa64 %zmm23, %zmm11
14307 ; AVX512-FCP-NEXT: vpermt2q %zmm9, %zmm28, %zmm11
14308 ; AVX512-FCP-NEXT: vmovdqa64 %zmm11, %zmm30
14309 ; AVX512-FCP-NEXT: vpermt2q %zmm9, %zmm0, %zmm23
14310 ; AVX512-FCP-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14311 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, %zmm9
14312 ; AVX512-FCP-NEXT: vpermt2q %zmm6, %zmm27, %zmm9
14313 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, %zmm8
14314 ; AVX512-FCP-NEXT: vpermt2q %zmm6, %zmm28, %zmm8
14315 ; AVX512-FCP-NEXT: vpermt2q %zmm6, %zmm0, %zmm18
14316 ; AVX512-FCP-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14317 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, %zmm6
14318 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm27, %zmm6
14319 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14320 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, %zmm6
14321 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, %zmm14
14322 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm28, %zmm6
14323 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm11
14324 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm14
14325 ; AVX512-FCP-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14326 ; AVX512-FCP-NEXT: vmovdqa64 %zmm15, %zmm6
14327 ; AVX512-FCP-NEXT: vmovdqa64 %zmm15, %zmm7
14328 ; AVX512-FCP-NEXT: vpermt2q %zmm10, %zmm27, %zmm7
14329 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm26
14330 ; AVX512-FCP-NEXT: vmovdqa64 %zmm15, %zmm7
14331 ; AVX512-FCP-NEXT: vpermt2q %zmm10, %zmm28, %zmm7
14332 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm25
14333 ; AVX512-FCP-NEXT: vpermt2q %zmm10, %zmm0, %zmm6
14334 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14335 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, %zmm6
14336 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, %zmm7
14337 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm27, %zmm7
14338 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm10
14339 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, %zmm7
14340 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm28, %zmm7
14341 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm23
14342 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm0, %zmm6
14343 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14344 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm16, %zmm27
14345 ; AVX512-FCP-NEXT: vpermi2q %zmm4, %zmm16, %zmm28
14346 ; AVX512-FCP-NEXT: vpermt2q %zmm4, %zmm0, %zmm16
14347 ; AVX512-FCP-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14348 ; AVX512-FCP-NEXT: movb $12, %al
14349 ; AVX512-FCP-NEXT: kmovw %eax, %k1
14350 ; AVX512-FCP-NEXT: vmovdqa64 %zmm24, %zmm22 {%k1}
14351 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
14352 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, %zmm18 {%k1}
14353 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
14354 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14355 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
14356 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
14357 ; AVX512-FCP-NEXT: vmovdqa64 %zmm31, %zmm17 {%k1}
14358 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
14359 ; AVX512-FCP-NEXT: vmovdqa64 %zmm21, %zmm16 {%k1}
14360 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
14361 ; AVX512-FCP-NEXT: vmovdqa64 %zmm12, %zmm21 {%k1}
14362 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
14363 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, %zmm31 {%k1}
14364 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
14365 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm20 {%k1}
14366 ; AVX512-FCP-NEXT: movb $48, %al
14367 ; AVX512-FCP-NEXT: kmovw %eax, %k2
14368 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14369 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
14370 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm2 {%k2}
14371 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14372 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
14373 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm3 {%k2}
14374 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14375 ; AVX512-FCP-NEXT: vshufi64x2 {{.*#+}} zmm14 = zmm0[0,1,2,3],zmm29[4,5,6,7]
14376 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14377 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
14378 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm4 {%k2}
14379 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14380 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
14381 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14382 ; AVX512-FCP-NEXT: vshufi64x2 $228, (%rsp), %zmm0, %zmm0 # 64-byte Folded Reload
14383 ; AVX512-FCP-NEXT: # zmm0 = zmm0[0,1,2,3],mem[4,5,6,7]
14384 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
14385 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
14386 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14387 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm6 {%k2}
14388 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14389 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm30 {%k2}
14390 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14391 ; AVX512-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm15 # 64-byte Folded Reload
14392 ; AVX512-FCP-NEXT: # zmm15 = zmm0[0,1,2,3],mem[4,5,6,7]
14393 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14394 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
14395 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14396 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
14397 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14398 ; AVX512-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm13 # 64-byte Folded Reload
14399 ; AVX512-FCP-NEXT: # zmm13 = zmm0[0,1,2,3],mem[4,5,6,7]
14400 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
14401 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14402 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm7 {%k2}
14403 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14404 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
14405 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14406 ; AVX512-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm12 # 64-byte Folded Reload
14407 ; AVX512-FCP-NEXT: # zmm12 = zmm0[0,1,2,3],mem[4,5,6,7]
14408 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14409 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm26 {%k2}
14410 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14411 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm25 {%k2}
14412 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14413 ; AVX512-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 64-byte Folded Reload
14414 ; AVX512-FCP-NEXT: # zmm0 = zmm0[0,1,2,3],mem[4,5,6,7]
14415 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14416 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14417 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
14418 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14419 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm23 {%k2}
14420 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14421 ; AVX512-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 64-byte Folded Reload
14422 ; AVX512-FCP-NEXT: # zmm0 = zmm0[0,1,2,3],mem[4,5,6,7]
14423 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14424 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14425 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm27 {%k2}
14426 ; AVX512-FCP-NEXT: vmovdqa64 (%r8), %zmm29
14427 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,1,9,0,4,5,6,7]
14428 ; AVX512-FCP-NEXT: vpermt2q %zmm29, %zmm1, %zmm2
14429 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14430 ; AVX512-FCP-NEXT: vmovdqa64 64(%r8), %zmm2
14431 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm1, %zmm4
14432 ; AVX512-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14433 ; AVX512-FCP-NEXT: vmovdqa64 128(%r8), %zmm0
14434 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm6
14435 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14436 ; AVX512-FCP-NEXT: vmovdqa64 192(%r8), %zmm4
14437 ; AVX512-FCP-NEXT: vpermt2q %zmm4, %zmm1, %zmm9
14438 ; AVX512-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14439 ; AVX512-FCP-NEXT: vmovdqa64 256(%r8), %zmm6
14440 ; AVX512-FCP-NEXT: vpermt2q %zmm6, %zmm1, %zmm7
14441 ; AVX512-FCP-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14442 ; AVX512-FCP-NEXT: vmovdqa64 320(%r8), %zmm7
14443 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm26
14444 ; AVX512-FCP-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14445 ; AVX512-FCP-NEXT: vmovdqa64 384(%r8), %zmm9
14446 ; AVX512-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm10
14447 ; AVX512-FCP-NEXT: vmovdqa64 %zmm10, %zmm24
14448 ; AVX512-FCP-NEXT: vmovdqa64 448(%r8), %zmm10
14449 ; AVX512-FCP-NEXT: vpermt2q %zmm10, %zmm1, %zmm27
14450 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14451 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm28 {%k2}
14452 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,1,13,0,4,5,6,7]
14453 ; AVX512-FCP-NEXT: vpermt2q %zmm29, %zmm1, %zmm3
14454 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14455 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm1, %zmm5
14456 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14457 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm30
14458 ; AVX512-FCP-NEXT: vmovdqu64 %zmm30, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14459 ; AVX512-FCP-NEXT: vpermt2q %zmm4, %zmm1, %zmm8
14460 ; AVX512-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14461 ; AVX512-FCP-NEXT: vpermt2q %zmm6, %zmm1, %zmm11
14462 ; AVX512-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14463 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm25
14464 ; AVX512-FCP-NEXT: vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14465 ; AVX512-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm23
14466 ; AVX512-FCP-NEXT: vpermt2q %zmm10, %zmm1, %zmm28
14467 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14468 ; AVX512-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm3 # 64-byte Folded Reload
14469 ; AVX512-FCP-NEXT: # zmm3 = zmm1[0,1,2,3],mem[4,5,6,7]
14470 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm1 = [14,0,2,3,4,5,15,0]
14471 ; AVX512-FCP-NEXT: vpermt2q %zmm29, %zmm1, %zmm14
14472 ; AVX512-FCP-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14473 ; AVX512-FCP-NEXT: vmovdqu64 (%rsp), %zmm14 # 64-byte Reload
14474 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm1, %zmm14
14475 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm15
14476 ; AVX512-FCP-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14477 ; AVX512-FCP-NEXT: vpermt2q %zmm4, %zmm1, %zmm13
14478 ; AVX512-FCP-NEXT: vpermt2q %zmm6, %zmm1, %zmm12
14479 ; AVX512-FCP-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14480 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
14481 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm12
14482 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
14483 ; AVX512-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm15
14484 ; AVX512-FCP-NEXT: vpermt2q %zmm10, %zmm1, %zmm3
14485 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14486 ; AVX512-FCP-NEXT: vmovdqa (%rdi), %ymm1
14487 ; AVX512-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
14488 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
14489 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm1, %zmm3, %zmm3
14490 ; AVX512-FCP-NEXT: vmovdqa 64(%rdi), %ymm1
14491 ; AVX512-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
14492 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
14493 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm1, %zmm5, %zmm5
14494 ; AVX512-FCP-NEXT: movb $16, %al
14495 ; AVX512-FCP-NEXT: kmovw %eax, %k2
14496 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm31 {%k2}
14497 ; AVX512-FCP-NEXT: vmovdqu64 %zmm31, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14498 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm1 = [10,0,2,3,4,5,11,0]
14499 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm1, %zmm5
14500 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14501 ; AVX512-FCP-NEXT: vmovdqa 128(%rdi), %ymm2
14502 ; AVX512-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
14503 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
14504 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm5, %zmm31
14505 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm21 {%k2}
14506 ; AVX512-FCP-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14507 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm31
14508 ; AVX512-FCP-NEXT: vmovdqa 192(%rdi), %ymm2
14509 ; AVX512-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
14510 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14511 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm30
14512 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm16 {%k2}
14513 ; AVX512-FCP-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14514 ; AVX512-FCP-NEXT: vpermt2q %zmm4, %zmm1, %zmm30
14515 ; AVX512-FCP-NEXT: vmovdqa 256(%rdi), %ymm2
14516 ; AVX512-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
14517 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14518 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm26
14519 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm17 {%k2}
14520 ; AVX512-FCP-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14521 ; AVX512-FCP-NEXT: vpermt2q %zmm6, %zmm1, %zmm26
14522 ; AVX512-FCP-NEXT: vmovdqa 320(%rdi), %ymm2
14523 ; AVX512-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
14524 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14525 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm25
14526 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, %zmm19 {%k2}
14527 ; AVX512-FCP-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14528 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm25
14529 ; AVX512-FCP-NEXT: vmovdqa 384(%rdi), %ymm2
14530 ; AVX512-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
14531 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14532 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm19
14533 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, %zmm18 {%k2}
14534 ; AVX512-FCP-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14535 ; AVX512-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm19
14536 ; AVX512-FCP-NEXT: vmovdqa 448(%rdi), %ymm2
14537 ; AVX512-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
14538 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14539 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm16
14540 ; AVX512-FCP-NEXT: vmovdqa64 %zmm10, %zmm22 {%k2}
14541 ; AVX512-FCP-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14542 ; AVX512-FCP-NEXT: vpermt2q %zmm10, %zmm1, %zmm16
14543 ; AVX512-FCP-NEXT: vpermt2q %zmm29, %zmm1, %zmm3
14544 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14545 ; AVX512-FCP-NEXT: vmovdqa64 %zmm29, %zmm20 {%k2}
14546 ; AVX512-FCP-NEXT: vmovdqu64 %zmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14547 ; AVX512-FCP-NEXT: vmovdqa64 (%r9), %zmm29
14548 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,1,2,9,4,5,6,7]
14549 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14550 ; AVX512-FCP-NEXT: vpermt2q %zmm29, %zmm6, %zmm0
14551 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14552 ; AVX512-FCP-NEXT: vmovdqa64 64(%r9), %zmm3
14553 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14554 ; AVX512-FCP-NEXT: vpermt2q %zmm3, %zmm6, %zmm0
14555 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14556 ; AVX512-FCP-NEXT: vmovdqa64 128(%r9), %zmm4
14557 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14558 ; AVX512-FCP-NEXT: vpermt2q %zmm4, %zmm6, %zmm0
14559 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14560 ; AVX512-FCP-NEXT: vmovdqa64 192(%r9), %zmm5
14561 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14562 ; AVX512-FCP-NEXT: vpermt2q %zmm5, %zmm6, %zmm0
14563 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14564 ; AVX512-FCP-NEXT: vmovdqa64 256(%r9), %zmm11
14565 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14566 ; AVX512-FCP-NEXT: vpermt2q %zmm11, %zmm6, %zmm0
14567 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14568 ; AVX512-FCP-NEXT: vmovdqa64 320(%r9), %zmm8
14569 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14570 ; AVX512-FCP-NEXT: vpermt2q %zmm8, %zmm6, %zmm0
14571 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14572 ; AVX512-FCP-NEXT: vmovdqa64 384(%r9), %zmm7
14573 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm6, %zmm24
14574 ; AVX512-FCP-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14575 ; AVX512-FCP-NEXT: vmovdqa64 448(%r9), %zmm22
14576 ; AVX512-FCP-NEXT: vpermt2q %zmm22, %zmm6, %zmm27
14577 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,1,2,13,4,5,6,7]
14578 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14579 ; AVX512-FCP-NEXT: vpermt2q %zmm29, %zmm6, %zmm0
14580 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14581 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14582 ; AVX512-FCP-NEXT: vpermt2q %zmm3, %zmm6, %zmm0
14583 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14584 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14585 ; AVX512-FCP-NEXT: vpermt2q %zmm4, %zmm6, %zmm0
14586 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14587 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14588 ; AVX512-FCP-NEXT: vpermt2q %zmm5, %zmm6, %zmm0
14589 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14590 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14591 ; AVX512-FCP-NEXT: vpermt2q %zmm11, %zmm6, %zmm0
14592 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14593 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14594 ; AVX512-FCP-NEXT: vpermt2q %zmm8, %zmm6, %zmm0
14595 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14596 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm6, %zmm23
14597 ; AVX512-FCP-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14598 ; AVX512-FCP-NEXT: vpermt2q %zmm22, %zmm6, %zmm28
14599 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,14,2,3,4,5,6,15]
14600 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14601 ; AVX512-FCP-NEXT: vpermt2q %zmm29, %zmm6, %zmm0
14602 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14603 ; AVX512-FCP-NEXT: vpermt2q %zmm3, %zmm6, %zmm14
14604 ; AVX512-FCP-NEXT: vmovdqu64 %zmm14, (%rsp) # 64-byte Spill
14605 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14606 ; AVX512-FCP-NEXT: vpermt2q %zmm4, %zmm6, %zmm0
14607 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14608 ; AVX512-FCP-NEXT: vpermt2q %zmm5, %zmm6, %zmm13
14609 ; AVX512-FCP-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14610 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14611 ; AVX512-FCP-NEXT: vpermt2q %zmm11, %zmm6, %zmm0
14612 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14613 ; AVX512-FCP-NEXT: vpermt2q %zmm8, %zmm6, %zmm12
14614 ; AVX512-FCP-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14615 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm6, %zmm15
14616 ; AVX512-FCP-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14617 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14618 ; AVX512-FCP-NEXT: vpermt2q %zmm22, %zmm6, %zmm0
14619 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14620 ; AVX512-FCP-NEXT: vmovdqa (%rdx), %xmm6
14621 ; AVX512-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
14622 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
14623 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14624 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm0 {%k1}
14625 ; AVX512-FCP-NEXT: vmovdqa 64(%rdx), %xmm6
14626 ; AVX512-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
14627 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
14628 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
14629 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm14 {%k1}
14630 ; AVX512-FCP-NEXT: vmovdqa 128(%rdx), %xmm6
14631 ; AVX512-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
14632 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
14633 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
14634 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm15 {%k1}
14635 ; AVX512-FCP-NEXT: vmovdqa 192(%rdx), %xmm6
14636 ; AVX512-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
14637 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
14638 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
14639 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm10 {%k1}
14640 ; AVX512-FCP-NEXT: vmovdqa 256(%rdx), %xmm6
14641 ; AVX512-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
14642 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
14643 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
14644 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm12 {%k1}
14645 ; AVX512-FCP-NEXT: vmovdqa 320(%rdx), %xmm6
14646 ; AVX512-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
14647 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
14648 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
14649 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm2 {%k1}
14650 ; AVX512-FCP-NEXT: vmovdqa 384(%rdx), %xmm6
14651 ; AVX512-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
14652 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
14653 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
14654 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm9 {%k1}
14655 ; AVX512-FCP-NEXT: vmovdqa 448(%rdx), %xmm6
14656 ; AVX512-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
14657 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
14658 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14659 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm1 {%k1}
14660 ; AVX512-FCP-NEXT: vinserti32x4 $2, (%r8), %zmm0, %zmm24
14661 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm0 = [0,1,2,3,4,8,6,7]
14662 ; AVX512-FCP-NEXT: vpermt2q %zmm29, %zmm0, %zmm24
14663 ; AVX512-FCP-NEXT: vinserti32x4 $2, 64(%r8), %zmm14, %zmm23
14664 ; AVX512-FCP-NEXT: vpermt2q %zmm3, %zmm0, %zmm23
14665 ; AVX512-FCP-NEXT: vinserti32x4 $2, 128(%r8), %zmm15, %zmm21
14666 ; AVX512-FCP-NEXT: vpermt2q %zmm4, %zmm0, %zmm21
14667 ; AVX512-FCP-NEXT: vinserti32x4 $2, 192(%r8), %zmm10, %zmm20
14668 ; AVX512-FCP-NEXT: vpermt2q %zmm5, %zmm0, %zmm20
14669 ; AVX512-FCP-NEXT: vinserti32x4 $2, 256(%r8), %zmm12, %zmm18
14670 ; AVX512-FCP-NEXT: vpermt2q %zmm11, %zmm0, %zmm18
14671 ; AVX512-FCP-NEXT: vinserti32x4 $2, 320(%r8), %zmm2, %zmm17
14672 ; AVX512-FCP-NEXT: vpermt2q %zmm8, %zmm0, %zmm17
14673 ; AVX512-FCP-NEXT: vinserti32x4 $2, 384(%r8), %zmm9, %zmm15
14674 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm15
14675 ; AVX512-FCP-NEXT: vinserti32x4 $2, 448(%r8), %zmm1, %zmm14
14676 ; AVX512-FCP-NEXT: vpermt2q %zmm22, %zmm0, %zmm14
14677 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm0 = [0,10,2,3,4,5,6,11]
14678 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
14679 ; AVX512-FCP-NEXT: vpermt2q %zmm29, %zmm0, %zmm13
14680 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,1,2,3,4,12,6,7]
14681 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
14682 ; AVX512-FCP-NEXT: vpermt2q %zmm29, %zmm1, %zmm12
14683 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
14684 ; AVX512-FCP-NEXT: vpermt2q %zmm3, %zmm0, %zmm10
14685 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
14686 ; AVX512-FCP-NEXT: vpermt2q %zmm3, %zmm1, %zmm9
14687 ; AVX512-FCP-NEXT: vpermt2q %zmm4, %zmm0, %zmm31
14688 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
14689 ; AVX512-FCP-NEXT: vpermt2q %zmm4, %zmm1, %zmm6
14690 ; AVX512-FCP-NEXT: vpermt2q %zmm5, %zmm0, %zmm30
14691 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm29 # 64-byte Reload
14692 ; AVX512-FCP-NEXT: vpermt2q %zmm5, %zmm1, %zmm29
14693 ; AVX512-FCP-NEXT: vpermt2q %zmm11, %zmm0, %zmm26
14694 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
14695 ; AVX512-FCP-NEXT: vpermt2q %zmm11, %zmm1, %zmm4
14696 ; AVX512-FCP-NEXT: vpermt2q %zmm8, %zmm0, %zmm25
14697 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
14698 ; AVX512-FCP-NEXT: vpermt2q %zmm8, %zmm1, %zmm3
14699 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm19
14700 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
14701 ; AVX512-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm2
14702 ; AVX512-FCP-NEXT: vpermt2q %zmm22, %zmm0, %zmm16
14703 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14704 ; AVX512-FCP-NEXT: vpermt2q %zmm22, %zmm1, %zmm0
14705 ; AVX512-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
14706 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14707 ; AVX512-FCP-NEXT: vmovaps %zmm1, 3008(%rax)
14708 ; AVX512-FCP-NEXT: vmovdqa64 %zmm28, 2944(%rax)
14709 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, 2880(%rax)
14710 ; AVX512-FCP-NEXT: vmovdqa64 %zmm16, 2816(%rax)
14711 ; AVX512-FCP-NEXT: vmovdqa64 %zmm27, 2752(%rax)
14712 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14713 ; AVX512-FCP-NEXT: vmovaps %zmm0, 2624(%rax)
14714 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14715 ; AVX512-FCP-NEXT: vmovaps %zmm0, 2560(%rax)
14716 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, 2496(%rax)
14717 ; AVX512-FCP-NEXT: vmovdqa64 %zmm19, 2432(%rax)
14718 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14719 ; AVX512-FCP-NEXT: vmovaps %zmm0, 2368(%rax)
14720 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14721 ; AVX512-FCP-NEXT: vmovaps %zmm0, 2240(%rax)
14722 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14723 ; AVX512-FCP-NEXT: vmovaps %zmm0, 2176(%rax)
14724 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, 2112(%rax)
14725 ; AVX512-FCP-NEXT: vmovdqa64 %zmm25, 2048(%rax)
14726 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14727 ; AVX512-FCP-NEXT: vmovaps %zmm0, 1984(%rax)
14728 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14729 ; AVX512-FCP-NEXT: vmovaps %zmm0, 1856(%rax)
14730 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14731 ; AVX512-FCP-NEXT: vmovaps %zmm0, 1792(%rax)
14732 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, 1728(%rax)
14733 ; AVX512-FCP-NEXT: vmovdqa64 %zmm26, 1664(%rax)
14734 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14735 ; AVX512-FCP-NEXT: vmovaps %zmm0, 1600(%rax)
14736 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14737 ; AVX512-FCP-NEXT: vmovaps %zmm0, 1472(%rax)
14738 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14739 ; AVX512-FCP-NEXT: vmovaps %zmm0, 1408(%rax)
14740 ; AVX512-FCP-NEXT: vmovdqa64 %zmm29, 1344(%rax)
14741 ; AVX512-FCP-NEXT: vmovdqa64 %zmm30, 1280(%rax)
14742 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14743 ; AVX512-FCP-NEXT: vmovaps %zmm0, 1216(%rax)
14744 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14745 ; AVX512-FCP-NEXT: vmovaps %zmm0, 1088(%rax)
14746 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14747 ; AVX512-FCP-NEXT: vmovaps %zmm0, 1024(%rax)
14748 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, 960(%rax)
14749 ; AVX512-FCP-NEXT: vmovdqa64 %zmm31, 896(%rax)
14750 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14751 ; AVX512-FCP-NEXT: vmovaps %zmm0, 832(%rax)
14752 ; AVX512-FCP-NEXT: vmovups (%rsp), %zmm0 # 64-byte Reload
14753 ; AVX512-FCP-NEXT: vmovaps %zmm0, 704(%rax)
14754 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14755 ; AVX512-FCP-NEXT: vmovaps %zmm0, 640(%rax)
14756 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, 576(%rax)
14757 ; AVX512-FCP-NEXT: vmovdqa64 %zmm10, 512(%rax)
14758 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14759 ; AVX512-FCP-NEXT: vmovaps %zmm0, 448(%rax)
14760 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14761 ; AVX512-FCP-NEXT: vmovaps %zmm0, 320(%rax)
14762 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14763 ; AVX512-FCP-NEXT: vmovaps %zmm0, 256(%rax)
14764 ; AVX512-FCP-NEXT: vmovdqa64 %zmm12, 192(%rax)
14765 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, 128(%rax)
14766 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14767 ; AVX512-FCP-NEXT: vmovaps %zmm0, 64(%rax)
14768 ; AVX512-FCP-NEXT: vmovdqa64 %zmm14, 2688(%rax)
14769 ; AVX512-FCP-NEXT: vmovdqa64 %zmm15, 2304(%rax)
14770 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, 1920(%rax)
14771 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, 1536(%rax)
14772 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, 1152(%rax)
14773 ; AVX512-FCP-NEXT: vmovdqa64 %zmm21, 768(%rax)
14774 ; AVX512-FCP-NEXT: vmovdqa64 %zmm23, 384(%rax)
14775 ; AVX512-FCP-NEXT: vmovdqa64 %zmm24, (%rax)
14776 ; AVX512-FCP-NEXT: addq $3720, %rsp # imm = 0xE88
14777 ; AVX512-FCP-NEXT: vzeroupper
14778 ; AVX512-FCP-NEXT: retq
14780 ; AVX512DQ-LABEL: store_i64_stride6_vf64:
14781 ; AVX512DQ: # %bb.0:
14782 ; AVX512DQ-NEXT: subq $3720, %rsp # imm = 0xE88
14783 ; AVX512DQ-NEXT: vmovdqa64 (%rdi), %zmm11
14784 ; AVX512DQ-NEXT: vmovdqa64 64(%rdi), %zmm10
14785 ; AVX512DQ-NEXT: vmovdqa64 128(%rdi), %zmm9
14786 ; AVX512DQ-NEXT: vmovdqa64 192(%rdi), %zmm8
14787 ; AVX512DQ-NEXT: vmovdqa64 256(%rdi), %zmm7
14788 ; AVX512DQ-NEXT: vmovdqa64 320(%rdi), %zmm6
14789 ; AVX512DQ-NEXT: vmovdqa64 384(%rdi), %zmm5
14790 ; AVX512DQ-NEXT: vmovdqa64 448(%rdi), %zmm4
14791 ; AVX512DQ-NEXT: vmovdqa64 (%rsi), %zmm30
14792 ; AVX512DQ-NEXT: vmovdqa64 64(%rsi), %zmm29
14793 ; AVX512DQ-NEXT: vmovdqa64 128(%rsi), %zmm28
14794 ; AVX512DQ-NEXT: vmovdqa64 192(%rsi), %zmm27
14795 ; AVX512DQ-NEXT: vmovdqa64 256(%rsi), %zmm1
14796 ; AVX512DQ-NEXT: vmovdqa64 320(%rsi), %zmm13
14797 ; AVX512DQ-NEXT: vmovdqa64 384(%rsi), %zmm24
14798 ; AVX512DQ-NEXT: vmovdqa64 448(%rsi), %zmm0
14799 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [4,12,5,13,4,12,5,13]
14800 ; AVX512DQ-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
14801 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm22
14802 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm2, %zmm22
14803 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm3
14804 ; AVX512DQ-NEXT: vpermt2q %zmm24, %zmm2, %zmm3
14805 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14806 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm3
14807 ; AVX512DQ-NEXT: vpermt2q %zmm13, %zmm2, %zmm3
14808 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14809 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm3
14810 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm2, %zmm3
14811 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14812 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, %zmm3
14813 ; AVX512DQ-NEXT: vpermt2q %zmm27, %zmm2, %zmm3
14814 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14815 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, %zmm3
14816 ; AVX512DQ-NEXT: vpermt2q %zmm28, %zmm2, %zmm3
14817 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14818 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, %zmm3
14819 ; AVX512DQ-NEXT: vpermt2q %zmm29, %zmm2, %zmm3
14820 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14821 ; AVX512DQ-NEXT: vpermi2q %zmm30, %zmm11, %zmm2
14822 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14823 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [2,10,2,10,2,10,2,10]
14824 ; AVX512DQ-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14825 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, %zmm3
14826 ; AVX512DQ-NEXT: vpermt2q %zmm30, %zmm2, %zmm3
14827 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14828 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm12
14829 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [6,14,6,14,6,14,6,14]
14830 ; AVX512DQ-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14831 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, %zmm3
14832 ; AVX512DQ-NEXT: vpermt2q %zmm30, %zmm2, %zmm3
14833 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14834 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [7,15,7,15,7,15,7,15]
14835 ; AVX512DQ-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14836 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, %zmm3
14837 ; AVX512DQ-NEXT: vpermt2q %zmm30, %zmm15, %zmm3
14838 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14839 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, %zmm3
14840 ; AVX512DQ-NEXT: vpermt2q %zmm29, %zmm12, %zmm3
14841 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14842 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,8,1,9,0,8,1,9]
14843 ; AVX512DQ-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
14844 ; AVX512DQ-NEXT: vpermt2q %zmm30, %zmm3, %zmm11
14845 ; AVX512DQ-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14846 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, %zmm11
14847 ; AVX512DQ-NEXT: vpermt2q %zmm29, %zmm2, %zmm11
14848 ; AVX512DQ-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14849 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, %zmm11
14850 ; AVX512DQ-NEXT: vpermt2q %zmm29, %zmm15, %zmm11
14851 ; AVX512DQ-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14852 ; AVX512DQ-NEXT: vpermt2q %zmm29, %zmm3, %zmm10
14853 ; AVX512DQ-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14854 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, %zmm10
14855 ; AVX512DQ-NEXT: vpermt2q %zmm28, %zmm12, %zmm10
14856 ; AVX512DQ-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14857 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, %zmm10
14858 ; AVX512DQ-NEXT: vpermt2q %zmm28, %zmm2, %zmm10
14859 ; AVX512DQ-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14860 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, %zmm10
14861 ; AVX512DQ-NEXT: vpermt2q %zmm28, %zmm15, %zmm10
14862 ; AVX512DQ-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14863 ; AVX512DQ-NEXT: vpermt2q %zmm28, %zmm3, %zmm9
14864 ; AVX512DQ-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14865 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, %zmm9
14866 ; AVX512DQ-NEXT: vpermt2q %zmm27, %zmm12, %zmm9
14867 ; AVX512DQ-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14868 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, %zmm9
14869 ; AVX512DQ-NEXT: vpermt2q %zmm27, %zmm2, %zmm9
14870 ; AVX512DQ-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14871 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, %zmm9
14872 ; AVX512DQ-NEXT: vpermt2q %zmm27, %zmm15, %zmm9
14873 ; AVX512DQ-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14874 ; AVX512DQ-NEXT: vpermt2q %zmm27, %zmm3, %zmm8
14875 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14876 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm8
14877 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm12, %zmm8
14878 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14879 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm8
14880 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm2, %zmm8
14881 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14882 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm8
14883 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm15, %zmm8
14884 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14885 ; AVX512DQ-NEXT: vpermt2q %zmm1, %zmm3, %zmm7
14886 ; AVX512DQ-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14887 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm1
14888 ; AVX512DQ-NEXT: vpermt2q %zmm13, %zmm12, %zmm1
14889 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14890 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm1
14891 ; AVX512DQ-NEXT: vpermt2q %zmm13, %zmm2, %zmm1
14892 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14893 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm1
14894 ; AVX512DQ-NEXT: vpermt2q %zmm13, %zmm15, %zmm1
14895 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14896 ; AVX512DQ-NEXT: vpermt2q %zmm13, %zmm3, %zmm6
14897 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14898 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm1
14899 ; AVX512DQ-NEXT: vpermt2q %zmm24, %zmm12, %zmm1
14900 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14901 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm1
14902 ; AVX512DQ-NEXT: vpermt2q %zmm24, %zmm2, %zmm1
14903 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14904 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm8
14905 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm1
14906 ; AVX512DQ-NEXT: vpermt2q %zmm24, %zmm15, %zmm1
14907 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14908 ; AVX512DQ-NEXT: vpermt2q %zmm24, %zmm3, %zmm5
14909 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14910 ; AVX512DQ-NEXT: vmovdqa64 (%rdx), %zmm5
14911 ; AVX512DQ-NEXT: vmovdqa64 (%rcx), %zmm14
14912 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm29
14913 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, %zmm21
14914 ; AVX512DQ-NEXT: vpermt2q %zmm14, %zmm15, %zmm29
14915 ; AVX512DQ-NEXT: vmovdqa64 64(%rdx), %zmm1
14916 ; AVX512DQ-NEXT: vmovdqa64 64(%rcx), %zmm11
14917 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm2
14918 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm20
14919 ; AVX512DQ-NEXT: vpermt2q %zmm11, %zmm15, %zmm2
14920 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
14921 ; AVX512DQ-NEXT: vmovdqa64 128(%rdx), %zmm23
14922 ; AVX512DQ-NEXT: vmovdqa64 128(%rcx), %zmm9
14923 ; AVX512DQ-NEXT: vmovdqa64 %zmm23, %zmm1
14924 ; AVX512DQ-NEXT: vpermt2q %zmm9, %zmm15, %zmm1
14925 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14926 ; AVX512DQ-NEXT: vmovdqa64 192(%rdx), %zmm18
14927 ; AVX512DQ-NEXT: vmovdqa64 192(%rcx), %zmm6
14928 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, %zmm1
14929 ; AVX512DQ-NEXT: vpermt2q %zmm6, %zmm15, %zmm1
14930 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14931 ; AVX512DQ-NEXT: vmovdqa64 256(%rdx), %zmm17
14932 ; AVX512DQ-NEXT: vmovdqa64 256(%rcx), %zmm7
14933 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, %zmm1
14934 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm15, %zmm1
14935 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14936 ; AVX512DQ-NEXT: vmovdqa64 320(%rdx), %zmm15
14937 ; AVX512DQ-NEXT: vmovdqa64 320(%rcx), %zmm10
14938 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, %zmm1
14939 ; AVX512DQ-NEXT: vpermt2q %zmm10, %zmm21, %zmm1
14940 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14941 ; AVX512DQ-NEXT: vmovdqa64 384(%rdx), %zmm13
14942 ; AVX512DQ-NEXT: vmovdqa64 384(%rcx), %zmm2
14943 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, %zmm1
14944 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm21, %zmm1
14945 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14946 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm1
14947 ; AVX512DQ-NEXT: vpermi2q %zmm0, %zmm4, %zmm12
14948 ; AVX512DQ-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14949 ; AVX512DQ-NEXT: vpermi2q %zmm0, %zmm4, %zmm8
14950 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14951 ; AVX512DQ-NEXT: vmovdqa64 448(%rdx), %zmm16
14952 ; AVX512DQ-NEXT: vmovdqa64 448(%rcx), %zmm4
14953 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, %zmm8
14954 ; AVX512DQ-NEXT: vpermt2q %zmm4, %zmm21, %zmm8
14955 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14956 ; AVX512DQ-NEXT: vpermi2q %zmm0, %zmm1, %zmm21
14957 ; AVX512DQ-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14958 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm3, %zmm1
14959 ; AVX512DQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14960 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} ymm1 = [0,0,4,12]
14961 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, %zmm24
14962 ; AVX512DQ-NEXT: vpermt2q %zmm4, %zmm1, %zmm24
14963 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, %zmm19
14964 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm1, %zmm19
14965 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, %zmm0
14966 ; AVX512DQ-NEXT: vpermt2q %zmm10, %zmm1, %zmm0
14967 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14968 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, %zmm31
14969 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm1, %zmm31
14970 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, %zmm21
14971 ; AVX512DQ-NEXT: vpermt2q %zmm6, %zmm1, %zmm21
14972 ; AVX512DQ-NEXT: vmovdqa64 %zmm23, %zmm12
14973 ; AVX512DQ-NEXT: vpermt2q %zmm9, %zmm1, %zmm12
14974 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, %zmm3
14975 ; AVX512DQ-NEXT: vpermt2q %zmm11, %zmm1, %zmm3
14976 ; AVX512DQ-NEXT: vpermi2q %zmm14, %zmm5, %zmm1
14977 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [1,9,2,10,1,9,2,10]
14978 ; AVX512DQ-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
14979 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm0
14980 ; AVX512DQ-NEXT: vpermt2q %zmm14, %zmm27, %zmm0
14981 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14982 ; AVX512DQ-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [5,13,6,14,5,13,6,14]
14983 ; AVX512DQ-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
14984 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm0
14985 ; AVX512DQ-NEXT: vpermt2q %zmm14, %zmm28, %zmm0
14986 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14987 ; AVX512DQ-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [3,11,3,11,3,11,3,11]
14988 ; AVX512DQ-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14989 ; AVX512DQ-NEXT: vpermt2q %zmm14, %zmm0, %zmm5
14990 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14991 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, %zmm5
14992 ; AVX512DQ-NEXT: vpermt2q %zmm11, %zmm27, %zmm5
14993 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14994 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, %zmm5
14995 ; AVX512DQ-NEXT: vpermt2q %zmm11, %zmm28, %zmm5
14996 ; AVX512DQ-NEXT: vpermt2q %zmm11, %zmm0, %zmm20
14997 ; AVX512DQ-NEXT: vmovdqu64 %zmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14998 ; AVX512DQ-NEXT: vmovdqa64 %zmm23, %zmm8
14999 ; AVX512DQ-NEXT: vpermt2q %zmm9, %zmm27, %zmm8
15000 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15001 ; AVX512DQ-NEXT: vmovdqa64 %zmm23, %zmm11
15002 ; AVX512DQ-NEXT: vpermt2q %zmm9, %zmm28, %zmm11
15003 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, %zmm30
15004 ; AVX512DQ-NEXT: vpermt2q %zmm9, %zmm0, %zmm23
15005 ; AVX512DQ-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15006 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, %zmm9
15007 ; AVX512DQ-NEXT: vpermt2q %zmm6, %zmm27, %zmm9
15008 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, %zmm8
15009 ; AVX512DQ-NEXT: vpermt2q %zmm6, %zmm28, %zmm8
15010 ; AVX512DQ-NEXT: vpermt2q %zmm6, %zmm0, %zmm18
15011 ; AVX512DQ-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15012 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, %zmm6
15013 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm27, %zmm6
15014 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15015 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, %zmm6
15016 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, %zmm14
15017 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm28, %zmm6
15018 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm11
15019 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm0, %zmm14
15020 ; AVX512DQ-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15021 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, %zmm6
15022 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, %zmm7
15023 ; AVX512DQ-NEXT: vpermt2q %zmm10, %zmm27, %zmm7
15024 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm26
15025 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, %zmm7
15026 ; AVX512DQ-NEXT: vpermt2q %zmm10, %zmm28, %zmm7
15027 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm25
15028 ; AVX512DQ-NEXT: vpermt2q %zmm10, %zmm0, %zmm6
15029 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15030 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, %zmm6
15031 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, %zmm7
15032 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm27, %zmm7
15033 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm10
15034 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, %zmm7
15035 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm28, %zmm7
15036 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm23
15037 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm0, %zmm6
15038 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15039 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm16, %zmm27
15040 ; AVX512DQ-NEXT: vpermi2q %zmm4, %zmm16, %zmm28
15041 ; AVX512DQ-NEXT: vpermt2q %zmm4, %zmm0, %zmm16
15042 ; AVX512DQ-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15043 ; AVX512DQ-NEXT: movb $12, %al
15044 ; AVX512DQ-NEXT: kmovw %eax, %k1
15045 ; AVX512DQ-NEXT: vmovdqa64 %zmm24, %zmm22 {%k1}
15046 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
15047 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, %zmm18 {%k1}
15048 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
15049 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15050 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
15051 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
15052 ; AVX512DQ-NEXT: vmovdqa64 %zmm31, %zmm17 {%k1}
15053 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
15054 ; AVX512DQ-NEXT: vmovdqa64 %zmm21, %zmm16 {%k1}
15055 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
15056 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, %zmm21 {%k1}
15057 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
15058 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, %zmm31 {%k1}
15059 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
15060 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm20 {%k1}
15061 ; AVX512DQ-NEXT: movb $48, %al
15062 ; AVX512DQ-NEXT: kmovw %eax, %k2
15063 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15064 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15065 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm2 {%k2}
15066 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15067 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
15068 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm3 {%k2}
15069 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15070 ; AVX512DQ-NEXT: vshufi64x2 {{.*#+}} zmm14 = zmm0[0,1,2,3],zmm29[4,5,6,7]
15071 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15072 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
15073 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm4 {%k2}
15074 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15075 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15076 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15077 ; AVX512DQ-NEXT: vshufi64x2 $228, (%rsp), %zmm0, %zmm0 # 64-byte Folded Reload
15078 ; AVX512DQ-NEXT: # zmm0 = zmm0[0,1,2,3],mem[4,5,6,7]
15079 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
15080 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
15081 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15082 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm6 {%k2}
15083 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15084 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm30 {%k2}
15085 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15086 ; AVX512DQ-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm15 # 64-byte Folded Reload
15087 ; AVX512DQ-NEXT: # zmm15 = zmm0[0,1,2,3],mem[4,5,6,7]
15088 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15089 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
15090 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15091 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
15092 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15093 ; AVX512DQ-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm13 # 64-byte Folded Reload
15094 ; AVX512DQ-NEXT: # zmm13 = zmm0[0,1,2,3],mem[4,5,6,7]
15095 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
15096 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15097 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm7 {%k2}
15098 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15099 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
15100 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15101 ; AVX512DQ-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm12 # 64-byte Folded Reload
15102 ; AVX512DQ-NEXT: # zmm12 = zmm0[0,1,2,3],mem[4,5,6,7]
15103 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15104 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm26 {%k2}
15105 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15106 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm25 {%k2}
15107 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15108 ; AVX512DQ-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 64-byte Folded Reload
15109 ; AVX512DQ-NEXT: # zmm0 = zmm0[0,1,2,3],mem[4,5,6,7]
15110 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15111 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15112 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
15113 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15114 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm23 {%k2}
15115 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15116 ; AVX512DQ-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 64-byte Folded Reload
15117 ; AVX512DQ-NEXT: # zmm0 = zmm0[0,1,2,3],mem[4,5,6,7]
15118 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15119 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15120 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm27 {%k2}
15121 ; AVX512DQ-NEXT: vmovdqa64 (%r8), %zmm29
15122 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,1,9,0,4,5,6,7]
15123 ; AVX512DQ-NEXT: vpermt2q %zmm29, %zmm1, %zmm2
15124 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15125 ; AVX512DQ-NEXT: vmovdqa64 64(%r8), %zmm2
15126 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm1, %zmm4
15127 ; AVX512DQ-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15128 ; AVX512DQ-NEXT: vmovdqa64 128(%r8), %zmm0
15129 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm1, %zmm6
15130 ; AVX512DQ-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15131 ; AVX512DQ-NEXT: vmovdqa64 192(%r8), %zmm4
15132 ; AVX512DQ-NEXT: vpermt2q %zmm4, %zmm1, %zmm9
15133 ; AVX512DQ-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15134 ; AVX512DQ-NEXT: vmovdqa64 256(%r8), %zmm6
15135 ; AVX512DQ-NEXT: vpermt2q %zmm6, %zmm1, %zmm7
15136 ; AVX512DQ-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15137 ; AVX512DQ-NEXT: vmovdqa64 320(%r8), %zmm7
15138 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm1, %zmm26
15139 ; AVX512DQ-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15140 ; AVX512DQ-NEXT: vmovdqa64 384(%r8), %zmm9
15141 ; AVX512DQ-NEXT: vpermt2q %zmm9, %zmm1, %zmm10
15142 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, %zmm24
15143 ; AVX512DQ-NEXT: vmovdqa64 448(%r8), %zmm10
15144 ; AVX512DQ-NEXT: vpermt2q %zmm10, %zmm1, %zmm27
15145 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15146 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm28 {%k2}
15147 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,1,13,0,4,5,6,7]
15148 ; AVX512DQ-NEXT: vpermt2q %zmm29, %zmm1, %zmm3
15149 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15150 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm1, %zmm5
15151 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15152 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm1, %zmm30
15153 ; AVX512DQ-NEXT: vmovdqu64 %zmm30, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15154 ; AVX512DQ-NEXT: vpermt2q %zmm4, %zmm1, %zmm8
15155 ; AVX512DQ-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15156 ; AVX512DQ-NEXT: vpermt2q %zmm6, %zmm1, %zmm11
15157 ; AVX512DQ-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15158 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm1, %zmm25
15159 ; AVX512DQ-NEXT: vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15160 ; AVX512DQ-NEXT: vpermt2q %zmm9, %zmm1, %zmm23
15161 ; AVX512DQ-NEXT: vpermt2q %zmm10, %zmm1, %zmm28
15162 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15163 ; AVX512DQ-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm3 # 64-byte Folded Reload
15164 ; AVX512DQ-NEXT: # zmm3 = zmm1[0,1,2,3],mem[4,5,6,7]
15165 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm1 = [14,0,2,3,4,5,15,0]
15166 ; AVX512DQ-NEXT: vpermt2q %zmm29, %zmm1, %zmm14
15167 ; AVX512DQ-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15168 ; AVX512DQ-NEXT: vmovdqu64 (%rsp), %zmm14 # 64-byte Reload
15169 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm1, %zmm14
15170 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm1, %zmm15
15171 ; AVX512DQ-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15172 ; AVX512DQ-NEXT: vpermt2q %zmm4, %zmm1, %zmm13
15173 ; AVX512DQ-NEXT: vpermt2q %zmm6, %zmm1, %zmm12
15174 ; AVX512DQ-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15175 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
15176 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm1, %zmm12
15177 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
15178 ; AVX512DQ-NEXT: vpermt2q %zmm9, %zmm1, %zmm15
15179 ; AVX512DQ-NEXT: vpermt2q %zmm10, %zmm1, %zmm3
15180 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15181 ; AVX512DQ-NEXT: vmovdqa (%rdi), %ymm1
15182 ; AVX512DQ-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
15183 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
15184 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm1, %zmm3, %zmm3
15185 ; AVX512DQ-NEXT: vmovdqa 64(%rdi), %ymm1
15186 ; AVX512DQ-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
15187 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15188 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm1, %zmm5, %zmm5
15189 ; AVX512DQ-NEXT: movb $16, %al
15190 ; AVX512DQ-NEXT: kmovw %eax, %k2
15191 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm31 {%k2}
15192 ; AVX512DQ-NEXT: vmovdqu64 %zmm31, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15193 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm1 = [10,0,2,3,4,5,11,0]
15194 ; AVX512DQ-NEXT: vpermt2q %zmm2, %zmm1, %zmm5
15195 ; AVX512DQ-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15196 ; AVX512DQ-NEXT: vmovdqa 128(%rdi), %ymm2
15197 ; AVX512DQ-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
15198 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15199 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm2, %zmm5, %zmm31
15200 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm21 {%k2}
15201 ; AVX512DQ-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15202 ; AVX512DQ-NEXT: vpermt2q %zmm0, %zmm1, %zmm31
15203 ; AVX512DQ-NEXT: vmovdqa 192(%rdi), %ymm2
15204 ; AVX512DQ-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
15205 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15206 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm30
15207 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm16 {%k2}
15208 ; AVX512DQ-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15209 ; AVX512DQ-NEXT: vpermt2q %zmm4, %zmm1, %zmm30
15210 ; AVX512DQ-NEXT: vmovdqa 256(%rdi), %ymm2
15211 ; AVX512DQ-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
15212 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15213 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm26
15214 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm17 {%k2}
15215 ; AVX512DQ-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15216 ; AVX512DQ-NEXT: vpermt2q %zmm6, %zmm1, %zmm26
15217 ; AVX512DQ-NEXT: vmovdqa 320(%rdi), %ymm2
15218 ; AVX512DQ-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
15219 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15220 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm25
15221 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, %zmm19 {%k2}
15222 ; AVX512DQ-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15223 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm1, %zmm25
15224 ; AVX512DQ-NEXT: vmovdqa 384(%rdi), %ymm2
15225 ; AVX512DQ-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
15226 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15227 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm19
15228 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, %zmm18 {%k2}
15229 ; AVX512DQ-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15230 ; AVX512DQ-NEXT: vpermt2q %zmm9, %zmm1, %zmm19
15231 ; AVX512DQ-NEXT: vmovdqa 448(%rdi), %ymm2
15232 ; AVX512DQ-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
15233 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15234 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm16
15235 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, %zmm22 {%k2}
15236 ; AVX512DQ-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15237 ; AVX512DQ-NEXT: vpermt2q %zmm10, %zmm1, %zmm16
15238 ; AVX512DQ-NEXT: vpermt2q %zmm29, %zmm1, %zmm3
15239 ; AVX512DQ-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15240 ; AVX512DQ-NEXT: vmovdqa64 %zmm29, %zmm20 {%k2}
15241 ; AVX512DQ-NEXT: vmovdqu64 %zmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15242 ; AVX512DQ-NEXT: vmovdqa64 (%r9), %zmm29
15243 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,1,2,9,4,5,6,7]
15244 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15245 ; AVX512DQ-NEXT: vpermt2q %zmm29, %zmm6, %zmm0
15246 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15247 ; AVX512DQ-NEXT: vmovdqa64 64(%r9), %zmm3
15248 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15249 ; AVX512DQ-NEXT: vpermt2q %zmm3, %zmm6, %zmm0
15250 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15251 ; AVX512DQ-NEXT: vmovdqa64 128(%r9), %zmm4
15252 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15253 ; AVX512DQ-NEXT: vpermt2q %zmm4, %zmm6, %zmm0
15254 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15255 ; AVX512DQ-NEXT: vmovdqa64 192(%r9), %zmm5
15256 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15257 ; AVX512DQ-NEXT: vpermt2q %zmm5, %zmm6, %zmm0
15258 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15259 ; AVX512DQ-NEXT: vmovdqa64 256(%r9), %zmm11
15260 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15261 ; AVX512DQ-NEXT: vpermt2q %zmm11, %zmm6, %zmm0
15262 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15263 ; AVX512DQ-NEXT: vmovdqa64 320(%r9), %zmm8
15264 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15265 ; AVX512DQ-NEXT: vpermt2q %zmm8, %zmm6, %zmm0
15266 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15267 ; AVX512DQ-NEXT: vmovdqa64 384(%r9), %zmm7
15268 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm6, %zmm24
15269 ; AVX512DQ-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15270 ; AVX512DQ-NEXT: vmovdqa64 448(%r9), %zmm22
15271 ; AVX512DQ-NEXT: vpermt2q %zmm22, %zmm6, %zmm27
15272 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,1,2,13,4,5,6,7]
15273 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15274 ; AVX512DQ-NEXT: vpermt2q %zmm29, %zmm6, %zmm0
15275 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15276 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15277 ; AVX512DQ-NEXT: vpermt2q %zmm3, %zmm6, %zmm0
15278 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15279 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15280 ; AVX512DQ-NEXT: vpermt2q %zmm4, %zmm6, %zmm0
15281 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15282 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15283 ; AVX512DQ-NEXT: vpermt2q %zmm5, %zmm6, %zmm0
15284 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15285 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15286 ; AVX512DQ-NEXT: vpermt2q %zmm11, %zmm6, %zmm0
15287 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15288 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15289 ; AVX512DQ-NEXT: vpermt2q %zmm8, %zmm6, %zmm0
15290 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15291 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm6, %zmm23
15292 ; AVX512DQ-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15293 ; AVX512DQ-NEXT: vpermt2q %zmm22, %zmm6, %zmm28
15294 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,14,2,3,4,5,6,15]
15295 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15296 ; AVX512DQ-NEXT: vpermt2q %zmm29, %zmm6, %zmm0
15297 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15298 ; AVX512DQ-NEXT: vpermt2q %zmm3, %zmm6, %zmm14
15299 ; AVX512DQ-NEXT: vmovdqu64 %zmm14, (%rsp) # 64-byte Spill
15300 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15301 ; AVX512DQ-NEXT: vpermt2q %zmm4, %zmm6, %zmm0
15302 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15303 ; AVX512DQ-NEXT: vpermt2q %zmm5, %zmm6, %zmm13
15304 ; AVX512DQ-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15305 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15306 ; AVX512DQ-NEXT: vpermt2q %zmm11, %zmm6, %zmm0
15307 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15308 ; AVX512DQ-NEXT: vpermt2q %zmm8, %zmm6, %zmm12
15309 ; AVX512DQ-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15310 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm6, %zmm15
15311 ; AVX512DQ-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15312 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15313 ; AVX512DQ-NEXT: vpermt2q %zmm22, %zmm6, %zmm0
15314 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15315 ; AVX512DQ-NEXT: vmovdqa (%rdx), %xmm6
15316 ; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
15317 ; AVX512DQ-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
15318 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15319 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm0 {%k1}
15320 ; AVX512DQ-NEXT: vmovdqa 64(%rdx), %xmm6
15321 ; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
15322 ; AVX512DQ-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
15323 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
15324 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm14 {%k1}
15325 ; AVX512DQ-NEXT: vmovdqa 128(%rdx), %xmm6
15326 ; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
15327 ; AVX512DQ-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
15328 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
15329 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm15 {%k1}
15330 ; AVX512DQ-NEXT: vmovdqa 192(%rdx), %xmm6
15331 ; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
15332 ; AVX512DQ-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
15333 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
15334 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm10 {%k1}
15335 ; AVX512DQ-NEXT: vmovdqa 256(%rdx), %xmm6
15336 ; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
15337 ; AVX512DQ-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
15338 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
15339 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm12 {%k1}
15340 ; AVX512DQ-NEXT: vmovdqa 320(%rdx), %xmm6
15341 ; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
15342 ; AVX512DQ-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
15343 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15344 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm2 {%k1}
15345 ; AVX512DQ-NEXT: vmovdqa 384(%rdx), %xmm6
15346 ; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
15347 ; AVX512DQ-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
15348 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
15349 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm9 {%k1}
15350 ; AVX512DQ-NEXT: vmovdqa 448(%rdx), %xmm6
15351 ; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
15352 ; AVX512DQ-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
15353 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15354 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm1 {%k1}
15355 ; AVX512DQ-NEXT: vinserti32x4 $2, (%r8), %zmm0, %zmm24
15356 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm0 = [0,1,2,3,4,8,6,7]
15357 ; AVX512DQ-NEXT: vpermt2q %zmm29, %zmm0, %zmm24
15358 ; AVX512DQ-NEXT: vinserti32x4 $2, 64(%r8), %zmm14, %zmm23
15359 ; AVX512DQ-NEXT: vpermt2q %zmm3, %zmm0, %zmm23
15360 ; AVX512DQ-NEXT: vinserti32x4 $2, 128(%r8), %zmm15, %zmm21
15361 ; AVX512DQ-NEXT: vpermt2q %zmm4, %zmm0, %zmm21
15362 ; AVX512DQ-NEXT: vinserti32x4 $2, 192(%r8), %zmm10, %zmm20
15363 ; AVX512DQ-NEXT: vpermt2q %zmm5, %zmm0, %zmm20
15364 ; AVX512DQ-NEXT: vinserti32x4 $2, 256(%r8), %zmm12, %zmm18
15365 ; AVX512DQ-NEXT: vpermt2q %zmm11, %zmm0, %zmm18
15366 ; AVX512DQ-NEXT: vinserti32x4 $2, 320(%r8), %zmm2, %zmm17
15367 ; AVX512DQ-NEXT: vpermt2q %zmm8, %zmm0, %zmm17
15368 ; AVX512DQ-NEXT: vinserti32x4 $2, 384(%r8), %zmm9, %zmm15
15369 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm0, %zmm15
15370 ; AVX512DQ-NEXT: vinserti32x4 $2, 448(%r8), %zmm1, %zmm14
15371 ; AVX512DQ-NEXT: vpermt2q %zmm22, %zmm0, %zmm14
15372 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm0 = [0,10,2,3,4,5,6,11]
15373 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
15374 ; AVX512DQ-NEXT: vpermt2q %zmm29, %zmm0, %zmm13
15375 ; AVX512DQ-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,1,2,3,4,12,6,7]
15376 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
15377 ; AVX512DQ-NEXT: vpermt2q %zmm29, %zmm1, %zmm12
15378 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
15379 ; AVX512DQ-NEXT: vpermt2q %zmm3, %zmm0, %zmm10
15380 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
15381 ; AVX512DQ-NEXT: vpermt2q %zmm3, %zmm1, %zmm9
15382 ; AVX512DQ-NEXT: vpermt2q %zmm4, %zmm0, %zmm31
15383 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
15384 ; AVX512DQ-NEXT: vpermt2q %zmm4, %zmm1, %zmm6
15385 ; AVX512DQ-NEXT: vpermt2q %zmm5, %zmm0, %zmm30
15386 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm29 # 64-byte Reload
15387 ; AVX512DQ-NEXT: vpermt2q %zmm5, %zmm1, %zmm29
15388 ; AVX512DQ-NEXT: vpermt2q %zmm11, %zmm0, %zmm26
15389 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
15390 ; AVX512DQ-NEXT: vpermt2q %zmm11, %zmm1, %zmm4
15391 ; AVX512DQ-NEXT: vpermt2q %zmm8, %zmm0, %zmm25
15392 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
15393 ; AVX512DQ-NEXT: vpermt2q %zmm8, %zmm1, %zmm3
15394 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm0, %zmm19
15395 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15396 ; AVX512DQ-NEXT: vpermt2q %zmm7, %zmm1, %zmm2
15397 ; AVX512DQ-NEXT: vpermt2q %zmm22, %zmm0, %zmm16
15398 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15399 ; AVX512DQ-NEXT: vpermt2q %zmm22, %zmm1, %zmm0
15400 ; AVX512DQ-NEXT: movq {{[0-9]+}}(%rsp), %rax
15401 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15402 ; AVX512DQ-NEXT: vmovaps %zmm1, 3008(%rax)
15403 ; AVX512DQ-NEXT: vmovdqa64 %zmm28, 2944(%rax)
15404 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, 2880(%rax)
15405 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, 2816(%rax)
15406 ; AVX512DQ-NEXT: vmovdqa64 %zmm27, 2752(%rax)
15407 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15408 ; AVX512DQ-NEXT: vmovaps %zmm0, 2624(%rax)
15409 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15410 ; AVX512DQ-NEXT: vmovaps %zmm0, 2560(%rax)
15411 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 2496(%rax)
15412 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, 2432(%rax)
15413 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15414 ; AVX512DQ-NEXT: vmovaps %zmm0, 2368(%rax)
15415 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15416 ; AVX512DQ-NEXT: vmovaps %zmm0, 2240(%rax)
15417 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15418 ; AVX512DQ-NEXT: vmovaps %zmm0, 2176(%rax)
15419 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, 2112(%rax)
15420 ; AVX512DQ-NEXT: vmovdqa64 %zmm25, 2048(%rax)
15421 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15422 ; AVX512DQ-NEXT: vmovaps %zmm0, 1984(%rax)
15423 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15424 ; AVX512DQ-NEXT: vmovaps %zmm0, 1856(%rax)
15425 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15426 ; AVX512DQ-NEXT: vmovaps %zmm0, 1792(%rax)
15427 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, 1728(%rax)
15428 ; AVX512DQ-NEXT: vmovdqa64 %zmm26, 1664(%rax)
15429 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15430 ; AVX512DQ-NEXT: vmovaps %zmm0, 1600(%rax)
15431 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15432 ; AVX512DQ-NEXT: vmovaps %zmm0, 1472(%rax)
15433 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15434 ; AVX512DQ-NEXT: vmovaps %zmm0, 1408(%rax)
15435 ; AVX512DQ-NEXT: vmovdqa64 %zmm29, 1344(%rax)
15436 ; AVX512DQ-NEXT: vmovdqa64 %zmm30, 1280(%rax)
15437 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15438 ; AVX512DQ-NEXT: vmovaps %zmm0, 1216(%rax)
15439 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15440 ; AVX512DQ-NEXT: vmovaps %zmm0, 1088(%rax)
15441 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15442 ; AVX512DQ-NEXT: vmovaps %zmm0, 1024(%rax)
15443 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, 960(%rax)
15444 ; AVX512DQ-NEXT: vmovdqa64 %zmm31, 896(%rax)
15445 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15446 ; AVX512DQ-NEXT: vmovaps %zmm0, 832(%rax)
15447 ; AVX512DQ-NEXT: vmovups (%rsp), %zmm0 # 64-byte Reload
15448 ; AVX512DQ-NEXT: vmovaps %zmm0, 704(%rax)
15449 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15450 ; AVX512DQ-NEXT: vmovaps %zmm0, 640(%rax)
15451 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, 576(%rax)
15452 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, 512(%rax)
15453 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15454 ; AVX512DQ-NEXT: vmovaps %zmm0, 448(%rax)
15455 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15456 ; AVX512DQ-NEXT: vmovaps %zmm0, 320(%rax)
15457 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15458 ; AVX512DQ-NEXT: vmovaps %zmm0, 256(%rax)
15459 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, 192(%rax)
15460 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, 128(%rax)
15461 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15462 ; AVX512DQ-NEXT: vmovaps %zmm0, 64(%rax)
15463 ; AVX512DQ-NEXT: vmovdqa64 %zmm14, 2688(%rax)
15464 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, 2304(%rax)
15465 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, 1920(%rax)
15466 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, 1536(%rax)
15467 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, 1152(%rax)
15468 ; AVX512DQ-NEXT: vmovdqa64 %zmm21, 768(%rax)
15469 ; AVX512DQ-NEXT: vmovdqa64 %zmm23, 384(%rax)
15470 ; AVX512DQ-NEXT: vmovdqa64 %zmm24, (%rax)
15471 ; AVX512DQ-NEXT: addq $3720, %rsp # imm = 0xE88
15472 ; AVX512DQ-NEXT: vzeroupper
15473 ; AVX512DQ-NEXT: retq
15475 ; AVX512DQ-FCP-LABEL: store_i64_stride6_vf64:
15476 ; AVX512DQ-FCP: # %bb.0:
15477 ; AVX512DQ-FCP-NEXT: subq $3720, %rsp # imm = 0xE88
15478 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rdi), %zmm11
15479 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rdi), %zmm10
15480 ; AVX512DQ-FCP-NEXT: vmovdqa64 128(%rdi), %zmm9
15481 ; AVX512DQ-FCP-NEXT: vmovdqa64 192(%rdi), %zmm8
15482 ; AVX512DQ-FCP-NEXT: vmovdqa64 256(%rdi), %zmm7
15483 ; AVX512DQ-FCP-NEXT: vmovdqa64 320(%rdi), %zmm6
15484 ; AVX512DQ-FCP-NEXT: vmovdqa64 384(%rdi), %zmm5
15485 ; AVX512DQ-FCP-NEXT: vmovdqa64 448(%rdi), %zmm4
15486 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rsi), %zmm30
15487 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rsi), %zmm29
15488 ; AVX512DQ-FCP-NEXT: vmovdqa64 128(%rsi), %zmm28
15489 ; AVX512DQ-FCP-NEXT: vmovdqa64 192(%rsi), %zmm27
15490 ; AVX512DQ-FCP-NEXT: vmovdqa64 256(%rsi), %zmm1
15491 ; AVX512DQ-FCP-NEXT: vmovdqa64 320(%rsi), %zmm13
15492 ; AVX512DQ-FCP-NEXT: vmovdqa64 384(%rsi), %zmm24
15493 ; AVX512DQ-FCP-NEXT: vmovdqa64 448(%rsi), %zmm0
15494 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [4,12,5,13,4,12,5,13]
15495 ; AVX512DQ-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
15496 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm22
15497 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm22
15498 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm3
15499 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm24, %zmm2, %zmm3
15500 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15501 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm3
15502 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm13, %zmm2, %zmm3
15503 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15504 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm3
15505 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm3
15506 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15507 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, %zmm3
15508 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm27, %zmm2, %zmm3
15509 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15510 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, %zmm3
15511 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm28, %zmm2, %zmm3
15512 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15513 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm10, %zmm3
15514 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm29, %zmm2, %zmm3
15515 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15516 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm30, %zmm11, %zmm2
15517 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15518 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [2,10,2,10,2,10,2,10]
15519 ; AVX512DQ-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15520 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm11, %zmm3
15521 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm30, %zmm2, %zmm3
15522 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15523 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm12
15524 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [6,14,6,14,6,14,6,14]
15525 ; AVX512DQ-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15526 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm11, %zmm3
15527 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm30, %zmm2, %zmm3
15528 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15529 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [7,15,7,15,7,15,7,15]
15530 ; AVX512DQ-FCP-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15531 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm11, %zmm3
15532 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm30, %zmm15, %zmm3
15533 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15534 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm10, %zmm3
15535 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm29, %zmm12, %zmm3
15536 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15537 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,8,1,9,0,8,1,9]
15538 ; AVX512DQ-FCP-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
15539 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm30, %zmm3, %zmm11
15540 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15541 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm10, %zmm11
15542 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm29, %zmm2, %zmm11
15543 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15544 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm10, %zmm11
15545 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm29, %zmm15, %zmm11
15546 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15547 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm29, %zmm3, %zmm10
15548 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15549 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, %zmm10
15550 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm28, %zmm12, %zmm10
15551 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15552 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, %zmm10
15553 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm28, %zmm2, %zmm10
15554 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15555 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, %zmm10
15556 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm28, %zmm15, %zmm10
15557 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15558 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm28, %zmm3, %zmm9
15559 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15560 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, %zmm9
15561 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm27, %zmm12, %zmm9
15562 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15563 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, %zmm9
15564 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm27, %zmm2, %zmm9
15565 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15566 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, %zmm9
15567 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm27, %zmm15, %zmm9
15568 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15569 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm27, %zmm3, %zmm8
15570 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15571 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm8
15572 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm12, %zmm8
15573 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15574 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm8
15575 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm8
15576 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15577 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm8
15578 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm15, %zmm8
15579 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15580 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm3, %zmm7
15581 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15582 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm1
15583 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm13, %zmm12, %zmm1
15584 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15585 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm1
15586 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm13, %zmm2, %zmm1
15587 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15588 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm1
15589 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm13, %zmm15, %zmm1
15590 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15591 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm13, %zmm3, %zmm6
15592 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15593 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
15594 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm24, %zmm12, %zmm1
15595 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15596 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
15597 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm24, %zmm2, %zmm1
15598 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15599 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm8
15600 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
15601 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm24, %zmm15, %zmm1
15602 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15603 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm24, %zmm3, %zmm5
15604 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15605 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rdx), %zmm5
15606 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rcx), %zmm14
15607 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm29
15608 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm15, %zmm21
15609 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm14, %zmm15, %zmm29
15610 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rdx), %zmm1
15611 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rcx), %zmm11
15612 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm2
15613 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm20
15614 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm11, %zmm15, %zmm2
15615 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
15616 ; AVX512DQ-FCP-NEXT: vmovdqa64 128(%rdx), %zmm23
15617 ; AVX512DQ-FCP-NEXT: vmovdqa64 128(%rcx), %zmm9
15618 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm23, %zmm1
15619 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm9, %zmm15, %zmm1
15620 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15621 ; AVX512DQ-FCP-NEXT: vmovdqa64 192(%rdx), %zmm18
15622 ; AVX512DQ-FCP-NEXT: vmovdqa64 192(%rcx), %zmm6
15623 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, %zmm1
15624 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm6, %zmm15, %zmm1
15625 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15626 ; AVX512DQ-FCP-NEXT: vmovdqa64 256(%rdx), %zmm17
15627 ; AVX512DQ-FCP-NEXT: vmovdqa64 256(%rcx), %zmm7
15628 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, %zmm1
15629 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm15, %zmm1
15630 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15631 ; AVX512DQ-FCP-NEXT: vmovdqa64 320(%rdx), %zmm15
15632 ; AVX512DQ-FCP-NEXT: vmovdqa64 320(%rcx), %zmm10
15633 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm15, %zmm1
15634 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm10, %zmm21, %zmm1
15635 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15636 ; AVX512DQ-FCP-NEXT: vmovdqa64 384(%rdx), %zmm13
15637 ; AVX512DQ-FCP-NEXT: vmovdqa64 384(%rcx), %zmm2
15638 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, %zmm1
15639 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm21, %zmm1
15640 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15641 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm1
15642 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm0, %zmm4, %zmm12
15643 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15644 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm0, %zmm4, %zmm8
15645 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15646 ; AVX512DQ-FCP-NEXT: vmovdqa64 448(%rdx), %zmm16
15647 ; AVX512DQ-FCP-NEXT: vmovdqa64 448(%rcx), %zmm4
15648 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm16, %zmm8
15649 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm4, %zmm21, %zmm8
15650 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15651 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm0, %zmm1, %zmm21
15652 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15653 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm3, %zmm1
15654 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15655 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} ymm1 = [0,0,4,12]
15656 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm16, %zmm24
15657 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm4, %zmm1, %zmm24
15658 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, %zmm19
15659 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm1, %zmm19
15660 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm15, %zmm0
15661 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm10, %zmm1, %zmm0
15662 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15663 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, %zmm31
15664 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm31
15665 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, %zmm21
15666 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm6, %zmm1, %zmm21
15667 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm23, %zmm12
15668 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm12
15669 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, %zmm3
15670 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm11, %zmm1, %zmm3
15671 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm14, %zmm5, %zmm1
15672 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [1,9,2,10,1,9,2,10]
15673 ; AVX512DQ-FCP-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
15674 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
15675 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm14, %zmm27, %zmm0
15676 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15677 ; AVX512DQ-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [5,13,6,14,5,13,6,14]
15678 ; AVX512DQ-FCP-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
15679 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
15680 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm14, %zmm28, %zmm0
15681 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15682 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [3,11,3,11,3,11,3,11]
15683 ; AVX512DQ-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15684 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm14, %zmm0, %zmm5
15685 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15686 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, %zmm5
15687 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm11, %zmm27, %zmm5
15688 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15689 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, %zmm5
15690 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm11, %zmm28, %zmm5
15691 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm11, %zmm0, %zmm20
15692 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15693 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm23, %zmm8
15694 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm9, %zmm27, %zmm8
15695 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15696 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm23, %zmm11
15697 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm9, %zmm28, %zmm11
15698 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm11, %zmm30
15699 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm9, %zmm0, %zmm23
15700 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15701 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, %zmm9
15702 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm6, %zmm27, %zmm9
15703 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, %zmm8
15704 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm6, %zmm28, %zmm8
15705 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm6, %zmm0, %zmm18
15706 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15707 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, %zmm6
15708 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm27, %zmm6
15709 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15710 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, %zmm6
15711 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, %zmm14
15712 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm28, %zmm6
15713 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm11
15714 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm14
15715 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15716 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm15, %zmm6
15717 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm15, %zmm7
15718 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm10, %zmm27, %zmm7
15719 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm26
15720 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm15, %zmm7
15721 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm10, %zmm28, %zmm7
15722 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm25
15723 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm10, %zmm0, %zmm6
15724 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15725 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, %zmm6
15726 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, %zmm7
15727 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm27, %zmm7
15728 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm10
15729 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, %zmm7
15730 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm28, %zmm7
15731 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm23
15732 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm0, %zmm6
15733 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15734 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm16, %zmm27
15735 ; AVX512DQ-FCP-NEXT: vpermi2q %zmm4, %zmm16, %zmm28
15736 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm4, %zmm0, %zmm16
15737 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15738 ; AVX512DQ-FCP-NEXT: movb $12, %al
15739 ; AVX512DQ-FCP-NEXT: kmovw %eax, %k1
15740 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm24, %zmm22 {%k1}
15741 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
15742 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, %zmm18 {%k1}
15743 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
15744 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15745 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
15746 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
15747 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm31, %zmm17 {%k1}
15748 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
15749 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm21, %zmm16 {%k1}
15750 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
15751 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm12, %zmm21 {%k1}
15752 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
15753 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, %zmm31 {%k1}
15754 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
15755 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm20 {%k1}
15756 ; AVX512DQ-FCP-NEXT: movb $48, %al
15757 ; AVX512DQ-FCP-NEXT: kmovw %eax, %k2
15758 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15759 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15760 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm2 {%k2}
15761 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15762 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
15763 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm3 {%k2}
15764 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15765 ; AVX512DQ-FCP-NEXT: vshufi64x2 {{.*#+}} zmm14 = zmm0[0,1,2,3],zmm29[4,5,6,7]
15766 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15767 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
15768 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm4 {%k2}
15769 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15770 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
15771 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15772 ; AVX512DQ-FCP-NEXT: vshufi64x2 $228, (%rsp), %zmm0, %zmm0 # 64-byte Folded Reload
15773 ; AVX512DQ-FCP-NEXT: # zmm0 = zmm0[0,1,2,3],mem[4,5,6,7]
15774 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
15775 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
15776 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15777 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm6 {%k2}
15778 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15779 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm30 {%k2}
15780 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15781 ; AVX512DQ-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm15 # 64-byte Folded Reload
15782 ; AVX512DQ-FCP-NEXT: # zmm15 = zmm0[0,1,2,3],mem[4,5,6,7]
15783 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15784 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
15785 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15786 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
15787 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15788 ; AVX512DQ-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm13 # 64-byte Folded Reload
15789 ; AVX512DQ-FCP-NEXT: # zmm13 = zmm0[0,1,2,3],mem[4,5,6,7]
15790 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
15791 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15792 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm7 {%k2}
15793 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15794 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
15795 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15796 ; AVX512DQ-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm12 # 64-byte Folded Reload
15797 ; AVX512DQ-FCP-NEXT: # zmm12 = zmm0[0,1,2,3],mem[4,5,6,7]
15798 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15799 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm26 {%k2}
15800 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15801 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm25 {%k2}
15802 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15803 ; AVX512DQ-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 64-byte Folded Reload
15804 ; AVX512DQ-FCP-NEXT: # zmm0 = zmm0[0,1,2,3],mem[4,5,6,7]
15805 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15806 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15807 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
15808 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15809 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm23 {%k2}
15810 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15811 ; AVX512DQ-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 64-byte Folded Reload
15812 ; AVX512DQ-FCP-NEXT: # zmm0 = zmm0[0,1,2,3],mem[4,5,6,7]
15813 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15814 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15815 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm27 {%k2}
15816 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%r8), %zmm29
15817 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,1,9,0,4,5,6,7]
15818 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm29, %zmm1, %zmm2
15819 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15820 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%r8), %zmm2
15821 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm1, %zmm4
15822 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15823 ; AVX512DQ-FCP-NEXT: vmovdqa64 128(%r8), %zmm0
15824 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm6
15825 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15826 ; AVX512DQ-FCP-NEXT: vmovdqa64 192(%r8), %zmm4
15827 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm4, %zmm1, %zmm9
15828 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15829 ; AVX512DQ-FCP-NEXT: vmovdqa64 256(%r8), %zmm6
15830 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm6, %zmm1, %zmm7
15831 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15832 ; AVX512DQ-FCP-NEXT: vmovdqa64 320(%r8), %zmm7
15833 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm26
15834 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15835 ; AVX512DQ-FCP-NEXT: vmovdqa64 384(%r8), %zmm9
15836 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm10
15837 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm10, %zmm24
15838 ; AVX512DQ-FCP-NEXT: vmovdqa64 448(%r8), %zmm10
15839 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm10, %zmm1, %zmm27
15840 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15841 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm28 {%k2}
15842 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,1,13,0,4,5,6,7]
15843 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm29, %zmm1, %zmm3
15844 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15845 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm1, %zmm5
15846 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15847 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm30
15848 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm30, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15849 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm4, %zmm1, %zmm8
15850 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15851 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm6, %zmm1, %zmm11
15852 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15853 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm25
15854 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15855 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm23
15856 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm10, %zmm1, %zmm28
15857 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15858 ; AVX512DQ-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm3 # 64-byte Folded Reload
15859 ; AVX512DQ-FCP-NEXT: # zmm3 = zmm1[0,1,2,3],mem[4,5,6,7]
15860 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm1 = [14,0,2,3,4,5,15,0]
15861 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm29, %zmm1, %zmm14
15862 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15863 ; AVX512DQ-FCP-NEXT: vmovdqu64 (%rsp), %zmm14 # 64-byte Reload
15864 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm1, %zmm14
15865 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm15
15866 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15867 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm4, %zmm1, %zmm13
15868 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm6, %zmm1, %zmm12
15869 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15870 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
15871 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm12
15872 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
15873 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm15
15874 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm10, %zmm1, %zmm3
15875 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15876 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdi), %ymm1
15877 ; AVX512DQ-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
15878 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
15879 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm1, %zmm3, %zmm3
15880 ; AVX512DQ-FCP-NEXT: vmovdqa 64(%rdi), %ymm1
15881 ; AVX512DQ-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
15882 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15883 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm1, %zmm5, %zmm5
15884 ; AVX512DQ-FCP-NEXT: movb $16, %al
15885 ; AVX512DQ-FCP-NEXT: kmovw %eax, %k2
15886 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm31 {%k2}
15887 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm31, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15888 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm1 = [10,0,2,3,4,5,11,0]
15889 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm1, %zmm5
15890 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15891 ; AVX512DQ-FCP-NEXT: vmovdqa 128(%rdi), %ymm2
15892 ; AVX512DQ-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
15893 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15894 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm5, %zmm31
15895 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm21 {%k2}
15896 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15897 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm31
15898 ; AVX512DQ-FCP-NEXT: vmovdqa 192(%rdi), %ymm2
15899 ; AVX512DQ-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
15900 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15901 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm30
15902 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm16 {%k2}
15903 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15904 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm4, %zmm1, %zmm30
15905 ; AVX512DQ-FCP-NEXT: vmovdqa 256(%rdi), %ymm2
15906 ; AVX512DQ-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
15907 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15908 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm26
15909 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm17 {%k2}
15910 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15911 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm6, %zmm1, %zmm26
15912 ; AVX512DQ-FCP-NEXT: vmovdqa 320(%rdi), %ymm2
15913 ; AVX512DQ-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
15914 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15915 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm25
15916 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, %zmm19 {%k2}
15917 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15918 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm25
15919 ; AVX512DQ-FCP-NEXT: vmovdqa 384(%rdi), %ymm2
15920 ; AVX512DQ-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
15921 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15922 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm19
15923 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, %zmm18 {%k2}
15924 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15925 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm19
15926 ; AVX512DQ-FCP-NEXT: vmovdqa 448(%rdi), %ymm2
15927 ; AVX512DQ-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
15928 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15929 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm16
15930 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm10, %zmm22 {%k2}
15931 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15932 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm10, %zmm1, %zmm16
15933 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm29, %zmm1, %zmm3
15934 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15935 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm29, %zmm20 {%k2}
15936 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15937 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%r9), %zmm29
15938 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,1,2,9,4,5,6,7]
15939 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15940 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm29, %zmm6, %zmm0
15941 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15942 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%r9), %zmm3
15943 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15944 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm3, %zmm6, %zmm0
15945 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15946 ; AVX512DQ-FCP-NEXT: vmovdqa64 128(%r9), %zmm4
15947 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15948 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm4, %zmm6, %zmm0
15949 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15950 ; AVX512DQ-FCP-NEXT: vmovdqa64 192(%r9), %zmm5
15951 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15952 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm5, %zmm6, %zmm0
15953 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15954 ; AVX512DQ-FCP-NEXT: vmovdqa64 256(%r9), %zmm11
15955 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15956 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm11, %zmm6, %zmm0
15957 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15958 ; AVX512DQ-FCP-NEXT: vmovdqa64 320(%r9), %zmm8
15959 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15960 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm8, %zmm6, %zmm0
15961 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15962 ; AVX512DQ-FCP-NEXT: vmovdqa64 384(%r9), %zmm7
15963 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm6, %zmm24
15964 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15965 ; AVX512DQ-FCP-NEXT: vmovdqa64 448(%r9), %zmm22
15966 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm22, %zmm6, %zmm27
15967 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,1,2,13,4,5,6,7]
15968 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15969 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm29, %zmm6, %zmm0
15970 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15971 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15972 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm3, %zmm6, %zmm0
15973 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15974 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15975 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm4, %zmm6, %zmm0
15976 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15977 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15978 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm5, %zmm6, %zmm0
15979 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15980 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15981 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm11, %zmm6, %zmm0
15982 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15983 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15984 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm8, %zmm6, %zmm0
15985 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15986 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm6, %zmm23
15987 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15988 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm22, %zmm6, %zmm28
15989 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,14,2,3,4,5,6,15]
15990 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15991 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm29, %zmm6, %zmm0
15992 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15993 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm3, %zmm6, %zmm14
15994 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm14, (%rsp) # 64-byte Spill
15995 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15996 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm4, %zmm6, %zmm0
15997 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15998 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm5, %zmm6, %zmm13
15999 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16000 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16001 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm11, %zmm6, %zmm0
16002 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16003 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm8, %zmm6, %zmm12
16004 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16005 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm6, %zmm15
16006 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16007 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16008 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm22, %zmm6, %zmm0
16009 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16010 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdx), %xmm6
16011 ; AVX512DQ-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
16012 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
16013 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16014 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm0 {%k1}
16015 ; AVX512DQ-FCP-NEXT: vmovdqa 64(%rdx), %xmm6
16016 ; AVX512DQ-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
16017 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
16018 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
16019 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm14 {%k1}
16020 ; AVX512DQ-FCP-NEXT: vmovdqa 128(%rdx), %xmm6
16021 ; AVX512DQ-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
16022 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
16023 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
16024 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm15 {%k1}
16025 ; AVX512DQ-FCP-NEXT: vmovdqa 192(%rdx), %xmm6
16026 ; AVX512DQ-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
16027 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
16028 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
16029 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm10 {%k1}
16030 ; AVX512DQ-FCP-NEXT: vmovdqa 256(%rdx), %xmm6
16031 ; AVX512DQ-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
16032 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
16033 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
16034 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm12 {%k1}
16035 ; AVX512DQ-FCP-NEXT: vmovdqa 320(%rdx), %xmm6
16036 ; AVX512DQ-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
16037 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
16038 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16039 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm2 {%k1}
16040 ; AVX512DQ-FCP-NEXT: vmovdqa 384(%rdx), %xmm6
16041 ; AVX512DQ-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
16042 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
16043 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
16044 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm9 {%k1}
16045 ; AVX512DQ-FCP-NEXT: vmovdqa 448(%rdx), %xmm6
16046 ; AVX512DQ-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
16047 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
16048 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16049 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm1 {%k1}
16050 ; AVX512DQ-FCP-NEXT: vinserti32x4 $2, (%r8), %zmm0, %zmm24
16051 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm0 = [0,1,2,3,4,8,6,7]
16052 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm29, %zmm0, %zmm24
16053 ; AVX512DQ-FCP-NEXT: vinserti32x4 $2, 64(%r8), %zmm14, %zmm23
16054 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm3, %zmm0, %zmm23
16055 ; AVX512DQ-FCP-NEXT: vinserti32x4 $2, 128(%r8), %zmm15, %zmm21
16056 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm4, %zmm0, %zmm21
16057 ; AVX512DQ-FCP-NEXT: vinserti32x4 $2, 192(%r8), %zmm10, %zmm20
16058 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm5, %zmm0, %zmm20
16059 ; AVX512DQ-FCP-NEXT: vinserti32x4 $2, 256(%r8), %zmm12, %zmm18
16060 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm11, %zmm0, %zmm18
16061 ; AVX512DQ-FCP-NEXT: vinserti32x4 $2, 320(%r8), %zmm2, %zmm17
16062 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm8, %zmm0, %zmm17
16063 ; AVX512DQ-FCP-NEXT: vinserti32x4 $2, 384(%r8), %zmm9, %zmm15
16064 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm15
16065 ; AVX512DQ-FCP-NEXT: vinserti32x4 $2, 448(%r8), %zmm1, %zmm14
16066 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm22, %zmm0, %zmm14
16067 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm0 = [0,10,2,3,4,5,6,11]
16068 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
16069 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm29, %zmm0, %zmm13
16070 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,1,2,3,4,12,6,7]
16071 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
16072 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm29, %zmm1, %zmm12
16073 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
16074 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm3, %zmm0, %zmm10
16075 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
16076 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm3, %zmm1, %zmm9
16077 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm4, %zmm0, %zmm31
16078 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
16079 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm4, %zmm1, %zmm6
16080 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm5, %zmm0, %zmm30
16081 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm29 # 64-byte Reload
16082 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm5, %zmm1, %zmm29
16083 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm11, %zmm0, %zmm26
16084 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
16085 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm11, %zmm1, %zmm4
16086 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm8, %zmm0, %zmm25
16087 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
16088 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm8, %zmm1, %zmm3
16089 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm19
16090 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16091 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm2
16092 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm22, %zmm0, %zmm16
16093 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16094 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm22, %zmm1, %zmm0
16095 ; AVX512DQ-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
16096 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16097 ; AVX512DQ-FCP-NEXT: vmovaps %zmm1, 3008(%rax)
16098 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm28, 2944(%rax)
16099 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, 2880(%rax)
16100 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm16, 2816(%rax)
16101 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm27, 2752(%rax)
16102 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16103 ; AVX512DQ-FCP-NEXT: vmovaps %zmm0, 2624(%rax)
16104 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16105 ; AVX512DQ-FCP-NEXT: vmovaps %zmm0, 2560(%rax)
16106 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, 2496(%rax)
16107 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm19, 2432(%rax)
16108 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16109 ; AVX512DQ-FCP-NEXT: vmovaps %zmm0, 2368(%rax)
16110 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16111 ; AVX512DQ-FCP-NEXT: vmovaps %zmm0, 2240(%rax)
16112 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16113 ; AVX512DQ-FCP-NEXT: vmovaps %zmm0, 2176(%rax)
16114 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, 2112(%rax)
16115 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm25, 2048(%rax)
16116 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16117 ; AVX512DQ-FCP-NEXT: vmovaps %zmm0, 1984(%rax)
16118 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16119 ; AVX512DQ-FCP-NEXT: vmovaps %zmm0, 1856(%rax)
16120 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16121 ; AVX512DQ-FCP-NEXT: vmovaps %zmm0, 1792(%rax)
16122 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, 1728(%rax)
16123 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm26, 1664(%rax)
16124 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16125 ; AVX512DQ-FCP-NEXT: vmovaps %zmm0, 1600(%rax)
16126 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16127 ; AVX512DQ-FCP-NEXT: vmovaps %zmm0, 1472(%rax)
16128 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16129 ; AVX512DQ-FCP-NEXT: vmovaps %zmm0, 1408(%rax)
16130 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm29, 1344(%rax)
16131 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm30, 1280(%rax)
16132 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16133 ; AVX512DQ-FCP-NEXT: vmovaps %zmm0, 1216(%rax)
16134 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16135 ; AVX512DQ-FCP-NEXT: vmovaps %zmm0, 1088(%rax)
16136 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16137 ; AVX512DQ-FCP-NEXT: vmovaps %zmm0, 1024(%rax)
16138 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, 960(%rax)
16139 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm31, 896(%rax)
16140 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16141 ; AVX512DQ-FCP-NEXT: vmovaps %zmm0, 832(%rax)
16142 ; AVX512DQ-FCP-NEXT: vmovups (%rsp), %zmm0 # 64-byte Reload
16143 ; AVX512DQ-FCP-NEXT: vmovaps %zmm0, 704(%rax)
16144 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16145 ; AVX512DQ-FCP-NEXT: vmovaps %zmm0, 640(%rax)
16146 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, 576(%rax)
16147 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm10, 512(%rax)
16148 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16149 ; AVX512DQ-FCP-NEXT: vmovaps %zmm0, 448(%rax)
16150 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16151 ; AVX512DQ-FCP-NEXT: vmovaps %zmm0, 320(%rax)
16152 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16153 ; AVX512DQ-FCP-NEXT: vmovaps %zmm0, 256(%rax)
16154 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm12, 192(%rax)
16155 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, 128(%rax)
16156 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16157 ; AVX512DQ-FCP-NEXT: vmovaps %zmm0, 64(%rax)
16158 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm14, 2688(%rax)
16159 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm15, 2304(%rax)
16160 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, 1920(%rax)
16161 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, 1536(%rax)
16162 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, 1152(%rax)
16163 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm21, 768(%rax)
16164 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm23, 384(%rax)
16165 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm24, (%rax)
16166 ; AVX512DQ-FCP-NEXT: addq $3720, %rsp # imm = 0xE88
16167 ; AVX512DQ-FCP-NEXT: vzeroupper
16168 ; AVX512DQ-FCP-NEXT: retq
16170 ; AVX512BW-LABEL: store_i64_stride6_vf64:
16171 ; AVX512BW: # %bb.0:
16172 ; AVX512BW-NEXT: subq $3720, %rsp # imm = 0xE88
16173 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm11
16174 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %zmm10
16175 ; AVX512BW-NEXT: vmovdqa64 128(%rdi), %zmm9
16176 ; AVX512BW-NEXT: vmovdqa64 192(%rdi), %zmm8
16177 ; AVX512BW-NEXT: vmovdqa64 256(%rdi), %zmm7
16178 ; AVX512BW-NEXT: vmovdqa64 320(%rdi), %zmm6
16179 ; AVX512BW-NEXT: vmovdqa64 384(%rdi), %zmm5
16180 ; AVX512BW-NEXT: vmovdqa64 448(%rdi), %zmm4
16181 ; AVX512BW-NEXT: vmovdqa64 (%rsi), %zmm30
16182 ; AVX512BW-NEXT: vmovdqa64 64(%rsi), %zmm29
16183 ; AVX512BW-NEXT: vmovdqa64 128(%rsi), %zmm28
16184 ; AVX512BW-NEXT: vmovdqa64 192(%rsi), %zmm27
16185 ; AVX512BW-NEXT: vmovdqa64 256(%rsi), %zmm1
16186 ; AVX512BW-NEXT: vmovdqa64 320(%rsi), %zmm13
16187 ; AVX512BW-NEXT: vmovdqa64 384(%rsi), %zmm24
16188 ; AVX512BW-NEXT: vmovdqa64 448(%rsi), %zmm0
16189 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [4,12,5,13,4,12,5,13]
16190 ; AVX512BW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
16191 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm22
16192 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm2, %zmm22
16193 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm3
16194 ; AVX512BW-NEXT: vpermt2q %zmm24, %zmm2, %zmm3
16195 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16196 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm3
16197 ; AVX512BW-NEXT: vpermt2q %zmm13, %zmm2, %zmm3
16198 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16199 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm3
16200 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm2, %zmm3
16201 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16202 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm3
16203 ; AVX512BW-NEXT: vpermt2q %zmm27, %zmm2, %zmm3
16204 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16205 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm3
16206 ; AVX512BW-NEXT: vpermt2q %zmm28, %zmm2, %zmm3
16207 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16208 ; AVX512BW-NEXT: vmovdqa64 %zmm10, %zmm3
16209 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm2, %zmm3
16210 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16211 ; AVX512BW-NEXT: vpermi2q %zmm30, %zmm11, %zmm2
16212 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16213 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [2,10,2,10,2,10,2,10]
16214 ; AVX512BW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
16215 ; AVX512BW-NEXT: vmovdqa64 %zmm11, %zmm3
16216 ; AVX512BW-NEXT: vpermt2q %zmm30, %zmm2, %zmm3
16217 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16218 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm12
16219 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [6,14,6,14,6,14,6,14]
16220 ; AVX512BW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
16221 ; AVX512BW-NEXT: vmovdqa64 %zmm11, %zmm3
16222 ; AVX512BW-NEXT: vpermt2q %zmm30, %zmm2, %zmm3
16223 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16224 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [7,15,7,15,7,15,7,15]
16225 ; AVX512BW-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
16226 ; AVX512BW-NEXT: vmovdqa64 %zmm11, %zmm3
16227 ; AVX512BW-NEXT: vpermt2q %zmm30, %zmm15, %zmm3
16228 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16229 ; AVX512BW-NEXT: vmovdqa64 %zmm10, %zmm3
16230 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm12, %zmm3
16231 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16232 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,8,1,9,0,8,1,9]
16233 ; AVX512BW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
16234 ; AVX512BW-NEXT: vpermt2q %zmm30, %zmm3, %zmm11
16235 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16236 ; AVX512BW-NEXT: vmovdqa64 %zmm10, %zmm11
16237 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm2, %zmm11
16238 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16239 ; AVX512BW-NEXT: vmovdqa64 %zmm10, %zmm11
16240 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm15, %zmm11
16241 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16242 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm3, %zmm10
16243 ; AVX512BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16244 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm10
16245 ; AVX512BW-NEXT: vpermt2q %zmm28, %zmm12, %zmm10
16246 ; AVX512BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16247 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm10
16248 ; AVX512BW-NEXT: vpermt2q %zmm28, %zmm2, %zmm10
16249 ; AVX512BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16250 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm10
16251 ; AVX512BW-NEXT: vpermt2q %zmm28, %zmm15, %zmm10
16252 ; AVX512BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16253 ; AVX512BW-NEXT: vpermt2q %zmm28, %zmm3, %zmm9
16254 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16255 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm9
16256 ; AVX512BW-NEXT: vpermt2q %zmm27, %zmm12, %zmm9
16257 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16258 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm9
16259 ; AVX512BW-NEXT: vpermt2q %zmm27, %zmm2, %zmm9
16260 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16261 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm9
16262 ; AVX512BW-NEXT: vpermt2q %zmm27, %zmm15, %zmm9
16263 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16264 ; AVX512BW-NEXT: vpermt2q %zmm27, %zmm3, %zmm8
16265 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16266 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm8
16267 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm12, %zmm8
16268 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16269 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm8
16270 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm2, %zmm8
16271 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16272 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm8
16273 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm15, %zmm8
16274 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16275 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm3, %zmm7
16276 ; AVX512BW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16277 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm1
16278 ; AVX512BW-NEXT: vpermt2q %zmm13, %zmm12, %zmm1
16279 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16280 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm1
16281 ; AVX512BW-NEXT: vpermt2q %zmm13, %zmm2, %zmm1
16282 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16283 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm1
16284 ; AVX512BW-NEXT: vpermt2q %zmm13, %zmm15, %zmm1
16285 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16286 ; AVX512BW-NEXT: vpermt2q %zmm13, %zmm3, %zmm6
16287 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16288 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm1
16289 ; AVX512BW-NEXT: vpermt2q %zmm24, %zmm12, %zmm1
16290 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16291 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm1
16292 ; AVX512BW-NEXT: vpermt2q %zmm24, %zmm2, %zmm1
16293 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16294 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm8
16295 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm1
16296 ; AVX512BW-NEXT: vpermt2q %zmm24, %zmm15, %zmm1
16297 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16298 ; AVX512BW-NEXT: vpermt2q %zmm24, %zmm3, %zmm5
16299 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16300 ; AVX512BW-NEXT: vmovdqa64 (%rdx), %zmm5
16301 ; AVX512BW-NEXT: vmovdqa64 (%rcx), %zmm14
16302 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm29
16303 ; AVX512BW-NEXT: vmovdqa64 %zmm15, %zmm21
16304 ; AVX512BW-NEXT: vpermt2q %zmm14, %zmm15, %zmm29
16305 ; AVX512BW-NEXT: vmovdqa64 64(%rdx), %zmm1
16306 ; AVX512BW-NEXT: vmovdqa64 64(%rcx), %zmm11
16307 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm2
16308 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm20
16309 ; AVX512BW-NEXT: vpermt2q %zmm11, %zmm15, %zmm2
16310 ; AVX512BW-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
16311 ; AVX512BW-NEXT: vmovdqa64 128(%rdx), %zmm23
16312 ; AVX512BW-NEXT: vmovdqa64 128(%rcx), %zmm9
16313 ; AVX512BW-NEXT: vmovdqa64 %zmm23, %zmm1
16314 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm15, %zmm1
16315 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16316 ; AVX512BW-NEXT: vmovdqa64 192(%rdx), %zmm18
16317 ; AVX512BW-NEXT: vmovdqa64 192(%rcx), %zmm6
16318 ; AVX512BW-NEXT: vmovdqa64 %zmm18, %zmm1
16319 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm15, %zmm1
16320 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16321 ; AVX512BW-NEXT: vmovdqa64 256(%rdx), %zmm17
16322 ; AVX512BW-NEXT: vmovdqa64 256(%rcx), %zmm7
16323 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm1
16324 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm15, %zmm1
16325 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16326 ; AVX512BW-NEXT: vmovdqa64 320(%rdx), %zmm15
16327 ; AVX512BW-NEXT: vmovdqa64 320(%rcx), %zmm10
16328 ; AVX512BW-NEXT: vmovdqa64 %zmm15, %zmm1
16329 ; AVX512BW-NEXT: vpermt2q %zmm10, %zmm21, %zmm1
16330 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16331 ; AVX512BW-NEXT: vmovdqa64 384(%rdx), %zmm13
16332 ; AVX512BW-NEXT: vmovdqa64 384(%rcx), %zmm2
16333 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm1
16334 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm21, %zmm1
16335 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16336 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm1
16337 ; AVX512BW-NEXT: vpermi2q %zmm0, %zmm4, %zmm12
16338 ; AVX512BW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16339 ; AVX512BW-NEXT: vpermi2q %zmm0, %zmm4, %zmm8
16340 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16341 ; AVX512BW-NEXT: vmovdqa64 448(%rdx), %zmm16
16342 ; AVX512BW-NEXT: vmovdqa64 448(%rcx), %zmm4
16343 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm8
16344 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm21, %zmm8
16345 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16346 ; AVX512BW-NEXT: vpermi2q %zmm0, %zmm1, %zmm21
16347 ; AVX512BW-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16348 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm3, %zmm1
16349 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16350 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} ymm1 = [0,0,4,12]
16351 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm24
16352 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm1, %zmm24
16353 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm19
16354 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm1, %zmm19
16355 ; AVX512BW-NEXT: vmovdqa64 %zmm15, %zmm0
16356 ; AVX512BW-NEXT: vpermt2q %zmm10, %zmm1, %zmm0
16357 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16358 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm31
16359 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm1, %zmm31
16360 ; AVX512BW-NEXT: vmovdqa64 %zmm18, %zmm21
16361 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm1, %zmm21
16362 ; AVX512BW-NEXT: vmovdqa64 %zmm23, %zmm12
16363 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm1, %zmm12
16364 ; AVX512BW-NEXT: vmovdqa64 %zmm20, %zmm3
16365 ; AVX512BW-NEXT: vpermt2q %zmm11, %zmm1, %zmm3
16366 ; AVX512BW-NEXT: vpermi2q %zmm14, %zmm5, %zmm1
16367 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [1,9,2,10,1,9,2,10]
16368 ; AVX512BW-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
16369 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm0
16370 ; AVX512BW-NEXT: vpermt2q %zmm14, %zmm27, %zmm0
16371 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16372 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [5,13,6,14,5,13,6,14]
16373 ; AVX512BW-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
16374 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm0
16375 ; AVX512BW-NEXT: vpermt2q %zmm14, %zmm28, %zmm0
16376 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16377 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [3,11,3,11,3,11,3,11]
16378 ; AVX512BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
16379 ; AVX512BW-NEXT: vpermt2q %zmm14, %zmm0, %zmm5
16380 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16381 ; AVX512BW-NEXT: vmovdqa64 %zmm20, %zmm5
16382 ; AVX512BW-NEXT: vpermt2q %zmm11, %zmm27, %zmm5
16383 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16384 ; AVX512BW-NEXT: vmovdqa64 %zmm20, %zmm5
16385 ; AVX512BW-NEXT: vpermt2q %zmm11, %zmm28, %zmm5
16386 ; AVX512BW-NEXT: vpermt2q %zmm11, %zmm0, %zmm20
16387 ; AVX512BW-NEXT: vmovdqu64 %zmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16388 ; AVX512BW-NEXT: vmovdqa64 %zmm23, %zmm8
16389 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm27, %zmm8
16390 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16391 ; AVX512BW-NEXT: vmovdqa64 %zmm23, %zmm11
16392 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm28, %zmm11
16393 ; AVX512BW-NEXT: vmovdqa64 %zmm11, %zmm30
16394 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm0, %zmm23
16395 ; AVX512BW-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16396 ; AVX512BW-NEXT: vmovdqa64 %zmm18, %zmm9
16397 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm27, %zmm9
16398 ; AVX512BW-NEXT: vmovdqa64 %zmm18, %zmm8
16399 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm28, %zmm8
16400 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm0, %zmm18
16401 ; AVX512BW-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16402 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm6
16403 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm27, %zmm6
16404 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16405 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm6
16406 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm14
16407 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm28, %zmm6
16408 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm11
16409 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm0, %zmm14
16410 ; AVX512BW-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16411 ; AVX512BW-NEXT: vmovdqa64 %zmm15, %zmm6
16412 ; AVX512BW-NEXT: vmovdqa64 %zmm15, %zmm7
16413 ; AVX512BW-NEXT: vpermt2q %zmm10, %zmm27, %zmm7
16414 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm26
16415 ; AVX512BW-NEXT: vmovdqa64 %zmm15, %zmm7
16416 ; AVX512BW-NEXT: vpermt2q %zmm10, %zmm28, %zmm7
16417 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm25
16418 ; AVX512BW-NEXT: vpermt2q %zmm10, %zmm0, %zmm6
16419 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16420 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm6
16421 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm7
16422 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm27, %zmm7
16423 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm10
16424 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm7
16425 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm28, %zmm7
16426 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm23
16427 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm0, %zmm6
16428 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16429 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm16, %zmm27
16430 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm16, %zmm28
16431 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm0, %zmm16
16432 ; AVX512BW-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16433 ; AVX512BW-NEXT: movb $12, %al
16434 ; AVX512BW-NEXT: kmovd %eax, %k1
16435 ; AVX512BW-NEXT: vmovdqa64 %zmm24, %zmm22 {%k1}
16436 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
16437 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm18 {%k1}
16438 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
16439 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16440 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
16441 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
16442 ; AVX512BW-NEXT: vmovdqa64 %zmm31, %zmm17 {%k1}
16443 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
16444 ; AVX512BW-NEXT: vmovdqa64 %zmm21, %zmm16 {%k1}
16445 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
16446 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm21 {%k1}
16447 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
16448 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm31 {%k1}
16449 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
16450 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm20 {%k1}
16451 ; AVX512BW-NEXT: movb $48, %al
16452 ; AVX512BW-NEXT: kmovd %eax, %k2
16453 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16454 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16455 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm2 {%k2}
16456 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16457 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
16458 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm3 {%k2}
16459 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16460 ; AVX512BW-NEXT: vshufi64x2 {{.*#+}} zmm14 = zmm0[0,1,2,3],zmm29[4,5,6,7]
16461 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16462 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
16463 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm4 {%k2}
16464 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16465 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16466 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16467 ; AVX512BW-NEXT: vshufi64x2 $228, (%rsp), %zmm0, %zmm0 # 64-byte Folded Reload
16468 ; AVX512BW-NEXT: # zmm0 = zmm0[0,1,2,3],mem[4,5,6,7]
16469 ; AVX512BW-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
16470 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
16471 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16472 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm6 {%k2}
16473 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16474 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm30 {%k2}
16475 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16476 ; AVX512BW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm15 # 64-byte Folded Reload
16477 ; AVX512BW-NEXT: # zmm15 = zmm0[0,1,2,3],mem[4,5,6,7]
16478 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16479 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
16480 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16481 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
16482 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16483 ; AVX512BW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm13 # 64-byte Folded Reload
16484 ; AVX512BW-NEXT: # zmm13 = zmm0[0,1,2,3],mem[4,5,6,7]
16485 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
16486 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16487 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm7 {%k2}
16488 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16489 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
16490 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16491 ; AVX512BW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm12 # 64-byte Folded Reload
16492 ; AVX512BW-NEXT: # zmm12 = zmm0[0,1,2,3],mem[4,5,6,7]
16493 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16494 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm26 {%k2}
16495 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16496 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm25 {%k2}
16497 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16498 ; AVX512BW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 64-byte Folded Reload
16499 ; AVX512BW-NEXT: # zmm0 = zmm0[0,1,2,3],mem[4,5,6,7]
16500 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16501 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16502 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
16503 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16504 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm23 {%k2}
16505 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16506 ; AVX512BW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 64-byte Folded Reload
16507 ; AVX512BW-NEXT: # zmm0 = zmm0[0,1,2,3],mem[4,5,6,7]
16508 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16509 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16510 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm27 {%k2}
16511 ; AVX512BW-NEXT: vmovdqa64 (%r8), %zmm29
16512 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,1,9,0,4,5,6,7]
16513 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm1, %zmm2
16514 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16515 ; AVX512BW-NEXT: vmovdqa64 64(%r8), %zmm2
16516 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm1, %zmm4
16517 ; AVX512BW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16518 ; AVX512BW-NEXT: vmovdqa64 128(%r8), %zmm0
16519 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm1, %zmm6
16520 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16521 ; AVX512BW-NEXT: vmovdqa64 192(%r8), %zmm4
16522 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm1, %zmm9
16523 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16524 ; AVX512BW-NEXT: vmovdqa64 256(%r8), %zmm6
16525 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm1, %zmm7
16526 ; AVX512BW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16527 ; AVX512BW-NEXT: vmovdqa64 320(%r8), %zmm7
16528 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm1, %zmm26
16529 ; AVX512BW-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16530 ; AVX512BW-NEXT: vmovdqa64 384(%r8), %zmm9
16531 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm1, %zmm10
16532 ; AVX512BW-NEXT: vmovdqa64 %zmm10, %zmm24
16533 ; AVX512BW-NEXT: vmovdqa64 448(%r8), %zmm10
16534 ; AVX512BW-NEXT: vpermt2q %zmm10, %zmm1, %zmm27
16535 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16536 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm28 {%k2}
16537 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,1,13,0,4,5,6,7]
16538 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm1, %zmm3
16539 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16540 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm1, %zmm5
16541 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16542 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm1, %zmm30
16543 ; AVX512BW-NEXT: vmovdqu64 %zmm30, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16544 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm1, %zmm8
16545 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16546 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm1, %zmm11
16547 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16548 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm1, %zmm25
16549 ; AVX512BW-NEXT: vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16550 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm1, %zmm23
16551 ; AVX512BW-NEXT: vpermt2q %zmm10, %zmm1, %zmm28
16552 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16553 ; AVX512BW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm3 # 64-byte Folded Reload
16554 ; AVX512BW-NEXT: # zmm3 = zmm1[0,1,2,3],mem[4,5,6,7]
16555 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm1 = [14,0,2,3,4,5,15,0]
16556 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm1, %zmm14
16557 ; AVX512BW-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16558 ; AVX512BW-NEXT: vmovdqu64 (%rsp), %zmm14 # 64-byte Reload
16559 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm1, %zmm14
16560 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm1, %zmm15
16561 ; AVX512BW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16562 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm1, %zmm13
16563 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm1, %zmm12
16564 ; AVX512BW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16565 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
16566 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm1, %zmm12
16567 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
16568 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm1, %zmm15
16569 ; AVX512BW-NEXT: vpermt2q %zmm10, %zmm1, %zmm3
16570 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16571 ; AVX512BW-NEXT: vmovdqa (%rdi), %ymm1
16572 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
16573 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
16574 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm1, %zmm3, %zmm3
16575 ; AVX512BW-NEXT: vmovdqa 64(%rdi), %ymm1
16576 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
16577 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16578 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm1, %zmm5, %zmm5
16579 ; AVX512BW-NEXT: movb $16, %al
16580 ; AVX512BW-NEXT: kmovd %eax, %k2
16581 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm31 {%k2}
16582 ; AVX512BW-NEXT: vmovdqu64 %zmm31, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16583 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm1 = [10,0,2,3,4,5,11,0]
16584 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm1, %zmm5
16585 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16586 ; AVX512BW-NEXT: vmovdqa 128(%rdi), %ymm2
16587 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
16588 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16589 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm2, %zmm5, %zmm31
16590 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm21 {%k2}
16591 ; AVX512BW-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16592 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm1, %zmm31
16593 ; AVX512BW-NEXT: vmovdqa 192(%rdi), %ymm2
16594 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
16595 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16596 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm30
16597 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm16 {%k2}
16598 ; AVX512BW-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16599 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm1, %zmm30
16600 ; AVX512BW-NEXT: vmovdqa 256(%rdi), %ymm2
16601 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
16602 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16603 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm26
16604 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm17 {%k2}
16605 ; AVX512BW-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16606 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm1, %zmm26
16607 ; AVX512BW-NEXT: vmovdqa 320(%rdi), %ymm2
16608 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
16609 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16610 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm25
16611 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm19 {%k2}
16612 ; AVX512BW-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16613 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm1, %zmm25
16614 ; AVX512BW-NEXT: vmovdqa 384(%rdi), %ymm2
16615 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
16616 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16617 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm19
16618 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm18 {%k2}
16619 ; AVX512BW-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16620 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm1, %zmm19
16621 ; AVX512BW-NEXT: vmovdqa 448(%rdi), %ymm2
16622 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
16623 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16624 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm16
16625 ; AVX512BW-NEXT: vmovdqa64 %zmm10, %zmm22 {%k2}
16626 ; AVX512BW-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16627 ; AVX512BW-NEXT: vpermt2q %zmm10, %zmm1, %zmm16
16628 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm1, %zmm3
16629 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16630 ; AVX512BW-NEXT: vmovdqa64 %zmm29, %zmm20 {%k2}
16631 ; AVX512BW-NEXT: vmovdqu64 %zmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16632 ; AVX512BW-NEXT: vmovdqa64 (%r9), %zmm29
16633 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,1,2,9,4,5,6,7]
16634 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16635 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm6, %zmm0
16636 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16637 ; AVX512BW-NEXT: vmovdqa64 64(%r9), %zmm3
16638 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16639 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm6, %zmm0
16640 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16641 ; AVX512BW-NEXT: vmovdqa64 128(%r9), %zmm4
16642 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16643 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm6, %zmm0
16644 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16645 ; AVX512BW-NEXT: vmovdqa64 192(%r9), %zmm5
16646 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16647 ; AVX512BW-NEXT: vpermt2q %zmm5, %zmm6, %zmm0
16648 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16649 ; AVX512BW-NEXT: vmovdqa64 256(%r9), %zmm11
16650 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16651 ; AVX512BW-NEXT: vpermt2q %zmm11, %zmm6, %zmm0
16652 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16653 ; AVX512BW-NEXT: vmovdqa64 320(%r9), %zmm8
16654 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16655 ; AVX512BW-NEXT: vpermt2q %zmm8, %zmm6, %zmm0
16656 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16657 ; AVX512BW-NEXT: vmovdqa64 384(%r9), %zmm7
16658 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm6, %zmm24
16659 ; AVX512BW-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16660 ; AVX512BW-NEXT: vmovdqa64 448(%r9), %zmm22
16661 ; AVX512BW-NEXT: vpermt2q %zmm22, %zmm6, %zmm27
16662 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,1,2,13,4,5,6,7]
16663 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16664 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm6, %zmm0
16665 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16666 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16667 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm6, %zmm0
16668 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16669 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16670 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm6, %zmm0
16671 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16672 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16673 ; AVX512BW-NEXT: vpermt2q %zmm5, %zmm6, %zmm0
16674 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16675 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16676 ; AVX512BW-NEXT: vpermt2q %zmm11, %zmm6, %zmm0
16677 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16678 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16679 ; AVX512BW-NEXT: vpermt2q %zmm8, %zmm6, %zmm0
16680 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16681 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm6, %zmm23
16682 ; AVX512BW-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16683 ; AVX512BW-NEXT: vpermt2q %zmm22, %zmm6, %zmm28
16684 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,14,2,3,4,5,6,15]
16685 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16686 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm6, %zmm0
16687 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16688 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm6, %zmm14
16689 ; AVX512BW-NEXT: vmovdqu64 %zmm14, (%rsp) # 64-byte Spill
16690 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16691 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm6, %zmm0
16692 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16693 ; AVX512BW-NEXT: vpermt2q %zmm5, %zmm6, %zmm13
16694 ; AVX512BW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16695 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16696 ; AVX512BW-NEXT: vpermt2q %zmm11, %zmm6, %zmm0
16697 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16698 ; AVX512BW-NEXT: vpermt2q %zmm8, %zmm6, %zmm12
16699 ; AVX512BW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16700 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm6, %zmm15
16701 ; AVX512BW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16702 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16703 ; AVX512BW-NEXT: vpermt2q %zmm22, %zmm6, %zmm0
16704 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16705 ; AVX512BW-NEXT: vmovdqa (%rdx), %xmm6
16706 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
16707 ; AVX512BW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
16708 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16709 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm0 {%k1}
16710 ; AVX512BW-NEXT: vmovdqa 64(%rdx), %xmm6
16711 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
16712 ; AVX512BW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
16713 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
16714 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm14 {%k1}
16715 ; AVX512BW-NEXT: vmovdqa 128(%rdx), %xmm6
16716 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
16717 ; AVX512BW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
16718 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
16719 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm15 {%k1}
16720 ; AVX512BW-NEXT: vmovdqa 192(%rdx), %xmm6
16721 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
16722 ; AVX512BW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
16723 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
16724 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm10 {%k1}
16725 ; AVX512BW-NEXT: vmovdqa 256(%rdx), %xmm6
16726 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
16727 ; AVX512BW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
16728 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
16729 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm12 {%k1}
16730 ; AVX512BW-NEXT: vmovdqa 320(%rdx), %xmm6
16731 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
16732 ; AVX512BW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
16733 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16734 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm2 {%k1}
16735 ; AVX512BW-NEXT: vmovdqa 384(%rdx), %xmm6
16736 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
16737 ; AVX512BW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
16738 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
16739 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm9 {%k1}
16740 ; AVX512BW-NEXT: vmovdqa 448(%rdx), %xmm6
16741 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
16742 ; AVX512BW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
16743 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16744 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm1 {%k1}
16745 ; AVX512BW-NEXT: vinserti32x4 $2, (%r8), %zmm0, %zmm24
16746 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm0 = [0,1,2,3,4,8,6,7]
16747 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm0, %zmm24
16748 ; AVX512BW-NEXT: vinserti32x4 $2, 64(%r8), %zmm14, %zmm23
16749 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm0, %zmm23
16750 ; AVX512BW-NEXT: vinserti32x4 $2, 128(%r8), %zmm15, %zmm21
16751 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm0, %zmm21
16752 ; AVX512BW-NEXT: vinserti32x4 $2, 192(%r8), %zmm10, %zmm20
16753 ; AVX512BW-NEXT: vpermt2q %zmm5, %zmm0, %zmm20
16754 ; AVX512BW-NEXT: vinserti32x4 $2, 256(%r8), %zmm12, %zmm18
16755 ; AVX512BW-NEXT: vpermt2q %zmm11, %zmm0, %zmm18
16756 ; AVX512BW-NEXT: vinserti32x4 $2, 320(%r8), %zmm2, %zmm17
16757 ; AVX512BW-NEXT: vpermt2q %zmm8, %zmm0, %zmm17
16758 ; AVX512BW-NEXT: vinserti32x4 $2, 384(%r8), %zmm9, %zmm15
16759 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm0, %zmm15
16760 ; AVX512BW-NEXT: vinserti32x4 $2, 448(%r8), %zmm1, %zmm14
16761 ; AVX512BW-NEXT: vpermt2q %zmm22, %zmm0, %zmm14
16762 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm0 = [0,10,2,3,4,5,6,11]
16763 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
16764 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm0, %zmm13
16765 ; AVX512BW-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,1,2,3,4,12,6,7]
16766 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
16767 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm1, %zmm12
16768 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
16769 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm0, %zmm10
16770 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
16771 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm1, %zmm9
16772 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm0, %zmm31
16773 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
16774 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm1, %zmm6
16775 ; AVX512BW-NEXT: vpermt2q %zmm5, %zmm0, %zmm30
16776 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm29 # 64-byte Reload
16777 ; AVX512BW-NEXT: vpermt2q %zmm5, %zmm1, %zmm29
16778 ; AVX512BW-NEXT: vpermt2q %zmm11, %zmm0, %zmm26
16779 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
16780 ; AVX512BW-NEXT: vpermt2q %zmm11, %zmm1, %zmm4
16781 ; AVX512BW-NEXT: vpermt2q %zmm8, %zmm0, %zmm25
16782 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
16783 ; AVX512BW-NEXT: vpermt2q %zmm8, %zmm1, %zmm3
16784 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm0, %zmm19
16785 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16786 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm1, %zmm2
16787 ; AVX512BW-NEXT: vpermt2q %zmm22, %zmm0, %zmm16
16788 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16789 ; AVX512BW-NEXT: vpermt2q %zmm22, %zmm1, %zmm0
16790 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
16791 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16792 ; AVX512BW-NEXT: vmovaps %zmm1, 3008(%rax)
16793 ; AVX512BW-NEXT: vmovdqa64 %zmm28, 2944(%rax)
16794 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 2880(%rax)
16795 ; AVX512BW-NEXT: vmovdqa64 %zmm16, 2816(%rax)
16796 ; AVX512BW-NEXT: vmovdqa64 %zmm27, 2752(%rax)
16797 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16798 ; AVX512BW-NEXT: vmovaps %zmm0, 2624(%rax)
16799 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16800 ; AVX512BW-NEXT: vmovaps %zmm0, 2560(%rax)
16801 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 2496(%rax)
16802 ; AVX512BW-NEXT: vmovdqa64 %zmm19, 2432(%rax)
16803 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16804 ; AVX512BW-NEXT: vmovaps %zmm0, 2368(%rax)
16805 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16806 ; AVX512BW-NEXT: vmovaps %zmm0, 2240(%rax)
16807 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16808 ; AVX512BW-NEXT: vmovaps %zmm0, 2176(%rax)
16809 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 2112(%rax)
16810 ; AVX512BW-NEXT: vmovdqa64 %zmm25, 2048(%rax)
16811 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16812 ; AVX512BW-NEXT: vmovaps %zmm0, 1984(%rax)
16813 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16814 ; AVX512BW-NEXT: vmovaps %zmm0, 1856(%rax)
16815 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16816 ; AVX512BW-NEXT: vmovaps %zmm0, 1792(%rax)
16817 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 1728(%rax)
16818 ; AVX512BW-NEXT: vmovdqa64 %zmm26, 1664(%rax)
16819 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16820 ; AVX512BW-NEXT: vmovaps %zmm0, 1600(%rax)
16821 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16822 ; AVX512BW-NEXT: vmovaps %zmm0, 1472(%rax)
16823 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16824 ; AVX512BW-NEXT: vmovaps %zmm0, 1408(%rax)
16825 ; AVX512BW-NEXT: vmovdqa64 %zmm29, 1344(%rax)
16826 ; AVX512BW-NEXT: vmovdqa64 %zmm30, 1280(%rax)
16827 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16828 ; AVX512BW-NEXT: vmovaps %zmm0, 1216(%rax)
16829 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16830 ; AVX512BW-NEXT: vmovaps %zmm0, 1088(%rax)
16831 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16832 ; AVX512BW-NEXT: vmovaps %zmm0, 1024(%rax)
16833 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 960(%rax)
16834 ; AVX512BW-NEXT: vmovdqa64 %zmm31, 896(%rax)
16835 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16836 ; AVX512BW-NEXT: vmovaps %zmm0, 832(%rax)
16837 ; AVX512BW-NEXT: vmovups (%rsp), %zmm0 # 64-byte Reload
16838 ; AVX512BW-NEXT: vmovaps %zmm0, 704(%rax)
16839 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16840 ; AVX512BW-NEXT: vmovaps %zmm0, 640(%rax)
16841 ; AVX512BW-NEXT: vmovdqa64 %zmm9, 576(%rax)
16842 ; AVX512BW-NEXT: vmovdqa64 %zmm10, 512(%rax)
16843 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16844 ; AVX512BW-NEXT: vmovaps %zmm0, 448(%rax)
16845 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16846 ; AVX512BW-NEXT: vmovaps %zmm0, 320(%rax)
16847 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16848 ; AVX512BW-NEXT: vmovaps %zmm0, 256(%rax)
16849 ; AVX512BW-NEXT: vmovdqa64 %zmm12, 192(%rax)
16850 ; AVX512BW-NEXT: vmovdqa64 %zmm13, 128(%rax)
16851 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16852 ; AVX512BW-NEXT: vmovaps %zmm0, 64(%rax)
16853 ; AVX512BW-NEXT: vmovdqa64 %zmm14, 2688(%rax)
16854 ; AVX512BW-NEXT: vmovdqa64 %zmm15, 2304(%rax)
16855 ; AVX512BW-NEXT: vmovdqa64 %zmm17, 1920(%rax)
16856 ; AVX512BW-NEXT: vmovdqa64 %zmm18, 1536(%rax)
16857 ; AVX512BW-NEXT: vmovdqa64 %zmm20, 1152(%rax)
16858 ; AVX512BW-NEXT: vmovdqa64 %zmm21, 768(%rax)
16859 ; AVX512BW-NEXT: vmovdqa64 %zmm23, 384(%rax)
16860 ; AVX512BW-NEXT: vmovdqa64 %zmm24, (%rax)
16861 ; AVX512BW-NEXT: addq $3720, %rsp # imm = 0xE88
16862 ; AVX512BW-NEXT: vzeroupper
16863 ; AVX512BW-NEXT: retq
16865 ; AVX512BW-FCP-LABEL: store_i64_stride6_vf64:
16866 ; AVX512BW-FCP: # %bb.0:
16867 ; AVX512BW-FCP-NEXT: subq $3720, %rsp # imm = 0xE88
16868 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm11
16869 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm10
16870 ; AVX512BW-FCP-NEXT: vmovdqa64 128(%rdi), %zmm9
16871 ; AVX512BW-FCP-NEXT: vmovdqa64 192(%rdi), %zmm8
16872 ; AVX512BW-FCP-NEXT: vmovdqa64 256(%rdi), %zmm7
16873 ; AVX512BW-FCP-NEXT: vmovdqa64 320(%rdi), %zmm6
16874 ; AVX512BW-FCP-NEXT: vmovdqa64 384(%rdi), %zmm5
16875 ; AVX512BW-FCP-NEXT: vmovdqa64 448(%rdi), %zmm4
16876 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rsi), %zmm30
16877 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rsi), %zmm29
16878 ; AVX512BW-FCP-NEXT: vmovdqa64 128(%rsi), %zmm28
16879 ; AVX512BW-FCP-NEXT: vmovdqa64 192(%rsi), %zmm27
16880 ; AVX512BW-FCP-NEXT: vmovdqa64 256(%rsi), %zmm1
16881 ; AVX512BW-FCP-NEXT: vmovdqa64 320(%rsi), %zmm13
16882 ; AVX512BW-FCP-NEXT: vmovdqa64 384(%rsi), %zmm24
16883 ; AVX512BW-FCP-NEXT: vmovdqa64 448(%rsi), %zmm0
16884 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [4,12,5,13,4,12,5,13]
16885 ; AVX512BW-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
16886 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm22
16887 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm22
16888 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm3
16889 ; AVX512BW-FCP-NEXT: vpermt2q %zmm24, %zmm2, %zmm3
16890 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16891 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm3
16892 ; AVX512BW-FCP-NEXT: vpermt2q %zmm13, %zmm2, %zmm3
16893 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16894 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm3
16895 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm3
16896 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16897 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm3
16898 ; AVX512BW-FCP-NEXT: vpermt2q %zmm27, %zmm2, %zmm3
16899 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16900 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm3
16901 ; AVX512BW-FCP-NEXT: vpermt2q %zmm28, %zmm2, %zmm3
16902 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16903 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm3
16904 ; AVX512BW-FCP-NEXT: vpermt2q %zmm29, %zmm2, %zmm3
16905 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16906 ; AVX512BW-FCP-NEXT: vpermi2q %zmm30, %zmm11, %zmm2
16907 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16908 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [2,10,2,10,2,10,2,10]
16909 ; AVX512BW-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
16910 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm3
16911 ; AVX512BW-FCP-NEXT: vpermt2q %zmm30, %zmm2, %zmm3
16912 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16913 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm12
16914 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [6,14,6,14,6,14,6,14]
16915 ; AVX512BW-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
16916 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm3
16917 ; AVX512BW-FCP-NEXT: vpermt2q %zmm30, %zmm2, %zmm3
16918 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16919 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [7,15,7,15,7,15,7,15]
16920 ; AVX512BW-FCP-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
16921 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm3
16922 ; AVX512BW-FCP-NEXT: vpermt2q %zmm30, %zmm15, %zmm3
16923 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16924 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm3
16925 ; AVX512BW-FCP-NEXT: vpermt2q %zmm29, %zmm12, %zmm3
16926 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16927 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,8,1,9,0,8,1,9]
16928 ; AVX512BW-FCP-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
16929 ; AVX512BW-FCP-NEXT: vpermt2q %zmm30, %zmm3, %zmm11
16930 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16931 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm11
16932 ; AVX512BW-FCP-NEXT: vpermt2q %zmm29, %zmm2, %zmm11
16933 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16934 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm11
16935 ; AVX512BW-FCP-NEXT: vpermt2q %zmm29, %zmm15, %zmm11
16936 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16937 ; AVX512BW-FCP-NEXT: vpermt2q %zmm29, %zmm3, %zmm10
16938 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16939 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm10
16940 ; AVX512BW-FCP-NEXT: vpermt2q %zmm28, %zmm12, %zmm10
16941 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16942 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm10
16943 ; AVX512BW-FCP-NEXT: vpermt2q %zmm28, %zmm2, %zmm10
16944 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16945 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm10
16946 ; AVX512BW-FCP-NEXT: vpermt2q %zmm28, %zmm15, %zmm10
16947 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16948 ; AVX512BW-FCP-NEXT: vpermt2q %zmm28, %zmm3, %zmm9
16949 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16950 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm9
16951 ; AVX512BW-FCP-NEXT: vpermt2q %zmm27, %zmm12, %zmm9
16952 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16953 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm9
16954 ; AVX512BW-FCP-NEXT: vpermt2q %zmm27, %zmm2, %zmm9
16955 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16956 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm9
16957 ; AVX512BW-FCP-NEXT: vpermt2q %zmm27, %zmm15, %zmm9
16958 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16959 ; AVX512BW-FCP-NEXT: vpermt2q %zmm27, %zmm3, %zmm8
16960 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16961 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm8
16962 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm12, %zmm8
16963 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16964 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm8
16965 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm8
16966 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16967 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm8
16968 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm15, %zmm8
16969 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16970 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm3, %zmm7
16971 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16972 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm1
16973 ; AVX512BW-FCP-NEXT: vpermt2q %zmm13, %zmm12, %zmm1
16974 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16975 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm1
16976 ; AVX512BW-FCP-NEXT: vpermt2q %zmm13, %zmm2, %zmm1
16977 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16978 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm1
16979 ; AVX512BW-FCP-NEXT: vpermt2q %zmm13, %zmm15, %zmm1
16980 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16981 ; AVX512BW-FCP-NEXT: vpermt2q %zmm13, %zmm3, %zmm6
16982 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16983 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
16984 ; AVX512BW-FCP-NEXT: vpermt2q %zmm24, %zmm12, %zmm1
16985 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16986 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
16987 ; AVX512BW-FCP-NEXT: vpermt2q %zmm24, %zmm2, %zmm1
16988 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16989 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm8
16990 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
16991 ; AVX512BW-FCP-NEXT: vpermt2q %zmm24, %zmm15, %zmm1
16992 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16993 ; AVX512BW-FCP-NEXT: vpermt2q %zmm24, %zmm3, %zmm5
16994 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16995 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdx), %zmm5
16996 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rcx), %zmm14
16997 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm29
16998 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm21
16999 ; AVX512BW-FCP-NEXT: vpermt2q %zmm14, %zmm15, %zmm29
17000 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rdx), %zmm1
17001 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rcx), %zmm11
17002 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm2
17003 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm20
17004 ; AVX512BW-FCP-NEXT: vpermt2q %zmm11, %zmm15, %zmm2
17005 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
17006 ; AVX512BW-FCP-NEXT: vmovdqa64 128(%rdx), %zmm23
17007 ; AVX512BW-FCP-NEXT: vmovdqa64 128(%rcx), %zmm9
17008 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm1
17009 ; AVX512BW-FCP-NEXT: vpermt2q %zmm9, %zmm15, %zmm1
17010 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17011 ; AVX512BW-FCP-NEXT: vmovdqa64 192(%rdx), %zmm18
17012 ; AVX512BW-FCP-NEXT: vmovdqa64 192(%rcx), %zmm6
17013 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm1
17014 ; AVX512BW-FCP-NEXT: vpermt2q %zmm6, %zmm15, %zmm1
17015 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17016 ; AVX512BW-FCP-NEXT: vmovdqa64 256(%rdx), %zmm17
17017 ; AVX512BW-FCP-NEXT: vmovdqa64 256(%rcx), %zmm7
17018 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm1
17019 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm15, %zmm1
17020 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17021 ; AVX512BW-FCP-NEXT: vmovdqa64 320(%rdx), %zmm15
17022 ; AVX512BW-FCP-NEXT: vmovdqa64 320(%rcx), %zmm10
17023 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm1
17024 ; AVX512BW-FCP-NEXT: vpermt2q %zmm10, %zmm21, %zmm1
17025 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17026 ; AVX512BW-FCP-NEXT: vmovdqa64 384(%rdx), %zmm13
17027 ; AVX512BW-FCP-NEXT: vmovdqa64 384(%rcx), %zmm2
17028 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm1
17029 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm21, %zmm1
17030 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17031 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm1
17032 ; AVX512BW-FCP-NEXT: vpermi2q %zmm0, %zmm4, %zmm12
17033 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17034 ; AVX512BW-FCP-NEXT: vpermi2q %zmm0, %zmm4, %zmm8
17035 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17036 ; AVX512BW-FCP-NEXT: vmovdqa64 448(%rdx), %zmm16
17037 ; AVX512BW-FCP-NEXT: vmovdqa64 448(%rcx), %zmm4
17038 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm8
17039 ; AVX512BW-FCP-NEXT: vpermt2q %zmm4, %zmm21, %zmm8
17040 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17041 ; AVX512BW-FCP-NEXT: vpermi2q %zmm0, %zmm1, %zmm21
17042 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17043 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm3, %zmm1
17044 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17045 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm1 = [0,0,4,12]
17046 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm24
17047 ; AVX512BW-FCP-NEXT: vpermt2q %zmm4, %zmm1, %zmm24
17048 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm19
17049 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm1, %zmm19
17050 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm0
17051 ; AVX512BW-FCP-NEXT: vpermt2q %zmm10, %zmm1, %zmm0
17052 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17053 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm31
17054 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm31
17055 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm21
17056 ; AVX512BW-FCP-NEXT: vpermt2q %zmm6, %zmm1, %zmm21
17057 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm12
17058 ; AVX512BW-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm12
17059 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm3
17060 ; AVX512BW-FCP-NEXT: vpermt2q %zmm11, %zmm1, %zmm3
17061 ; AVX512BW-FCP-NEXT: vpermi2q %zmm14, %zmm5, %zmm1
17062 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [1,9,2,10,1,9,2,10]
17063 ; AVX512BW-FCP-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
17064 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
17065 ; AVX512BW-FCP-NEXT: vpermt2q %zmm14, %zmm27, %zmm0
17066 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17067 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [5,13,6,14,5,13,6,14]
17068 ; AVX512BW-FCP-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
17069 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
17070 ; AVX512BW-FCP-NEXT: vpermt2q %zmm14, %zmm28, %zmm0
17071 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17072 ; AVX512BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [3,11,3,11,3,11,3,11]
17073 ; AVX512BW-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
17074 ; AVX512BW-FCP-NEXT: vpermt2q %zmm14, %zmm0, %zmm5
17075 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17076 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm5
17077 ; AVX512BW-FCP-NEXT: vpermt2q %zmm11, %zmm27, %zmm5
17078 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17079 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm5
17080 ; AVX512BW-FCP-NEXT: vpermt2q %zmm11, %zmm28, %zmm5
17081 ; AVX512BW-FCP-NEXT: vpermt2q %zmm11, %zmm0, %zmm20
17082 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17083 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm8
17084 ; AVX512BW-FCP-NEXT: vpermt2q %zmm9, %zmm27, %zmm8
17085 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17086 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm11
17087 ; AVX512BW-FCP-NEXT: vpermt2q %zmm9, %zmm28, %zmm11
17088 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm30
17089 ; AVX512BW-FCP-NEXT: vpermt2q %zmm9, %zmm0, %zmm23
17090 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17091 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm9
17092 ; AVX512BW-FCP-NEXT: vpermt2q %zmm6, %zmm27, %zmm9
17093 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm8
17094 ; AVX512BW-FCP-NEXT: vpermt2q %zmm6, %zmm28, %zmm8
17095 ; AVX512BW-FCP-NEXT: vpermt2q %zmm6, %zmm0, %zmm18
17096 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17097 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm6
17098 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm27, %zmm6
17099 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17100 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm6
17101 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm14
17102 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm28, %zmm6
17103 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm11
17104 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm14
17105 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17106 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm6
17107 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm7
17108 ; AVX512BW-FCP-NEXT: vpermt2q %zmm10, %zmm27, %zmm7
17109 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm26
17110 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm7
17111 ; AVX512BW-FCP-NEXT: vpermt2q %zmm10, %zmm28, %zmm7
17112 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm25
17113 ; AVX512BW-FCP-NEXT: vpermt2q %zmm10, %zmm0, %zmm6
17114 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17115 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm6
17116 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm7
17117 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm27, %zmm7
17118 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm10
17119 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm7
17120 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm28, %zmm7
17121 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm23
17122 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm0, %zmm6
17123 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17124 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm16, %zmm27
17125 ; AVX512BW-FCP-NEXT: vpermi2q %zmm4, %zmm16, %zmm28
17126 ; AVX512BW-FCP-NEXT: vpermt2q %zmm4, %zmm0, %zmm16
17127 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17128 ; AVX512BW-FCP-NEXT: movb $12, %al
17129 ; AVX512BW-FCP-NEXT: kmovd %eax, %k1
17130 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm22 {%k1}
17131 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
17132 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm18 {%k1}
17133 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
17134 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17135 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
17136 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
17137 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm31, %zmm17 {%k1}
17138 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
17139 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm16 {%k1}
17140 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
17141 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm21 {%k1}
17142 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
17143 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm31 {%k1}
17144 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
17145 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm20 {%k1}
17146 ; AVX512BW-FCP-NEXT: movb $48, %al
17147 ; AVX512BW-FCP-NEXT: kmovd %eax, %k2
17148 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17149 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17150 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm2 {%k2}
17151 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17152 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
17153 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm3 {%k2}
17154 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17155 ; AVX512BW-FCP-NEXT: vshufi64x2 {{.*#+}} zmm14 = zmm0[0,1,2,3],zmm29[4,5,6,7]
17156 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17157 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
17158 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm4 {%k2}
17159 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17160 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17161 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17162 ; AVX512BW-FCP-NEXT: vshufi64x2 $228, (%rsp), %zmm0, %zmm0 # 64-byte Folded Reload
17163 ; AVX512BW-FCP-NEXT: # zmm0 = zmm0[0,1,2,3],mem[4,5,6,7]
17164 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
17165 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
17166 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17167 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm6 {%k2}
17168 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17169 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm30 {%k2}
17170 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17171 ; AVX512BW-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm15 # 64-byte Folded Reload
17172 ; AVX512BW-FCP-NEXT: # zmm15 = zmm0[0,1,2,3],mem[4,5,6,7]
17173 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17174 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
17175 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17176 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
17177 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17178 ; AVX512BW-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm13 # 64-byte Folded Reload
17179 ; AVX512BW-FCP-NEXT: # zmm13 = zmm0[0,1,2,3],mem[4,5,6,7]
17180 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
17181 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17182 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm7 {%k2}
17183 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17184 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
17185 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17186 ; AVX512BW-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm12 # 64-byte Folded Reload
17187 ; AVX512BW-FCP-NEXT: # zmm12 = zmm0[0,1,2,3],mem[4,5,6,7]
17188 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17189 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm26 {%k2}
17190 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17191 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm25 {%k2}
17192 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17193 ; AVX512BW-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 64-byte Folded Reload
17194 ; AVX512BW-FCP-NEXT: # zmm0 = zmm0[0,1,2,3],mem[4,5,6,7]
17195 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17196 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17197 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
17198 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17199 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm23 {%k2}
17200 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17201 ; AVX512BW-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 64-byte Folded Reload
17202 ; AVX512BW-FCP-NEXT: # zmm0 = zmm0[0,1,2,3],mem[4,5,6,7]
17203 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17204 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17205 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm27 {%k2}
17206 ; AVX512BW-FCP-NEXT: vmovdqa64 (%r8), %zmm29
17207 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,1,9,0,4,5,6,7]
17208 ; AVX512BW-FCP-NEXT: vpermt2q %zmm29, %zmm1, %zmm2
17209 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17210 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%r8), %zmm2
17211 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm1, %zmm4
17212 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17213 ; AVX512BW-FCP-NEXT: vmovdqa64 128(%r8), %zmm0
17214 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm6
17215 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17216 ; AVX512BW-FCP-NEXT: vmovdqa64 192(%r8), %zmm4
17217 ; AVX512BW-FCP-NEXT: vpermt2q %zmm4, %zmm1, %zmm9
17218 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17219 ; AVX512BW-FCP-NEXT: vmovdqa64 256(%r8), %zmm6
17220 ; AVX512BW-FCP-NEXT: vpermt2q %zmm6, %zmm1, %zmm7
17221 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17222 ; AVX512BW-FCP-NEXT: vmovdqa64 320(%r8), %zmm7
17223 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm26
17224 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17225 ; AVX512BW-FCP-NEXT: vmovdqa64 384(%r8), %zmm9
17226 ; AVX512BW-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm10
17227 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm24
17228 ; AVX512BW-FCP-NEXT: vmovdqa64 448(%r8), %zmm10
17229 ; AVX512BW-FCP-NEXT: vpermt2q %zmm10, %zmm1, %zmm27
17230 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17231 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm28 {%k2}
17232 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,1,13,0,4,5,6,7]
17233 ; AVX512BW-FCP-NEXT: vpermt2q %zmm29, %zmm1, %zmm3
17234 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17235 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm1, %zmm5
17236 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17237 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm30
17238 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm30, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17239 ; AVX512BW-FCP-NEXT: vpermt2q %zmm4, %zmm1, %zmm8
17240 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17241 ; AVX512BW-FCP-NEXT: vpermt2q %zmm6, %zmm1, %zmm11
17242 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17243 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm25
17244 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17245 ; AVX512BW-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm23
17246 ; AVX512BW-FCP-NEXT: vpermt2q %zmm10, %zmm1, %zmm28
17247 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17248 ; AVX512BW-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm3 # 64-byte Folded Reload
17249 ; AVX512BW-FCP-NEXT: # zmm3 = zmm1[0,1,2,3],mem[4,5,6,7]
17250 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm1 = [14,0,2,3,4,5,15,0]
17251 ; AVX512BW-FCP-NEXT: vpermt2q %zmm29, %zmm1, %zmm14
17252 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17253 ; AVX512BW-FCP-NEXT: vmovdqu64 (%rsp), %zmm14 # 64-byte Reload
17254 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm1, %zmm14
17255 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm15
17256 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17257 ; AVX512BW-FCP-NEXT: vpermt2q %zmm4, %zmm1, %zmm13
17258 ; AVX512BW-FCP-NEXT: vpermt2q %zmm6, %zmm1, %zmm12
17259 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17260 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
17261 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm12
17262 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
17263 ; AVX512BW-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm15
17264 ; AVX512BW-FCP-NEXT: vpermt2q %zmm10, %zmm1, %zmm3
17265 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17266 ; AVX512BW-FCP-NEXT: vmovdqa (%rdi), %ymm1
17267 ; AVX512BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
17268 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
17269 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm1, %zmm3, %zmm3
17270 ; AVX512BW-FCP-NEXT: vmovdqa 64(%rdi), %ymm1
17271 ; AVX512BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
17272 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17273 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm1, %zmm5, %zmm5
17274 ; AVX512BW-FCP-NEXT: movb $16, %al
17275 ; AVX512BW-FCP-NEXT: kmovd %eax, %k2
17276 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm31 {%k2}
17277 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm31, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17278 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm1 = [10,0,2,3,4,5,11,0]
17279 ; AVX512BW-FCP-NEXT: vpermt2q %zmm2, %zmm1, %zmm5
17280 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17281 ; AVX512BW-FCP-NEXT: vmovdqa 128(%rdi), %ymm2
17282 ; AVX512BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
17283 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17284 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm5, %zmm31
17285 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm21 {%k2}
17286 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17287 ; AVX512BW-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm31
17288 ; AVX512BW-FCP-NEXT: vmovdqa 192(%rdi), %ymm2
17289 ; AVX512BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
17290 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17291 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm30
17292 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm16 {%k2}
17293 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17294 ; AVX512BW-FCP-NEXT: vpermt2q %zmm4, %zmm1, %zmm30
17295 ; AVX512BW-FCP-NEXT: vmovdqa 256(%rdi), %ymm2
17296 ; AVX512BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
17297 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17298 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm26
17299 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm17 {%k2}
17300 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17301 ; AVX512BW-FCP-NEXT: vpermt2q %zmm6, %zmm1, %zmm26
17302 ; AVX512BW-FCP-NEXT: vmovdqa 320(%rdi), %ymm2
17303 ; AVX512BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
17304 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17305 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm25
17306 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm19 {%k2}
17307 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17308 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm25
17309 ; AVX512BW-FCP-NEXT: vmovdqa 384(%rdi), %ymm2
17310 ; AVX512BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
17311 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17312 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm19
17313 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm18 {%k2}
17314 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17315 ; AVX512BW-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm19
17316 ; AVX512BW-FCP-NEXT: vmovdqa 448(%rdi), %ymm2
17317 ; AVX512BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
17318 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17319 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm16
17320 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm22 {%k2}
17321 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17322 ; AVX512BW-FCP-NEXT: vpermt2q %zmm10, %zmm1, %zmm16
17323 ; AVX512BW-FCP-NEXT: vpermt2q %zmm29, %zmm1, %zmm3
17324 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17325 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm29, %zmm20 {%k2}
17326 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17327 ; AVX512BW-FCP-NEXT: vmovdqa64 (%r9), %zmm29
17328 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,1,2,9,4,5,6,7]
17329 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17330 ; AVX512BW-FCP-NEXT: vpermt2q %zmm29, %zmm6, %zmm0
17331 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17332 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%r9), %zmm3
17333 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17334 ; AVX512BW-FCP-NEXT: vpermt2q %zmm3, %zmm6, %zmm0
17335 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17336 ; AVX512BW-FCP-NEXT: vmovdqa64 128(%r9), %zmm4
17337 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17338 ; AVX512BW-FCP-NEXT: vpermt2q %zmm4, %zmm6, %zmm0
17339 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17340 ; AVX512BW-FCP-NEXT: vmovdqa64 192(%r9), %zmm5
17341 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17342 ; AVX512BW-FCP-NEXT: vpermt2q %zmm5, %zmm6, %zmm0
17343 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17344 ; AVX512BW-FCP-NEXT: vmovdqa64 256(%r9), %zmm11
17345 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17346 ; AVX512BW-FCP-NEXT: vpermt2q %zmm11, %zmm6, %zmm0
17347 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17348 ; AVX512BW-FCP-NEXT: vmovdqa64 320(%r9), %zmm8
17349 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17350 ; AVX512BW-FCP-NEXT: vpermt2q %zmm8, %zmm6, %zmm0
17351 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17352 ; AVX512BW-FCP-NEXT: vmovdqa64 384(%r9), %zmm7
17353 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm6, %zmm24
17354 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17355 ; AVX512BW-FCP-NEXT: vmovdqa64 448(%r9), %zmm22
17356 ; AVX512BW-FCP-NEXT: vpermt2q %zmm22, %zmm6, %zmm27
17357 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,1,2,13,4,5,6,7]
17358 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17359 ; AVX512BW-FCP-NEXT: vpermt2q %zmm29, %zmm6, %zmm0
17360 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17361 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17362 ; AVX512BW-FCP-NEXT: vpermt2q %zmm3, %zmm6, %zmm0
17363 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17364 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17365 ; AVX512BW-FCP-NEXT: vpermt2q %zmm4, %zmm6, %zmm0
17366 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17367 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17368 ; AVX512BW-FCP-NEXT: vpermt2q %zmm5, %zmm6, %zmm0
17369 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17370 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17371 ; AVX512BW-FCP-NEXT: vpermt2q %zmm11, %zmm6, %zmm0
17372 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17373 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17374 ; AVX512BW-FCP-NEXT: vpermt2q %zmm8, %zmm6, %zmm0
17375 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17376 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm6, %zmm23
17377 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17378 ; AVX512BW-FCP-NEXT: vpermt2q %zmm22, %zmm6, %zmm28
17379 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,14,2,3,4,5,6,15]
17380 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17381 ; AVX512BW-FCP-NEXT: vpermt2q %zmm29, %zmm6, %zmm0
17382 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17383 ; AVX512BW-FCP-NEXT: vpermt2q %zmm3, %zmm6, %zmm14
17384 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm14, (%rsp) # 64-byte Spill
17385 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17386 ; AVX512BW-FCP-NEXT: vpermt2q %zmm4, %zmm6, %zmm0
17387 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17388 ; AVX512BW-FCP-NEXT: vpermt2q %zmm5, %zmm6, %zmm13
17389 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17390 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17391 ; AVX512BW-FCP-NEXT: vpermt2q %zmm11, %zmm6, %zmm0
17392 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17393 ; AVX512BW-FCP-NEXT: vpermt2q %zmm8, %zmm6, %zmm12
17394 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17395 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm6, %zmm15
17396 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17397 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17398 ; AVX512BW-FCP-NEXT: vpermt2q %zmm22, %zmm6, %zmm0
17399 ; AVX512BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17400 ; AVX512BW-FCP-NEXT: vmovdqa (%rdx), %xmm6
17401 ; AVX512BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
17402 ; AVX512BW-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
17403 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17404 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm0 {%k1}
17405 ; AVX512BW-FCP-NEXT: vmovdqa 64(%rdx), %xmm6
17406 ; AVX512BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
17407 ; AVX512BW-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
17408 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
17409 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm14 {%k1}
17410 ; AVX512BW-FCP-NEXT: vmovdqa 128(%rdx), %xmm6
17411 ; AVX512BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
17412 ; AVX512BW-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
17413 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
17414 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm15 {%k1}
17415 ; AVX512BW-FCP-NEXT: vmovdqa 192(%rdx), %xmm6
17416 ; AVX512BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
17417 ; AVX512BW-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
17418 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
17419 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm10 {%k1}
17420 ; AVX512BW-FCP-NEXT: vmovdqa 256(%rdx), %xmm6
17421 ; AVX512BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
17422 ; AVX512BW-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
17423 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
17424 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm12 {%k1}
17425 ; AVX512BW-FCP-NEXT: vmovdqa 320(%rdx), %xmm6
17426 ; AVX512BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
17427 ; AVX512BW-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
17428 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17429 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm2 {%k1}
17430 ; AVX512BW-FCP-NEXT: vmovdqa 384(%rdx), %xmm6
17431 ; AVX512BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
17432 ; AVX512BW-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
17433 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
17434 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm9 {%k1}
17435 ; AVX512BW-FCP-NEXT: vmovdqa 448(%rdx), %xmm6
17436 ; AVX512BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
17437 ; AVX512BW-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
17438 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17439 ; AVX512BW-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm1 {%k1}
17440 ; AVX512BW-FCP-NEXT: vinserti32x4 $2, (%r8), %zmm0, %zmm24
17441 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm0 = [0,1,2,3,4,8,6,7]
17442 ; AVX512BW-FCP-NEXT: vpermt2q %zmm29, %zmm0, %zmm24
17443 ; AVX512BW-FCP-NEXT: vinserti32x4 $2, 64(%r8), %zmm14, %zmm23
17444 ; AVX512BW-FCP-NEXT: vpermt2q %zmm3, %zmm0, %zmm23
17445 ; AVX512BW-FCP-NEXT: vinserti32x4 $2, 128(%r8), %zmm15, %zmm21
17446 ; AVX512BW-FCP-NEXT: vpermt2q %zmm4, %zmm0, %zmm21
17447 ; AVX512BW-FCP-NEXT: vinserti32x4 $2, 192(%r8), %zmm10, %zmm20
17448 ; AVX512BW-FCP-NEXT: vpermt2q %zmm5, %zmm0, %zmm20
17449 ; AVX512BW-FCP-NEXT: vinserti32x4 $2, 256(%r8), %zmm12, %zmm18
17450 ; AVX512BW-FCP-NEXT: vpermt2q %zmm11, %zmm0, %zmm18
17451 ; AVX512BW-FCP-NEXT: vinserti32x4 $2, 320(%r8), %zmm2, %zmm17
17452 ; AVX512BW-FCP-NEXT: vpermt2q %zmm8, %zmm0, %zmm17
17453 ; AVX512BW-FCP-NEXT: vinserti32x4 $2, 384(%r8), %zmm9, %zmm15
17454 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm15
17455 ; AVX512BW-FCP-NEXT: vinserti32x4 $2, 448(%r8), %zmm1, %zmm14
17456 ; AVX512BW-FCP-NEXT: vpermt2q %zmm22, %zmm0, %zmm14
17457 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm0 = [0,10,2,3,4,5,6,11]
17458 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
17459 ; AVX512BW-FCP-NEXT: vpermt2q %zmm29, %zmm0, %zmm13
17460 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,1,2,3,4,12,6,7]
17461 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
17462 ; AVX512BW-FCP-NEXT: vpermt2q %zmm29, %zmm1, %zmm12
17463 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
17464 ; AVX512BW-FCP-NEXT: vpermt2q %zmm3, %zmm0, %zmm10
17465 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
17466 ; AVX512BW-FCP-NEXT: vpermt2q %zmm3, %zmm1, %zmm9
17467 ; AVX512BW-FCP-NEXT: vpermt2q %zmm4, %zmm0, %zmm31
17468 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
17469 ; AVX512BW-FCP-NEXT: vpermt2q %zmm4, %zmm1, %zmm6
17470 ; AVX512BW-FCP-NEXT: vpermt2q %zmm5, %zmm0, %zmm30
17471 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm29 # 64-byte Reload
17472 ; AVX512BW-FCP-NEXT: vpermt2q %zmm5, %zmm1, %zmm29
17473 ; AVX512BW-FCP-NEXT: vpermt2q %zmm11, %zmm0, %zmm26
17474 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
17475 ; AVX512BW-FCP-NEXT: vpermt2q %zmm11, %zmm1, %zmm4
17476 ; AVX512BW-FCP-NEXT: vpermt2q %zmm8, %zmm0, %zmm25
17477 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
17478 ; AVX512BW-FCP-NEXT: vpermt2q %zmm8, %zmm1, %zmm3
17479 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm19
17480 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17481 ; AVX512BW-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm2
17482 ; AVX512BW-FCP-NEXT: vpermt2q %zmm22, %zmm0, %zmm16
17483 ; AVX512BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17484 ; AVX512BW-FCP-NEXT: vpermt2q %zmm22, %zmm1, %zmm0
17485 ; AVX512BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
17486 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17487 ; AVX512BW-FCP-NEXT: vmovaps %zmm1, 3008(%rax)
17488 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm28, 2944(%rax)
17489 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, 2880(%rax)
17490 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm16, 2816(%rax)
17491 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm27, 2752(%rax)
17492 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17493 ; AVX512BW-FCP-NEXT: vmovaps %zmm0, 2624(%rax)
17494 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17495 ; AVX512BW-FCP-NEXT: vmovaps %zmm0, 2560(%rax)
17496 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, 2496(%rax)
17497 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, 2432(%rax)
17498 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17499 ; AVX512BW-FCP-NEXT: vmovaps %zmm0, 2368(%rax)
17500 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17501 ; AVX512BW-FCP-NEXT: vmovaps %zmm0, 2240(%rax)
17502 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17503 ; AVX512BW-FCP-NEXT: vmovaps %zmm0, 2176(%rax)
17504 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, 2112(%rax)
17505 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm25, 2048(%rax)
17506 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17507 ; AVX512BW-FCP-NEXT: vmovaps %zmm0, 1984(%rax)
17508 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17509 ; AVX512BW-FCP-NEXT: vmovaps %zmm0, 1856(%rax)
17510 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17511 ; AVX512BW-FCP-NEXT: vmovaps %zmm0, 1792(%rax)
17512 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, 1728(%rax)
17513 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm26, 1664(%rax)
17514 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17515 ; AVX512BW-FCP-NEXT: vmovaps %zmm0, 1600(%rax)
17516 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17517 ; AVX512BW-FCP-NEXT: vmovaps %zmm0, 1472(%rax)
17518 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17519 ; AVX512BW-FCP-NEXT: vmovaps %zmm0, 1408(%rax)
17520 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm29, 1344(%rax)
17521 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm30, 1280(%rax)
17522 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17523 ; AVX512BW-FCP-NEXT: vmovaps %zmm0, 1216(%rax)
17524 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17525 ; AVX512BW-FCP-NEXT: vmovaps %zmm0, 1088(%rax)
17526 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17527 ; AVX512BW-FCP-NEXT: vmovaps %zmm0, 1024(%rax)
17528 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, 960(%rax)
17529 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm31, 896(%rax)
17530 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17531 ; AVX512BW-FCP-NEXT: vmovaps %zmm0, 832(%rax)
17532 ; AVX512BW-FCP-NEXT: vmovups (%rsp), %zmm0 # 64-byte Reload
17533 ; AVX512BW-FCP-NEXT: vmovaps %zmm0, 704(%rax)
17534 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17535 ; AVX512BW-FCP-NEXT: vmovaps %zmm0, 640(%rax)
17536 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, 576(%rax)
17537 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, 512(%rax)
17538 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17539 ; AVX512BW-FCP-NEXT: vmovaps %zmm0, 448(%rax)
17540 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17541 ; AVX512BW-FCP-NEXT: vmovaps %zmm0, 320(%rax)
17542 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17543 ; AVX512BW-FCP-NEXT: vmovaps %zmm0, 256(%rax)
17544 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm12, 192(%rax)
17545 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, 128(%rax)
17546 ; AVX512BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17547 ; AVX512BW-FCP-NEXT: vmovaps %zmm0, 64(%rax)
17548 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm14, 2688(%rax)
17549 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm15, 2304(%rax)
17550 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, 1920(%rax)
17551 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, 1536(%rax)
17552 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, 1152(%rax)
17553 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm21, 768(%rax)
17554 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm23, 384(%rax)
17555 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm24, (%rax)
17556 ; AVX512BW-FCP-NEXT: addq $3720, %rsp # imm = 0xE88
17557 ; AVX512BW-FCP-NEXT: vzeroupper
17558 ; AVX512BW-FCP-NEXT: retq
17560 ; AVX512DQ-BW-LABEL: store_i64_stride6_vf64:
17561 ; AVX512DQ-BW: # %bb.0:
17562 ; AVX512DQ-BW-NEXT: subq $3720, %rsp # imm = 0xE88
17563 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdi), %zmm11
17564 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rdi), %zmm10
17565 ; AVX512DQ-BW-NEXT: vmovdqa64 128(%rdi), %zmm9
17566 ; AVX512DQ-BW-NEXT: vmovdqa64 192(%rdi), %zmm8
17567 ; AVX512DQ-BW-NEXT: vmovdqa64 256(%rdi), %zmm7
17568 ; AVX512DQ-BW-NEXT: vmovdqa64 320(%rdi), %zmm6
17569 ; AVX512DQ-BW-NEXT: vmovdqa64 384(%rdi), %zmm5
17570 ; AVX512DQ-BW-NEXT: vmovdqa64 448(%rdi), %zmm4
17571 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rsi), %zmm30
17572 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rsi), %zmm29
17573 ; AVX512DQ-BW-NEXT: vmovdqa64 128(%rsi), %zmm28
17574 ; AVX512DQ-BW-NEXT: vmovdqa64 192(%rsi), %zmm27
17575 ; AVX512DQ-BW-NEXT: vmovdqa64 256(%rsi), %zmm1
17576 ; AVX512DQ-BW-NEXT: vmovdqa64 320(%rsi), %zmm13
17577 ; AVX512DQ-BW-NEXT: vmovdqa64 384(%rsi), %zmm24
17578 ; AVX512DQ-BW-NEXT: vmovdqa64 448(%rsi), %zmm0
17579 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [4,12,5,13,4,12,5,13]
17580 ; AVX512DQ-BW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
17581 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm22
17582 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm2, %zmm22
17583 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm3
17584 ; AVX512DQ-BW-NEXT: vpermt2q %zmm24, %zmm2, %zmm3
17585 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17586 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm3
17587 ; AVX512DQ-BW-NEXT: vpermt2q %zmm13, %zmm2, %zmm3
17588 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17589 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm3
17590 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm2, %zmm3
17591 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17592 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, %zmm3
17593 ; AVX512DQ-BW-NEXT: vpermt2q %zmm27, %zmm2, %zmm3
17594 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17595 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, %zmm3
17596 ; AVX512DQ-BW-NEXT: vpermt2q %zmm28, %zmm2, %zmm3
17597 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17598 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, %zmm3
17599 ; AVX512DQ-BW-NEXT: vpermt2q %zmm29, %zmm2, %zmm3
17600 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17601 ; AVX512DQ-BW-NEXT: vpermi2q %zmm30, %zmm11, %zmm2
17602 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17603 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [2,10,2,10,2,10,2,10]
17604 ; AVX512DQ-BW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
17605 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm11, %zmm3
17606 ; AVX512DQ-BW-NEXT: vpermt2q %zmm30, %zmm2, %zmm3
17607 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17608 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm12
17609 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [6,14,6,14,6,14,6,14]
17610 ; AVX512DQ-BW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
17611 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm11, %zmm3
17612 ; AVX512DQ-BW-NEXT: vpermt2q %zmm30, %zmm2, %zmm3
17613 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17614 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [7,15,7,15,7,15,7,15]
17615 ; AVX512DQ-BW-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
17616 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm11, %zmm3
17617 ; AVX512DQ-BW-NEXT: vpermt2q %zmm30, %zmm15, %zmm3
17618 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17619 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, %zmm3
17620 ; AVX512DQ-BW-NEXT: vpermt2q %zmm29, %zmm12, %zmm3
17621 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17622 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,8,1,9,0,8,1,9]
17623 ; AVX512DQ-BW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
17624 ; AVX512DQ-BW-NEXT: vpermt2q %zmm30, %zmm3, %zmm11
17625 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17626 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, %zmm11
17627 ; AVX512DQ-BW-NEXT: vpermt2q %zmm29, %zmm2, %zmm11
17628 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17629 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, %zmm11
17630 ; AVX512DQ-BW-NEXT: vpermt2q %zmm29, %zmm15, %zmm11
17631 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17632 ; AVX512DQ-BW-NEXT: vpermt2q %zmm29, %zmm3, %zmm10
17633 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17634 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, %zmm10
17635 ; AVX512DQ-BW-NEXT: vpermt2q %zmm28, %zmm12, %zmm10
17636 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17637 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, %zmm10
17638 ; AVX512DQ-BW-NEXT: vpermt2q %zmm28, %zmm2, %zmm10
17639 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17640 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, %zmm10
17641 ; AVX512DQ-BW-NEXT: vpermt2q %zmm28, %zmm15, %zmm10
17642 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17643 ; AVX512DQ-BW-NEXT: vpermt2q %zmm28, %zmm3, %zmm9
17644 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17645 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, %zmm9
17646 ; AVX512DQ-BW-NEXT: vpermt2q %zmm27, %zmm12, %zmm9
17647 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17648 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, %zmm9
17649 ; AVX512DQ-BW-NEXT: vpermt2q %zmm27, %zmm2, %zmm9
17650 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17651 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, %zmm9
17652 ; AVX512DQ-BW-NEXT: vpermt2q %zmm27, %zmm15, %zmm9
17653 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17654 ; AVX512DQ-BW-NEXT: vpermt2q %zmm27, %zmm3, %zmm8
17655 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17656 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm8
17657 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm12, %zmm8
17658 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17659 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm8
17660 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm2, %zmm8
17661 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17662 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm8
17663 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm15, %zmm8
17664 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17665 ; AVX512DQ-BW-NEXT: vpermt2q %zmm1, %zmm3, %zmm7
17666 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17667 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm1
17668 ; AVX512DQ-BW-NEXT: vpermt2q %zmm13, %zmm12, %zmm1
17669 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17670 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm1
17671 ; AVX512DQ-BW-NEXT: vpermt2q %zmm13, %zmm2, %zmm1
17672 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17673 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm1
17674 ; AVX512DQ-BW-NEXT: vpermt2q %zmm13, %zmm15, %zmm1
17675 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17676 ; AVX512DQ-BW-NEXT: vpermt2q %zmm13, %zmm3, %zmm6
17677 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17678 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm1
17679 ; AVX512DQ-BW-NEXT: vpermt2q %zmm24, %zmm12, %zmm1
17680 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17681 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm1
17682 ; AVX512DQ-BW-NEXT: vpermt2q %zmm24, %zmm2, %zmm1
17683 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17684 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm8
17685 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm1
17686 ; AVX512DQ-BW-NEXT: vpermt2q %zmm24, %zmm15, %zmm1
17687 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17688 ; AVX512DQ-BW-NEXT: vpermt2q %zmm24, %zmm3, %zmm5
17689 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17690 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdx), %zmm5
17691 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rcx), %zmm14
17692 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm29
17693 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm15, %zmm21
17694 ; AVX512DQ-BW-NEXT: vpermt2q %zmm14, %zmm15, %zmm29
17695 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rdx), %zmm1
17696 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rcx), %zmm11
17697 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm2
17698 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm20
17699 ; AVX512DQ-BW-NEXT: vpermt2q %zmm11, %zmm15, %zmm2
17700 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
17701 ; AVX512DQ-BW-NEXT: vmovdqa64 128(%rdx), %zmm23
17702 ; AVX512DQ-BW-NEXT: vmovdqa64 128(%rcx), %zmm9
17703 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm23, %zmm1
17704 ; AVX512DQ-BW-NEXT: vpermt2q %zmm9, %zmm15, %zmm1
17705 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17706 ; AVX512DQ-BW-NEXT: vmovdqa64 192(%rdx), %zmm18
17707 ; AVX512DQ-BW-NEXT: vmovdqa64 192(%rcx), %zmm6
17708 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, %zmm1
17709 ; AVX512DQ-BW-NEXT: vpermt2q %zmm6, %zmm15, %zmm1
17710 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17711 ; AVX512DQ-BW-NEXT: vmovdqa64 256(%rdx), %zmm17
17712 ; AVX512DQ-BW-NEXT: vmovdqa64 256(%rcx), %zmm7
17713 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, %zmm1
17714 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm15, %zmm1
17715 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17716 ; AVX512DQ-BW-NEXT: vmovdqa64 320(%rdx), %zmm15
17717 ; AVX512DQ-BW-NEXT: vmovdqa64 320(%rcx), %zmm10
17718 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm15, %zmm1
17719 ; AVX512DQ-BW-NEXT: vpermt2q %zmm10, %zmm21, %zmm1
17720 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17721 ; AVX512DQ-BW-NEXT: vmovdqa64 384(%rdx), %zmm13
17722 ; AVX512DQ-BW-NEXT: vmovdqa64 384(%rcx), %zmm2
17723 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, %zmm1
17724 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm21, %zmm1
17725 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17726 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm1
17727 ; AVX512DQ-BW-NEXT: vpermi2q %zmm0, %zmm4, %zmm12
17728 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17729 ; AVX512DQ-BW-NEXT: vpermi2q %zmm0, %zmm4, %zmm8
17730 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17731 ; AVX512DQ-BW-NEXT: vmovdqa64 448(%rdx), %zmm16
17732 ; AVX512DQ-BW-NEXT: vmovdqa64 448(%rcx), %zmm4
17733 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm16, %zmm8
17734 ; AVX512DQ-BW-NEXT: vpermt2q %zmm4, %zmm21, %zmm8
17735 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17736 ; AVX512DQ-BW-NEXT: vpermi2q %zmm0, %zmm1, %zmm21
17737 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17738 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm3, %zmm1
17739 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17740 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} ymm1 = [0,0,4,12]
17741 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm16, %zmm24
17742 ; AVX512DQ-BW-NEXT: vpermt2q %zmm4, %zmm1, %zmm24
17743 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, %zmm19
17744 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm1, %zmm19
17745 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm15, %zmm0
17746 ; AVX512DQ-BW-NEXT: vpermt2q %zmm10, %zmm1, %zmm0
17747 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17748 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, %zmm31
17749 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm1, %zmm31
17750 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, %zmm21
17751 ; AVX512DQ-BW-NEXT: vpermt2q %zmm6, %zmm1, %zmm21
17752 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm23, %zmm12
17753 ; AVX512DQ-BW-NEXT: vpermt2q %zmm9, %zmm1, %zmm12
17754 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, %zmm3
17755 ; AVX512DQ-BW-NEXT: vpermt2q %zmm11, %zmm1, %zmm3
17756 ; AVX512DQ-BW-NEXT: vpermi2q %zmm14, %zmm5, %zmm1
17757 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [1,9,2,10,1,9,2,10]
17758 ; AVX512DQ-BW-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
17759 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm0
17760 ; AVX512DQ-BW-NEXT: vpermt2q %zmm14, %zmm27, %zmm0
17761 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17762 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [5,13,6,14,5,13,6,14]
17763 ; AVX512DQ-BW-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
17764 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm0
17765 ; AVX512DQ-BW-NEXT: vpermt2q %zmm14, %zmm28, %zmm0
17766 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17767 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [3,11,3,11,3,11,3,11]
17768 ; AVX512DQ-BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
17769 ; AVX512DQ-BW-NEXT: vpermt2q %zmm14, %zmm0, %zmm5
17770 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17771 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, %zmm5
17772 ; AVX512DQ-BW-NEXT: vpermt2q %zmm11, %zmm27, %zmm5
17773 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17774 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, %zmm5
17775 ; AVX512DQ-BW-NEXT: vpermt2q %zmm11, %zmm28, %zmm5
17776 ; AVX512DQ-BW-NEXT: vpermt2q %zmm11, %zmm0, %zmm20
17777 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17778 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm23, %zmm8
17779 ; AVX512DQ-BW-NEXT: vpermt2q %zmm9, %zmm27, %zmm8
17780 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17781 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm23, %zmm11
17782 ; AVX512DQ-BW-NEXT: vpermt2q %zmm9, %zmm28, %zmm11
17783 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm11, %zmm30
17784 ; AVX512DQ-BW-NEXT: vpermt2q %zmm9, %zmm0, %zmm23
17785 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17786 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, %zmm9
17787 ; AVX512DQ-BW-NEXT: vpermt2q %zmm6, %zmm27, %zmm9
17788 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, %zmm8
17789 ; AVX512DQ-BW-NEXT: vpermt2q %zmm6, %zmm28, %zmm8
17790 ; AVX512DQ-BW-NEXT: vpermt2q %zmm6, %zmm0, %zmm18
17791 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17792 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, %zmm6
17793 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm27, %zmm6
17794 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17795 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, %zmm6
17796 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, %zmm14
17797 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm28, %zmm6
17798 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm11
17799 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm0, %zmm14
17800 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17801 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm15, %zmm6
17802 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm15, %zmm7
17803 ; AVX512DQ-BW-NEXT: vpermt2q %zmm10, %zmm27, %zmm7
17804 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm26
17805 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm15, %zmm7
17806 ; AVX512DQ-BW-NEXT: vpermt2q %zmm10, %zmm28, %zmm7
17807 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm25
17808 ; AVX512DQ-BW-NEXT: vpermt2q %zmm10, %zmm0, %zmm6
17809 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17810 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, %zmm6
17811 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, %zmm7
17812 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm27, %zmm7
17813 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm10
17814 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, %zmm7
17815 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm28, %zmm7
17816 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm23
17817 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm0, %zmm6
17818 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17819 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm16, %zmm27
17820 ; AVX512DQ-BW-NEXT: vpermi2q %zmm4, %zmm16, %zmm28
17821 ; AVX512DQ-BW-NEXT: vpermt2q %zmm4, %zmm0, %zmm16
17822 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17823 ; AVX512DQ-BW-NEXT: movb $12, %al
17824 ; AVX512DQ-BW-NEXT: kmovd %eax, %k1
17825 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm24, %zmm22 {%k1}
17826 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
17827 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, %zmm18 {%k1}
17828 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
17829 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17830 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
17831 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
17832 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm31, %zmm17 {%k1}
17833 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
17834 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm21, %zmm16 {%k1}
17835 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
17836 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm12, %zmm21 {%k1}
17837 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
17838 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, %zmm31 {%k1}
17839 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
17840 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm20 {%k1}
17841 ; AVX512DQ-BW-NEXT: movb $48, %al
17842 ; AVX512DQ-BW-NEXT: kmovd %eax, %k2
17843 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17844 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17845 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm2 {%k2}
17846 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17847 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
17848 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm3 {%k2}
17849 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17850 ; AVX512DQ-BW-NEXT: vshufi64x2 {{.*#+}} zmm14 = zmm0[0,1,2,3],zmm29[4,5,6,7]
17851 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17852 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
17853 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm4 {%k2}
17854 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17855 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
17856 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17857 ; AVX512DQ-BW-NEXT: vshufi64x2 $228, (%rsp), %zmm0, %zmm0 # 64-byte Folded Reload
17858 ; AVX512DQ-BW-NEXT: # zmm0 = zmm0[0,1,2,3],mem[4,5,6,7]
17859 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
17860 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
17861 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17862 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm6 {%k2}
17863 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17864 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm30 {%k2}
17865 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17866 ; AVX512DQ-BW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm15 # 64-byte Folded Reload
17867 ; AVX512DQ-BW-NEXT: # zmm15 = zmm0[0,1,2,3],mem[4,5,6,7]
17868 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17869 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
17870 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17871 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
17872 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17873 ; AVX512DQ-BW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm13 # 64-byte Folded Reload
17874 ; AVX512DQ-BW-NEXT: # zmm13 = zmm0[0,1,2,3],mem[4,5,6,7]
17875 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
17876 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17877 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm7 {%k2}
17878 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17879 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
17880 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17881 ; AVX512DQ-BW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm12 # 64-byte Folded Reload
17882 ; AVX512DQ-BW-NEXT: # zmm12 = zmm0[0,1,2,3],mem[4,5,6,7]
17883 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17884 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm26 {%k2}
17885 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17886 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm25 {%k2}
17887 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17888 ; AVX512DQ-BW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 64-byte Folded Reload
17889 ; AVX512DQ-BW-NEXT: # zmm0 = zmm0[0,1,2,3],mem[4,5,6,7]
17890 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17891 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17892 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
17893 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17894 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm23 {%k2}
17895 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17896 ; AVX512DQ-BW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 64-byte Folded Reload
17897 ; AVX512DQ-BW-NEXT: # zmm0 = zmm0[0,1,2,3],mem[4,5,6,7]
17898 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17899 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17900 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm27 {%k2}
17901 ; AVX512DQ-BW-NEXT: vmovdqa64 (%r8), %zmm29
17902 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,1,9,0,4,5,6,7]
17903 ; AVX512DQ-BW-NEXT: vpermt2q %zmm29, %zmm1, %zmm2
17904 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17905 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%r8), %zmm2
17906 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm1, %zmm4
17907 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17908 ; AVX512DQ-BW-NEXT: vmovdqa64 128(%r8), %zmm0
17909 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm1, %zmm6
17910 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17911 ; AVX512DQ-BW-NEXT: vmovdqa64 192(%r8), %zmm4
17912 ; AVX512DQ-BW-NEXT: vpermt2q %zmm4, %zmm1, %zmm9
17913 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17914 ; AVX512DQ-BW-NEXT: vmovdqa64 256(%r8), %zmm6
17915 ; AVX512DQ-BW-NEXT: vpermt2q %zmm6, %zmm1, %zmm7
17916 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17917 ; AVX512DQ-BW-NEXT: vmovdqa64 320(%r8), %zmm7
17918 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm1, %zmm26
17919 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17920 ; AVX512DQ-BW-NEXT: vmovdqa64 384(%r8), %zmm9
17921 ; AVX512DQ-BW-NEXT: vpermt2q %zmm9, %zmm1, %zmm10
17922 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, %zmm24
17923 ; AVX512DQ-BW-NEXT: vmovdqa64 448(%r8), %zmm10
17924 ; AVX512DQ-BW-NEXT: vpermt2q %zmm10, %zmm1, %zmm27
17925 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17926 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm28 {%k2}
17927 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,1,13,0,4,5,6,7]
17928 ; AVX512DQ-BW-NEXT: vpermt2q %zmm29, %zmm1, %zmm3
17929 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17930 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm1, %zmm5
17931 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17932 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm1, %zmm30
17933 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm30, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17934 ; AVX512DQ-BW-NEXT: vpermt2q %zmm4, %zmm1, %zmm8
17935 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17936 ; AVX512DQ-BW-NEXT: vpermt2q %zmm6, %zmm1, %zmm11
17937 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17938 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm1, %zmm25
17939 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17940 ; AVX512DQ-BW-NEXT: vpermt2q %zmm9, %zmm1, %zmm23
17941 ; AVX512DQ-BW-NEXT: vpermt2q %zmm10, %zmm1, %zmm28
17942 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17943 ; AVX512DQ-BW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm3 # 64-byte Folded Reload
17944 ; AVX512DQ-BW-NEXT: # zmm3 = zmm1[0,1,2,3],mem[4,5,6,7]
17945 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm1 = [14,0,2,3,4,5,15,0]
17946 ; AVX512DQ-BW-NEXT: vpermt2q %zmm29, %zmm1, %zmm14
17947 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17948 ; AVX512DQ-BW-NEXT: vmovdqu64 (%rsp), %zmm14 # 64-byte Reload
17949 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm1, %zmm14
17950 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm1, %zmm15
17951 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17952 ; AVX512DQ-BW-NEXT: vpermt2q %zmm4, %zmm1, %zmm13
17953 ; AVX512DQ-BW-NEXT: vpermt2q %zmm6, %zmm1, %zmm12
17954 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17955 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
17956 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm1, %zmm12
17957 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
17958 ; AVX512DQ-BW-NEXT: vpermt2q %zmm9, %zmm1, %zmm15
17959 ; AVX512DQ-BW-NEXT: vpermt2q %zmm10, %zmm1, %zmm3
17960 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17961 ; AVX512DQ-BW-NEXT: vmovdqa (%rdi), %ymm1
17962 ; AVX512DQ-BW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
17963 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
17964 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm1, %zmm3, %zmm3
17965 ; AVX512DQ-BW-NEXT: vmovdqa 64(%rdi), %ymm1
17966 ; AVX512DQ-BW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
17967 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17968 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm1, %zmm5, %zmm5
17969 ; AVX512DQ-BW-NEXT: movb $16, %al
17970 ; AVX512DQ-BW-NEXT: kmovd %eax, %k2
17971 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm31 {%k2}
17972 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm31, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17973 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm1 = [10,0,2,3,4,5,11,0]
17974 ; AVX512DQ-BW-NEXT: vpermt2q %zmm2, %zmm1, %zmm5
17975 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17976 ; AVX512DQ-BW-NEXT: vmovdqa 128(%rdi), %ymm2
17977 ; AVX512DQ-BW-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
17978 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17979 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm2, %zmm5, %zmm31
17980 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm21 {%k2}
17981 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17982 ; AVX512DQ-BW-NEXT: vpermt2q %zmm0, %zmm1, %zmm31
17983 ; AVX512DQ-BW-NEXT: vmovdqa 192(%rdi), %ymm2
17984 ; AVX512DQ-BW-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
17985 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17986 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm30
17987 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm16 {%k2}
17988 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17989 ; AVX512DQ-BW-NEXT: vpermt2q %zmm4, %zmm1, %zmm30
17990 ; AVX512DQ-BW-NEXT: vmovdqa 256(%rdi), %ymm2
17991 ; AVX512DQ-BW-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
17992 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17993 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm26
17994 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm17 {%k2}
17995 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17996 ; AVX512DQ-BW-NEXT: vpermt2q %zmm6, %zmm1, %zmm26
17997 ; AVX512DQ-BW-NEXT: vmovdqa 320(%rdi), %ymm2
17998 ; AVX512DQ-BW-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
17999 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18000 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm25
18001 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, %zmm19 {%k2}
18002 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18003 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm1, %zmm25
18004 ; AVX512DQ-BW-NEXT: vmovdqa 384(%rdi), %ymm2
18005 ; AVX512DQ-BW-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
18006 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18007 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm19
18008 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, %zmm18 {%k2}
18009 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18010 ; AVX512DQ-BW-NEXT: vpermt2q %zmm9, %zmm1, %zmm19
18011 ; AVX512DQ-BW-NEXT: vmovdqa 448(%rdi), %ymm2
18012 ; AVX512DQ-BW-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
18013 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18014 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm16
18015 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, %zmm22 {%k2}
18016 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18017 ; AVX512DQ-BW-NEXT: vpermt2q %zmm10, %zmm1, %zmm16
18018 ; AVX512DQ-BW-NEXT: vpermt2q %zmm29, %zmm1, %zmm3
18019 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18020 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm29, %zmm20 {%k2}
18021 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18022 ; AVX512DQ-BW-NEXT: vmovdqa64 (%r9), %zmm29
18023 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,1,2,9,4,5,6,7]
18024 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18025 ; AVX512DQ-BW-NEXT: vpermt2q %zmm29, %zmm6, %zmm0
18026 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18027 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%r9), %zmm3
18028 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18029 ; AVX512DQ-BW-NEXT: vpermt2q %zmm3, %zmm6, %zmm0
18030 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18031 ; AVX512DQ-BW-NEXT: vmovdqa64 128(%r9), %zmm4
18032 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18033 ; AVX512DQ-BW-NEXT: vpermt2q %zmm4, %zmm6, %zmm0
18034 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18035 ; AVX512DQ-BW-NEXT: vmovdqa64 192(%r9), %zmm5
18036 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18037 ; AVX512DQ-BW-NEXT: vpermt2q %zmm5, %zmm6, %zmm0
18038 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18039 ; AVX512DQ-BW-NEXT: vmovdqa64 256(%r9), %zmm11
18040 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18041 ; AVX512DQ-BW-NEXT: vpermt2q %zmm11, %zmm6, %zmm0
18042 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18043 ; AVX512DQ-BW-NEXT: vmovdqa64 320(%r9), %zmm8
18044 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18045 ; AVX512DQ-BW-NEXT: vpermt2q %zmm8, %zmm6, %zmm0
18046 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18047 ; AVX512DQ-BW-NEXT: vmovdqa64 384(%r9), %zmm7
18048 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm6, %zmm24
18049 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18050 ; AVX512DQ-BW-NEXT: vmovdqa64 448(%r9), %zmm22
18051 ; AVX512DQ-BW-NEXT: vpermt2q %zmm22, %zmm6, %zmm27
18052 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,1,2,13,4,5,6,7]
18053 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18054 ; AVX512DQ-BW-NEXT: vpermt2q %zmm29, %zmm6, %zmm0
18055 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18056 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18057 ; AVX512DQ-BW-NEXT: vpermt2q %zmm3, %zmm6, %zmm0
18058 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18059 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18060 ; AVX512DQ-BW-NEXT: vpermt2q %zmm4, %zmm6, %zmm0
18061 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18062 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18063 ; AVX512DQ-BW-NEXT: vpermt2q %zmm5, %zmm6, %zmm0
18064 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18065 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18066 ; AVX512DQ-BW-NEXT: vpermt2q %zmm11, %zmm6, %zmm0
18067 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18068 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18069 ; AVX512DQ-BW-NEXT: vpermt2q %zmm8, %zmm6, %zmm0
18070 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18071 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm6, %zmm23
18072 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18073 ; AVX512DQ-BW-NEXT: vpermt2q %zmm22, %zmm6, %zmm28
18074 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,14,2,3,4,5,6,15]
18075 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18076 ; AVX512DQ-BW-NEXT: vpermt2q %zmm29, %zmm6, %zmm0
18077 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18078 ; AVX512DQ-BW-NEXT: vpermt2q %zmm3, %zmm6, %zmm14
18079 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm14, (%rsp) # 64-byte Spill
18080 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18081 ; AVX512DQ-BW-NEXT: vpermt2q %zmm4, %zmm6, %zmm0
18082 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18083 ; AVX512DQ-BW-NEXT: vpermt2q %zmm5, %zmm6, %zmm13
18084 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18085 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18086 ; AVX512DQ-BW-NEXT: vpermt2q %zmm11, %zmm6, %zmm0
18087 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18088 ; AVX512DQ-BW-NEXT: vpermt2q %zmm8, %zmm6, %zmm12
18089 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18090 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm6, %zmm15
18091 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18092 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18093 ; AVX512DQ-BW-NEXT: vpermt2q %zmm22, %zmm6, %zmm0
18094 ; AVX512DQ-BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18095 ; AVX512DQ-BW-NEXT: vmovdqa (%rdx), %xmm6
18096 ; AVX512DQ-BW-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
18097 ; AVX512DQ-BW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
18098 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18099 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm0 {%k1}
18100 ; AVX512DQ-BW-NEXT: vmovdqa 64(%rdx), %xmm6
18101 ; AVX512DQ-BW-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
18102 ; AVX512DQ-BW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
18103 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
18104 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm14 {%k1}
18105 ; AVX512DQ-BW-NEXT: vmovdqa 128(%rdx), %xmm6
18106 ; AVX512DQ-BW-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
18107 ; AVX512DQ-BW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
18108 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
18109 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm15 {%k1}
18110 ; AVX512DQ-BW-NEXT: vmovdqa 192(%rdx), %xmm6
18111 ; AVX512DQ-BW-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
18112 ; AVX512DQ-BW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
18113 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
18114 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm10 {%k1}
18115 ; AVX512DQ-BW-NEXT: vmovdqa 256(%rdx), %xmm6
18116 ; AVX512DQ-BW-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
18117 ; AVX512DQ-BW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
18118 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
18119 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm12 {%k1}
18120 ; AVX512DQ-BW-NEXT: vmovdqa 320(%rdx), %xmm6
18121 ; AVX512DQ-BW-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
18122 ; AVX512DQ-BW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
18123 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18124 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm2 {%k1}
18125 ; AVX512DQ-BW-NEXT: vmovdqa 384(%rdx), %xmm6
18126 ; AVX512DQ-BW-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
18127 ; AVX512DQ-BW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
18128 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
18129 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm9 {%k1}
18130 ; AVX512DQ-BW-NEXT: vmovdqa 448(%rdx), %xmm6
18131 ; AVX512DQ-BW-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
18132 ; AVX512DQ-BW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
18133 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
18134 ; AVX512DQ-BW-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm1 {%k1}
18135 ; AVX512DQ-BW-NEXT: vinserti32x4 $2, (%r8), %zmm0, %zmm24
18136 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm0 = [0,1,2,3,4,8,6,7]
18137 ; AVX512DQ-BW-NEXT: vpermt2q %zmm29, %zmm0, %zmm24
18138 ; AVX512DQ-BW-NEXT: vinserti32x4 $2, 64(%r8), %zmm14, %zmm23
18139 ; AVX512DQ-BW-NEXT: vpermt2q %zmm3, %zmm0, %zmm23
18140 ; AVX512DQ-BW-NEXT: vinserti32x4 $2, 128(%r8), %zmm15, %zmm21
18141 ; AVX512DQ-BW-NEXT: vpermt2q %zmm4, %zmm0, %zmm21
18142 ; AVX512DQ-BW-NEXT: vinserti32x4 $2, 192(%r8), %zmm10, %zmm20
18143 ; AVX512DQ-BW-NEXT: vpermt2q %zmm5, %zmm0, %zmm20
18144 ; AVX512DQ-BW-NEXT: vinserti32x4 $2, 256(%r8), %zmm12, %zmm18
18145 ; AVX512DQ-BW-NEXT: vpermt2q %zmm11, %zmm0, %zmm18
18146 ; AVX512DQ-BW-NEXT: vinserti32x4 $2, 320(%r8), %zmm2, %zmm17
18147 ; AVX512DQ-BW-NEXT: vpermt2q %zmm8, %zmm0, %zmm17
18148 ; AVX512DQ-BW-NEXT: vinserti32x4 $2, 384(%r8), %zmm9, %zmm15
18149 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm0, %zmm15
18150 ; AVX512DQ-BW-NEXT: vinserti32x4 $2, 448(%r8), %zmm1, %zmm14
18151 ; AVX512DQ-BW-NEXT: vpermt2q %zmm22, %zmm0, %zmm14
18152 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm0 = [0,10,2,3,4,5,6,11]
18153 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
18154 ; AVX512DQ-BW-NEXT: vpermt2q %zmm29, %zmm0, %zmm13
18155 ; AVX512DQ-BW-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,1,2,3,4,12,6,7]
18156 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
18157 ; AVX512DQ-BW-NEXT: vpermt2q %zmm29, %zmm1, %zmm12
18158 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
18159 ; AVX512DQ-BW-NEXT: vpermt2q %zmm3, %zmm0, %zmm10
18160 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
18161 ; AVX512DQ-BW-NEXT: vpermt2q %zmm3, %zmm1, %zmm9
18162 ; AVX512DQ-BW-NEXT: vpermt2q %zmm4, %zmm0, %zmm31
18163 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
18164 ; AVX512DQ-BW-NEXT: vpermt2q %zmm4, %zmm1, %zmm6
18165 ; AVX512DQ-BW-NEXT: vpermt2q %zmm5, %zmm0, %zmm30
18166 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm29 # 64-byte Reload
18167 ; AVX512DQ-BW-NEXT: vpermt2q %zmm5, %zmm1, %zmm29
18168 ; AVX512DQ-BW-NEXT: vpermt2q %zmm11, %zmm0, %zmm26
18169 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
18170 ; AVX512DQ-BW-NEXT: vpermt2q %zmm11, %zmm1, %zmm4
18171 ; AVX512DQ-BW-NEXT: vpermt2q %zmm8, %zmm0, %zmm25
18172 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
18173 ; AVX512DQ-BW-NEXT: vpermt2q %zmm8, %zmm1, %zmm3
18174 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm0, %zmm19
18175 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18176 ; AVX512DQ-BW-NEXT: vpermt2q %zmm7, %zmm1, %zmm2
18177 ; AVX512DQ-BW-NEXT: vpermt2q %zmm22, %zmm0, %zmm16
18178 ; AVX512DQ-BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18179 ; AVX512DQ-BW-NEXT: vpermt2q %zmm22, %zmm1, %zmm0
18180 ; AVX512DQ-BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
18181 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
18182 ; AVX512DQ-BW-NEXT: vmovaps %zmm1, 3008(%rax)
18183 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm28, 2944(%rax)
18184 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, 2880(%rax)
18185 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm16, 2816(%rax)
18186 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm27, 2752(%rax)
18187 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18188 ; AVX512DQ-BW-NEXT: vmovaps %zmm0, 2624(%rax)
18189 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18190 ; AVX512DQ-BW-NEXT: vmovaps %zmm0, 2560(%rax)
18191 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, 2496(%rax)
18192 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, 2432(%rax)
18193 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18194 ; AVX512DQ-BW-NEXT: vmovaps %zmm0, 2368(%rax)
18195 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18196 ; AVX512DQ-BW-NEXT: vmovaps %zmm0, 2240(%rax)
18197 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18198 ; AVX512DQ-BW-NEXT: vmovaps %zmm0, 2176(%rax)
18199 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, 2112(%rax)
18200 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm25, 2048(%rax)
18201 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18202 ; AVX512DQ-BW-NEXT: vmovaps %zmm0, 1984(%rax)
18203 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18204 ; AVX512DQ-BW-NEXT: vmovaps %zmm0, 1856(%rax)
18205 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18206 ; AVX512DQ-BW-NEXT: vmovaps %zmm0, 1792(%rax)
18207 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, 1728(%rax)
18208 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm26, 1664(%rax)
18209 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18210 ; AVX512DQ-BW-NEXT: vmovaps %zmm0, 1600(%rax)
18211 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18212 ; AVX512DQ-BW-NEXT: vmovaps %zmm0, 1472(%rax)
18213 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18214 ; AVX512DQ-BW-NEXT: vmovaps %zmm0, 1408(%rax)
18215 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm29, 1344(%rax)
18216 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm30, 1280(%rax)
18217 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18218 ; AVX512DQ-BW-NEXT: vmovaps %zmm0, 1216(%rax)
18219 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18220 ; AVX512DQ-BW-NEXT: vmovaps %zmm0, 1088(%rax)
18221 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18222 ; AVX512DQ-BW-NEXT: vmovaps %zmm0, 1024(%rax)
18223 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, 960(%rax)
18224 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm31, 896(%rax)
18225 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18226 ; AVX512DQ-BW-NEXT: vmovaps %zmm0, 832(%rax)
18227 ; AVX512DQ-BW-NEXT: vmovups (%rsp), %zmm0 # 64-byte Reload
18228 ; AVX512DQ-BW-NEXT: vmovaps %zmm0, 704(%rax)
18229 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18230 ; AVX512DQ-BW-NEXT: vmovaps %zmm0, 640(%rax)
18231 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, 576(%rax)
18232 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, 512(%rax)
18233 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18234 ; AVX512DQ-BW-NEXT: vmovaps %zmm0, 448(%rax)
18235 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18236 ; AVX512DQ-BW-NEXT: vmovaps %zmm0, 320(%rax)
18237 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18238 ; AVX512DQ-BW-NEXT: vmovaps %zmm0, 256(%rax)
18239 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm12, 192(%rax)
18240 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, 128(%rax)
18241 ; AVX512DQ-BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18242 ; AVX512DQ-BW-NEXT: vmovaps %zmm0, 64(%rax)
18243 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm14, 2688(%rax)
18244 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm15, 2304(%rax)
18245 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, 1920(%rax)
18246 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, 1536(%rax)
18247 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, 1152(%rax)
18248 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm21, 768(%rax)
18249 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm23, 384(%rax)
18250 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm24, (%rax)
18251 ; AVX512DQ-BW-NEXT: addq $3720, %rsp # imm = 0xE88
18252 ; AVX512DQ-BW-NEXT: vzeroupper
18253 ; AVX512DQ-BW-NEXT: retq
18255 ; AVX512DQ-BW-FCP-LABEL: store_i64_stride6_vf64:
18256 ; AVX512DQ-BW-FCP: # %bb.0:
18257 ; AVX512DQ-BW-FCP-NEXT: subq $3720, %rsp # imm = 0xE88
18258 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm11
18259 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm10
18260 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 128(%rdi), %zmm9
18261 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 192(%rdi), %zmm8
18262 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 256(%rdi), %zmm7
18263 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 320(%rdi), %zmm6
18264 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 384(%rdi), %zmm5
18265 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 448(%rdi), %zmm4
18266 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rsi), %zmm30
18267 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rsi), %zmm29
18268 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 128(%rsi), %zmm28
18269 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 192(%rsi), %zmm27
18270 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 256(%rsi), %zmm1
18271 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 320(%rsi), %zmm13
18272 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 384(%rsi), %zmm24
18273 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 448(%rsi), %zmm0
18274 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [4,12,5,13,4,12,5,13]
18275 ; AVX512DQ-BW-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
18276 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm22
18277 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm2, %zmm22
18278 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm3
18279 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm24, %zmm2, %zmm3
18280 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18281 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm3
18282 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm13, %zmm2, %zmm3
18283 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18284 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm3
18285 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm3
18286 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18287 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm3
18288 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm27, %zmm2, %zmm3
18289 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18290 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm3
18291 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm28, %zmm2, %zmm3
18292 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18293 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm3
18294 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm29, %zmm2, %zmm3
18295 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18296 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm30, %zmm11, %zmm2
18297 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18298 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [2,10,2,10,2,10,2,10]
18299 ; AVX512DQ-BW-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
18300 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm3
18301 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm30, %zmm2, %zmm3
18302 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18303 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm12
18304 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [6,14,6,14,6,14,6,14]
18305 ; AVX512DQ-BW-FCP-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
18306 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm3
18307 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm30, %zmm2, %zmm3
18308 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18309 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [7,15,7,15,7,15,7,15]
18310 ; AVX512DQ-BW-FCP-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
18311 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm3
18312 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm30, %zmm15, %zmm3
18313 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18314 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm3
18315 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm29, %zmm12, %zmm3
18316 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18317 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,8,1,9,0,8,1,9]
18318 ; AVX512DQ-BW-FCP-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
18319 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm30, %zmm3, %zmm11
18320 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18321 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm11
18322 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm29, %zmm2, %zmm11
18323 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18324 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm11
18325 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm29, %zmm15, %zmm11
18326 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18327 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm29, %zmm3, %zmm10
18328 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18329 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm10
18330 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm28, %zmm12, %zmm10
18331 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18332 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm10
18333 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm28, %zmm2, %zmm10
18334 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18335 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm10
18336 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm28, %zmm15, %zmm10
18337 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18338 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm28, %zmm3, %zmm9
18339 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18340 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm9
18341 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm27, %zmm12, %zmm9
18342 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18343 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm9
18344 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm27, %zmm2, %zmm9
18345 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18346 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm9
18347 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm27, %zmm15, %zmm9
18348 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18349 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm27, %zmm3, %zmm8
18350 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18351 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm8
18352 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm12, %zmm8
18353 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18354 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm8
18355 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm2, %zmm8
18356 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18357 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm8
18358 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm15, %zmm8
18359 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18360 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm3, %zmm7
18361 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18362 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm1
18363 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm13, %zmm12, %zmm1
18364 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18365 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm1
18366 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm13, %zmm2, %zmm1
18367 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18368 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm1
18369 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm13, %zmm15, %zmm1
18370 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18371 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm13, %zmm3, %zmm6
18372 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18373 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
18374 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm24, %zmm12, %zmm1
18375 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18376 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
18377 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm24, %zmm2, %zmm1
18378 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18379 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm8
18380 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm1
18381 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm24, %zmm15, %zmm1
18382 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18383 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm24, %zmm3, %zmm5
18384 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18385 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdx), %zmm5
18386 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rcx), %zmm14
18387 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm29
18388 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm21
18389 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm14, %zmm15, %zmm29
18390 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rdx), %zmm1
18391 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rcx), %zmm11
18392 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm2
18393 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm20
18394 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm11, %zmm15, %zmm2
18395 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
18396 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 128(%rdx), %zmm23
18397 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 128(%rcx), %zmm9
18398 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm1
18399 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm9, %zmm15, %zmm1
18400 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18401 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 192(%rdx), %zmm18
18402 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 192(%rcx), %zmm6
18403 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm1
18404 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm6, %zmm15, %zmm1
18405 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18406 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 256(%rdx), %zmm17
18407 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 256(%rcx), %zmm7
18408 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm1
18409 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm15, %zmm1
18410 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18411 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 320(%rdx), %zmm15
18412 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 320(%rcx), %zmm10
18413 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm1
18414 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm10, %zmm21, %zmm1
18415 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18416 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 384(%rdx), %zmm13
18417 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 384(%rcx), %zmm2
18418 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm1
18419 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm21, %zmm1
18420 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18421 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm1
18422 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm0, %zmm4, %zmm12
18423 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18424 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm0, %zmm4, %zmm8
18425 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18426 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 448(%rdx), %zmm16
18427 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 448(%rcx), %zmm4
18428 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm8
18429 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm4, %zmm21, %zmm8
18430 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18431 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm0, %zmm1, %zmm21
18432 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18433 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm3, %zmm1
18434 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18435 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} ymm1 = [0,0,4,12]
18436 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm24
18437 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm4, %zmm1, %zmm24
18438 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm19
18439 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm1, %zmm19
18440 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm0
18441 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm10, %zmm1, %zmm0
18442 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18443 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm31
18444 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm31
18445 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm21
18446 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm6, %zmm1, %zmm21
18447 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm12
18448 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm12
18449 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm3
18450 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm11, %zmm1, %zmm3
18451 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm14, %zmm5, %zmm1
18452 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [1,9,2,10,1,9,2,10]
18453 ; AVX512DQ-BW-FCP-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
18454 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
18455 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm14, %zmm27, %zmm0
18456 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18457 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [5,13,6,14,5,13,6,14]
18458 ; AVX512DQ-BW-FCP-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
18459 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm0
18460 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm14, %zmm28, %zmm0
18461 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18462 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [3,11,3,11,3,11,3,11]
18463 ; AVX512DQ-BW-FCP-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
18464 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm14, %zmm0, %zmm5
18465 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18466 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm5
18467 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm11, %zmm27, %zmm5
18468 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18469 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm5
18470 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm11, %zmm28, %zmm5
18471 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm11, %zmm0, %zmm20
18472 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18473 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm8
18474 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm9, %zmm27, %zmm8
18475 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18476 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm11
18477 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm9, %zmm28, %zmm11
18478 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm30
18479 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm9, %zmm0, %zmm23
18480 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18481 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm9
18482 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm6, %zmm27, %zmm9
18483 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm8
18484 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm6, %zmm28, %zmm8
18485 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm6, %zmm0, %zmm18
18486 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18487 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm6
18488 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm27, %zmm6
18489 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18490 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm6
18491 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, %zmm14
18492 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm28, %zmm6
18493 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm11
18494 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm14
18495 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18496 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm6
18497 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm7
18498 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm10, %zmm27, %zmm7
18499 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm26
18500 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm7
18501 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm10, %zmm28, %zmm7
18502 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm25
18503 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm10, %zmm0, %zmm6
18504 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18505 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm6
18506 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm7
18507 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm27, %zmm7
18508 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm10
18509 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm7
18510 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm28, %zmm7
18511 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm23
18512 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm0, %zmm6
18513 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18514 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm16, %zmm27
18515 ; AVX512DQ-BW-FCP-NEXT: vpermi2q %zmm4, %zmm16, %zmm28
18516 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm4, %zmm0, %zmm16
18517 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18518 ; AVX512DQ-BW-FCP-NEXT: movb $12, %al
18519 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k1
18520 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm22 {%k1}
18521 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
18522 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, %zmm18 {%k1}
18523 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
18524 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18525 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
18526 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
18527 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm31, %zmm17 {%k1}
18528 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
18529 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm21, %zmm16 {%k1}
18530 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
18531 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm21 {%k1}
18532 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
18533 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm31 {%k1}
18534 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
18535 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm20 {%k1}
18536 ; AVX512DQ-BW-FCP-NEXT: movb $48, %al
18537 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k2
18538 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18539 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18540 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm2 {%k2}
18541 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18542 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
18543 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm3 {%k2}
18544 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18545 ; AVX512DQ-BW-FCP-NEXT: vshufi64x2 {{.*#+}} zmm14 = zmm0[0,1,2,3],zmm29[4,5,6,7]
18546 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18547 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
18548 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm4 {%k2}
18549 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18550 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18551 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18552 ; AVX512DQ-BW-FCP-NEXT: vshufi64x2 $228, (%rsp), %zmm0, %zmm0 # 64-byte Folded Reload
18553 ; AVX512DQ-BW-FCP-NEXT: # zmm0 = zmm0[0,1,2,3],mem[4,5,6,7]
18554 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
18555 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
18556 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18557 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm6 {%k2}
18558 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18559 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm30 {%k2}
18560 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18561 ; AVX512DQ-BW-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm15 # 64-byte Folded Reload
18562 ; AVX512DQ-BW-FCP-NEXT: # zmm15 = zmm0[0,1,2,3],mem[4,5,6,7]
18563 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18564 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
18565 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18566 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
18567 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18568 ; AVX512DQ-BW-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm13 # 64-byte Folded Reload
18569 ; AVX512DQ-BW-FCP-NEXT: # zmm13 = zmm0[0,1,2,3],mem[4,5,6,7]
18570 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
18571 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18572 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm7 {%k2}
18573 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18574 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
18575 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18576 ; AVX512DQ-BW-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm12 # 64-byte Folded Reload
18577 ; AVX512DQ-BW-FCP-NEXT: # zmm12 = zmm0[0,1,2,3],mem[4,5,6,7]
18578 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18579 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm26 {%k2}
18580 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18581 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm25 {%k2}
18582 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18583 ; AVX512DQ-BW-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 64-byte Folded Reload
18584 ; AVX512DQ-BW-FCP-NEXT: # zmm0 = zmm0[0,1,2,3],mem[4,5,6,7]
18585 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18586 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18587 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
18588 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18589 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm23 {%k2}
18590 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18591 ; AVX512DQ-BW-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 64-byte Folded Reload
18592 ; AVX512DQ-BW-FCP-NEXT: # zmm0 = zmm0[0,1,2,3],mem[4,5,6,7]
18593 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18594 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18595 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm27 {%k2}
18596 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%r8), %zmm29
18597 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,1,9,0,4,5,6,7]
18598 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm29, %zmm1, %zmm2
18599 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18600 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%r8), %zmm2
18601 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm1, %zmm4
18602 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18603 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 128(%r8), %zmm0
18604 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm6
18605 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18606 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 192(%r8), %zmm4
18607 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm4, %zmm1, %zmm9
18608 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18609 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 256(%r8), %zmm6
18610 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm6, %zmm1, %zmm7
18611 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18612 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 320(%r8), %zmm7
18613 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm26
18614 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18615 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 384(%r8), %zmm9
18616 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm10
18617 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm24
18618 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 448(%r8), %zmm10
18619 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm10, %zmm1, %zmm27
18620 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
18621 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm28 {%k2}
18622 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,1,13,0,4,5,6,7]
18623 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm29, %zmm1, %zmm3
18624 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18625 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm1, %zmm5
18626 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18627 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm30
18628 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm30, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18629 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm4, %zmm1, %zmm8
18630 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18631 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm6, %zmm1, %zmm11
18632 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18633 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm25
18634 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18635 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm23
18636 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm10, %zmm1, %zmm28
18637 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
18638 ; AVX512DQ-BW-FCP-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm3 # 64-byte Folded Reload
18639 ; AVX512DQ-BW-FCP-NEXT: # zmm3 = zmm1[0,1,2,3],mem[4,5,6,7]
18640 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm1 = [14,0,2,3,4,5,15,0]
18641 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm29, %zmm1, %zmm14
18642 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18643 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 (%rsp), %zmm14 # 64-byte Reload
18644 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm1, %zmm14
18645 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm15
18646 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18647 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm4, %zmm1, %zmm13
18648 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm6, %zmm1, %zmm12
18649 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18650 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
18651 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm12
18652 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
18653 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm15
18654 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm10, %zmm1, %zmm3
18655 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18656 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rdi), %ymm1
18657 ; AVX512DQ-BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
18658 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
18659 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm1, %zmm3, %zmm3
18660 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 64(%rdi), %ymm1
18661 ; AVX512DQ-BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
18662 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18663 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm1, %zmm5, %zmm5
18664 ; AVX512DQ-BW-FCP-NEXT: movb $16, %al
18665 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k2
18666 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm31 {%k2}
18667 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm31, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18668 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm1 = [10,0,2,3,4,5,11,0]
18669 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm2, %zmm1, %zmm5
18670 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18671 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 128(%rdi), %ymm2
18672 ; AVX512DQ-BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
18673 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18674 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm5, %zmm31
18675 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm21 {%k2}
18676 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18677 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm0, %zmm1, %zmm31
18678 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 192(%rdi), %ymm2
18679 ; AVX512DQ-BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
18680 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18681 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm30
18682 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm16 {%k2}
18683 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18684 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm4, %zmm1, %zmm30
18685 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 256(%rdi), %ymm2
18686 ; AVX512DQ-BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
18687 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18688 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm26
18689 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm17 {%k2}
18690 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18691 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm6, %zmm1, %zmm26
18692 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 320(%rdi), %ymm2
18693 ; AVX512DQ-BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
18694 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18695 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm25
18696 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, %zmm19 {%k2}
18697 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18698 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm25
18699 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 384(%rdi), %ymm2
18700 ; AVX512DQ-BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
18701 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18702 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm19
18703 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm18 {%k2}
18704 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18705 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm9, %zmm1, %zmm19
18706 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 448(%rdi), %ymm2
18707 ; AVX512DQ-BW-FCP-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
18708 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18709 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm16
18710 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm22 {%k2}
18711 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18712 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm10, %zmm1, %zmm16
18713 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm29, %zmm1, %zmm3
18714 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18715 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm29, %zmm20 {%k2}
18716 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18717 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%r9), %zmm29
18718 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,1,2,9,4,5,6,7]
18719 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18720 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm29, %zmm6, %zmm0
18721 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18722 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%r9), %zmm3
18723 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18724 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm3, %zmm6, %zmm0
18725 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18726 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 128(%r9), %zmm4
18727 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18728 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm4, %zmm6, %zmm0
18729 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18730 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 192(%r9), %zmm5
18731 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18732 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm5, %zmm6, %zmm0
18733 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18734 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 256(%r9), %zmm11
18735 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18736 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm11, %zmm6, %zmm0
18737 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18738 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 320(%r9), %zmm8
18739 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18740 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm8, %zmm6, %zmm0
18741 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18742 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 384(%r9), %zmm7
18743 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm6, %zmm24
18744 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18745 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 448(%r9), %zmm22
18746 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm22, %zmm6, %zmm27
18747 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,1,2,13,4,5,6,7]
18748 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18749 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm29, %zmm6, %zmm0
18750 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18751 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18752 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm3, %zmm6, %zmm0
18753 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18754 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18755 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm4, %zmm6, %zmm0
18756 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18757 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18758 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm5, %zmm6, %zmm0
18759 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18760 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18761 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm11, %zmm6, %zmm0
18762 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18763 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18764 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm8, %zmm6, %zmm0
18765 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18766 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm6, %zmm23
18767 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18768 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm22, %zmm6, %zmm28
18769 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm6 = [0,14,2,3,4,5,6,15]
18770 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18771 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm29, %zmm6, %zmm0
18772 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18773 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm3, %zmm6, %zmm14
18774 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm14, (%rsp) # 64-byte Spill
18775 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18776 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm4, %zmm6, %zmm0
18777 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18778 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm5, %zmm6, %zmm13
18779 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18780 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18781 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm11, %zmm6, %zmm0
18782 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18783 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm8, %zmm6, %zmm12
18784 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18785 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm6, %zmm15
18786 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18787 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18788 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm22, %zmm6, %zmm0
18789 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18790 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rdx), %xmm6
18791 ; AVX512DQ-BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
18792 ; AVX512DQ-BW-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
18793 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18794 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm0 {%k1}
18795 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 64(%rdx), %xmm6
18796 ; AVX512DQ-BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
18797 ; AVX512DQ-BW-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
18798 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
18799 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm14 {%k1}
18800 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 128(%rdx), %xmm6
18801 ; AVX512DQ-BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
18802 ; AVX512DQ-BW-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
18803 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
18804 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm15 {%k1}
18805 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 192(%rdx), %xmm6
18806 ; AVX512DQ-BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
18807 ; AVX512DQ-BW-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
18808 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
18809 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm10 {%k1}
18810 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 256(%rdx), %xmm6
18811 ; AVX512DQ-BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
18812 ; AVX512DQ-BW-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
18813 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
18814 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm12 {%k1}
18815 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 320(%rdx), %xmm6
18816 ; AVX512DQ-BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
18817 ; AVX512DQ-BW-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
18818 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18819 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm2 {%k1}
18820 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 384(%rdx), %xmm6
18821 ; AVX512DQ-BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
18822 ; AVX512DQ-BW-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
18823 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
18824 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm9 {%k1}
18825 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 448(%rdx), %xmm6
18826 ; AVX512DQ-BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
18827 ; AVX512DQ-BW-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
18828 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
18829 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm1 {%k1}
18830 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $2, (%r8), %zmm0, %zmm24
18831 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm0 = [0,1,2,3,4,8,6,7]
18832 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm29, %zmm0, %zmm24
18833 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $2, 64(%r8), %zmm14, %zmm23
18834 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm3, %zmm0, %zmm23
18835 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $2, 128(%r8), %zmm15, %zmm21
18836 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm4, %zmm0, %zmm21
18837 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $2, 192(%r8), %zmm10, %zmm20
18838 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm5, %zmm0, %zmm20
18839 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $2, 256(%r8), %zmm12, %zmm18
18840 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm11, %zmm0, %zmm18
18841 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $2, 320(%r8), %zmm2, %zmm17
18842 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm8, %zmm0, %zmm17
18843 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $2, 384(%r8), %zmm9, %zmm15
18844 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm15
18845 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $2, 448(%r8), %zmm1, %zmm14
18846 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm22, %zmm0, %zmm14
18847 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm0 = [0,10,2,3,4,5,6,11]
18848 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
18849 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm29, %zmm0, %zmm13
18850 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm1 = [0,1,2,3,4,12,6,7]
18851 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
18852 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm29, %zmm1, %zmm12
18853 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
18854 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm3, %zmm0, %zmm10
18855 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
18856 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm3, %zmm1, %zmm9
18857 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm4, %zmm0, %zmm31
18858 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
18859 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm4, %zmm1, %zmm6
18860 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm5, %zmm0, %zmm30
18861 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm29 # 64-byte Reload
18862 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm5, %zmm1, %zmm29
18863 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm11, %zmm0, %zmm26
18864 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
18865 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm11, %zmm1, %zmm4
18866 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm8, %zmm0, %zmm25
18867 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
18868 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm8, %zmm1, %zmm3
18869 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm0, %zmm19
18870 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18871 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm7, %zmm1, %zmm2
18872 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm22, %zmm0, %zmm16
18873 ; AVX512DQ-BW-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18874 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm22, %zmm1, %zmm0
18875 ; AVX512DQ-BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
18876 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
18877 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm1, 3008(%rax)
18878 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm28, 2944(%rax)
18879 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, 2880(%rax)
18880 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm16, 2816(%rax)
18881 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm27, 2752(%rax)
18882 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18883 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm0, 2624(%rax)
18884 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18885 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm0, 2560(%rax)
18886 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, 2496(%rax)
18887 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, 2432(%rax)
18888 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18889 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm0, 2368(%rax)
18890 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18891 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm0, 2240(%rax)
18892 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18893 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm0, 2176(%rax)
18894 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, 2112(%rax)
18895 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm25, 2048(%rax)
18896 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18897 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm0, 1984(%rax)
18898 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18899 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm0, 1856(%rax)
18900 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18901 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm0, 1792(%rax)
18902 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, 1728(%rax)
18903 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm26, 1664(%rax)
18904 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18905 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm0, 1600(%rax)
18906 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18907 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm0, 1472(%rax)
18908 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18909 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm0, 1408(%rax)
18910 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm29, 1344(%rax)
18911 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm30, 1280(%rax)
18912 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18913 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm0, 1216(%rax)
18914 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18915 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm0, 1088(%rax)
18916 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18917 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm0, 1024(%rax)
18918 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, 960(%rax)
18919 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm31, 896(%rax)
18920 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18921 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm0, 832(%rax)
18922 ; AVX512DQ-BW-FCP-NEXT: vmovups (%rsp), %zmm0 # 64-byte Reload
18923 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm0, 704(%rax)
18924 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18925 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm0, 640(%rax)
18926 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, 576(%rax)
18927 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, 512(%rax)
18928 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18929 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm0, 448(%rax)
18930 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18931 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm0, 320(%rax)
18932 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18933 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm0, 256(%rax)
18934 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm12, 192(%rax)
18935 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, 128(%rax)
18936 ; AVX512DQ-BW-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18937 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm0, 64(%rax)
18938 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm14, 2688(%rax)
18939 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm15, 2304(%rax)
18940 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, 1920(%rax)
18941 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, 1536(%rax)
18942 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, 1152(%rax)
18943 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm21, 768(%rax)
18944 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm23, 384(%rax)
18945 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm24, (%rax)
18946 ; AVX512DQ-BW-FCP-NEXT: addq $3720, %rsp # imm = 0xE88
18947 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
18948 ; AVX512DQ-BW-FCP-NEXT: retq
18949 %in.vec0 = load <64 x i64>, ptr %in.vecptr0, align 64
18950 %in.vec1 = load <64 x i64>, ptr %in.vecptr1, align 64
18951 %in.vec2 = load <64 x i64>, ptr %in.vecptr2, align 64
18952 %in.vec3 = load <64 x i64>, ptr %in.vecptr3, align 64
18953 %in.vec4 = load <64 x i64>, ptr %in.vecptr4, align 64
18954 %in.vec5 = load <64 x i64>, ptr %in.vecptr5, align 64
18955 %1 = shufflevector <64 x i64> %in.vec0, <64 x i64> %in.vec1, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
18956 %2 = shufflevector <64 x i64> %in.vec2, <64 x i64> %in.vec3, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
18957 %3 = shufflevector <64 x i64> %in.vec4, <64 x i64> %in.vec5, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
18958 %4 = shufflevector <128 x i64> %1, <128 x i64> %2, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255>
18959 %5 = shufflevector <128 x i64> %3, <128 x i64> poison, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
18960 %6 = shufflevector <256 x i64> %4, <256 x i64> %5, <384 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255, i32 256, i32 257, i32 258, i32 259, i32 260, i32 261, i32 262, i32 263, i32 264, i32 265, i32 266, i32 267, i32 268, i32 269, i32 270, i32 271, i32 272, i32 273, i32 274, i32 275, i32 276, i32 277, i32 278, i32 279, i32 280, i32 281, i32 282, i32 283, i32 284, i32 285, i32 286, i32 287, i32 288, i32 289, i32 290, i32 291, i32 292, i32 293, i32 294, i32 295, i32 296, i32 297, i32 298, i32 299, i32 300, i32 301, i32 302, i32 303, i32 304, i32 305, i32 306, i32 307, i32 308, i32 309, i32 310, i32 311, i32 312, i32 313, i32 314, i32 315, i32 316, i32 317, i32 318, i32 319, i32 320, i32 321, i32 322, i32 323, i32 324, i32 325, i32 326, i32 327, i32 328, i32 329, i32 330, i32 331, i32 332, i32 333, i32 334, i32 335, i32 336, i32 337, i32 338, i32 339, i32 340, i32 341, i32 342, i32 343, i32 344, i32 345, i32 346, i32 347, i32 348, i32 349, i32 350, i32 351, i32 352, i32 353, i32 354, i32 355, i32 356, i32 357, i32 358, i32 359, i32 360, i32 361, i32 362, i32 363, i32 364, i32 365, i32 366, i32 367, i32 368, i32 369, i32 370, i32 371, i32 372, i32 373, i32 374, i32 375, i32 376, i32 377, i32 378, i32 379, i32 380, i32 381, i32 382, i32 383>
18961 %interleaved.vec = shufflevector <384 x i64> %6, <384 x i64> poison, <384 x i32> <i32 0, i32 64, i32 128, i32 192, i32 256, i32 320, i32 1, i32 65, i32 129, i32 193, i32 257, i32 321, i32 2, i32 66, i32 130, i32 194, i32 258, i32 322, i32 3, i32 67, i32 131, i32 195, i32 259, i32 323, i32 4, i32 68, i32 132, i32 196, i32 260, i32 324, i32 5, i32 69, i32 133, i32 197, i32 261, i32 325, i32 6, i32 70, i32 134, i32 198, i32 262, i32 326, i32 7, i32 71, i32 135, i32 199, i32 263, i32 327, i32 8, i32 72, i32 136, i32 200, i32 264, i32 328, i32 9, i32 73, i32 137, i32 201, i32 265, i32 329, i32 10, i32 74, i32 138, i32 202, i32 266, i32 330, i32 11, i32 75, i32 139, i32 203, i32 267, i32 331, i32 12, i32 76, i32 140, i32 204, i32 268, i32 332, i32 13, i32 77, i32 141, i32 205, i32 269, i32 333, i32 14, i32 78, i32 142, i32 206, i32 270, i32 334, i32 15, i32 79, i32 143, i32 207, i32 271, i32 335, i32 16, i32 80, i32 144, i32 208, i32 272, i32 336, i32 17, i32 81, i32 145, i32 209, i32 273, i32 337, i32 18, i32 82, i32 146, i32 210, i32 274, i32 338, i32 19, i32 83, i32 147, i32 211, i32 275, i32 339, i32 20, i32 84, i32 148, i32 212, i32 276, i32 340, i32 21, i32 85, i32 149, i32 213, i32 277, i32 341, i32 22, i32 86, i32 150, i32 214, i32 278, i32 342, i32 23, i32 87, i32 151, i32 215, i32 279, i32 343, i32 24, i32 88, i32 152, i32 216, i32 280, i32 344, i32 25, i32 89, i32 153, i32 217, i32 281, i32 345, i32 26, i32 90, i32 154, i32 218, i32 282, i32 346, i32 27, i32 91, i32 155, i32 219, i32 283, i32 347, i32 28, i32 92, i32 156, i32 220, i32 284, i32 348, i32 29, i32 93, i32 157, i32 221, i32 285, i32 349, i32 30, i32 94, i32 158, i32 222, i32 286, i32 350, i32 31, i32 95, i32 159, i32 223, i32 287, i32 351, i32 32, i32 96, i32 160, i32 224, i32 288, i32 352, i32 33, i32 97, i32 161, i32 225, i32 289, i32 353, i32 34, i32 98, i32 162, i32 226, i32 290, i32 354, i32 35, i32 99, i32 163, i32 227, i32 291, i32 355, i32 36, i32 100, i32 164, i32 228, i32 292, i32 356, i32 37, i32 101, i32 165, i32 229, i32 293, i32 357, i32 38, i32 102, i32 166, i32 230, i32 294, i32 358, i32 39, i32 103, i32 167, i32 231, i32 295, i32 359, i32 40, i32 104, i32 168, i32 232, i32 296, i32 360, i32 41, i32 105, i32 169, i32 233, i32 297, i32 361, i32 42, i32 106, i32 170, i32 234, i32 298, i32 362, i32 43, i32 107, i32 171, i32 235, i32 299, i32 363, i32 44, i32 108, i32 172, i32 236, i32 300, i32 364, i32 45, i32 109, i32 173, i32 237, i32 301, i32 365, i32 46, i32 110, i32 174, i32 238, i32 302, i32 366, i32 47, i32 111, i32 175, i32 239, i32 303, i32 367, i32 48, i32 112, i32 176, i32 240, i32 304, i32 368, i32 49, i32 113, i32 177, i32 241, i32 305, i32 369, i32 50, i32 114, i32 178, i32 242, i32 306, i32 370, i32 51, i32 115, i32 179, i32 243, i32 307, i32 371, i32 52, i32 116, i32 180, i32 244, i32 308, i32 372, i32 53, i32 117, i32 181, i32 245, i32 309, i32 373, i32 54, i32 118, i32 182, i32 246, i32 310, i32 374, i32 55, i32 119, i32 183, i32 247, i32 311, i32 375, i32 56, i32 120, i32 184, i32 248, i32 312, i32 376, i32 57, i32 121, i32 185, i32 249, i32 313, i32 377, i32 58, i32 122, i32 186, i32 250, i32 314, i32 378, i32 59, i32 123, i32 187, i32 251, i32 315, i32 379, i32 60, i32 124, i32 188, i32 252, i32 316, i32 380, i32 61, i32 125, i32 189, i32 253, i32 317, i32 381, i32 62, i32 126, i32 190, i32 254, i32 318, i32 382, i32 63, i32 127, i32 191, i32 255, i32 319, i32 383>
18962 store <384 x i64> %interleaved.vec, ptr %out.vec, align 64