1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+sse2 | FileCheck %s --check-prefixes=SSE,FALLBACK0
3 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx | FileCheck %s --check-prefixes=AVX,AVX1,AVX1-ONLY,FALLBACK1
4 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2 | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-SLOW,FALLBACK2
5 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-FAST,FALLBACK3
6 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-FAST-PERLANE,FALLBACK4
7 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-SLOW,AVX512F-SLOW,AVX512F-ONLY-SLOW,FALLBACK5
8 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-FAST,AVX512F-FAST,AVX512F-ONLY-FAST,FALLBACK6
9 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-SLOW,AVX512F-SLOW,AVX512DQ-SLOW,FALLBACK7
10 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-FAST,AVX512F-FAST,AVX512DQ-FAST,FALLBACK8
11 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-SLOW,AVX512BW-SLOW,AVX512BW-ONLY-SLOW,FALLBACK9
12 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-FAST,AVX512BW-FAST,AVX512BW-ONLY-FAST,FALLBACK10
13 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-SLOW,AVX512BW-SLOW,AVX512DQBW-SLOW,FALLBACK11
14 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-FAST,AVX512BW-FAST,AVX512DQBW-FAST,FALLBACK12
16 ; These patterns are produced by LoopVectorizer for interleaved stores.
18 define void @store_i64_stride8_vf2(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %in.vecptr7, ptr %out.vec) nounwind {
19 ; SSE-LABEL: store_i64_stride8_vf2:
21 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
22 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
23 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r11
24 ; SSE-NEXT: movaps (%rdi), %xmm0
25 ; SSE-NEXT: movaps (%rsi), %xmm1
26 ; SSE-NEXT: movaps (%rdx), %xmm2
27 ; SSE-NEXT: movaps (%rcx), %xmm3
28 ; SSE-NEXT: movaps (%r8), %xmm4
29 ; SSE-NEXT: movaps (%r9), %xmm5
30 ; SSE-NEXT: movaps (%r11), %xmm6
31 ; SSE-NEXT: movaps (%r10), %xmm7
32 ; SSE-NEXT: movaps %xmm0, %xmm8
33 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm1[0]
34 ; SSE-NEXT: movaps %xmm2, %xmm9
35 ; SSE-NEXT: movlhps {{.*#+}} xmm9 = xmm9[0],xmm3[0]
36 ; SSE-NEXT: movaps %xmm6, %xmm10
37 ; SSE-NEXT: movlhps {{.*#+}} xmm10 = xmm10[0],xmm7[0]
38 ; SSE-NEXT: movaps %xmm4, %xmm11
39 ; SSE-NEXT: movlhps {{.*#+}} xmm11 = xmm11[0],xmm5[0]
40 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
41 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
42 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm7[1]
43 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm5[1]
44 ; SSE-NEXT: movaps %xmm4, 96(%rax)
45 ; SSE-NEXT: movaps %xmm6, 112(%rax)
46 ; SSE-NEXT: movaps %xmm2, 80(%rax)
47 ; SSE-NEXT: movaps %xmm0, 64(%rax)
48 ; SSE-NEXT: movaps %xmm11, 32(%rax)
49 ; SSE-NEXT: movaps %xmm10, 48(%rax)
50 ; SSE-NEXT: movaps %xmm9, 16(%rax)
51 ; SSE-NEXT: movaps %xmm8, (%rax)
54 ; AVX1-ONLY-LABEL: store_i64_stride8_vf2:
56 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
57 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
58 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r11
59 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %xmm0
60 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %xmm1
61 ; AVX1-ONLY-NEXT: vmovaps (%r8), %xmm2
62 ; AVX1-ONLY-NEXT: vmovaps (%r9), %xmm3
63 ; AVX1-ONLY-NEXT: vinsertf128 $1, (%rcx), %ymm1, %ymm1
64 ; AVX1-ONLY-NEXT: vinsertf128 $1, (%rdx), %ymm0, %ymm0
65 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
66 ; AVX1-ONLY-NEXT: vinsertf128 $1, (%r11), %ymm3, %ymm3
67 ; AVX1-ONLY-NEXT: vinsertf128 $1, (%r10), %ymm2, %ymm2
68 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm5 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
69 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
70 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
71 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 96(%rax)
72 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 64(%rax)
73 ; AVX1-ONLY-NEXT: vmovaps %ymm5, 32(%rax)
74 ; AVX1-ONLY-NEXT: vmovaps %ymm4, (%rax)
75 ; AVX1-ONLY-NEXT: vzeroupper
76 ; AVX1-ONLY-NEXT: retq
78 ; AVX2-ONLY-LABEL: store_i64_stride8_vf2:
80 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
81 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
82 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r11
83 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %xmm0
84 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %xmm1
85 ; AVX2-ONLY-NEXT: vmovaps (%r8), %xmm2
86 ; AVX2-ONLY-NEXT: vmovaps (%r11), %xmm3
87 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%rsi), %ymm0, %ymm0
88 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%rcx), %ymm1, %ymm1
89 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%r9), %ymm2, %ymm2
90 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%r10), %ymm3, %ymm3
91 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
92 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,2,1,3]
93 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm5 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
94 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,2,1,3]
95 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
96 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,1,3]
97 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
98 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,1,3]
99 ; AVX2-ONLY-NEXT: vmovaps %ymm1, 96(%rax)
100 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 64(%rax)
101 ; AVX2-ONLY-NEXT: vmovaps %ymm5, 32(%rax)
102 ; AVX2-ONLY-NEXT: vmovaps %ymm4, (%rax)
103 ; AVX2-ONLY-NEXT: vzeroupper
104 ; AVX2-ONLY-NEXT: retq
106 ; AVX512-LABEL: store_i64_stride8_vf2:
108 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
109 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %r10
110 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %r11
111 ; AVX512-NEXT: vmovdqa (%rdi), %xmm0
112 ; AVX512-NEXT: vmovdqa (%rdx), %xmm1
113 ; AVX512-NEXT: vmovdqa (%r8), %xmm2
114 ; AVX512-NEXT: vmovdqa (%r11), %xmm3
115 ; AVX512-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
116 ; AVX512-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
117 ; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
118 ; AVX512-NEXT: vinserti128 $1, (%r10), %ymm3, %ymm1
119 ; AVX512-NEXT: vinserti128 $1, (%r9), %ymm2, %ymm2
120 ; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm1
121 ; AVX512-NEXT: vmovdqa64 {{.*#+}} zmm2 = [0,2,4,6,8,10,12,14]
122 ; AVX512-NEXT: vpermi2q %zmm1, %zmm0, %zmm2
123 ; AVX512-NEXT: vmovdqa64 {{.*#+}} zmm3 = [1,3,5,7,9,11,13,15]
124 ; AVX512-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
125 ; AVX512-NEXT: vmovdqa64 %zmm3, 64(%rax)
126 ; AVX512-NEXT: vmovdqa64 %zmm2, (%rax)
127 ; AVX512-NEXT: vzeroupper
129 %in.vec0 = load <2 x i64>, ptr %in.vecptr0, align 64
130 %in.vec1 = load <2 x i64>, ptr %in.vecptr1, align 64
131 %in.vec2 = load <2 x i64>, ptr %in.vecptr2, align 64
132 %in.vec3 = load <2 x i64>, ptr %in.vecptr3, align 64
133 %in.vec4 = load <2 x i64>, ptr %in.vecptr4, align 64
134 %in.vec5 = load <2 x i64>, ptr %in.vecptr5, align 64
135 %in.vec6 = load <2 x i64>, ptr %in.vecptr6, align 64
136 %in.vec7 = load <2 x i64>, ptr %in.vecptr7, align 64
137 %1 = shufflevector <2 x i64> %in.vec0, <2 x i64> %in.vec1, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
138 %2 = shufflevector <2 x i64> %in.vec2, <2 x i64> %in.vec3, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
139 %3 = shufflevector <2 x i64> %in.vec4, <2 x i64> %in.vec5, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
140 %4 = shufflevector <2 x i64> %in.vec6, <2 x i64> %in.vec7, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
141 %5 = shufflevector <4 x i64> %1, <4 x i64> %2, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
142 %6 = shufflevector <4 x i64> %3, <4 x i64> %4, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
143 %7 = shufflevector <8 x i64> %5, <8 x i64> %6, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
144 %interleaved.vec = shufflevector <16 x i64> %7, <16 x i64> poison, <16 x i32> <i32 0, i32 2, i32 4, i32 6, i32 8, i32 10, i32 12, i32 14, i32 1, i32 3, i32 5, i32 7, i32 9, i32 11, i32 13, i32 15>
145 store <16 x i64> %interleaved.vec, ptr %out.vec, align 64
149 define void @store_i64_stride8_vf4(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %in.vecptr7, ptr %out.vec) nounwind {
150 ; SSE-LABEL: store_i64_stride8_vf4:
152 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
153 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
154 ; SSE-NEXT: movaps (%rdi), %xmm4
155 ; SSE-NEXT: movaps 16(%rdi), %xmm2
156 ; SSE-NEXT: movaps (%rsi), %xmm10
157 ; SSE-NEXT: movaps 16(%rsi), %xmm14
158 ; SSE-NEXT: movaps (%rdx), %xmm1
159 ; SSE-NEXT: movaps 16(%rdx), %xmm3
160 ; SSE-NEXT: movaps (%rcx), %xmm7
161 ; SSE-NEXT: movaps 16(%rcx), %xmm12
162 ; SSE-NEXT: movaps (%r8), %xmm5
163 ; SSE-NEXT: movaps 16(%r8), %xmm0
164 ; SSE-NEXT: movaps (%r9), %xmm13
165 ; SSE-NEXT: movaps (%r10), %xmm6
166 ; SSE-NEXT: movaps 16(%r10), %xmm9
167 ; SSE-NEXT: movaps (%rax), %xmm15
168 ; SSE-NEXT: movaps 16(%rax), %xmm11
169 ; SSE-NEXT: movaps %xmm1, %xmm8
170 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm7[0]
171 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
172 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm7[1]
173 ; SSE-NEXT: movaps %xmm4, %xmm7
174 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm10[0]
175 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm10[1]
176 ; SSE-NEXT: movaps %xmm3, %xmm10
177 ; SSE-NEXT: movlhps {{.*#+}} xmm10 = xmm10[0],xmm12[0]
178 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm12[1]
179 ; SSE-NEXT: movaps %xmm2, %xmm12
180 ; SSE-NEXT: movlhps {{.*#+}} xmm12 = xmm12[0],xmm14[0]
181 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm14[1]
182 ; SSE-NEXT: movaps %xmm6, %xmm14
183 ; SSE-NEXT: movlhps {{.*#+}} xmm14 = xmm14[0],xmm15[0]
184 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm15[1]
185 ; SSE-NEXT: movaps %xmm5, %xmm15
186 ; SSE-NEXT: movlhps {{.*#+}} xmm15 = xmm15[0],xmm13[0]
187 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm13[1]
188 ; SSE-NEXT: movaps %xmm9, %xmm13
189 ; SSE-NEXT: movlhps {{.*#+}} xmm13 = xmm13[0],xmm11[0]
190 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm11[1]
191 ; SSE-NEXT: movaps 16(%r9), %xmm11
192 ; SSE-NEXT: movaps %xmm0, %xmm8
193 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm11[0]
194 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm11[1]
195 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
196 ; SSE-NEXT: movaps %xmm0, 224(%rax)
197 ; SSE-NEXT: movaps %xmm9, 240(%rax)
198 ; SSE-NEXT: movaps %xmm8, 160(%rax)
199 ; SSE-NEXT: movaps %xmm13, 176(%rax)
200 ; SSE-NEXT: movaps %xmm5, 96(%rax)
201 ; SSE-NEXT: movaps %xmm6, 112(%rax)
202 ; SSE-NEXT: movaps %xmm15, 32(%rax)
203 ; SSE-NEXT: movaps %xmm14, 48(%rax)
204 ; SSE-NEXT: movaps %xmm2, 192(%rax)
205 ; SSE-NEXT: movaps %xmm3, 208(%rax)
206 ; SSE-NEXT: movaps %xmm12, 128(%rax)
207 ; SSE-NEXT: movaps %xmm10, 144(%rax)
208 ; SSE-NEXT: movaps %xmm4, 64(%rax)
209 ; SSE-NEXT: movaps %xmm1, 80(%rax)
210 ; SSE-NEXT: movaps %xmm7, (%rax)
211 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
212 ; SSE-NEXT: movaps %xmm0, 16(%rax)
215 ; AVX1-ONLY-LABEL: store_i64_stride8_vf4:
216 ; AVX1-ONLY: # %bb.0:
217 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
218 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
219 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r11
220 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %ymm2
221 ; AVX1-ONLY-NEXT: vmovaps (%rcx), %ymm3
222 ; AVX1-ONLY-NEXT: vmovaps (%r11), %ymm1
223 ; AVX1-ONLY-NEXT: vmovaps (%r10), %ymm4
224 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm4[0],ymm1[2],ymm4[2]
225 ; AVX1-ONLY-NEXT: vmovaps 16(%r9), %xmm5
226 ; AVX1-ONLY-NEXT: vmovaps 16(%r8), %xmm6
227 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm7 = xmm6[0],xmm5[0]
228 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm7[0,1,2,3],ymm0[4,5,6,7]
229 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm4[1],ymm1[3],ymm4[3]
230 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm6[1],xmm5[1]
231 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm4[0,1,2,3],ymm1[4,5,6,7]
232 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm4 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
233 ; AVX1-ONLY-NEXT: vmovaps 16(%rsi), %xmm5
234 ; AVX1-ONLY-NEXT: vmovaps 16(%rdi), %xmm6
235 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm7 = xmm6[1],xmm5[1]
236 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm7[0,1,2,3],ymm4[4,5,6,7]
237 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
238 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm3 = xmm6[0],xmm5[0]
239 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1,2,3],ymm2[4,5,6,7]
240 ; AVX1-ONLY-NEXT: vmovaps (%r9), %xmm3
241 ; AVX1-ONLY-NEXT: vmovaps (%r8), %xmm5
242 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm6 = xmm5[1],xmm3[1]
243 ; AVX1-ONLY-NEXT: vmovaps (%r10), %xmm7
244 ; AVX1-ONLY-NEXT: vmovaps (%r11), %xmm8
245 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm9 = xmm8[1],xmm7[1]
246 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm3 = xmm5[0],xmm3[0]
247 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm5 = xmm8[0],xmm7[0]
248 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %xmm7
249 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %xmm8
250 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm10 = xmm8[1],xmm7[1]
251 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm7 = xmm8[0],xmm7[0]
252 ; AVX1-ONLY-NEXT: vmovaps (%rcx), %xmm8
253 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %xmm11
254 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm12 = xmm11[1],xmm8[1]
255 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm8 = xmm11[0],xmm8[0]
256 ; AVX1-ONLY-NEXT: vmovaps %xmm8, 16(%rax)
257 ; AVX1-ONLY-NEXT: vmovaps %xmm12, 80(%rax)
258 ; AVX1-ONLY-NEXT: vmovaps %xmm7, (%rax)
259 ; AVX1-ONLY-NEXT: vmovaps %xmm10, 64(%rax)
260 ; AVX1-ONLY-NEXT: vmovaps %xmm5, 48(%rax)
261 ; AVX1-ONLY-NEXT: vmovaps %xmm3, 32(%rax)
262 ; AVX1-ONLY-NEXT: vmovaps %xmm9, 112(%rax)
263 ; AVX1-ONLY-NEXT: vmovaps %xmm6, 96(%rax)
264 ; AVX1-ONLY-NEXT: vmovaps %ymm2, 128(%rax)
265 ; AVX1-ONLY-NEXT: vmovaps %ymm4, 192(%rax)
266 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 224(%rax)
267 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 160(%rax)
268 ; AVX1-ONLY-NEXT: vzeroupper
269 ; AVX1-ONLY-NEXT: retq
271 ; AVX2-ONLY-LABEL: store_i64_stride8_vf4:
272 ; AVX2-ONLY: # %bb.0:
273 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
274 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
275 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r11
276 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %ymm0
277 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %ymm1
278 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %ymm2
279 ; AVX2-ONLY-NEXT: vmovaps (%rcx), %ymm3
280 ; AVX2-ONLY-NEXT: vmovaps (%r8), %ymm4
281 ; AVX2-ONLY-NEXT: vmovaps (%r9), %ymm5
282 ; AVX2-ONLY-NEXT: vmovaps (%r11), %ymm6
283 ; AVX2-ONLY-NEXT: vmovaps (%r10), %ymm7
284 ; AVX2-ONLY-NEXT: vmovaps (%r9), %xmm8
285 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%r10), %ymm8, %ymm8
286 ; AVX2-ONLY-NEXT: vmovaps (%r8), %xmm9
287 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%r11), %ymm9, %ymm9
288 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm10 = ymm9[0],ymm8[0],ymm9[2],ymm8[2]
289 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm8 = ymm9[1],ymm8[1],ymm9[3],ymm8[3]
290 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %xmm9
291 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%rcx), %ymm9, %ymm9
292 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %xmm11
293 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%rdx), %ymm11, %ymm11
294 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm12 = ymm11[0],ymm9[0],ymm11[2],ymm9[2]
295 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm9 = ymm11[1],ymm9[1],ymm11[3],ymm9[3]
296 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm11 = ymm6[0],ymm7[0],ymm6[2],ymm7[2]
297 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm13 = ymm4[0],ymm5[0],ymm4[2],ymm5[2]
298 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm11 = ymm13[2,3],ymm11[2,3]
299 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm6 = ymm6[1],ymm7[1],ymm6[3],ymm7[3]
300 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm4 = ymm4[1],ymm5[1],ymm4[3],ymm5[3]
301 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],ymm6[2,3]
302 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
303 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm6 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
304 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm5 = ymm6[2,3],ymm5[2,3]
305 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
306 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
307 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm2[2,3]
308 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 128(%rax)
309 ; AVX2-ONLY-NEXT: vmovaps %ymm5, 192(%rax)
310 ; AVX2-ONLY-NEXT: vmovaps %ymm4, 224(%rax)
311 ; AVX2-ONLY-NEXT: vmovaps %ymm11, 160(%rax)
312 ; AVX2-ONLY-NEXT: vmovaps %ymm9, 64(%rax)
313 ; AVX2-ONLY-NEXT: vmovaps %ymm12, (%rax)
314 ; AVX2-ONLY-NEXT: vmovaps %ymm8, 96(%rax)
315 ; AVX2-ONLY-NEXT: vmovaps %ymm10, 32(%rax)
316 ; AVX2-ONLY-NEXT: vzeroupper
317 ; AVX2-ONLY-NEXT: retq
319 ; AVX512-LABEL: store_i64_stride8_vf4:
321 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
322 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %r10
323 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %r11
324 ; AVX512-NEXT: vmovdqa (%rdi), %ymm0
325 ; AVX512-NEXT: vmovdqa (%rdx), %ymm1
326 ; AVX512-NEXT: vmovdqa (%r8), %ymm2
327 ; AVX512-NEXT: vmovdqa (%r11), %ymm3
328 ; AVX512-NEXT: vinserti64x4 $1, (%rsi), %zmm0, %zmm0
329 ; AVX512-NEXT: vinserti64x4 $1, (%rcx), %zmm1, %zmm1
330 ; AVX512-NEXT: vinserti64x4 $1, (%r9), %zmm2, %zmm2
331 ; AVX512-NEXT: vinserti64x4 $1, (%r10), %zmm3, %zmm3
332 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm4 = [0,4,8,12,0,4,8,12]
333 ; AVX512-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3]
334 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm5
335 ; AVX512-NEXT: vpermt2q %zmm3, %zmm4, %zmm5
336 ; AVX512-NEXT: vpermi2q %zmm1, %zmm0, %zmm4
337 ; AVX512-NEXT: vshufi64x2 {{.*#+}} zmm4 = zmm4[0,1,2,3],zmm5[4,5,6,7]
338 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [1,5,9,13,1,5,9,13]
339 ; AVX512-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
340 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm6
341 ; AVX512-NEXT: vpermt2q %zmm3, %zmm5, %zmm6
342 ; AVX512-NEXT: vpermi2q %zmm1, %zmm0, %zmm5
343 ; AVX512-NEXT: vshufi64x2 {{.*#+}} zmm5 = zmm5[0,1,2,3],zmm6[4,5,6,7]
344 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [2,6,10,14,2,6,10,14]
345 ; AVX512-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
346 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm7
347 ; AVX512-NEXT: vpermt2q %zmm3, %zmm6, %zmm7
348 ; AVX512-NEXT: vpermi2q %zmm1, %zmm0, %zmm6
349 ; AVX512-NEXT: vshufi64x2 {{.*#+}} zmm6 = zmm6[0,1,2,3],zmm7[4,5,6,7]
350 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [3,7,11,15,3,7,11,15]
351 ; AVX512-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
352 ; AVX512-NEXT: vpermt2q %zmm3, %zmm7, %zmm2
353 ; AVX512-NEXT: vpermt2q %zmm1, %zmm7, %zmm0
354 ; AVX512-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,2,3],zmm2[4,5,6,7]
355 ; AVX512-NEXT: vmovdqa64 %zmm0, 192(%rax)
356 ; AVX512-NEXT: vmovdqa64 %zmm6, 128(%rax)
357 ; AVX512-NEXT: vmovdqa64 %zmm5, 64(%rax)
358 ; AVX512-NEXT: vmovdqa64 %zmm4, (%rax)
359 ; AVX512-NEXT: vzeroupper
361 %in.vec0 = load <4 x i64>, ptr %in.vecptr0, align 64
362 %in.vec1 = load <4 x i64>, ptr %in.vecptr1, align 64
363 %in.vec2 = load <4 x i64>, ptr %in.vecptr2, align 64
364 %in.vec3 = load <4 x i64>, ptr %in.vecptr3, align 64
365 %in.vec4 = load <4 x i64>, ptr %in.vecptr4, align 64
366 %in.vec5 = load <4 x i64>, ptr %in.vecptr5, align 64
367 %in.vec6 = load <4 x i64>, ptr %in.vecptr6, align 64
368 %in.vec7 = load <4 x i64>, ptr %in.vecptr7, align 64
369 %1 = shufflevector <4 x i64> %in.vec0, <4 x i64> %in.vec1, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
370 %2 = shufflevector <4 x i64> %in.vec2, <4 x i64> %in.vec3, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
371 %3 = shufflevector <4 x i64> %in.vec4, <4 x i64> %in.vec5, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
372 %4 = shufflevector <4 x i64> %in.vec6, <4 x i64> %in.vec7, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
373 %5 = shufflevector <8 x i64> %1, <8 x i64> %2, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
374 %6 = shufflevector <8 x i64> %3, <8 x i64> %4, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
375 %7 = shufflevector <16 x i64> %5, <16 x i64> %6, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
376 %interleaved.vec = shufflevector <32 x i64> %7, <32 x i64> poison, <32 x i32> <i32 0, i32 4, i32 8, i32 12, i32 16, i32 20, i32 24, i32 28, i32 1, i32 5, i32 9, i32 13, i32 17, i32 21, i32 25, i32 29, i32 2, i32 6, i32 10, i32 14, i32 18, i32 22, i32 26, i32 30, i32 3, i32 7, i32 11, i32 15, i32 19, i32 23, i32 27, i32 31>
377 store <32 x i64> %interleaved.vec, ptr %out.vec, align 64
381 define void @store_i64_stride8_vf8(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %in.vecptr7, ptr %out.vec) nounwind {
382 ; SSE-LABEL: store_i64_stride8_vf8:
384 ; SSE-NEXT: subq $152, %rsp
385 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
386 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
387 ; SSE-NEXT: movaps (%rdi), %xmm7
388 ; SSE-NEXT: movaps 16(%rdi), %xmm9
389 ; SSE-NEXT: movaps (%rsi), %xmm3
390 ; SSE-NEXT: movaps 16(%rsi), %xmm0
391 ; SSE-NEXT: movaps (%rdx), %xmm8
392 ; SSE-NEXT: movaps 16(%rdx), %xmm11
393 ; SSE-NEXT: movaps (%rcx), %xmm4
394 ; SSE-NEXT: movaps 16(%rcx), %xmm1
395 ; SSE-NEXT: movaps (%r8), %xmm10
396 ; SSE-NEXT: movaps 16(%r8), %xmm13
397 ; SSE-NEXT: movaps (%r9), %xmm5
398 ; SSE-NEXT: movaps 16(%r9), %xmm2
399 ; SSE-NEXT: movaps (%r10), %xmm12
400 ; SSE-NEXT: movaps 16(%r10), %xmm15
401 ; SSE-NEXT: movaps (%rax), %xmm6
402 ; SSE-NEXT: movaps %xmm7, %xmm14
403 ; SSE-NEXT: movlhps {{.*#+}} xmm14 = xmm14[0],xmm3[0]
404 ; SSE-NEXT: movaps %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
405 ; SSE-NEXT: unpckhpd {{.*#+}} xmm7 = xmm7[1],xmm3[1]
406 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
407 ; SSE-NEXT: movaps %xmm8, %xmm3
408 ; SSE-NEXT: movlhps {{.*#+}} xmm3 = xmm3[0],xmm4[0]
409 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
410 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm4[1]
411 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
412 ; SSE-NEXT: movaps %xmm10, %xmm4
413 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm5[0]
414 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
415 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm5[1]
416 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
417 ; SSE-NEXT: movaps %xmm12, %xmm4
418 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm6[0]
419 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
420 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm6[1]
421 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
422 ; SSE-NEXT: movaps %xmm9, %xmm3
423 ; SSE-NEXT: movlhps {{.*#+}} xmm3 = xmm3[0],xmm0[0]
424 ; SSE-NEXT: movaps %xmm3, (%rsp) # 16-byte Spill
425 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm0[1]
426 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
427 ; SSE-NEXT: movaps %xmm11, %xmm0
428 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
429 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
430 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm1[1]
431 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
432 ; SSE-NEXT: movaps %xmm13, %xmm0
433 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm2[0]
434 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
435 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm2[1]
436 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
437 ; SSE-NEXT: movaps 16(%rax), %xmm0
438 ; SSE-NEXT: movaps %xmm15, %xmm1
439 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
440 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
441 ; SSE-NEXT: unpckhpd {{.*#+}} xmm15 = xmm15[1],xmm0[1]
442 ; SSE-NEXT: movaps %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
443 ; SSE-NEXT: movaps 32(%rdi), %xmm13
444 ; SSE-NEXT: movaps 32(%rsi), %xmm0
445 ; SSE-NEXT: movaps %xmm13, %xmm1
446 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
447 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
448 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm0[1]
449 ; SSE-NEXT: movaps 32(%rdx), %xmm11
450 ; SSE-NEXT: movaps 32(%rcx), %xmm0
451 ; SSE-NEXT: movaps %xmm11, %xmm15
452 ; SSE-NEXT: movlhps {{.*#+}} xmm15 = xmm15[0],xmm0[0]
453 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm0[1]
454 ; SSE-NEXT: movaps 32(%r8), %xmm10
455 ; SSE-NEXT: movaps 32(%r9), %xmm0
456 ; SSE-NEXT: movaps %xmm10, %xmm14
457 ; SSE-NEXT: movlhps {{.*#+}} xmm14 = xmm14[0],xmm0[0]
458 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm0[1]
459 ; SSE-NEXT: movaps 32(%r10), %xmm8
460 ; SSE-NEXT: movaps 32(%rax), %xmm1
461 ; SSE-NEXT: movaps %xmm8, %xmm12
462 ; SSE-NEXT: movlhps {{.*#+}} xmm12 = xmm12[0],xmm1[0]
463 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm1[1]
464 ; SSE-NEXT: movaps 48(%rdi), %xmm6
465 ; SSE-NEXT: movaps 48(%rsi), %xmm0
466 ; SSE-NEXT: movaps %xmm6, %xmm9
467 ; SSE-NEXT: movlhps {{.*#+}} xmm9 = xmm9[0],xmm0[0]
468 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm0[1]
469 ; SSE-NEXT: movaps 48(%rdx), %xmm5
470 ; SSE-NEXT: movaps 48(%rcx), %xmm1
471 ; SSE-NEXT: movaps %xmm5, %xmm7
472 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm1[0]
473 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm1[1]
474 ; SSE-NEXT: movaps 48(%r8), %xmm1
475 ; SSE-NEXT: movaps 48(%r9), %xmm2
476 ; SSE-NEXT: movaps %xmm1, %xmm4
477 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm2[0]
478 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
479 ; SSE-NEXT: movaps 48(%r10), %xmm2
480 ; SSE-NEXT: movaps 48(%rax), %xmm3
481 ; SSE-NEXT: movaps %xmm2, %xmm0
482 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm3[0]
483 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
484 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
485 ; SSE-NEXT: movaps %xmm2, 496(%rax)
486 ; SSE-NEXT: movaps %xmm1, 480(%rax)
487 ; SSE-NEXT: movaps %xmm5, 464(%rax)
488 ; SSE-NEXT: movaps %xmm6, 448(%rax)
489 ; SSE-NEXT: movaps %xmm0, 432(%rax)
490 ; SSE-NEXT: movaps %xmm4, 416(%rax)
491 ; SSE-NEXT: movaps %xmm7, 400(%rax)
492 ; SSE-NEXT: movaps %xmm9, 384(%rax)
493 ; SSE-NEXT: movaps %xmm8, 368(%rax)
494 ; SSE-NEXT: movaps %xmm10, 352(%rax)
495 ; SSE-NEXT: movaps %xmm11, 336(%rax)
496 ; SSE-NEXT: movaps %xmm13, 320(%rax)
497 ; SSE-NEXT: movaps %xmm12, 304(%rax)
498 ; SSE-NEXT: movaps %xmm14, 288(%rax)
499 ; SSE-NEXT: movaps %xmm15, 272(%rax)
500 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
501 ; SSE-NEXT: movaps %xmm0, 256(%rax)
502 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
503 ; SSE-NEXT: movaps %xmm0, 240(%rax)
504 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
505 ; SSE-NEXT: movaps %xmm0, 224(%rax)
506 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
507 ; SSE-NEXT: movaps %xmm0, 208(%rax)
508 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
509 ; SSE-NEXT: movaps %xmm0, 192(%rax)
510 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
511 ; SSE-NEXT: movaps %xmm0, 176(%rax)
512 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
513 ; SSE-NEXT: movaps %xmm0, 160(%rax)
514 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
515 ; SSE-NEXT: movaps %xmm0, 144(%rax)
516 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
517 ; SSE-NEXT: movaps %xmm0, 128(%rax)
518 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
519 ; SSE-NEXT: movaps %xmm0, 112(%rax)
520 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
521 ; SSE-NEXT: movaps %xmm0, 96(%rax)
522 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
523 ; SSE-NEXT: movaps %xmm0, 80(%rax)
524 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
525 ; SSE-NEXT: movaps %xmm0, 64(%rax)
526 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
527 ; SSE-NEXT: movaps %xmm0, 48(%rax)
528 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
529 ; SSE-NEXT: movaps %xmm0, 32(%rax)
530 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
531 ; SSE-NEXT: movaps %xmm0, 16(%rax)
532 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
533 ; SSE-NEXT: movaps %xmm0, (%rax)
534 ; SSE-NEXT: addq $152, %rsp
537 ; AVX1-ONLY-LABEL: store_i64_stride8_vf8:
538 ; AVX1-ONLY: # %bb.0:
539 ; AVX1-ONLY-NEXT: pushq %rax
540 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
541 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
542 ; AVX1-ONLY-NEXT: vmovaps 32(%r9), %xmm1
543 ; AVX1-ONLY-NEXT: vmovaps 32(%r8), %xmm2
544 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm0 = xmm2[0],xmm1[0]
545 ; AVX1-ONLY-NEXT: vmovaps (%rax), %xmm5
546 ; AVX1-ONLY-NEXT: vmovaps 32(%rax), %xmm3
547 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm4
548 ; AVX1-ONLY-NEXT: vinsertf128 $1, 32(%r10), %ymm0, %ymm0
549 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm4[1],ymm0[2],ymm4[2]
550 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
551 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm2[1],xmm1[1]
552 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm1, %ymm2
553 ; AVX1-ONLY-NEXT: vbroadcastsd 40(%r10), %ymm3
554 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
555 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm2[2,3],ymm1[4,5],ymm2[6,7]
556 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
557 ; AVX1-ONLY-NEXT: vmovaps 32(%rsi), %xmm3
558 ; AVX1-ONLY-NEXT: vmovaps 32(%rdi), %xmm4
559 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm4[0],xmm3[0]
560 ; AVX1-ONLY-NEXT: vmovaps (%rcx), %xmm7
561 ; AVX1-ONLY-NEXT: vmovaps 32(%rcx), %xmm6
562 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm2, %ymm8
563 ; AVX1-ONLY-NEXT: vinsertf128 $1, 32(%rdx), %ymm2, %ymm2
564 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm0 = ymm2[0],ymm8[1],ymm2[2],ymm8[2]
565 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
566 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm4[1],xmm3[1]
567 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm3, %ymm4
568 ; AVX1-ONLY-NEXT: vbroadcastsd 40(%rdx), %ymm6
569 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm6[4,5,6,7]
570 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm3[0,1],ymm4[2,3],ymm3[4,5],ymm4[6,7]
571 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
572 ; AVX1-ONLY-NEXT: vmovaps (%r9), %xmm6
573 ; AVX1-ONLY-NEXT: vmovaps (%r8), %xmm8
574 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm8[0],xmm6[0]
575 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm4, %ymm9
576 ; AVX1-ONLY-NEXT: vinsertf128 $1, (%r10), %ymm4, %ymm4
577 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm4 = ymm4[0],ymm9[1],ymm4[2],ymm9[2]
578 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm6 = xmm8[1],xmm6[1]
579 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm6, %ymm5
580 ; AVX1-ONLY-NEXT: vbroadcastsd 8(%r10), %ymm8
581 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
582 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm6[0,1],ymm5[2,3],ymm6[4,5],ymm5[6,7]
583 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %xmm8
584 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %xmm9
585 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm6 = xmm9[0],xmm8[0]
586 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm6, %ymm10
587 ; AVX1-ONLY-NEXT: vinsertf128 $1, (%rdx), %ymm6, %ymm6
588 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm6 = ymm6[0],ymm10[1],ymm6[2],ymm10[2]
589 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm8 = xmm9[1],xmm8[1]
590 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm8, %ymm7
591 ; AVX1-ONLY-NEXT: vbroadcastsd 8(%rdx), %ymm9
592 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm9[4,5,6,7]
593 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm8[0,1],ymm7[2,3],ymm8[4,5],ymm7[6,7]
594 ; AVX1-ONLY-NEXT: vmovaps 16(%r9), %xmm10
595 ; AVX1-ONLY-NEXT: vmovaps 16(%r8), %xmm11
596 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm7 = xmm11[0],xmm10[0]
597 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],mem[4,5,6,7]
598 ; AVX1-ONLY-NEXT: vbroadcastsd 16(%rax), %ymm9
599 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3,4,5],ymm9[6,7]
600 ; AVX1-ONLY-NEXT: vmovaps 16(%rsi), %xmm12
601 ; AVX1-ONLY-NEXT: vmovaps 16(%rdi), %xmm13
602 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm9 = xmm13[0],xmm12[0]
603 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3],mem[4,5,6,7]
604 ; AVX1-ONLY-NEXT: vbroadcastsd 16(%rcx), %ymm14
605 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],ymm14[6,7]
606 ; AVX1-ONLY-NEXT: vmovaps 48(%r9), %xmm14
607 ; AVX1-ONLY-NEXT: vmovaps 48(%r8), %xmm15
608 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm15[1],xmm14[1]
609 ; AVX1-ONLY-NEXT: vbroadcastsd 56(%r10), %ymm1
610 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm0[0,1,2,3],ymm1[4,5,6,7]
611 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm1 = xmm15[0],xmm14[0]
612 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
613 ; AVX1-ONLY-NEXT: vbroadcastsd 48(%rax), %ymm14
614 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm1[0,1,2,3,4,5],ymm14[6,7]
615 ; AVX1-ONLY-NEXT: vmovaps 48(%rsi), %xmm1
616 ; AVX1-ONLY-NEXT: vmovaps 48(%rdi), %xmm15
617 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm15[1],xmm1[1]
618 ; AVX1-ONLY-NEXT: vbroadcastsd 56(%rdx), %ymm3
619 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
620 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm1 = xmm15[0],xmm1[0]
621 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
622 ; AVX1-ONLY-NEXT: vbroadcastsd 48(%rcx), %ymm3
623 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm3[6,7]
624 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm11[1],xmm10[1]
625 ; AVX1-ONLY-NEXT: vbroadcastsd 24(%r10), %ymm10
626 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm10[4,5,6,7]
627 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm10 = xmm13[1],xmm12[1]
628 ; AVX1-ONLY-NEXT: vbroadcastsd 24(%rdx), %ymm11
629 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1,2,3],ymm11[4,5,6,7]
630 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rdx
631 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],mem[6,7]
632 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
633 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],mem[6,7]
634 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1,2,3,4,5],mem[6,7]
635 ; AVX1-ONLY-NEXT: vmovaps %ymm10, 192(%rdx)
636 ; AVX1-ONLY-NEXT: vmovaps %ymm3, 224(%rdx)
637 ; AVX1-ONLY-NEXT: vmovaps %ymm8, 64(%rdx)
638 ; AVX1-ONLY-NEXT: vmovapd %ymm6, (%rdx)
639 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 384(%rdx)
640 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 448(%rdx)
641 ; AVX1-ONLY-NEXT: vmovaps %ymm5, 96(%rdx)
642 ; AVX1-ONLY-NEXT: vmovapd %ymm4, 32(%rdx)
643 ; AVX1-ONLY-NEXT: vmovaps %ymm14, 416(%rdx)
644 ; AVX1-ONLY-NEXT: vmovaps %ymm2, 480(%rdx)
645 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
646 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 320(%rdx)
647 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
648 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 256(%rdx)
649 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
650 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 352(%rdx)
651 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
652 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 288(%rdx)
653 ; AVX1-ONLY-NEXT: vmovaps %ymm9, 128(%rdx)
654 ; AVX1-ONLY-NEXT: vmovaps %ymm7, 160(%rdx)
655 ; AVX1-ONLY-NEXT: popq %rax
656 ; AVX1-ONLY-NEXT: vzeroupper
657 ; AVX1-ONLY-NEXT: retq
659 ; AVX2-ONLY-LABEL: store_i64_stride8_vf8:
660 ; AVX2-ONLY: # %bb.0:
661 ; AVX2-ONLY-NEXT: pushq %rax
662 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
663 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
664 ; AVX2-ONLY-NEXT: vmovaps (%rax), %xmm6
665 ; AVX2-ONLY-NEXT: vmovaps 32(%rax), %xmm3
666 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm0
667 ; AVX2-ONLY-NEXT: vmovaps (%r9), %xmm9
668 ; AVX2-ONLY-NEXT: vmovaps 32(%r9), %xmm7
669 ; AVX2-ONLY-NEXT: vmovaps (%r8), %xmm10
670 ; AVX2-ONLY-NEXT: vmovaps 32(%r8), %xmm8
671 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm8[1],xmm7[1]
672 ; AVX2-ONLY-NEXT: vbroadcastsd 40(%r10), %ymm2
673 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
674 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
675 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
676 ; AVX2-ONLY-NEXT: vmovaps 32(%rcx), %xmm5
677 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm1
678 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %xmm11
679 ; AVX2-ONLY-NEXT: vmovaps 32(%rsi), %xmm13
680 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %xmm12
681 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %xmm14
682 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm14[1],xmm13[1]
683 ; AVX2-ONLY-NEXT: vbroadcastsd 40(%rdx), %ymm4
684 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm4[4,5,6,7]
685 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm1[6,7]
686 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
687 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm2
688 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm10[1],xmm9[1]
689 ; AVX2-ONLY-NEXT: vbroadcastsd 8(%r10), %ymm15
690 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm15[4,5,6,7]
691 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1,2,3,4,5],ymm2[6,7]
692 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
693 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm12[1],xmm11[1]
694 ; AVX2-ONLY-NEXT: vbroadcastsd 8(%rdx), %ymm15
695 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm15[4,5,6,7]
696 ; AVX2-ONLY-NEXT: vmovaps (%rcx), %xmm15
697 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm15, %ymm0, %ymm0
698 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm0[6,7]
699 ; AVX2-ONLY-NEXT: vmovlhps {{.*#+}} xmm0 = xmm8[0],xmm7[0]
700 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %ymm7
701 ; AVX2-ONLY-NEXT: vinsertf128 $1, 32(%r10), %ymm0, %ymm0
702 ; AVX2-ONLY-NEXT: vbroadcastsd %xmm3, %ymm3
703 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm3[6,7]
704 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
705 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %ymm8
706 ; AVX2-ONLY-NEXT: vmovlhps {{.*#+}} xmm0 = xmm14[0],xmm13[0]
707 ; AVX2-ONLY-NEXT: vmovaps 32(%r8), %ymm1
708 ; AVX2-ONLY-NEXT: vinsertf128 $1, 32(%rdx), %ymm0, %ymm0
709 ; AVX2-ONLY-NEXT: vbroadcastsd %xmm5, %ymm5
710 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm0[0,1,2,3,4,5],ymm5[6,7]
711 ; AVX2-ONLY-NEXT: vmovaps (%r8), %ymm13
712 ; AVX2-ONLY-NEXT: vmovlhps {{.*#+}} xmm0 = xmm10[0],xmm9[0]
713 ; AVX2-ONLY-NEXT: vmovaps (%r9), %ymm14
714 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%r10), %ymm0, %ymm0
715 ; AVX2-ONLY-NEXT: vbroadcastsd %xmm6, %ymm6
716 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm0[0,1,2,3,4,5],ymm6[6,7]
717 ; AVX2-ONLY-NEXT: vmovlhps {{.*#+}} xmm0 = xmm12[0],xmm11[0]
718 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%rdx), %ymm0, %ymm0
719 ; AVX2-ONLY-NEXT: vbroadcastsd %xmm15, %ymm9
720 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm0[0,1,2,3,4,5],ymm9[6,7]
721 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm13[0],ymm14[0],ymm13[2],ymm14[2]
722 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],mem[2,3]
723 ; AVX2-ONLY-NEXT: vbroadcastsd 16(%rax), %ymm10
724 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm0[0,1,2,3,4,5],ymm10[6,7]
725 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm7[0],ymm8[0],ymm7[2],ymm8[2]
726 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],mem[2,3]
727 ; AVX2-ONLY-NEXT: vbroadcastsd 16(%rcx), %ymm11
728 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm0[0,1,2,3,4,5],ymm11[6,7]
729 ; AVX2-ONLY-NEXT: vmovaps 32(%r9), %ymm0
730 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm12 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
731 ; AVX2-ONLY-NEXT: vbroadcastsd 56(%r10), %ymm15
732 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm12 = ymm12[2,3],ymm15[2,3]
733 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
734 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],mem[2,3]
735 ; AVX2-ONLY-NEXT: vbroadcastsd 48(%rax), %ymm1
736 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm0[0,1,2,3,4,5],ymm1[6,7]
737 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %ymm1
738 ; AVX2-ONLY-NEXT: vmovaps 32(%rsi), %ymm15
739 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm15[1],ymm1[3],ymm15[3]
740 ; AVX2-ONLY-NEXT: vbroadcastsd 56(%rdx), %ymm3
741 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm3[2,3]
742 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm1[0],ymm15[0],ymm1[2],ymm15[2]
743 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm1[2,3],mem[2,3]
744 ; AVX2-ONLY-NEXT: vbroadcastsd 48(%rcx), %ymm3
745 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm3[6,7]
746 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm13[1],ymm14[1],ymm13[3],ymm14[3]
747 ; AVX2-ONLY-NEXT: vbroadcastsd 24(%r10), %ymm13
748 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm13[2,3]
749 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm7 = ymm7[1],ymm8[1],ymm7[3],ymm8[3]
750 ; AVX2-ONLY-NEXT: vbroadcastsd 24(%rdx), %ymm8
751 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm7 = ymm7[2,3],ymm8[2,3]
752 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rdx
753 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm12[0,1,2,3,4,5],mem[6,7]
754 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
755 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],mem[6,7]
756 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3,4,5],mem[6,7]
757 ; AVX2-ONLY-NEXT: vmovaps %ymm7, 192(%rdx)
758 ; AVX2-ONLY-NEXT: vmovaps %ymm3, 224(%rdx)
759 ; AVX2-ONLY-NEXT: vmovaps %ymm1, 384(%rdx)
760 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 448(%rdx)
761 ; AVX2-ONLY-NEXT: vmovaps %ymm2, 416(%rdx)
762 ; AVX2-ONLY-NEXT: vmovaps %ymm8, 480(%rdx)
763 ; AVX2-ONLY-NEXT: vmovaps %ymm11, 128(%rdx)
764 ; AVX2-ONLY-NEXT: vmovaps %ymm10, 160(%rdx)
765 ; AVX2-ONLY-NEXT: vmovaps %ymm4, 64(%rdx)
766 ; AVX2-ONLY-NEXT: vmovaps %ymm9, (%rdx)
767 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
768 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 96(%rdx)
769 ; AVX2-ONLY-NEXT: vmovaps %ymm6, 32(%rdx)
770 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
771 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 320(%rdx)
772 ; AVX2-ONLY-NEXT: vmovaps %ymm5, 256(%rdx)
773 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
774 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 352(%rdx)
775 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
776 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 288(%rdx)
777 ; AVX2-ONLY-NEXT: popq %rax
778 ; AVX2-ONLY-NEXT: vzeroupper
779 ; AVX2-ONLY-NEXT: retq
781 ; AVX512F-LABEL: store_i64_stride8_vf8:
783 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %rax
784 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %r10
785 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %r11
786 ; AVX512F-NEXT: vmovdqa64 (%rdi), %zmm6
787 ; AVX512F-NEXT: vmovdqa64 (%rsi), %zmm9
788 ; AVX512F-NEXT: vmovdqa64 (%rdx), %zmm7
789 ; AVX512F-NEXT: vmovdqa64 (%rcx), %zmm8
790 ; AVX512F-NEXT: vmovdqa64 (%r8), %zmm0
791 ; AVX512F-NEXT: vmovdqa64 (%r9), %zmm2
792 ; AVX512F-NEXT: vmovdqa64 (%r11), %zmm1
793 ; AVX512F-NEXT: vmovdqa64 (%r10), %zmm3
794 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm4 = [1,9,1,9,1,9,1,9]
795 ; AVX512F-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
796 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm5
797 ; AVX512F-NEXT: vpermt2q %zmm3, %zmm4, %zmm5
798 ; AVX512F-NEXT: vpermi2q %zmm2, %zmm0, %zmm4
799 ; AVX512F-NEXT: movb $-64, %r8b
800 ; AVX512F-NEXT: kmovw %r8d, %k1
801 ; AVX512F-NEXT: vmovdqa64 %zmm5, %zmm4 {%k1}
802 ; AVX512F-NEXT: vmovdqa (%rsi), %xmm5
803 ; AVX512F-NEXT: vinserti128 $1, (%rcx), %ymm5, %ymm10
804 ; AVX512F-NEXT: vmovdqa (%rdi), %xmm5
805 ; AVX512F-NEXT: vinserti128 $1, (%rdx), %ymm5, %ymm12
806 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} ymm5 = ymm12[1],ymm10[1],ymm12[3],ymm10[3]
807 ; AVX512F-NEXT: vinserti64x4 $0, %ymm5, %zmm4, %zmm17
808 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [6,14,6,14,6,14,6,14]
809 ; AVX512F-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
810 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm11
811 ; AVX512F-NEXT: vpermt2q %zmm2, %zmm5, %zmm11
812 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} zmm11 {%k1} = zmm1[0],zmm3[0],zmm1[2],zmm3[2],zmm1[4],zmm3[4],zmm1[6],zmm3[6]
813 ; AVX512F-NEXT: vpermi2q %zmm9, %zmm6, %zmm5
814 ; AVX512F-NEXT: vbroadcasti128 {{.*#+}} ymm13 = [6,14,6,14]
815 ; AVX512F-NEXT: # ymm13 = mem[0,1,0,1]
816 ; AVX512F-NEXT: vpermi2q %zmm8, %zmm7, %zmm13
817 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm13[4,5,6,7]
818 ; AVX512F-NEXT: vinserti64x4 $0, %ymm5, %zmm11, %zmm5
819 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm11 = [7,15,7,15,7,15,7,15]
820 ; AVX512F-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
821 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm13
822 ; AVX512F-NEXT: vpermt2q %zmm2, %zmm11, %zmm13
823 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} zmm13 {%k1} = zmm1[1],zmm3[1],zmm1[3],zmm3[3],zmm1[5],zmm3[5],zmm1[7],zmm3[7]
824 ; AVX512F-NEXT: vpermi2q %zmm9, %zmm6, %zmm11
825 ; AVX512F-NEXT: vbroadcasti128 {{.*#+}} ymm14 = [7,15,7,15]
826 ; AVX512F-NEXT: # ymm14 = mem[0,1,0,1]
827 ; AVX512F-NEXT: vpermi2q %zmm8, %zmm7, %zmm14
828 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm11 = ymm11[0,1,2,3],ymm14[4,5,6,7]
829 ; AVX512F-NEXT: vinserti64x4 $0, %ymm11, %zmm13, %zmm11
830 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm13 = [4,12,4,12,4,12,4,12]
831 ; AVX512F-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
832 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm14
833 ; AVX512F-NEXT: vpermt2q %zmm3, %zmm13, %zmm14
834 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} zmm15 = zmm0[0],zmm2[0],zmm0[2],zmm2[2],zmm0[4],zmm2[4],zmm0[6],zmm2[6]
835 ; AVX512F-NEXT: vmovdqa64 %zmm14, %zmm15 {%k1}
836 ; AVX512F-NEXT: vpermi2q %zmm9, %zmm6, %zmm13
837 ; AVX512F-NEXT: vbroadcasti128 {{.*#+}} ymm14 = [4,12,4,12]
838 ; AVX512F-NEXT: # ymm14 = mem[0,1,0,1]
839 ; AVX512F-NEXT: vpermi2q %zmm8, %zmm7, %zmm14
840 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm13 = ymm13[0,1,2,3],ymm14[4,5,6,7]
841 ; AVX512F-NEXT: vinserti64x4 $0, %ymm13, %zmm15, %zmm13
842 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm14 = [5,13,5,13,5,13,5,13]
843 ; AVX512F-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
844 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm15
845 ; AVX512F-NEXT: vpermt2q %zmm3, %zmm14, %zmm15
846 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} zmm16 = zmm0[1],zmm2[1],zmm0[3],zmm2[3],zmm0[5],zmm2[5],zmm0[7],zmm2[7]
847 ; AVX512F-NEXT: vmovdqa64 %zmm15, %zmm16 {%k1}
848 ; AVX512F-NEXT: vpermt2q %zmm9, %zmm14, %zmm6
849 ; AVX512F-NEXT: vbroadcasti128 {{.*#+}} ymm9 = [5,13,5,13]
850 ; AVX512F-NEXT: # ymm9 = mem[0,1,0,1]
851 ; AVX512F-NEXT: vpermi2q %zmm8, %zmm7, %zmm9
852 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm9[4,5,6,7]
853 ; AVX512F-NEXT: vinserti64x4 $0, %ymm6, %zmm16, %zmm6
854 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [0,8,0,8,0,8,0,8]
855 ; AVX512F-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
856 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm8
857 ; AVX512F-NEXT: vpermt2q %zmm3, %zmm7, %zmm8
858 ; AVX512F-NEXT: vpermi2q %zmm2, %zmm0, %zmm7
859 ; AVX512F-NEXT: vmovdqa64 %zmm8, %zmm7 {%k1}
860 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm12[0],ymm10[0],ymm12[2],ymm10[2]
861 ; AVX512F-NEXT: vinserti64x4 $0, %ymm8, %zmm7, %zmm7
862 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [2,10,2,10,2,10,2,10]
863 ; AVX512F-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
864 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm9
865 ; AVX512F-NEXT: vpermt2q %zmm3, %zmm8, %zmm9
866 ; AVX512F-NEXT: vpermi2q %zmm2, %zmm0, %zmm8
867 ; AVX512F-NEXT: vmovdqa64 %zmm9, %zmm8 {%k1}
868 ; AVX512F-NEXT: vmovdqa (%rcx), %ymm9
869 ; AVX512F-NEXT: vmovdqa (%rdx), %ymm10
870 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} ymm12 = ymm10[0],ymm9[0],ymm10[2],ymm9[2]
871 ; AVX512F-NEXT: vmovdqa (%rsi), %ymm14
872 ; AVX512F-NEXT: vmovdqa (%rdi), %ymm15
873 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm15[0],ymm14[0],ymm15[2],ymm14[2]
874 ; AVX512F-NEXT: vperm2i128 {{.*#+}} ymm4 = ymm4[2,3],ymm12[2,3]
875 ; AVX512F-NEXT: vinserti64x4 $0, %ymm4, %zmm8, %zmm4
876 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [3,11,3,11,3,11,3,11]
877 ; AVX512F-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
878 ; AVX512F-NEXT: vpermt2q %zmm3, %zmm8, %zmm1
879 ; AVX512F-NEXT: vpermt2q %zmm2, %zmm8, %zmm0
880 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm0 {%k1}
881 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm10[1],ymm9[1],ymm10[3],ymm9[3]
882 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm15[1],ymm14[1],ymm15[3],ymm14[3]
883 ; AVX512F-NEXT: vperm2i128 {{.*#+}} ymm1 = ymm2[2,3],ymm1[2,3]
884 ; AVX512F-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm0
885 ; AVX512F-NEXT: vmovdqa64 %zmm0, 192(%rax)
886 ; AVX512F-NEXT: vmovdqa64 %zmm4, 128(%rax)
887 ; AVX512F-NEXT: vmovdqa64 %zmm7, (%rax)
888 ; AVX512F-NEXT: vmovdqa64 %zmm6, 320(%rax)
889 ; AVX512F-NEXT: vmovdqa64 %zmm13, 256(%rax)
890 ; AVX512F-NEXT: vmovdqa64 %zmm11, 448(%rax)
891 ; AVX512F-NEXT: vmovdqa64 %zmm5, 384(%rax)
892 ; AVX512F-NEXT: vmovdqa64 %zmm17, 64(%rax)
893 ; AVX512F-NEXT: vzeroupper
896 ; AVX512BW-LABEL: store_i64_stride8_vf8:
898 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
899 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r10
900 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r11
901 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm6
902 ; AVX512BW-NEXT: vmovdqa64 (%rsi), %zmm9
903 ; AVX512BW-NEXT: vmovdqa64 (%rdx), %zmm7
904 ; AVX512BW-NEXT: vmovdqa64 (%rcx), %zmm8
905 ; AVX512BW-NEXT: vmovdqa64 (%r8), %zmm0
906 ; AVX512BW-NEXT: vmovdqa64 (%r9), %zmm2
907 ; AVX512BW-NEXT: vmovdqa64 (%r11), %zmm1
908 ; AVX512BW-NEXT: vmovdqa64 (%r10), %zmm3
909 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm4 = [1,9,1,9,1,9,1,9]
910 ; AVX512BW-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
911 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm5
912 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm4, %zmm5
913 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm0, %zmm4
914 ; AVX512BW-NEXT: movb $-64, %r8b
915 ; AVX512BW-NEXT: kmovd %r8d, %k1
916 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm4 {%k1}
917 ; AVX512BW-NEXT: vmovdqa (%rsi), %xmm5
918 ; AVX512BW-NEXT: vinserti128 $1, (%rcx), %ymm5, %ymm10
919 ; AVX512BW-NEXT: vmovdqa (%rdi), %xmm5
920 ; AVX512BW-NEXT: vinserti128 $1, (%rdx), %ymm5, %ymm12
921 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm5 = ymm12[1],ymm10[1],ymm12[3],ymm10[3]
922 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm5, %zmm4, %zmm17
923 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [6,14,6,14,6,14,6,14]
924 ; AVX512BW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
925 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm11
926 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm5, %zmm11
927 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} zmm11 {%k1} = zmm1[0],zmm3[0],zmm1[2],zmm3[2],zmm1[4],zmm3[4],zmm1[6],zmm3[6]
928 ; AVX512BW-NEXT: vpermi2q %zmm9, %zmm6, %zmm5
929 ; AVX512BW-NEXT: vbroadcasti128 {{.*#+}} ymm13 = [6,14,6,14]
930 ; AVX512BW-NEXT: # ymm13 = mem[0,1,0,1]
931 ; AVX512BW-NEXT: vpermi2q %zmm8, %zmm7, %zmm13
932 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm13[4,5,6,7]
933 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm5, %zmm11, %zmm5
934 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm11 = [7,15,7,15,7,15,7,15]
935 ; AVX512BW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
936 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm13
937 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm11, %zmm13
938 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} zmm13 {%k1} = zmm1[1],zmm3[1],zmm1[3],zmm3[3],zmm1[5],zmm3[5],zmm1[7],zmm3[7]
939 ; AVX512BW-NEXT: vpermi2q %zmm9, %zmm6, %zmm11
940 ; AVX512BW-NEXT: vbroadcasti128 {{.*#+}} ymm14 = [7,15,7,15]
941 ; AVX512BW-NEXT: # ymm14 = mem[0,1,0,1]
942 ; AVX512BW-NEXT: vpermi2q %zmm8, %zmm7, %zmm14
943 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm11 = ymm11[0,1,2,3],ymm14[4,5,6,7]
944 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm11, %zmm13, %zmm11
945 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm13 = [4,12,4,12,4,12,4,12]
946 ; AVX512BW-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
947 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm14
948 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm13, %zmm14
949 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} zmm15 = zmm0[0],zmm2[0],zmm0[2],zmm2[2],zmm0[4],zmm2[4],zmm0[6],zmm2[6]
950 ; AVX512BW-NEXT: vmovdqa64 %zmm14, %zmm15 {%k1}
951 ; AVX512BW-NEXT: vpermi2q %zmm9, %zmm6, %zmm13
952 ; AVX512BW-NEXT: vbroadcasti128 {{.*#+}} ymm14 = [4,12,4,12]
953 ; AVX512BW-NEXT: # ymm14 = mem[0,1,0,1]
954 ; AVX512BW-NEXT: vpermi2q %zmm8, %zmm7, %zmm14
955 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm13 = ymm13[0,1,2,3],ymm14[4,5,6,7]
956 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm13, %zmm15, %zmm13
957 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm14 = [5,13,5,13,5,13,5,13]
958 ; AVX512BW-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
959 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm15
960 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm14, %zmm15
961 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} zmm16 = zmm0[1],zmm2[1],zmm0[3],zmm2[3],zmm0[5],zmm2[5],zmm0[7],zmm2[7]
962 ; AVX512BW-NEXT: vmovdqa64 %zmm15, %zmm16 {%k1}
963 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm14, %zmm6
964 ; AVX512BW-NEXT: vbroadcasti128 {{.*#+}} ymm9 = [5,13,5,13]
965 ; AVX512BW-NEXT: # ymm9 = mem[0,1,0,1]
966 ; AVX512BW-NEXT: vpermi2q %zmm8, %zmm7, %zmm9
967 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm9[4,5,6,7]
968 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm6, %zmm16, %zmm6
969 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [0,8,0,8,0,8,0,8]
970 ; AVX512BW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
971 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm8
972 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm7, %zmm8
973 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm0, %zmm7
974 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm7 {%k1}
975 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm12[0],ymm10[0],ymm12[2],ymm10[2]
976 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm8, %zmm7, %zmm7
977 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [2,10,2,10,2,10,2,10]
978 ; AVX512BW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
979 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm9
980 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm8, %zmm9
981 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm0, %zmm8
982 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm8 {%k1}
983 ; AVX512BW-NEXT: vmovdqa (%rcx), %ymm9
984 ; AVX512BW-NEXT: vmovdqa (%rdx), %ymm10
985 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} ymm12 = ymm10[0],ymm9[0],ymm10[2],ymm9[2]
986 ; AVX512BW-NEXT: vmovdqa (%rsi), %ymm14
987 ; AVX512BW-NEXT: vmovdqa (%rdi), %ymm15
988 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm15[0],ymm14[0],ymm15[2],ymm14[2]
989 ; AVX512BW-NEXT: vperm2i128 {{.*#+}} ymm4 = ymm4[2,3],ymm12[2,3]
990 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm4, %zmm8, %zmm4
991 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [3,11,3,11,3,11,3,11]
992 ; AVX512BW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
993 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm8, %zmm1
994 ; AVX512BW-NEXT: vpermt2q %zmm2, %zmm8, %zmm0
995 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm0 {%k1}
996 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm10[1],ymm9[1],ymm10[3],ymm9[3]
997 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm15[1],ymm14[1],ymm15[3],ymm14[3]
998 ; AVX512BW-NEXT: vperm2i128 {{.*#+}} ymm1 = ymm2[2,3],ymm1[2,3]
999 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm0
1000 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 192(%rax)
1001 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 128(%rax)
1002 ; AVX512BW-NEXT: vmovdqa64 %zmm7, (%rax)
1003 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 320(%rax)
1004 ; AVX512BW-NEXT: vmovdqa64 %zmm13, 256(%rax)
1005 ; AVX512BW-NEXT: vmovdqa64 %zmm11, 448(%rax)
1006 ; AVX512BW-NEXT: vmovdqa64 %zmm5, 384(%rax)
1007 ; AVX512BW-NEXT: vmovdqa64 %zmm17, 64(%rax)
1008 ; AVX512BW-NEXT: vzeroupper
1009 ; AVX512BW-NEXT: retq
1010 %in.vec0 = load <8 x i64>, ptr %in.vecptr0, align 64
1011 %in.vec1 = load <8 x i64>, ptr %in.vecptr1, align 64
1012 %in.vec2 = load <8 x i64>, ptr %in.vecptr2, align 64
1013 %in.vec3 = load <8 x i64>, ptr %in.vecptr3, align 64
1014 %in.vec4 = load <8 x i64>, ptr %in.vecptr4, align 64
1015 %in.vec5 = load <8 x i64>, ptr %in.vecptr5, align 64
1016 %in.vec6 = load <8 x i64>, ptr %in.vecptr6, align 64
1017 %in.vec7 = load <8 x i64>, ptr %in.vecptr7, align 64
1018 %1 = shufflevector <8 x i64> %in.vec0, <8 x i64> %in.vec1, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
1019 %2 = shufflevector <8 x i64> %in.vec2, <8 x i64> %in.vec3, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
1020 %3 = shufflevector <8 x i64> %in.vec4, <8 x i64> %in.vec5, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
1021 %4 = shufflevector <8 x i64> %in.vec6, <8 x i64> %in.vec7, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
1022 %5 = shufflevector <16 x i64> %1, <16 x i64> %2, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1023 %6 = shufflevector <16 x i64> %3, <16 x i64> %4, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1024 %7 = shufflevector <32 x i64> %5, <32 x i64> %6, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
1025 %interleaved.vec = shufflevector <64 x i64> %7, <64 x i64> poison, <64 x i32> <i32 0, i32 8, i32 16, i32 24, i32 32, i32 40, i32 48, i32 56, i32 1, i32 9, i32 17, i32 25, i32 33, i32 41, i32 49, i32 57, i32 2, i32 10, i32 18, i32 26, i32 34, i32 42, i32 50, i32 58, i32 3, i32 11, i32 19, i32 27, i32 35, i32 43, i32 51, i32 59, i32 4, i32 12, i32 20, i32 28, i32 36, i32 44, i32 52, i32 60, i32 5, i32 13, i32 21, i32 29, i32 37, i32 45, i32 53, i32 61, i32 6, i32 14, i32 22, i32 30, i32 38, i32 46, i32 54, i32 62, i32 7, i32 15, i32 23, i32 31, i32 39, i32 47, i32 55, i32 63>
1026 store <64 x i64> %interleaved.vec, ptr %out.vec, align 64
1030 define void @store_i64_stride8_vf16(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %in.vecptr7, ptr %out.vec) nounwind {
1031 ; SSE-LABEL: store_i64_stride8_vf16:
1033 ; SSE-NEXT: subq $664, %rsp # imm = 0x298
1034 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
1035 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
1036 ; SSE-NEXT: movaps (%rdi), %xmm7
1037 ; SSE-NEXT: movaps 16(%rdi), %xmm8
1038 ; SSE-NEXT: movaps (%rsi), %xmm2
1039 ; SSE-NEXT: movaps 16(%rsi), %xmm0
1040 ; SSE-NEXT: movaps (%rdx), %xmm9
1041 ; SSE-NEXT: movaps 16(%rdx), %xmm10
1042 ; SSE-NEXT: movaps (%rcx), %xmm4
1043 ; SSE-NEXT: movaps 16(%rcx), %xmm1
1044 ; SSE-NEXT: movaps (%r8), %xmm11
1045 ; SSE-NEXT: movaps 16(%r8), %xmm12
1046 ; SSE-NEXT: movaps (%r9), %xmm5
1047 ; SSE-NEXT: movaps 16(%r9), %xmm3
1048 ; SSE-NEXT: movaps (%r10), %xmm13
1049 ; SSE-NEXT: movaps 16(%r10), %xmm15
1050 ; SSE-NEXT: movaps (%rax), %xmm6
1051 ; SSE-NEXT: movaps %xmm7, %xmm14
1052 ; SSE-NEXT: movlhps {{.*#+}} xmm14 = xmm14[0],xmm2[0]
1053 ; SSE-NEXT: movaps %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1054 ; SSE-NEXT: unpckhpd {{.*#+}} xmm7 = xmm7[1],xmm2[1]
1055 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1056 ; SSE-NEXT: movaps %xmm9, %xmm2
1057 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm4[0]
1058 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1059 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm4[1]
1060 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1061 ; SSE-NEXT: movaps %xmm11, %xmm2
1062 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm5[0]
1063 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1064 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm5[1]
1065 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1066 ; SSE-NEXT: movaps %xmm13, %xmm4
1067 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm6[0]
1068 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1069 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm6[1]
1070 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1071 ; SSE-NEXT: movaps %xmm8, %xmm2
1072 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
1073 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1074 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm0[1]
1075 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1076 ; SSE-NEXT: movaps %xmm10, %xmm0
1077 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
1078 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1079 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm1[1]
1080 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1081 ; SSE-NEXT: movaps %xmm12, %xmm0
1082 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm3[0]
1083 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1084 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm3[1]
1085 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1086 ; SSE-NEXT: movaps 16(%rax), %xmm0
1087 ; SSE-NEXT: movaps %xmm15, %xmm1
1088 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
1089 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1090 ; SSE-NEXT: unpckhpd {{.*#+}} xmm15 = xmm15[1],xmm0[1]
1091 ; SSE-NEXT: movaps %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1092 ; SSE-NEXT: movaps 32(%rdi), %xmm2
1093 ; SSE-NEXT: movaps 32(%rsi), %xmm0
1094 ; SSE-NEXT: movaps %xmm2, %xmm1
1095 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
1096 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1097 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
1098 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1099 ; SSE-NEXT: movaps 32(%rdx), %xmm2
1100 ; SSE-NEXT: movaps 32(%rcx), %xmm0
1101 ; SSE-NEXT: movaps %xmm2, %xmm1
1102 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
1103 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1104 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
1105 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1106 ; SSE-NEXT: movaps 32(%r8), %xmm2
1107 ; SSE-NEXT: movaps 32(%r9), %xmm0
1108 ; SSE-NEXT: movaps %xmm2, %xmm1
1109 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
1110 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1111 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
1112 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1113 ; SSE-NEXT: movaps 32(%r10), %xmm2
1114 ; SSE-NEXT: movaps 32(%rax), %xmm0
1115 ; SSE-NEXT: movaps %xmm2, %xmm1
1116 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
1117 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1118 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
1119 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1120 ; SSE-NEXT: movaps 48(%rdi), %xmm2
1121 ; SSE-NEXT: movaps 48(%rsi), %xmm0
1122 ; SSE-NEXT: movaps %xmm2, %xmm1
1123 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
1124 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1125 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
1126 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1127 ; SSE-NEXT: movaps 48(%rdx), %xmm2
1128 ; SSE-NEXT: movaps 48(%rcx), %xmm0
1129 ; SSE-NEXT: movaps %xmm2, %xmm1
1130 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
1131 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1132 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
1133 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1134 ; SSE-NEXT: movaps 48(%r8), %xmm2
1135 ; SSE-NEXT: movaps 48(%r9), %xmm0
1136 ; SSE-NEXT: movaps %xmm2, %xmm1
1137 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
1138 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1139 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
1140 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1141 ; SSE-NEXT: movaps 48(%r10), %xmm2
1142 ; SSE-NEXT: movaps 48(%rax), %xmm0
1143 ; SSE-NEXT: movaps %xmm2, %xmm1
1144 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
1145 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1146 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
1147 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1148 ; SSE-NEXT: movaps 64(%rdi), %xmm2
1149 ; SSE-NEXT: movaps 64(%rsi), %xmm0
1150 ; SSE-NEXT: movaps %xmm2, %xmm1
1151 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
1152 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1153 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
1154 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1155 ; SSE-NEXT: movaps 64(%rdx), %xmm2
1156 ; SSE-NEXT: movaps 64(%rcx), %xmm0
1157 ; SSE-NEXT: movaps %xmm2, %xmm1
1158 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
1159 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1160 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
1161 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1162 ; SSE-NEXT: movaps 64(%r8), %xmm2
1163 ; SSE-NEXT: movaps 64(%r9), %xmm0
1164 ; SSE-NEXT: movaps %xmm2, %xmm1
1165 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
1166 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1167 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
1168 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1169 ; SSE-NEXT: movaps 64(%r10), %xmm2
1170 ; SSE-NEXT: movaps 64(%rax), %xmm0
1171 ; SSE-NEXT: movaps %xmm2, %xmm1
1172 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
1173 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1174 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
1175 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1176 ; SSE-NEXT: movaps 80(%rdi), %xmm2
1177 ; SSE-NEXT: movaps 80(%rsi), %xmm0
1178 ; SSE-NEXT: movaps %xmm2, %xmm1
1179 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
1180 ; SSE-NEXT: movaps %xmm1, (%rsp) # 16-byte Spill
1181 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
1182 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1183 ; SSE-NEXT: movaps 80(%rdx), %xmm2
1184 ; SSE-NEXT: movaps 80(%rcx), %xmm0
1185 ; SSE-NEXT: movaps %xmm2, %xmm1
1186 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
1187 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1188 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
1189 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1190 ; SSE-NEXT: movaps 80(%r8), %xmm2
1191 ; SSE-NEXT: movaps 80(%r9), %xmm0
1192 ; SSE-NEXT: movaps %xmm2, %xmm1
1193 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
1194 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1195 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
1196 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1197 ; SSE-NEXT: movaps 80(%r10), %xmm2
1198 ; SSE-NEXT: movaps 80(%rax), %xmm0
1199 ; SSE-NEXT: movaps %xmm2, %xmm1
1200 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
1201 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1202 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
1203 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1204 ; SSE-NEXT: movaps 96(%rdi), %xmm13
1205 ; SSE-NEXT: movaps 96(%rsi), %xmm0
1206 ; SSE-NEXT: movaps %xmm13, %xmm1
1207 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
1208 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1209 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm0[1]
1210 ; SSE-NEXT: movaps 96(%rdx), %xmm10
1211 ; SSE-NEXT: movaps 96(%rcx), %xmm0
1212 ; SSE-NEXT: movaps %xmm10, %xmm15
1213 ; SSE-NEXT: movlhps {{.*#+}} xmm15 = xmm15[0],xmm0[0]
1214 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm0[1]
1215 ; SSE-NEXT: movaps 96(%r8), %xmm11
1216 ; SSE-NEXT: movaps 96(%r9), %xmm0
1217 ; SSE-NEXT: movaps %xmm11, %xmm14
1218 ; SSE-NEXT: movlhps {{.*#+}} xmm14 = xmm14[0],xmm0[0]
1219 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm0[1]
1220 ; SSE-NEXT: movaps 96(%r10), %xmm9
1221 ; SSE-NEXT: movaps 96(%rax), %xmm0
1222 ; SSE-NEXT: movaps %xmm9, %xmm12
1223 ; SSE-NEXT: movlhps {{.*#+}} xmm12 = xmm12[0],xmm0[0]
1224 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm0[1]
1225 ; SSE-NEXT: movaps 112(%rdi), %xmm7
1226 ; SSE-NEXT: movaps 112(%rsi), %xmm0
1227 ; SSE-NEXT: movaps %xmm7, %xmm8
1228 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm0[0]
1229 ; SSE-NEXT: unpckhpd {{.*#+}} xmm7 = xmm7[1],xmm0[1]
1230 ; SSE-NEXT: movaps 112(%rdx), %xmm5
1231 ; SSE-NEXT: movaps 112(%rcx), %xmm1
1232 ; SSE-NEXT: movaps %xmm5, %xmm6
1233 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm1[0]
1234 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm1[1]
1235 ; SSE-NEXT: movaps 112(%r8), %xmm1
1236 ; SSE-NEXT: movaps 112(%r9), %xmm2
1237 ; SSE-NEXT: movaps %xmm1, %xmm4
1238 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm2[0]
1239 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
1240 ; SSE-NEXT: movaps 112(%r10), %xmm2
1241 ; SSE-NEXT: movaps 112(%rax), %xmm3
1242 ; SSE-NEXT: movaps %xmm2, %xmm0
1243 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm3[0]
1244 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
1245 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
1246 ; SSE-NEXT: movaps %xmm2, 1008(%rax)
1247 ; SSE-NEXT: movaps %xmm1, 992(%rax)
1248 ; SSE-NEXT: movaps %xmm5, 976(%rax)
1249 ; SSE-NEXT: movaps %xmm7, 960(%rax)
1250 ; SSE-NEXT: movaps %xmm0, 944(%rax)
1251 ; SSE-NEXT: movaps %xmm4, 928(%rax)
1252 ; SSE-NEXT: movaps %xmm6, 912(%rax)
1253 ; SSE-NEXT: movaps %xmm8, 896(%rax)
1254 ; SSE-NEXT: movaps %xmm9, 880(%rax)
1255 ; SSE-NEXT: movaps %xmm11, 864(%rax)
1256 ; SSE-NEXT: movaps %xmm10, 848(%rax)
1257 ; SSE-NEXT: movaps %xmm13, 832(%rax)
1258 ; SSE-NEXT: movaps %xmm12, 816(%rax)
1259 ; SSE-NEXT: movaps %xmm14, 800(%rax)
1260 ; SSE-NEXT: movaps %xmm15, 784(%rax)
1261 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1262 ; SSE-NEXT: movaps %xmm0, 768(%rax)
1263 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1264 ; SSE-NEXT: movaps %xmm0, 752(%rax)
1265 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1266 ; SSE-NEXT: movaps %xmm0, 736(%rax)
1267 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1268 ; SSE-NEXT: movaps %xmm0, 720(%rax)
1269 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1270 ; SSE-NEXT: movaps %xmm0, 704(%rax)
1271 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1272 ; SSE-NEXT: movaps %xmm0, 688(%rax)
1273 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1274 ; SSE-NEXT: movaps %xmm0, 672(%rax)
1275 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1276 ; SSE-NEXT: movaps %xmm0, 656(%rax)
1277 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
1278 ; SSE-NEXT: movaps %xmm0, 640(%rax)
1279 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1280 ; SSE-NEXT: movaps %xmm0, 624(%rax)
1281 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1282 ; SSE-NEXT: movaps %xmm0, 608(%rax)
1283 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1284 ; SSE-NEXT: movaps %xmm0, 592(%rax)
1285 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1286 ; SSE-NEXT: movaps %xmm0, 576(%rax)
1287 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1288 ; SSE-NEXT: movaps %xmm0, 560(%rax)
1289 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1290 ; SSE-NEXT: movaps %xmm0, 544(%rax)
1291 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1292 ; SSE-NEXT: movaps %xmm0, 528(%rax)
1293 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1294 ; SSE-NEXT: movaps %xmm0, 512(%rax)
1295 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1296 ; SSE-NEXT: movaps %xmm0, 496(%rax)
1297 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1298 ; SSE-NEXT: movaps %xmm0, 480(%rax)
1299 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1300 ; SSE-NEXT: movaps %xmm0, 464(%rax)
1301 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1302 ; SSE-NEXT: movaps %xmm0, 448(%rax)
1303 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1304 ; SSE-NEXT: movaps %xmm0, 432(%rax)
1305 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1306 ; SSE-NEXT: movaps %xmm0, 416(%rax)
1307 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1308 ; SSE-NEXT: movaps %xmm0, 400(%rax)
1309 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1310 ; SSE-NEXT: movaps %xmm0, 384(%rax)
1311 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1312 ; SSE-NEXT: movaps %xmm0, 368(%rax)
1313 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1314 ; SSE-NEXT: movaps %xmm0, 352(%rax)
1315 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1316 ; SSE-NEXT: movaps %xmm0, 336(%rax)
1317 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1318 ; SSE-NEXT: movaps %xmm0, 320(%rax)
1319 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1320 ; SSE-NEXT: movaps %xmm0, 304(%rax)
1321 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1322 ; SSE-NEXT: movaps %xmm0, 288(%rax)
1323 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1324 ; SSE-NEXT: movaps %xmm0, 272(%rax)
1325 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1326 ; SSE-NEXT: movaps %xmm0, 256(%rax)
1327 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1328 ; SSE-NEXT: movaps %xmm0, 240(%rax)
1329 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1330 ; SSE-NEXT: movaps %xmm0, 224(%rax)
1331 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1332 ; SSE-NEXT: movaps %xmm0, 208(%rax)
1333 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1334 ; SSE-NEXT: movaps %xmm0, 192(%rax)
1335 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1336 ; SSE-NEXT: movaps %xmm0, 176(%rax)
1337 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1338 ; SSE-NEXT: movaps %xmm0, 160(%rax)
1339 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1340 ; SSE-NEXT: movaps %xmm0, 144(%rax)
1341 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1342 ; SSE-NEXT: movaps %xmm0, 128(%rax)
1343 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1344 ; SSE-NEXT: movaps %xmm0, 112(%rax)
1345 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1346 ; SSE-NEXT: movaps %xmm0, 96(%rax)
1347 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1348 ; SSE-NEXT: movaps %xmm0, 80(%rax)
1349 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1350 ; SSE-NEXT: movaps %xmm0, 64(%rax)
1351 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1352 ; SSE-NEXT: movaps %xmm0, 48(%rax)
1353 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1354 ; SSE-NEXT: movaps %xmm0, 32(%rax)
1355 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1356 ; SSE-NEXT: movaps %xmm0, 16(%rax)
1357 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1358 ; SSE-NEXT: movaps %xmm0, (%rax)
1359 ; SSE-NEXT: addq $664, %rsp # imm = 0x298
1362 ; AVX1-ONLY-LABEL: store_i64_stride8_vf16:
1363 ; AVX1-ONLY: # %bb.0:
1364 ; AVX1-ONLY-NEXT: subq $488, %rsp # imm = 0x1E8
1365 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
1366 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
1367 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %xmm4
1368 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %xmm5
1369 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm4[0]
1370 ; AVX1-ONLY-NEXT: vmovaps (%rcx), %xmm6
1371 ; AVX1-ONLY-NEXT: vmovaps 32(%rcx), %xmm2
1372 ; AVX1-ONLY-NEXT: vmovaps 64(%rcx), %xmm0
1373 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm1, %ymm3
1374 ; AVX1-ONLY-NEXT: vinsertf128 $1, (%rdx), %ymm1, %ymm1
1375 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm1 = ymm1[0],ymm3[1],ymm1[2],ymm3[2]
1376 ; AVX1-ONLY-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1377 ; AVX1-ONLY-NEXT: vmovaps (%r9), %xmm7
1378 ; AVX1-ONLY-NEXT: vmovaps (%r8), %xmm8
1379 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm9 = xmm8[0],xmm7[0]
1380 ; AVX1-ONLY-NEXT: vmovaps (%rax), %xmm10
1381 ; AVX1-ONLY-NEXT: vmovaps 32(%rax), %xmm3
1382 ; AVX1-ONLY-NEXT: vmovaps 64(%rax), %xmm1
1383 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm9, %ymm11
1384 ; AVX1-ONLY-NEXT: vinsertf128 $1, (%r10), %ymm9, %ymm9
1385 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm9 = ymm9[0],ymm11[1],ymm9[2],ymm11[2]
1386 ; AVX1-ONLY-NEXT: vmovupd %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1387 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm5[1],xmm4[1]
1388 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm4, %ymm5
1389 ; AVX1-ONLY-NEXT: vbroadcastsd 8(%rdx), %ymm6
1390 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
1391 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1],ymm5[2,3],ymm4[4,5],ymm5[6,7]
1392 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1393 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm8[1],xmm7[1]
1394 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm4, %ymm5
1395 ; AVX1-ONLY-NEXT: vbroadcastsd 8(%r10), %ymm6
1396 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
1397 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1],ymm5[2,3],ymm4[4,5],ymm5[6,7]
1398 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1399 ; AVX1-ONLY-NEXT: vmovaps 32(%rsi), %xmm4
1400 ; AVX1-ONLY-NEXT: vmovaps 32(%rdi), %xmm5
1401 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm6 = xmm5[0],xmm4[0]
1402 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm6, %ymm7
1403 ; AVX1-ONLY-NEXT: vinsertf128 $1, 32(%rdx), %ymm6, %ymm6
1404 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2],ymm7[2]
1405 ; AVX1-ONLY-NEXT: vmovupd %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1406 ; AVX1-ONLY-NEXT: vmovaps 32(%r9), %xmm6
1407 ; AVX1-ONLY-NEXT: vmovaps 32(%r8), %xmm7
1408 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm8 = xmm7[0],xmm6[0]
1409 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm8, %ymm9
1410 ; AVX1-ONLY-NEXT: vinsertf128 $1, 32(%r10), %ymm8, %ymm8
1411 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm8 = ymm8[0],ymm9[1],ymm8[2],ymm9[2]
1412 ; AVX1-ONLY-NEXT: vmovupd %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1413 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm5[1],xmm4[1]
1414 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm4, %ymm2
1415 ; AVX1-ONLY-NEXT: vbroadcastsd 40(%rdx), %ymm5
1416 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
1417 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm4[0,1],ymm2[2,3],ymm4[4,5],ymm2[6,7]
1418 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1419 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm7[1],xmm6[1]
1420 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm3
1421 ; AVX1-ONLY-NEXT: vbroadcastsd 40(%r10), %ymm4
1422 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm4[4,5,6,7]
1423 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1],ymm3[2,3],ymm2[4,5],ymm3[6,7]
1424 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1425 ; AVX1-ONLY-NEXT: vmovaps 64(%rsi), %xmm2
1426 ; AVX1-ONLY-NEXT: vmovaps 64(%rdi), %xmm3
1427 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm3[0],xmm2[0]
1428 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm4, %ymm5
1429 ; AVX1-ONLY-NEXT: vinsertf128 $1, 64(%rdx), %ymm4, %ymm4
1430 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm4 = ymm4[0],ymm5[1],ymm4[2],ymm5[2]
1431 ; AVX1-ONLY-NEXT: vmovupd %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1432 ; AVX1-ONLY-NEXT: vmovaps 64(%r9), %xmm4
1433 ; AVX1-ONLY-NEXT: vmovaps 64(%r8), %xmm5
1434 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm6 = xmm5[0],xmm4[0]
1435 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm6, %ymm7
1436 ; AVX1-ONLY-NEXT: vinsertf128 $1, 64(%r10), %ymm6, %ymm6
1437 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2],ymm7[2]
1438 ; AVX1-ONLY-NEXT: vmovupd %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1439 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm3[1],xmm2[1]
1440 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm2, %ymm0
1441 ; AVX1-ONLY-NEXT: vbroadcastsd 72(%rdx), %ymm3
1442 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
1443 ; AVX1-ONLY-NEXT: vmovaps 96(%rcx), %xmm3
1444 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1],ymm0[2,3],ymm2[4,5],ymm0[6,7]
1445 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1446 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm4[1]
1447 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
1448 ; AVX1-ONLY-NEXT: vbroadcastsd 72(%r10), %ymm2
1449 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
1450 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
1451 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1452 ; AVX1-ONLY-NEXT: vmovaps 96(%rsi), %xmm0
1453 ; AVX1-ONLY-NEXT: vmovaps 96(%rdi), %xmm1
1454 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
1455 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm4
1456 ; AVX1-ONLY-NEXT: vinsertf128 $1, 96(%rdx), %ymm2, %ymm2
1457 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0],ymm4[1],ymm2[2],ymm4[2]
1458 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1459 ; AVX1-ONLY-NEXT: vmovaps 96(%rax), %xmm2
1460 ; AVX1-ONLY-NEXT: vmovaps 96(%r9), %xmm4
1461 ; AVX1-ONLY-NEXT: vmovaps 96(%r8), %xmm5
1462 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm6 = xmm5[0],xmm4[0]
1463 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm6, %ymm7
1464 ; AVX1-ONLY-NEXT: vinsertf128 $1, 96(%r10), %ymm6, %ymm6
1465 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2],ymm7[2]
1466 ; AVX1-ONLY-NEXT: vmovupd %ymm6, (%rsp) # 32-byte Spill
1467 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
1468 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm1
1469 ; AVX1-ONLY-NEXT: vbroadcastsd 104(%rdx), %ymm3
1470 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
1471 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
1472 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1473 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm4[1]
1474 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm1
1475 ; AVX1-ONLY-NEXT: vbroadcastsd 104(%r10), %ymm2
1476 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
1477 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
1478 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1479 ; AVX1-ONLY-NEXT: vmovaps 16(%rsi), %xmm0
1480 ; AVX1-ONLY-NEXT: vmovaps 16(%rdi), %xmm1
1481 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
1482 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
1483 ; AVX1-ONLY-NEXT: vbroadcastsd 16(%rcx), %ymm3
1484 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
1485 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1486 ; AVX1-ONLY-NEXT: vmovaps 16(%r9), %xmm2
1487 ; AVX1-ONLY-NEXT: vmovaps 16(%r8), %xmm3
1488 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm3[0],xmm2[0]
1489 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],mem[4,5,6,7]
1490 ; AVX1-ONLY-NEXT: vbroadcastsd 16(%rax), %ymm5
1491 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
1492 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1493 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
1494 ; AVX1-ONLY-NEXT: vbroadcastsd 24(%rdx), %ymm1
1495 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm0[0,1,2,3],ymm1[4,5,6,7]
1496 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm2[1]
1497 ; AVX1-ONLY-NEXT: vbroadcastsd 24(%r10), %ymm1
1498 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm0[0,1,2,3],ymm1[4,5,6,7]
1499 ; AVX1-ONLY-NEXT: vmovaps 48(%rsi), %xmm0
1500 ; AVX1-ONLY-NEXT: vmovaps 48(%rdi), %xmm1
1501 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
1502 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
1503 ; AVX1-ONLY-NEXT: vbroadcastsd 48(%rcx), %ymm3
1504 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
1505 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1506 ; AVX1-ONLY-NEXT: vmovaps 48(%r9), %xmm2
1507 ; AVX1-ONLY-NEXT: vmovaps 48(%r8), %xmm4
1508 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm3 = xmm4[0],xmm2[0]
1509 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],mem[4,5,6,7]
1510 ; AVX1-ONLY-NEXT: vbroadcastsd 48(%rax), %ymm7
1511 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm3[0,1,2,3,4,5],ymm7[6,7]
1512 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
1513 ; AVX1-ONLY-NEXT: vbroadcastsd 56(%rdx), %ymm1
1514 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm0[0,1,2,3],ymm1[4,5,6,7]
1515 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm2[1]
1516 ; AVX1-ONLY-NEXT: vbroadcastsd 56(%r10), %ymm1
1517 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm0[0,1,2,3],ymm1[4,5,6,7]
1518 ; AVX1-ONLY-NEXT: vmovaps 80(%rsi), %xmm5
1519 ; AVX1-ONLY-NEXT: vmovaps 80(%rdi), %xmm2
1520 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm3 = xmm2[0],xmm5[0]
1521 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],mem[4,5,6,7]
1522 ; AVX1-ONLY-NEXT: vbroadcastsd 80(%rcx), %ymm4
1523 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm3[0,1,2,3,4,5],ymm4[6,7]
1524 ; AVX1-ONLY-NEXT: vmovaps 80(%r9), %xmm1
1525 ; AVX1-ONLY-NEXT: vmovaps 80(%r8), %xmm0
1526 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm3 = xmm0[0],xmm1[0]
1527 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],mem[4,5,6,7]
1528 ; AVX1-ONLY-NEXT: vbroadcastsd 80(%rax), %ymm14
1529 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm3[0,1,2,3,4,5],ymm14[6,7]
1530 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm5[1]
1531 ; AVX1-ONLY-NEXT: vbroadcastsd 88(%rdx), %ymm5
1532 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm2[0,1,2,3],ymm5[4,5,6,7]
1533 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
1534 ; AVX1-ONLY-NEXT: vbroadcastsd 88(%r10), %ymm1
1535 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm0[0,1,2,3],ymm1[4,5,6,7]
1536 ; AVX1-ONLY-NEXT: vmovaps 112(%rsi), %xmm1
1537 ; AVX1-ONLY-NEXT: vmovaps 112(%rdi), %xmm5
1538 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm14 = xmm5[0],xmm1[0]
1539 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3],mem[4,5,6,7]
1540 ; AVX1-ONLY-NEXT: vbroadcastsd 112(%rcx), %ymm15
1541 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
1542 ; AVX1-ONLY-NEXT: vmovaps 112(%r9), %xmm15
1543 ; AVX1-ONLY-NEXT: vmovaps 112(%r8), %xmm0
1544 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm13 = xmm0[0],xmm15[0]
1545 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3],mem[4,5,6,7]
1546 ; AVX1-ONLY-NEXT: vbroadcastsd 112(%rax), %ymm12
1547 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm13[0,1,2,3,4,5],ymm12[6,7]
1548 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm5[1],xmm1[1]
1549 ; AVX1-ONLY-NEXT: vbroadcastsd 120(%rdx), %ymm5
1550 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm5[4,5,6,7]
1551 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm15[1]
1552 ; AVX1-ONLY-NEXT: vbroadcastsd 120(%r10), %ymm5
1553 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm5[4,5,6,7]
1554 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rdx
1555 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3,4,5],mem[6,7]
1556 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm10[0,1,2,3,4,5],mem[6,7]
1557 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],mem[6,7]
1558 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3,4,5],mem[6,7]
1559 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],mem[6,7]
1560 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],mem[6,7]
1561 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],mem[6,7]
1562 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
1563 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 992(%rdx)
1564 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 960(%rdx)
1565 ; AVX1-ONLY-NEXT: vmovaps %ymm12, 928(%rdx)
1566 ; AVX1-ONLY-NEXT: vmovaps %ymm14, 896(%rdx)
1567 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1568 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 864(%rdx)
1569 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1570 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 832(%rdx)
1571 ; AVX1-ONLY-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
1572 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 800(%rdx)
1573 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1574 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 768(%rdx)
1575 ; AVX1-ONLY-NEXT: vmovaps %ymm2, 736(%rdx)
1576 ; AVX1-ONLY-NEXT: vmovaps %ymm3, 704(%rdx)
1577 ; AVX1-ONLY-NEXT: vmovaps %ymm4, 672(%rdx)
1578 ; AVX1-ONLY-NEXT: vmovaps %ymm9, 640(%rdx)
1579 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1580 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 608(%rdx)
1581 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1582 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 576(%rdx)
1583 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1584 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 544(%rdx)
1585 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1586 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 512(%rdx)
1587 ; AVX1-ONLY-NEXT: vmovaps %ymm7, 480(%rdx)
1588 ; AVX1-ONLY-NEXT: vmovaps %ymm8, 448(%rdx)
1589 ; AVX1-ONLY-NEXT: vmovaps %ymm11, 416(%rdx)
1590 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1591 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 384(%rdx)
1592 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1593 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 352(%rdx)
1594 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1595 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 320(%rdx)
1596 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1597 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 288(%rdx)
1598 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1599 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 256(%rdx)
1600 ; AVX1-ONLY-NEXT: vmovaps %ymm5, 224(%rdx)
1601 ; AVX1-ONLY-NEXT: vmovaps %ymm6, 192(%rdx)
1602 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1603 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 160(%rdx)
1604 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1605 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 128(%rdx)
1606 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1607 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 96(%rdx)
1608 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1609 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 64(%rdx)
1610 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1611 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 32(%rdx)
1612 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1613 ; AVX1-ONLY-NEXT: vmovaps %ymm0, (%rdx)
1614 ; AVX1-ONLY-NEXT: addq $488, %rsp # imm = 0x1E8
1615 ; AVX1-ONLY-NEXT: vzeroupper
1616 ; AVX1-ONLY-NEXT: retq
1618 ; AVX2-ONLY-LABEL: store_i64_stride8_vf16:
1619 ; AVX2-ONLY: # %bb.0:
1620 ; AVX2-ONLY-NEXT: subq $488, %rsp # imm = 0x1E8
1621 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
1622 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
1623 ; AVX2-ONLY-NEXT: vmovaps (%rcx), %xmm0
1624 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1625 ; AVX2-ONLY-NEXT: vmovaps 32(%rcx), %xmm3
1626 ; AVX2-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1627 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
1628 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %xmm2
1629 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1630 ; AVX2-ONLY-NEXT: vmovaps 32(%rsi), %xmm4
1631 ; AVX2-ONLY-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1632 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %xmm1
1633 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1634 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %xmm5
1635 ; AVX2-ONLY-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1636 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
1637 ; AVX2-ONLY-NEXT: vbroadcastsd 8(%rdx), %ymm2
1638 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
1639 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
1640 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1641 ; AVX2-ONLY-NEXT: vmovaps (%rax), %xmm0
1642 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1643 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
1644 ; AVX2-ONLY-NEXT: vmovaps (%r9), %xmm1
1645 ; AVX2-ONLY-NEXT: vmovaps %xmm1, (%rsp) # 16-byte Spill
1646 ; AVX2-ONLY-NEXT: vmovaps 32(%r9), %xmm6
1647 ; AVX2-ONLY-NEXT: vmovaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1648 ; AVX2-ONLY-NEXT: vmovaps (%r8), %xmm15
1649 ; AVX2-ONLY-NEXT: vmovaps 32(%r8), %xmm7
1650 ; AVX2-ONLY-NEXT: vmovaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1651 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm15[1],xmm1[1]
1652 ; AVX2-ONLY-NEXT: vbroadcastsd 8(%r10), %ymm2
1653 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
1654 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
1655 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1656 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm0
1657 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm5[1],xmm4[1]
1658 ; AVX2-ONLY-NEXT: vbroadcastsd 40(%rdx), %ymm2
1659 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
1660 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
1661 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1662 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm6[1]
1663 ; AVX2-ONLY-NEXT: vbroadcastsd 40(%r10), %ymm1
1664 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
1665 ; AVX2-ONLY-NEXT: vmovaps 32(%rax), %xmm12
1666 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm0, %ymm1
1667 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
1668 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1669 ; AVX2-ONLY-NEXT: vmovaps 64(%rsi), %xmm11
1670 ; AVX2-ONLY-NEXT: vmovaps 64(%rdi), %xmm10
1671 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm10[1],xmm11[1]
1672 ; AVX2-ONLY-NEXT: vbroadcastsd 72(%rdx), %ymm1
1673 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
1674 ; AVX2-ONLY-NEXT: vmovaps 64(%rcx), %xmm9
1675 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm0, %ymm1
1676 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
1677 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1678 ; AVX2-ONLY-NEXT: vmovaps 64(%r9), %xmm8
1679 ; AVX2-ONLY-NEXT: vmovaps 64(%r8), %xmm7
1680 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm8[1]
1681 ; AVX2-ONLY-NEXT: vbroadcastsd 72(%r10), %ymm1
1682 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
1683 ; AVX2-ONLY-NEXT: vmovaps 64(%rax), %xmm6
1684 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm1
1685 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
1686 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1687 ; AVX2-ONLY-NEXT: vmovaps 96(%rsi), %xmm5
1688 ; AVX2-ONLY-NEXT: vmovaps 96(%rdi), %xmm4
1689 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm5[1]
1690 ; AVX2-ONLY-NEXT: vbroadcastsd 104(%rdx), %ymm1
1691 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
1692 ; AVX2-ONLY-NEXT: vmovaps 96(%rcx), %xmm3
1693 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm1
1694 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
1695 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1696 ; AVX2-ONLY-NEXT: vmovaps 96(%r9), %xmm2
1697 ; AVX2-ONLY-NEXT: vmovaps 96(%r8), %xmm1
1698 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm2[1]
1699 ; AVX2-ONLY-NEXT: vbroadcastsd 104(%r10), %ymm14
1700 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm0[0,1,2,3],ymm14[4,5,6,7]
1701 ; AVX2-ONLY-NEXT: vmovaps 96(%rax), %xmm0
1702 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm13
1703 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm14[0,1,2,3,4,5],ymm13[6,7]
1704 ; AVX2-ONLY-NEXT: vmovups %ymm13, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1705 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
1706 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm13, %xmm13 # 16-byte Folded Reload
1707 ; AVX2-ONLY-NEXT: # xmm13 = xmm13[0],mem[0]
1708 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%rdx), %ymm13, %ymm13
1709 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 16-byte Folded Reload
1710 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3,4,5],ymm14[6,7]
1711 ; AVX2-ONLY-NEXT: vmovups %ymm13, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1712 ; AVX2-ONLY-NEXT: vunpcklpd (%rsp), %xmm15, %xmm13 # 16-byte Folded Reload
1713 ; AVX2-ONLY-NEXT: # xmm13 = xmm15[0],mem[0]
1714 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%r10), %ymm13, %ymm13
1715 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 16-byte Folded Reload
1716 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3,4,5],ymm14[6,7]
1717 ; AVX2-ONLY-NEXT: vmovups %ymm13, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1718 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
1719 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm13, %xmm13 # 16-byte Folded Reload
1720 ; AVX2-ONLY-NEXT: # xmm13 = xmm13[0],mem[0]
1721 ; AVX2-ONLY-NEXT: vinsertf128 $1, 32(%rdx), %ymm13, %ymm13
1722 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 16-byte Folded Reload
1723 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3,4,5],ymm14[6,7]
1724 ; AVX2-ONLY-NEXT: vmovups %ymm13, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1725 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
1726 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm13, %xmm13 # 16-byte Folded Reload
1727 ; AVX2-ONLY-NEXT: # xmm13 = xmm13[0],mem[0]
1728 ; AVX2-ONLY-NEXT: vinsertf128 $1, 32(%r10), %ymm13, %ymm13
1729 ; AVX2-ONLY-NEXT: vbroadcastsd %xmm12, %ymm12
1730 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm13[0,1,2,3,4,5],ymm12[6,7]
1731 ; AVX2-ONLY-NEXT: vmovups %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1732 ; AVX2-ONLY-NEXT: vmovlhps {{.*#+}} xmm10 = xmm10[0],xmm11[0]
1733 ; AVX2-ONLY-NEXT: vinsertf128 $1, 64(%rdx), %ymm10, %ymm10
1734 ; AVX2-ONLY-NEXT: vbroadcastsd %xmm9, %ymm9
1735 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm10[0,1,2,3,4,5],ymm9[6,7]
1736 ; AVX2-ONLY-NEXT: vmovups %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1737 ; AVX2-ONLY-NEXT: vmovlhps {{.*#+}} xmm7 = xmm7[0],xmm8[0]
1738 ; AVX2-ONLY-NEXT: vinsertf128 $1, 64(%r10), %ymm7, %ymm7
1739 ; AVX2-ONLY-NEXT: vbroadcastsd %xmm6, %ymm6
1740 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3,4,5],ymm6[6,7]
1741 ; AVX2-ONLY-NEXT: vmovups %ymm6, (%rsp) # 32-byte Spill
1742 ; AVX2-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm4[0],xmm5[0]
1743 ; AVX2-ONLY-NEXT: vinsertf128 $1, 96(%rdx), %ymm4, %ymm4
1744 ; AVX2-ONLY-NEXT: vbroadcastsd %xmm3, %ymm3
1745 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm4[0,1,2,3,4,5],ymm3[6,7]
1746 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1747 ; AVX2-ONLY-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
1748 ; AVX2-ONLY-NEXT: vinsertf128 $1, 96(%r10), %ymm1, %ymm1
1749 ; AVX2-ONLY-NEXT: vbroadcastsd %xmm0, %ymm0
1750 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
1751 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1752 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %ymm0
1753 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %ymm1
1754 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
1755 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],mem[2,3]
1756 ; AVX2-ONLY-NEXT: vbroadcastsd 16(%rcx), %ymm3
1757 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
1758 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1759 ; AVX2-ONLY-NEXT: vmovaps (%r8), %ymm2
1760 ; AVX2-ONLY-NEXT: vmovaps (%r9), %ymm3
1761 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
1762 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],mem[2,3]
1763 ; AVX2-ONLY-NEXT: vbroadcastsd 16(%rax), %ymm6
1764 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm6[6,7]
1765 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1766 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
1767 ; AVX2-ONLY-NEXT: vbroadcastsd 24(%rdx), %ymm1
1768 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm5 = ymm0[2,3],ymm1[2,3]
1769 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
1770 ; AVX2-ONLY-NEXT: vbroadcastsd 24(%r10), %ymm2
1771 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm0[2,3],ymm2[2,3]
1772 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %ymm6
1773 ; AVX2-ONLY-NEXT: vmovaps 32(%rsi), %ymm7
1774 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm6[0],ymm7[0],ymm6[2],ymm7[2]
1775 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],mem[2,3]
1776 ; AVX2-ONLY-NEXT: vbroadcastsd 48(%rcx), %ymm3
1777 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm3[6,7]
1778 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1779 ; AVX2-ONLY-NEXT: vmovaps 32(%r8), %ymm8
1780 ; AVX2-ONLY-NEXT: vmovaps 32(%r9), %ymm9
1781 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm8[0],ymm9[0],ymm8[2],ymm9[2]
1782 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],mem[2,3]
1783 ; AVX2-ONLY-NEXT: vbroadcastsd 48(%rax), %ymm10
1784 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm2[0,1,2,3,4,5],ymm10[6,7]
1785 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm6 = ymm6[1],ymm7[1],ymm6[3],ymm7[3]
1786 ; AVX2-ONLY-NEXT: vbroadcastsd 56(%rdx), %ymm7
1787 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm7 = ymm6[2,3],ymm7[2,3]
1788 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm6 = ymm8[1],ymm9[1],ymm8[3],ymm9[3]
1789 ; AVX2-ONLY-NEXT: vbroadcastsd 56(%r10), %ymm8
1790 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm8 = ymm6[2,3],ymm8[2,3]
1791 ; AVX2-ONLY-NEXT: vmovaps 64(%rdi), %ymm10
1792 ; AVX2-ONLY-NEXT: vmovaps 64(%rsi), %ymm11
1793 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm6 = ymm10[0],ymm11[0],ymm10[2],ymm11[2]
1794 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm6 = ymm6[2,3],mem[2,3]
1795 ; AVX2-ONLY-NEXT: vbroadcastsd 80(%rcx), %ymm9
1796 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3,4,5],ymm9[6,7]
1797 ; AVX2-ONLY-NEXT: vmovaps 64(%r8), %ymm1
1798 ; AVX2-ONLY-NEXT: vmovaps 64(%r9), %ymm0
1799 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm9 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
1800 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm9 = ymm9[2,3],mem[2,3]
1801 ; AVX2-ONLY-NEXT: vbroadcastsd 80(%rax), %ymm14
1802 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],ymm14[6,7]
1803 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm10 = ymm10[1],ymm11[1],ymm10[3],ymm11[3]
1804 ; AVX2-ONLY-NEXT: vbroadcastsd 88(%rdx), %ymm11
1805 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm10 = ymm10[2,3],ymm11[2,3]
1806 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
1807 ; AVX2-ONLY-NEXT: vbroadcastsd 88(%r10), %ymm1
1808 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm0[2,3],ymm1[2,3]
1809 ; AVX2-ONLY-NEXT: vmovaps 96(%rdi), %ymm1
1810 ; AVX2-ONLY-NEXT: vmovaps 96(%rsi), %ymm11
1811 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm14 = ymm1[0],ymm11[0],ymm1[2],ymm11[2]
1812 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm14 = ymm14[2,3],mem[2,3]
1813 ; AVX2-ONLY-NEXT: vbroadcastsd 112(%rcx), %ymm15
1814 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
1815 ; AVX2-ONLY-NEXT: vmovaps 96(%r8), %ymm15
1816 ; AVX2-ONLY-NEXT: vmovaps 96(%r9), %ymm0
1817 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm13 = ymm15[0],ymm0[0],ymm15[2],ymm0[2]
1818 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm13 = ymm13[2,3],mem[2,3]
1819 ; AVX2-ONLY-NEXT: vbroadcastsd 112(%rax), %ymm12
1820 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm13[0,1,2,3,4,5],ymm12[6,7]
1821 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm11[1],ymm1[3],ymm11[3]
1822 ; AVX2-ONLY-NEXT: vbroadcastsd 120(%rdx), %ymm11
1823 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm1[2,3],ymm11[2,3]
1824 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm15[1],ymm0[1],ymm15[3],ymm0[3]
1825 ; AVX2-ONLY-NEXT: vbroadcastsd 120(%r10), %ymm11
1826 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm11[2,3]
1827 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rdx
1828 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],mem[6,7]
1829 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],mem[6,7]
1830 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3,4,5],mem[6,7]
1831 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],mem[6,7]
1832 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1,2,3,4,5],mem[6,7]
1833 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],mem[6,7]
1834 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],mem[6,7]
1835 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
1836 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 992(%rdx)
1837 ; AVX2-ONLY-NEXT: vmovaps %ymm1, 960(%rdx)
1838 ; AVX2-ONLY-NEXT: vmovaps %ymm12, 928(%rdx)
1839 ; AVX2-ONLY-NEXT: vmovaps %ymm14, 896(%rdx)
1840 ; AVX2-ONLY-NEXT: vmovaps %ymm2, 736(%rdx)
1841 ; AVX2-ONLY-NEXT: vmovaps %ymm10, 704(%rdx)
1842 ; AVX2-ONLY-NEXT: vmovaps %ymm9, 672(%rdx)
1843 ; AVX2-ONLY-NEXT: vmovaps %ymm6, 640(%rdx)
1844 ; AVX2-ONLY-NEXT: vmovaps %ymm8, 480(%rdx)
1845 ; AVX2-ONLY-NEXT: vmovaps %ymm7, 448(%rdx)
1846 ; AVX2-ONLY-NEXT: vmovaps %ymm3, 416(%rdx)
1847 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1848 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 384(%rdx)
1849 ; AVX2-ONLY-NEXT: vmovaps %ymm4, 224(%rdx)
1850 ; AVX2-ONLY-NEXT: vmovaps %ymm5, 192(%rdx)
1851 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1852 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 160(%rdx)
1853 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1854 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 128(%rdx)
1855 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1856 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 864(%rdx)
1857 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1858 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 832(%rdx)
1859 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1860 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 800(%rdx)
1861 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1862 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 768(%rdx)
1863 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1864 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 608(%rdx)
1865 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1866 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 576(%rdx)
1867 ; AVX2-ONLY-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
1868 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 544(%rdx)
1869 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1870 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 512(%rdx)
1871 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1872 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 352(%rdx)
1873 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1874 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 320(%rdx)
1875 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1876 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 288(%rdx)
1877 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1878 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 256(%rdx)
1879 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1880 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 96(%rdx)
1881 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1882 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 64(%rdx)
1883 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1884 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 32(%rdx)
1885 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1886 ; AVX2-ONLY-NEXT: vmovaps %ymm0, (%rdx)
1887 ; AVX2-ONLY-NEXT: addq $488, %rsp # imm = 0x1E8
1888 ; AVX2-ONLY-NEXT: vzeroupper
1889 ; AVX2-ONLY-NEXT: retq
1891 ; AVX512F-LABEL: store_i64_stride8_vf16:
1893 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %rax
1894 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %r10
1895 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %r11
1896 ; AVX512F-NEXT: vmovdqa64 64(%rdi), %zmm11
1897 ; AVX512F-NEXT: vmovdqa64 (%rdi), %zmm3
1898 ; AVX512F-NEXT: vmovdqa64 64(%rsi), %zmm25
1899 ; AVX512F-NEXT: vmovdqa64 (%rsi), %zmm19
1900 ; AVX512F-NEXT: vmovdqa64 64(%rdx), %zmm12
1901 ; AVX512F-NEXT: vmovdqa64 (%rdx), %zmm0
1902 ; AVX512F-NEXT: vmovdqa64 64(%rcx), %zmm26
1903 ; AVX512F-NEXT: vmovdqa64 (%rcx), %zmm17
1904 ; AVX512F-NEXT: vmovdqa64 (%r8), %zmm6
1905 ; AVX512F-NEXT: vmovdqa64 64(%r8), %zmm16
1906 ; AVX512F-NEXT: vmovdqa64 (%r9), %zmm31
1907 ; AVX512F-NEXT: vmovdqa64 64(%r9), %zmm27
1908 ; AVX512F-NEXT: vmovdqa64 (%r11), %zmm8
1909 ; AVX512F-NEXT: vmovdqa64 64(%r11), %zmm30
1910 ; AVX512F-NEXT: vmovdqa64 (%r10), %zmm9
1911 ; AVX512F-NEXT: vmovdqa64 64(%r10), %zmm29
1912 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm18 = [1,9,1,9,1,9,1,9]
1913 ; AVX512F-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1914 ; AVX512F-NEXT: vmovdqa64 %zmm8, %zmm1
1915 ; AVX512F-NEXT: vpermt2q %zmm9, %zmm18, %zmm1
1916 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm4
1917 ; AVX512F-NEXT: vpermt2q %zmm31, %zmm18, %zmm4
1918 ; AVX512F-NEXT: movb $-64, %r8b
1919 ; AVX512F-NEXT: kmovw %r8d, %k1
1920 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm4 {%k1}
1921 ; AVX512F-NEXT: vmovdqa (%rsi), %xmm1
1922 ; AVX512F-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
1923 ; AVX512F-NEXT: vmovdqa (%rdi), %xmm10
1924 ; AVX512F-NEXT: vinserti128 $1, (%rdx), %ymm10, %ymm10
1925 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} ymm13 = ymm10[1],ymm1[1],ymm10[3],ymm1[3]
1926 ; AVX512F-NEXT: vinserti64x4 $0, %ymm13, %zmm4, %zmm2
1927 ; AVX512F-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
1928 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm23 = [0,8,0,8,0,8,0,8]
1929 ; AVX512F-NEXT: # zmm23 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1930 ; AVX512F-NEXT: vmovdqa64 %zmm8, %zmm4
1931 ; AVX512F-NEXT: vpermt2q %zmm9, %zmm23, %zmm4
1932 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm13
1933 ; AVX512F-NEXT: vpermt2q %zmm31, %zmm23, %zmm13
1934 ; AVX512F-NEXT: vmovdqa64 %zmm4, %zmm13 {%k1}
1935 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} ymm1 = ymm10[0],ymm1[0],ymm10[2],ymm1[2]
1936 ; AVX512F-NEXT: vinserti64x4 $0, %ymm1, %zmm13, %zmm21
1937 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [5,13,5,13,5,13,5,13]
1938 ; AVX512F-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1939 ; AVX512F-NEXT: vmovdqa64 %zmm8, %zmm1
1940 ; AVX512F-NEXT: vpermt2q %zmm9, %zmm10, %zmm1
1941 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} zmm4 = zmm6[1],zmm31[1],zmm6[3],zmm31[3],zmm6[5],zmm31[5],zmm6[7],zmm31[7]
1942 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm4 {%k1}
1943 ; AVX512F-NEXT: vmovdqa64 %zmm3, %zmm1
1944 ; AVX512F-NEXT: vpermt2q %zmm19, %zmm10, %zmm1
1945 ; AVX512F-NEXT: vbroadcasti128 {{.*#+}} ymm14 = [5,13,5,13]
1946 ; AVX512F-NEXT: # ymm14 = mem[0,1,0,1]
1947 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm13
1948 ; AVX512F-NEXT: vpermt2q %zmm17, %zmm14, %zmm13
1949 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm13[4,5,6,7]
1950 ; AVX512F-NEXT: vinserti64x4 $0, %ymm1, %zmm4, %zmm22
1951 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [4,12,4,12,4,12,4,12]
1952 ; AVX512F-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1953 ; AVX512F-NEXT: vmovdqa64 %zmm8, %zmm1
1954 ; AVX512F-NEXT: vpermt2q %zmm9, %zmm15, %zmm1
1955 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} zmm4 = zmm6[0],zmm31[0],zmm6[2],zmm31[2],zmm6[4],zmm31[4],zmm6[6],zmm31[6]
1956 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm4 {%k1}
1957 ; AVX512F-NEXT: vmovdqa64 %zmm3, %zmm13
1958 ; AVX512F-NEXT: vpermt2q %zmm19, %zmm15, %zmm13
1959 ; AVX512F-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [4,12,4,12]
1960 ; AVX512F-NEXT: # ymm1 = mem[0,1,0,1]
1961 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm5
1962 ; AVX512F-NEXT: vpermt2q %zmm17, %zmm1, %zmm5
1963 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm5 = ymm13[0,1,2,3],ymm5[4,5,6,7]
1964 ; AVX512F-NEXT: vinserti64x4 $0, %ymm5, %zmm4, %zmm24
1965 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm13 = [7,15,7,15,7,15,7,15]
1966 ; AVX512F-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1967 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm5
1968 ; AVX512F-NEXT: vpermt2q %zmm31, %zmm13, %zmm5
1969 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm8[1],zmm9[1],zmm8[3],zmm9[3],zmm8[5],zmm9[5],zmm8[7],zmm9[7]
1970 ; AVX512F-NEXT: vmovdqa64 %zmm3, %zmm2
1971 ; AVX512F-NEXT: vpermt2q %zmm19, %zmm13, %zmm2
1972 ; AVX512F-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [7,15,7,15]
1973 ; AVX512F-NEXT: # ymm4 = mem[0,1,0,1]
1974 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm7
1975 ; AVX512F-NEXT: vpermt2q %zmm17, %zmm4, %zmm7
1976 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm7[4,5,6,7]
1977 ; AVX512F-NEXT: vinserti64x4 $0, %ymm2, %zmm5, %zmm20
1978 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm28 = [6,14,6,14,6,14,6,14]
1979 ; AVX512F-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1980 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm2
1981 ; AVX512F-NEXT: vpermt2q %zmm31, %zmm28, %zmm2
1982 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} zmm2 {%k1} = zmm8[0],zmm9[0],zmm8[2],zmm9[2],zmm8[4],zmm9[4],zmm8[6],zmm9[6]
1983 ; AVX512F-NEXT: vpermt2q %zmm19, %zmm28, %zmm3
1984 ; AVX512F-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [6,14,6,14]
1985 ; AVX512F-NEXT: # ymm7 = mem[0,1,0,1]
1986 ; AVX512F-NEXT: vpermt2q %zmm17, %zmm7, %zmm0
1987 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3],ymm0[4,5,6,7]
1988 ; AVX512F-NEXT: vinserti64x4 $0, %ymm0, %zmm2, %zmm17
1989 ; AVX512F-NEXT: vmovdqa64 %zmm30, %zmm0
1990 ; AVX512F-NEXT: vpermt2q %zmm29, %zmm18, %zmm0
1991 ; AVX512F-NEXT: vpermi2q %zmm27, %zmm16, %zmm18
1992 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm18 {%k1}
1993 ; AVX512F-NEXT: vmovdqa 64(%rsi), %xmm0
1994 ; AVX512F-NEXT: vinserti128 $1, 64(%rcx), %ymm0, %ymm0
1995 ; AVX512F-NEXT: vmovdqa 64(%rdi), %xmm2
1996 ; AVX512F-NEXT: vinserti128 $1, 64(%rdx), %ymm2, %ymm2
1997 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} ymm3 = ymm2[1],ymm0[1],ymm2[3],ymm0[3]
1998 ; AVX512F-NEXT: vinserti64x4 $0, %ymm3, %zmm18, %zmm18
1999 ; AVX512F-NEXT: vmovdqa64 %zmm30, %zmm3
2000 ; AVX512F-NEXT: vpermt2q %zmm29, %zmm23, %zmm3
2001 ; AVX512F-NEXT: vpermi2q %zmm27, %zmm16, %zmm23
2002 ; AVX512F-NEXT: vmovdqa64 %zmm3, %zmm23 {%k1}
2003 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm2[0],ymm0[0],ymm2[2],ymm0[2]
2004 ; AVX512F-NEXT: vinserti64x4 $0, %ymm0, %zmm23, %zmm19
2005 ; AVX512F-NEXT: vmovdqa64 %zmm30, %zmm0
2006 ; AVX512F-NEXT: vpermt2q %zmm29, %zmm10, %zmm0
2007 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} zmm2 = zmm16[1],zmm27[1],zmm16[3],zmm27[3],zmm16[5],zmm27[5],zmm16[7],zmm27[7]
2008 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm2 {%k1}
2009 ; AVX512F-NEXT: vpermi2q %zmm25, %zmm11, %zmm10
2010 ; AVX512F-NEXT: vpermi2q %zmm26, %zmm12, %zmm14
2011 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm0 = ymm10[0,1,2,3],ymm14[4,5,6,7]
2012 ; AVX512F-NEXT: vinserti64x4 $0, %ymm0, %zmm2, %zmm2
2013 ; AVX512F-NEXT: vmovdqa64 %zmm30, %zmm0
2014 ; AVX512F-NEXT: vpermt2q %zmm29, %zmm15, %zmm0
2015 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} zmm3 = zmm16[0],zmm27[0],zmm16[2],zmm27[2],zmm16[4],zmm27[4],zmm16[6],zmm27[6]
2016 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm3 {%k1}
2017 ; AVX512F-NEXT: vpermi2q %zmm25, %zmm11, %zmm15
2018 ; AVX512F-NEXT: vpermi2q %zmm26, %zmm12, %zmm1
2019 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm1[4,5,6,7]
2020 ; AVX512F-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm1
2021 ; AVX512F-NEXT: vmovdqa64 %zmm16, %zmm0
2022 ; AVX512F-NEXT: vpermt2q %zmm27, %zmm13, %zmm0
2023 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} zmm0 {%k1} = zmm30[1],zmm29[1],zmm30[3],zmm29[3],zmm30[5],zmm29[5],zmm30[7],zmm29[7]
2024 ; AVX512F-NEXT: vpermi2q %zmm25, %zmm11, %zmm13
2025 ; AVX512F-NEXT: vpermi2q %zmm26, %zmm12, %zmm4
2026 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm3 = ymm13[0,1,2,3],ymm4[4,5,6,7]
2027 ; AVX512F-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm4
2028 ; AVX512F-NEXT: vmovdqa64 %zmm16, %zmm0
2029 ; AVX512F-NEXT: vpermt2q %zmm27, %zmm28, %zmm0
2030 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} zmm0 {%k1} = zmm30[0],zmm29[0],zmm30[2],zmm29[2],zmm30[4],zmm29[4],zmm30[6],zmm29[6]
2031 ; AVX512F-NEXT: vpermt2q %zmm25, %zmm28, %zmm11
2032 ; AVX512F-NEXT: vpermt2q %zmm26, %zmm7, %zmm12
2033 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm3 = ymm11[0,1,2,3],ymm12[4,5,6,7]
2034 ; AVX512F-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm0
2035 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [3,11,3,11,3,11,3,11]
2036 ; AVX512F-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2037 ; AVX512F-NEXT: vmovdqa64 %zmm8, %zmm7
2038 ; AVX512F-NEXT: vpermt2q %zmm9, %zmm3, %zmm7
2039 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm10
2040 ; AVX512F-NEXT: vpermt2q %zmm31, %zmm3, %zmm10
2041 ; AVX512F-NEXT: vmovdqa64 %zmm7, %zmm10 {%k1}
2042 ; AVX512F-NEXT: vmovdqa (%rcx), %ymm7
2043 ; AVX512F-NEXT: vmovdqa 64(%rcx), %ymm11
2044 ; AVX512F-NEXT: vmovdqa (%rdx), %ymm12
2045 ; AVX512F-NEXT: vmovdqa 64(%rdx), %ymm13
2046 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} ymm14 = ymm12[1],ymm7[1],ymm12[3],ymm7[3]
2047 ; AVX512F-NEXT: vmovdqa (%rsi), %ymm15
2048 ; AVX512F-NEXT: vmovdqa64 64(%rsi), %ymm23
2049 ; AVX512F-NEXT: vmovdqa64 (%rdi), %ymm25
2050 ; AVX512F-NEXT: vmovdqa64 64(%rdi), %ymm26
2051 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} ymm5 = ymm25[1],ymm15[1],ymm25[3],ymm15[3]
2052 ; AVX512F-NEXT: vperm2i128 {{.*#+}} ymm5 = ymm5[2,3],ymm14[2,3]
2053 ; AVX512F-NEXT: vinserti64x4 $0, %ymm5, %zmm10, %zmm5
2054 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [2,10,2,10,2,10,2,10]
2055 ; AVX512F-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2056 ; AVX512F-NEXT: vpermt2q %zmm9, %zmm10, %zmm8
2057 ; AVX512F-NEXT: vpermt2q %zmm31, %zmm10, %zmm6
2058 ; AVX512F-NEXT: vmovdqa64 %zmm8, %zmm6 {%k1}
2059 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} ymm7 = ymm12[0],ymm7[0],ymm12[2],ymm7[2]
2060 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm25[0],ymm15[0],ymm25[2],ymm15[2]
2061 ; AVX512F-NEXT: vperm2i128 {{.*#+}} ymm7 = ymm8[2,3],ymm7[2,3]
2062 ; AVX512F-NEXT: vinserti64x4 $0, %ymm7, %zmm6, %zmm6
2063 ; AVX512F-NEXT: vmovdqa64 %zmm30, %zmm7
2064 ; AVX512F-NEXT: vpermt2q %zmm29, %zmm3, %zmm7
2065 ; AVX512F-NEXT: vpermi2q %zmm27, %zmm16, %zmm3
2066 ; AVX512F-NEXT: vmovdqa64 %zmm7, %zmm3 {%k1}
2067 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} ymm7 = ymm13[1],ymm11[1],ymm13[3],ymm11[3]
2068 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} ymm8 = ymm26[1],ymm23[1],ymm26[3],ymm23[3]
2069 ; AVX512F-NEXT: vperm2i128 {{.*#+}} ymm7 = ymm8[2,3],ymm7[2,3]
2070 ; AVX512F-NEXT: vinserti64x4 $0, %ymm7, %zmm3, %zmm3
2071 ; AVX512F-NEXT: vpermt2q %zmm29, %zmm10, %zmm30
2072 ; AVX512F-NEXT: vpermt2q %zmm27, %zmm10, %zmm16
2073 ; AVX512F-NEXT: vmovdqa64 %zmm30, %zmm16 {%k1}
2074 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} ymm7 = ymm13[0],ymm11[0],ymm13[2],ymm11[2]
2075 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm26[0],ymm23[0],ymm26[2],ymm23[2]
2076 ; AVX512F-NEXT: vperm2i128 {{.*#+}} ymm7 = ymm8[2,3],ymm7[2,3]
2077 ; AVX512F-NEXT: vinserti64x4 $0, %ymm7, %zmm16, %zmm7
2078 ; AVX512F-NEXT: vmovdqa64 %zmm7, 640(%rax)
2079 ; AVX512F-NEXT: vmovdqa64 %zmm3, 704(%rax)
2080 ; AVX512F-NEXT: vmovdqa64 %zmm6, 128(%rax)
2081 ; AVX512F-NEXT: vmovdqa64 %zmm5, 192(%rax)
2082 ; AVX512F-NEXT: vmovdqa64 %zmm0, 896(%rax)
2083 ; AVX512F-NEXT: vmovdqa64 %zmm4, 960(%rax)
2084 ; AVX512F-NEXT: vmovdqa64 %zmm1, 768(%rax)
2085 ; AVX512F-NEXT: vmovdqa64 %zmm2, 832(%rax)
2086 ; AVX512F-NEXT: vmovdqa64 %zmm19, 512(%rax)
2087 ; AVX512F-NEXT: vmovdqa64 %zmm18, 576(%rax)
2088 ; AVX512F-NEXT: vmovdqa64 %zmm17, 384(%rax)
2089 ; AVX512F-NEXT: vmovdqa64 %zmm20, 448(%rax)
2090 ; AVX512F-NEXT: vmovdqa64 %zmm24, 256(%rax)
2091 ; AVX512F-NEXT: vmovdqa64 %zmm22, 320(%rax)
2092 ; AVX512F-NEXT: vmovdqa64 %zmm21, (%rax)
2093 ; AVX512F-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
2094 ; AVX512F-NEXT: vmovaps %zmm0, 64(%rax)
2095 ; AVX512F-NEXT: vzeroupper
2096 ; AVX512F-NEXT: retq
2098 ; AVX512BW-LABEL: store_i64_stride8_vf16:
2099 ; AVX512BW: # %bb.0:
2100 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
2101 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r10
2102 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r11
2103 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %zmm11
2104 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm3
2105 ; AVX512BW-NEXT: vmovdqa64 64(%rsi), %zmm25
2106 ; AVX512BW-NEXT: vmovdqa64 (%rsi), %zmm19
2107 ; AVX512BW-NEXT: vmovdqa64 64(%rdx), %zmm12
2108 ; AVX512BW-NEXT: vmovdqa64 (%rdx), %zmm0
2109 ; AVX512BW-NEXT: vmovdqa64 64(%rcx), %zmm26
2110 ; AVX512BW-NEXT: vmovdqa64 (%rcx), %zmm17
2111 ; AVX512BW-NEXT: vmovdqa64 (%r8), %zmm6
2112 ; AVX512BW-NEXT: vmovdqa64 64(%r8), %zmm16
2113 ; AVX512BW-NEXT: vmovdqa64 (%r9), %zmm31
2114 ; AVX512BW-NEXT: vmovdqa64 64(%r9), %zmm27
2115 ; AVX512BW-NEXT: vmovdqa64 (%r11), %zmm8
2116 ; AVX512BW-NEXT: vmovdqa64 64(%r11), %zmm30
2117 ; AVX512BW-NEXT: vmovdqa64 (%r10), %zmm9
2118 ; AVX512BW-NEXT: vmovdqa64 64(%r10), %zmm29
2119 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm18 = [1,9,1,9,1,9,1,9]
2120 ; AVX512BW-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2121 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm1
2122 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm18, %zmm1
2123 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm4
2124 ; AVX512BW-NEXT: vpermt2q %zmm31, %zmm18, %zmm4
2125 ; AVX512BW-NEXT: movb $-64, %r8b
2126 ; AVX512BW-NEXT: kmovd %r8d, %k1
2127 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm4 {%k1}
2128 ; AVX512BW-NEXT: vmovdqa (%rsi), %xmm1
2129 ; AVX512BW-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
2130 ; AVX512BW-NEXT: vmovdqa (%rdi), %xmm10
2131 ; AVX512BW-NEXT: vinserti128 $1, (%rdx), %ymm10, %ymm10
2132 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm13 = ymm10[1],ymm1[1],ymm10[3],ymm1[3]
2133 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm13, %zmm4, %zmm2
2134 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
2135 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm23 = [0,8,0,8,0,8,0,8]
2136 ; AVX512BW-NEXT: # zmm23 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2137 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm4
2138 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm23, %zmm4
2139 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm13
2140 ; AVX512BW-NEXT: vpermt2q %zmm31, %zmm23, %zmm13
2141 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm13 {%k1}
2142 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} ymm1 = ymm10[0],ymm1[0],ymm10[2],ymm1[2]
2143 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm1, %zmm13, %zmm21
2144 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [5,13,5,13,5,13,5,13]
2145 ; AVX512BW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2146 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm1
2147 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm10, %zmm1
2148 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} zmm4 = zmm6[1],zmm31[1],zmm6[3],zmm31[3],zmm6[5],zmm31[5],zmm6[7],zmm31[7]
2149 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm4 {%k1}
2150 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm1
2151 ; AVX512BW-NEXT: vpermt2q %zmm19, %zmm10, %zmm1
2152 ; AVX512BW-NEXT: vbroadcasti128 {{.*#+}} ymm14 = [5,13,5,13]
2153 ; AVX512BW-NEXT: # ymm14 = mem[0,1,0,1]
2154 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm13
2155 ; AVX512BW-NEXT: vpermt2q %zmm17, %zmm14, %zmm13
2156 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm13[4,5,6,7]
2157 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm1, %zmm4, %zmm22
2158 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [4,12,4,12,4,12,4,12]
2159 ; AVX512BW-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2160 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm1
2161 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm15, %zmm1
2162 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} zmm4 = zmm6[0],zmm31[0],zmm6[2],zmm31[2],zmm6[4],zmm31[4],zmm6[6],zmm31[6]
2163 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm4 {%k1}
2164 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm13
2165 ; AVX512BW-NEXT: vpermt2q %zmm19, %zmm15, %zmm13
2166 ; AVX512BW-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [4,12,4,12]
2167 ; AVX512BW-NEXT: # ymm1 = mem[0,1,0,1]
2168 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm5
2169 ; AVX512BW-NEXT: vpermt2q %zmm17, %zmm1, %zmm5
2170 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm5 = ymm13[0,1,2,3],ymm5[4,5,6,7]
2171 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm5, %zmm4, %zmm24
2172 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm13 = [7,15,7,15,7,15,7,15]
2173 ; AVX512BW-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2174 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm5
2175 ; AVX512BW-NEXT: vpermt2q %zmm31, %zmm13, %zmm5
2176 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm8[1],zmm9[1],zmm8[3],zmm9[3],zmm8[5],zmm9[5],zmm8[7],zmm9[7]
2177 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm2
2178 ; AVX512BW-NEXT: vpermt2q %zmm19, %zmm13, %zmm2
2179 ; AVX512BW-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [7,15,7,15]
2180 ; AVX512BW-NEXT: # ymm4 = mem[0,1,0,1]
2181 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm7
2182 ; AVX512BW-NEXT: vpermt2q %zmm17, %zmm4, %zmm7
2183 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm7[4,5,6,7]
2184 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm2, %zmm5, %zmm20
2185 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm28 = [6,14,6,14,6,14,6,14]
2186 ; AVX512BW-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2187 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm2
2188 ; AVX512BW-NEXT: vpermt2q %zmm31, %zmm28, %zmm2
2189 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} zmm2 {%k1} = zmm8[0],zmm9[0],zmm8[2],zmm9[2],zmm8[4],zmm9[4],zmm8[6],zmm9[6]
2190 ; AVX512BW-NEXT: vpermt2q %zmm19, %zmm28, %zmm3
2191 ; AVX512BW-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [6,14,6,14]
2192 ; AVX512BW-NEXT: # ymm7 = mem[0,1,0,1]
2193 ; AVX512BW-NEXT: vpermt2q %zmm17, %zmm7, %zmm0
2194 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3],ymm0[4,5,6,7]
2195 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm0, %zmm2, %zmm17
2196 ; AVX512BW-NEXT: vmovdqa64 %zmm30, %zmm0
2197 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm18, %zmm0
2198 ; AVX512BW-NEXT: vpermi2q %zmm27, %zmm16, %zmm18
2199 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm18 {%k1}
2200 ; AVX512BW-NEXT: vmovdqa 64(%rsi), %xmm0
2201 ; AVX512BW-NEXT: vinserti128 $1, 64(%rcx), %ymm0, %ymm0
2202 ; AVX512BW-NEXT: vmovdqa 64(%rdi), %xmm2
2203 ; AVX512BW-NEXT: vinserti128 $1, 64(%rdx), %ymm2, %ymm2
2204 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm3 = ymm2[1],ymm0[1],ymm2[3],ymm0[3]
2205 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm3, %zmm18, %zmm18
2206 ; AVX512BW-NEXT: vmovdqa64 %zmm30, %zmm3
2207 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm23, %zmm3
2208 ; AVX512BW-NEXT: vpermi2q %zmm27, %zmm16, %zmm23
2209 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm23 {%k1}
2210 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm2[0],ymm0[0],ymm2[2],ymm0[2]
2211 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm0, %zmm23, %zmm19
2212 ; AVX512BW-NEXT: vmovdqa64 %zmm30, %zmm0
2213 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm10, %zmm0
2214 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} zmm2 = zmm16[1],zmm27[1],zmm16[3],zmm27[3],zmm16[5],zmm27[5],zmm16[7],zmm27[7]
2215 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm2 {%k1}
2216 ; AVX512BW-NEXT: vpermi2q %zmm25, %zmm11, %zmm10
2217 ; AVX512BW-NEXT: vpermi2q %zmm26, %zmm12, %zmm14
2218 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm0 = ymm10[0,1,2,3],ymm14[4,5,6,7]
2219 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm0, %zmm2, %zmm2
2220 ; AVX512BW-NEXT: vmovdqa64 %zmm30, %zmm0
2221 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm15, %zmm0
2222 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} zmm3 = zmm16[0],zmm27[0],zmm16[2],zmm27[2],zmm16[4],zmm27[4],zmm16[6],zmm27[6]
2223 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm3 {%k1}
2224 ; AVX512BW-NEXT: vpermi2q %zmm25, %zmm11, %zmm15
2225 ; AVX512BW-NEXT: vpermi2q %zmm26, %zmm12, %zmm1
2226 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm0 = ymm15[0,1,2,3],ymm1[4,5,6,7]
2227 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm1
2228 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm0
2229 ; AVX512BW-NEXT: vpermt2q %zmm27, %zmm13, %zmm0
2230 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} zmm0 {%k1} = zmm30[1],zmm29[1],zmm30[3],zmm29[3],zmm30[5],zmm29[5],zmm30[7],zmm29[7]
2231 ; AVX512BW-NEXT: vpermi2q %zmm25, %zmm11, %zmm13
2232 ; AVX512BW-NEXT: vpermi2q %zmm26, %zmm12, %zmm4
2233 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm3 = ymm13[0,1,2,3],ymm4[4,5,6,7]
2234 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm4
2235 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm0
2236 ; AVX512BW-NEXT: vpermt2q %zmm27, %zmm28, %zmm0
2237 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} zmm0 {%k1} = zmm30[0],zmm29[0],zmm30[2],zmm29[2],zmm30[4],zmm29[4],zmm30[6],zmm29[6]
2238 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm28, %zmm11
2239 ; AVX512BW-NEXT: vpermt2q %zmm26, %zmm7, %zmm12
2240 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm3 = ymm11[0,1,2,3],ymm12[4,5,6,7]
2241 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm0
2242 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [3,11,3,11,3,11,3,11]
2243 ; AVX512BW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2244 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm7
2245 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm3, %zmm7
2246 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm10
2247 ; AVX512BW-NEXT: vpermt2q %zmm31, %zmm3, %zmm10
2248 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm10 {%k1}
2249 ; AVX512BW-NEXT: vmovdqa (%rcx), %ymm7
2250 ; AVX512BW-NEXT: vmovdqa 64(%rcx), %ymm11
2251 ; AVX512BW-NEXT: vmovdqa (%rdx), %ymm12
2252 ; AVX512BW-NEXT: vmovdqa 64(%rdx), %ymm13
2253 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm14 = ymm12[1],ymm7[1],ymm12[3],ymm7[3]
2254 ; AVX512BW-NEXT: vmovdqa (%rsi), %ymm15
2255 ; AVX512BW-NEXT: vmovdqa64 64(%rsi), %ymm23
2256 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %ymm25
2257 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %ymm26
2258 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm5 = ymm25[1],ymm15[1],ymm25[3],ymm15[3]
2259 ; AVX512BW-NEXT: vperm2i128 {{.*#+}} ymm5 = ymm5[2,3],ymm14[2,3]
2260 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm5, %zmm10, %zmm5
2261 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [2,10,2,10,2,10,2,10]
2262 ; AVX512BW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2263 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm10, %zmm8
2264 ; AVX512BW-NEXT: vpermt2q %zmm31, %zmm10, %zmm6
2265 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm6 {%k1}
2266 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} ymm7 = ymm12[0],ymm7[0],ymm12[2],ymm7[2]
2267 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm25[0],ymm15[0],ymm25[2],ymm15[2]
2268 ; AVX512BW-NEXT: vperm2i128 {{.*#+}} ymm7 = ymm8[2,3],ymm7[2,3]
2269 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm7, %zmm6, %zmm6
2270 ; AVX512BW-NEXT: vmovdqa64 %zmm30, %zmm7
2271 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm3, %zmm7
2272 ; AVX512BW-NEXT: vpermi2q %zmm27, %zmm16, %zmm3
2273 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm3 {%k1}
2274 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm7 = ymm13[1],ymm11[1],ymm13[3],ymm11[3]
2275 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm8 = ymm26[1],ymm23[1],ymm26[3],ymm23[3]
2276 ; AVX512BW-NEXT: vperm2i128 {{.*#+}} ymm7 = ymm8[2,3],ymm7[2,3]
2277 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm7, %zmm3, %zmm3
2278 ; AVX512BW-NEXT: vpermt2q %zmm29, %zmm10, %zmm30
2279 ; AVX512BW-NEXT: vpermt2q %zmm27, %zmm10, %zmm16
2280 ; AVX512BW-NEXT: vmovdqa64 %zmm30, %zmm16 {%k1}
2281 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} ymm7 = ymm13[0],ymm11[0],ymm13[2],ymm11[2]
2282 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm26[0],ymm23[0],ymm26[2],ymm23[2]
2283 ; AVX512BW-NEXT: vperm2i128 {{.*#+}} ymm7 = ymm8[2,3],ymm7[2,3]
2284 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm7, %zmm16, %zmm7
2285 ; AVX512BW-NEXT: vmovdqa64 %zmm7, 640(%rax)
2286 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 704(%rax)
2287 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 128(%rax)
2288 ; AVX512BW-NEXT: vmovdqa64 %zmm5, 192(%rax)
2289 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 896(%rax)
2290 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 960(%rax)
2291 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 768(%rax)
2292 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 832(%rax)
2293 ; AVX512BW-NEXT: vmovdqa64 %zmm19, 512(%rax)
2294 ; AVX512BW-NEXT: vmovdqa64 %zmm18, 576(%rax)
2295 ; AVX512BW-NEXT: vmovdqa64 %zmm17, 384(%rax)
2296 ; AVX512BW-NEXT: vmovdqa64 %zmm20, 448(%rax)
2297 ; AVX512BW-NEXT: vmovdqa64 %zmm24, 256(%rax)
2298 ; AVX512BW-NEXT: vmovdqa64 %zmm22, 320(%rax)
2299 ; AVX512BW-NEXT: vmovdqa64 %zmm21, (%rax)
2300 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
2301 ; AVX512BW-NEXT: vmovaps %zmm0, 64(%rax)
2302 ; AVX512BW-NEXT: vzeroupper
2303 ; AVX512BW-NEXT: retq
2304 %in.vec0 = load <16 x i64>, ptr %in.vecptr0, align 64
2305 %in.vec1 = load <16 x i64>, ptr %in.vecptr1, align 64
2306 %in.vec2 = load <16 x i64>, ptr %in.vecptr2, align 64
2307 %in.vec3 = load <16 x i64>, ptr %in.vecptr3, align 64
2308 %in.vec4 = load <16 x i64>, ptr %in.vecptr4, align 64
2309 %in.vec5 = load <16 x i64>, ptr %in.vecptr5, align 64
2310 %in.vec6 = load <16 x i64>, ptr %in.vecptr6, align 64
2311 %in.vec7 = load <16 x i64>, ptr %in.vecptr7, align 64
2312 %1 = shufflevector <16 x i64> %in.vec0, <16 x i64> %in.vec1, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
2313 %2 = shufflevector <16 x i64> %in.vec2, <16 x i64> %in.vec3, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
2314 %3 = shufflevector <16 x i64> %in.vec4, <16 x i64> %in.vec5, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
2315 %4 = shufflevector <16 x i64> %in.vec6, <16 x i64> %in.vec7, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
2316 %5 = shufflevector <32 x i64> %1, <32 x i64> %2, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
2317 %6 = shufflevector <32 x i64> %3, <32 x i64> %4, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
2318 %7 = shufflevector <64 x i64> %5, <64 x i64> %6, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
2319 %interleaved.vec = shufflevector <128 x i64> %7, <128 x i64> poison, <128 x i32> <i32 0, i32 16, i32 32, i32 48, i32 64, i32 80, i32 96, i32 112, i32 1, i32 17, i32 33, i32 49, i32 65, i32 81, i32 97, i32 113, i32 2, i32 18, i32 34, i32 50, i32 66, i32 82, i32 98, i32 114, i32 3, i32 19, i32 35, i32 51, i32 67, i32 83, i32 99, i32 115, i32 4, i32 20, i32 36, i32 52, i32 68, i32 84, i32 100, i32 116, i32 5, i32 21, i32 37, i32 53, i32 69, i32 85, i32 101, i32 117, i32 6, i32 22, i32 38, i32 54, i32 70, i32 86, i32 102, i32 118, i32 7, i32 23, i32 39, i32 55, i32 71, i32 87, i32 103, i32 119, i32 8, i32 24, i32 40, i32 56, i32 72, i32 88, i32 104, i32 120, i32 9, i32 25, i32 41, i32 57, i32 73, i32 89, i32 105, i32 121, i32 10, i32 26, i32 42, i32 58, i32 74, i32 90, i32 106, i32 122, i32 11, i32 27, i32 43, i32 59, i32 75, i32 91, i32 107, i32 123, i32 12, i32 28, i32 44, i32 60, i32 76, i32 92, i32 108, i32 124, i32 13, i32 29, i32 45, i32 61, i32 77, i32 93, i32 109, i32 125, i32 14, i32 30, i32 46, i32 62, i32 78, i32 94, i32 110, i32 126, i32 15, i32 31, i32 47, i32 63, i32 79, i32 95, i32 111, i32 127>
2320 store <128 x i64> %interleaved.vec, ptr %out.vec, align 64
2324 define void @store_i64_stride8_vf32(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %in.vecptr7, ptr %out.vec) nounwind {
2325 ; SSE-LABEL: store_i64_stride8_vf32:
2327 ; SSE-NEXT: subq $1688, %rsp # imm = 0x698
2328 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
2329 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
2330 ; SSE-NEXT: movaps (%rdi), %xmm7
2331 ; SSE-NEXT: movaps 16(%rdi), %xmm8
2332 ; SSE-NEXT: movaps (%rsi), %xmm1
2333 ; SSE-NEXT: movaps 16(%rsi), %xmm0
2334 ; SSE-NEXT: movaps (%rdx), %xmm9
2335 ; SSE-NEXT: movaps 16(%rdx), %xmm10
2336 ; SSE-NEXT: movaps (%rcx), %xmm3
2337 ; SSE-NEXT: movaps 16(%rcx), %xmm2
2338 ; SSE-NEXT: movaps (%r8), %xmm11
2339 ; SSE-NEXT: movaps 16(%r8), %xmm12
2340 ; SSE-NEXT: movaps (%r9), %xmm5
2341 ; SSE-NEXT: movaps 16(%r9), %xmm4
2342 ; SSE-NEXT: movaps (%r10), %xmm13
2343 ; SSE-NEXT: movaps 16(%r10), %xmm14
2344 ; SSE-NEXT: movaps (%rax), %xmm6
2345 ; SSE-NEXT: movaps %xmm7, %xmm15
2346 ; SSE-NEXT: movlhps {{.*#+}} xmm15 = xmm15[0],xmm1[0]
2347 ; SSE-NEXT: movaps %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2348 ; SSE-NEXT: unpckhpd {{.*#+}} xmm7 = xmm7[1],xmm1[1]
2349 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2350 ; SSE-NEXT: movaps %xmm9, %xmm1
2351 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
2352 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2353 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm3[1]
2354 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2355 ; SSE-NEXT: movaps %xmm11, %xmm1
2356 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm5[0]
2357 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2358 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm5[1]
2359 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2360 ; SSE-NEXT: movaps %xmm13, %xmm1
2361 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm6[0]
2362 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2363 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm6[1]
2364 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2365 ; SSE-NEXT: movaps %xmm8, %xmm1
2366 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2367 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2368 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm0[1]
2369 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2370 ; SSE-NEXT: movaps %xmm10, %xmm0
2371 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm2[0]
2372 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2373 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm2[1]
2374 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2375 ; SSE-NEXT: movaps %xmm12, %xmm0
2376 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm4[0]
2377 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2378 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm4[1]
2379 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2380 ; SSE-NEXT: movaps 16(%rax), %xmm0
2381 ; SSE-NEXT: movaps %xmm14, %xmm1
2382 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2383 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2384 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm0[1]
2385 ; SSE-NEXT: movaps %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2386 ; SSE-NEXT: movaps 32(%rdi), %xmm2
2387 ; SSE-NEXT: movaps 32(%rsi), %xmm0
2388 ; SSE-NEXT: movaps %xmm2, %xmm1
2389 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2390 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2391 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2392 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2393 ; SSE-NEXT: movaps 32(%rdx), %xmm2
2394 ; SSE-NEXT: movaps 32(%rcx), %xmm0
2395 ; SSE-NEXT: movaps %xmm2, %xmm1
2396 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2397 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2398 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2399 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2400 ; SSE-NEXT: movaps 32(%r8), %xmm2
2401 ; SSE-NEXT: movaps 32(%r9), %xmm0
2402 ; SSE-NEXT: movaps %xmm2, %xmm1
2403 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2404 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2405 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2406 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2407 ; SSE-NEXT: movaps 32(%r10), %xmm2
2408 ; SSE-NEXT: movaps 32(%rax), %xmm0
2409 ; SSE-NEXT: movaps %xmm2, %xmm1
2410 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2411 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2412 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2413 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2414 ; SSE-NEXT: movaps 48(%rdi), %xmm2
2415 ; SSE-NEXT: movaps 48(%rsi), %xmm0
2416 ; SSE-NEXT: movaps %xmm2, %xmm1
2417 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2418 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2419 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2420 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2421 ; SSE-NEXT: movaps 48(%rdx), %xmm2
2422 ; SSE-NEXT: movaps 48(%rcx), %xmm0
2423 ; SSE-NEXT: movaps %xmm2, %xmm1
2424 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2425 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2426 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2427 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2428 ; SSE-NEXT: movaps 48(%r8), %xmm2
2429 ; SSE-NEXT: movaps 48(%r9), %xmm0
2430 ; SSE-NEXT: movaps %xmm2, %xmm1
2431 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2432 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2433 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2434 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2435 ; SSE-NEXT: movaps 48(%r10), %xmm2
2436 ; SSE-NEXT: movaps 48(%rax), %xmm0
2437 ; SSE-NEXT: movaps %xmm2, %xmm1
2438 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2439 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2440 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2441 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2442 ; SSE-NEXT: movaps 64(%rdi), %xmm2
2443 ; SSE-NEXT: movaps 64(%rsi), %xmm0
2444 ; SSE-NEXT: movaps %xmm2, %xmm1
2445 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2446 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2447 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2448 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2449 ; SSE-NEXT: movaps 64(%rdx), %xmm2
2450 ; SSE-NEXT: movaps 64(%rcx), %xmm0
2451 ; SSE-NEXT: movaps %xmm2, %xmm1
2452 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2453 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2454 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2455 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2456 ; SSE-NEXT: movaps 64(%r8), %xmm2
2457 ; SSE-NEXT: movaps 64(%r9), %xmm0
2458 ; SSE-NEXT: movaps %xmm2, %xmm1
2459 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2460 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2461 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2462 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2463 ; SSE-NEXT: movaps 64(%r10), %xmm2
2464 ; SSE-NEXT: movaps 64(%rax), %xmm0
2465 ; SSE-NEXT: movaps %xmm2, %xmm1
2466 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2467 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2468 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2469 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2470 ; SSE-NEXT: movaps 80(%rdi), %xmm2
2471 ; SSE-NEXT: movaps 80(%rsi), %xmm0
2472 ; SSE-NEXT: movaps %xmm2, %xmm1
2473 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2474 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2475 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2476 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2477 ; SSE-NEXT: movaps 80(%rdx), %xmm2
2478 ; SSE-NEXT: movaps 80(%rcx), %xmm0
2479 ; SSE-NEXT: movaps %xmm2, %xmm1
2480 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2481 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2482 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2483 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2484 ; SSE-NEXT: movaps 80(%r8), %xmm2
2485 ; SSE-NEXT: movaps 80(%r9), %xmm0
2486 ; SSE-NEXT: movaps %xmm2, %xmm1
2487 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2488 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2489 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2490 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2491 ; SSE-NEXT: movaps 80(%r10), %xmm2
2492 ; SSE-NEXT: movaps 80(%rax), %xmm0
2493 ; SSE-NEXT: movaps %xmm2, %xmm1
2494 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2495 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2496 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2497 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2498 ; SSE-NEXT: movaps 96(%rdi), %xmm2
2499 ; SSE-NEXT: movaps 96(%rsi), %xmm0
2500 ; SSE-NEXT: movaps %xmm2, %xmm1
2501 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2502 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2503 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2504 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2505 ; SSE-NEXT: movaps 96(%rdx), %xmm2
2506 ; SSE-NEXT: movaps 96(%rcx), %xmm0
2507 ; SSE-NEXT: movaps %xmm2, %xmm1
2508 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2509 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2510 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2511 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2512 ; SSE-NEXT: movaps 96(%r8), %xmm2
2513 ; SSE-NEXT: movaps 96(%r9), %xmm0
2514 ; SSE-NEXT: movaps %xmm2, %xmm1
2515 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2516 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2517 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2518 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2519 ; SSE-NEXT: movaps 96(%r10), %xmm2
2520 ; SSE-NEXT: movaps 96(%rax), %xmm0
2521 ; SSE-NEXT: movaps %xmm2, %xmm1
2522 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2523 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2524 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2525 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2526 ; SSE-NEXT: movaps 112(%rdi), %xmm2
2527 ; SSE-NEXT: movaps 112(%rsi), %xmm0
2528 ; SSE-NEXT: movaps %xmm2, %xmm1
2529 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2530 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2531 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2532 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2533 ; SSE-NEXT: movaps 112(%rdx), %xmm2
2534 ; SSE-NEXT: movaps 112(%rcx), %xmm0
2535 ; SSE-NEXT: movaps %xmm2, %xmm1
2536 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2537 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2538 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2539 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2540 ; SSE-NEXT: movaps 112(%r8), %xmm2
2541 ; SSE-NEXT: movaps 112(%r9), %xmm0
2542 ; SSE-NEXT: movaps %xmm2, %xmm1
2543 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2544 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2545 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2546 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2547 ; SSE-NEXT: movaps 112(%r10), %xmm2
2548 ; SSE-NEXT: movaps 112(%rax), %xmm0
2549 ; SSE-NEXT: movaps %xmm2, %xmm1
2550 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2551 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2552 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2553 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2554 ; SSE-NEXT: movaps 128(%rdi), %xmm2
2555 ; SSE-NEXT: movaps 128(%rsi), %xmm0
2556 ; SSE-NEXT: movaps %xmm2, %xmm1
2557 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2558 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2559 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2560 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2561 ; SSE-NEXT: movaps 128(%rdx), %xmm2
2562 ; SSE-NEXT: movaps 128(%rcx), %xmm0
2563 ; SSE-NEXT: movaps %xmm2, %xmm1
2564 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2565 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2566 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2567 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2568 ; SSE-NEXT: movaps 128(%r8), %xmm2
2569 ; SSE-NEXT: movaps 128(%r9), %xmm0
2570 ; SSE-NEXT: movaps %xmm2, %xmm1
2571 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2572 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2573 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2574 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2575 ; SSE-NEXT: movaps 128(%r10), %xmm2
2576 ; SSE-NEXT: movaps 128(%rax), %xmm0
2577 ; SSE-NEXT: movaps %xmm2, %xmm1
2578 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2579 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2580 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2581 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2582 ; SSE-NEXT: movaps 144(%rdi), %xmm2
2583 ; SSE-NEXT: movaps 144(%rsi), %xmm0
2584 ; SSE-NEXT: movaps %xmm2, %xmm1
2585 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2586 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2587 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2588 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2589 ; SSE-NEXT: movaps 144(%rdx), %xmm2
2590 ; SSE-NEXT: movaps 144(%rcx), %xmm0
2591 ; SSE-NEXT: movaps %xmm2, %xmm1
2592 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2593 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2594 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2595 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2596 ; SSE-NEXT: movaps 144(%r8), %xmm2
2597 ; SSE-NEXT: movaps 144(%r9), %xmm0
2598 ; SSE-NEXT: movaps %xmm2, %xmm1
2599 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2600 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2601 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2602 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2603 ; SSE-NEXT: movaps 144(%r10), %xmm2
2604 ; SSE-NEXT: movaps 144(%rax), %xmm0
2605 ; SSE-NEXT: movaps %xmm2, %xmm1
2606 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2607 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2608 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2609 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2610 ; SSE-NEXT: movaps 160(%rdi), %xmm2
2611 ; SSE-NEXT: movaps 160(%rsi), %xmm0
2612 ; SSE-NEXT: movaps %xmm2, %xmm1
2613 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2614 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2615 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2616 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2617 ; SSE-NEXT: movaps 160(%rdx), %xmm2
2618 ; SSE-NEXT: movaps 160(%rcx), %xmm0
2619 ; SSE-NEXT: movaps %xmm2, %xmm1
2620 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2621 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2622 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2623 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2624 ; SSE-NEXT: movaps 160(%r8), %xmm2
2625 ; SSE-NEXT: movaps 160(%r9), %xmm0
2626 ; SSE-NEXT: movaps %xmm2, %xmm1
2627 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2628 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2629 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2630 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2631 ; SSE-NEXT: movaps 160(%r10), %xmm2
2632 ; SSE-NEXT: movaps 160(%rax), %xmm0
2633 ; SSE-NEXT: movaps %xmm2, %xmm1
2634 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2635 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2636 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2637 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2638 ; SSE-NEXT: movaps 176(%rdi), %xmm2
2639 ; SSE-NEXT: movaps 176(%rsi), %xmm0
2640 ; SSE-NEXT: movaps %xmm2, %xmm1
2641 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2642 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2643 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2644 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2645 ; SSE-NEXT: movaps 176(%rdx), %xmm2
2646 ; SSE-NEXT: movaps 176(%rcx), %xmm0
2647 ; SSE-NEXT: movaps %xmm2, %xmm1
2648 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2649 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2650 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2651 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2652 ; SSE-NEXT: movaps 176(%r8), %xmm2
2653 ; SSE-NEXT: movaps 176(%r9), %xmm0
2654 ; SSE-NEXT: movaps %xmm2, %xmm1
2655 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2656 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2657 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2658 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2659 ; SSE-NEXT: movaps 176(%r10), %xmm2
2660 ; SSE-NEXT: movaps 176(%rax), %xmm0
2661 ; SSE-NEXT: movaps %xmm2, %xmm1
2662 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2663 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2664 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2665 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2666 ; SSE-NEXT: movaps 192(%rdi), %xmm2
2667 ; SSE-NEXT: movaps 192(%rsi), %xmm0
2668 ; SSE-NEXT: movaps %xmm2, %xmm1
2669 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2670 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2671 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2672 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2673 ; SSE-NEXT: movaps 192(%rdx), %xmm2
2674 ; SSE-NEXT: movaps 192(%rcx), %xmm0
2675 ; SSE-NEXT: movaps %xmm2, %xmm1
2676 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2677 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2678 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2679 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2680 ; SSE-NEXT: movaps 192(%r8), %xmm2
2681 ; SSE-NEXT: movaps 192(%r9), %xmm0
2682 ; SSE-NEXT: movaps %xmm2, %xmm1
2683 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2684 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2685 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2686 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2687 ; SSE-NEXT: movaps 192(%r10), %xmm2
2688 ; SSE-NEXT: movaps 192(%rax), %xmm0
2689 ; SSE-NEXT: movaps %xmm2, %xmm1
2690 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2691 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2692 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2693 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2694 ; SSE-NEXT: movaps 208(%rdi), %xmm2
2695 ; SSE-NEXT: movaps 208(%rsi), %xmm0
2696 ; SSE-NEXT: movaps %xmm2, %xmm1
2697 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2698 ; SSE-NEXT: movaps %xmm1, (%rsp) # 16-byte Spill
2699 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2700 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2701 ; SSE-NEXT: movaps 208(%rdx), %xmm2
2702 ; SSE-NEXT: movaps 208(%rcx), %xmm0
2703 ; SSE-NEXT: movaps %xmm2, %xmm1
2704 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2705 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2706 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2707 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2708 ; SSE-NEXT: movaps 208(%r8), %xmm2
2709 ; SSE-NEXT: movaps 208(%r9), %xmm0
2710 ; SSE-NEXT: movaps %xmm2, %xmm1
2711 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2712 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2713 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2714 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2715 ; SSE-NEXT: movaps 208(%r10), %xmm2
2716 ; SSE-NEXT: movaps 208(%rax), %xmm0
2717 ; SSE-NEXT: movaps %xmm2, %xmm1
2718 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2719 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2720 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2721 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2722 ; SSE-NEXT: movaps 224(%rdi), %xmm14
2723 ; SSE-NEXT: movaps 224(%rsi), %xmm0
2724 ; SSE-NEXT: movaps %xmm14, %xmm1
2725 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2726 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2727 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm0[1]
2728 ; SSE-NEXT: movaps 224(%rdx), %xmm10
2729 ; SSE-NEXT: movaps 224(%rcx), %xmm0
2730 ; SSE-NEXT: movaps %xmm10, %xmm15
2731 ; SSE-NEXT: movlhps {{.*#+}} xmm15 = xmm15[0],xmm0[0]
2732 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm0[1]
2733 ; SSE-NEXT: movaps 224(%r8), %xmm12
2734 ; SSE-NEXT: movaps 224(%r9), %xmm0
2735 ; SSE-NEXT: movaps %xmm12, %xmm13
2736 ; SSE-NEXT: movlhps {{.*#+}} xmm13 = xmm13[0],xmm0[0]
2737 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm0[1]
2738 ; SSE-NEXT: movaps 224(%r10), %xmm8
2739 ; SSE-NEXT: movaps 224(%rax), %xmm0
2740 ; SSE-NEXT: movaps %xmm8, %xmm11
2741 ; SSE-NEXT: movlhps {{.*#+}} xmm11 = xmm11[0],xmm0[0]
2742 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm0[1]
2743 ; SSE-NEXT: movaps 240(%rdi), %xmm5
2744 ; SSE-NEXT: movaps 240(%rsi), %xmm0
2745 ; SSE-NEXT: movaps %xmm5, %xmm9
2746 ; SSE-NEXT: movlhps {{.*#+}} xmm9 = xmm9[0],xmm0[0]
2747 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm0[1]
2748 ; SSE-NEXT: movaps 240(%rdx), %xmm6
2749 ; SSE-NEXT: movaps 240(%rcx), %xmm1
2750 ; SSE-NEXT: movaps %xmm6, %xmm7
2751 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm1[0]
2752 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm1[1]
2753 ; SSE-NEXT: movaps 240(%r8), %xmm1
2754 ; SSE-NEXT: movaps 240(%r9), %xmm2
2755 ; SSE-NEXT: movaps %xmm1, %xmm4
2756 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm2[0]
2757 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
2758 ; SSE-NEXT: movaps 240(%r10), %xmm2
2759 ; SSE-NEXT: movaps 240(%rax), %xmm3
2760 ; SSE-NEXT: movaps %xmm2, %xmm0
2761 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm3[0]
2762 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
2763 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
2764 ; SSE-NEXT: movaps %xmm2, 2032(%rax)
2765 ; SSE-NEXT: movaps %xmm1, 2016(%rax)
2766 ; SSE-NEXT: movaps %xmm6, 2000(%rax)
2767 ; SSE-NEXT: movaps %xmm5, 1984(%rax)
2768 ; SSE-NEXT: movaps %xmm0, 1968(%rax)
2769 ; SSE-NEXT: movaps %xmm4, 1952(%rax)
2770 ; SSE-NEXT: movaps %xmm7, 1936(%rax)
2771 ; SSE-NEXT: movaps %xmm9, 1920(%rax)
2772 ; SSE-NEXT: movaps %xmm8, 1904(%rax)
2773 ; SSE-NEXT: movaps %xmm12, 1888(%rax)
2774 ; SSE-NEXT: movaps %xmm10, 1872(%rax)
2775 ; SSE-NEXT: movaps %xmm14, 1856(%rax)
2776 ; SSE-NEXT: movaps %xmm11, 1840(%rax)
2777 ; SSE-NEXT: movaps %xmm13, 1824(%rax)
2778 ; SSE-NEXT: movaps %xmm15, 1808(%rax)
2779 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2780 ; SSE-NEXT: movaps %xmm0, 1792(%rax)
2781 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2782 ; SSE-NEXT: movaps %xmm0, 1776(%rax)
2783 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2784 ; SSE-NEXT: movaps %xmm0, 1760(%rax)
2785 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2786 ; SSE-NEXT: movaps %xmm0, 1744(%rax)
2787 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2788 ; SSE-NEXT: movaps %xmm0, 1728(%rax)
2789 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2790 ; SSE-NEXT: movaps %xmm0, 1712(%rax)
2791 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2792 ; SSE-NEXT: movaps %xmm0, 1696(%rax)
2793 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2794 ; SSE-NEXT: movaps %xmm0, 1680(%rax)
2795 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
2796 ; SSE-NEXT: movaps %xmm0, 1664(%rax)
2797 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2798 ; SSE-NEXT: movaps %xmm0, 1648(%rax)
2799 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2800 ; SSE-NEXT: movaps %xmm0, 1632(%rax)
2801 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2802 ; SSE-NEXT: movaps %xmm0, 1616(%rax)
2803 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2804 ; SSE-NEXT: movaps %xmm0, 1600(%rax)
2805 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2806 ; SSE-NEXT: movaps %xmm0, 1584(%rax)
2807 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2808 ; SSE-NEXT: movaps %xmm0, 1568(%rax)
2809 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2810 ; SSE-NEXT: movaps %xmm0, 1552(%rax)
2811 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2812 ; SSE-NEXT: movaps %xmm0, 1536(%rax)
2813 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2814 ; SSE-NEXT: movaps %xmm0, 1520(%rax)
2815 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2816 ; SSE-NEXT: movaps %xmm0, 1504(%rax)
2817 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2818 ; SSE-NEXT: movaps %xmm0, 1488(%rax)
2819 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2820 ; SSE-NEXT: movaps %xmm0, 1472(%rax)
2821 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2822 ; SSE-NEXT: movaps %xmm0, 1456(%rax)
2823 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2824 ; SSE-NEXT: movaps %xmm0, 1440(%rax)
2825 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2826 ; SSE-NEXT: movaps %xmm0, 1424(%rax)
2827 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2828 ; SSE-NEXT: movaps %xmm0, 1408(%rax)
2829 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2830 ; SSE-NEXT: movaps %xmm0, 1392(%rax)
2831 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2832 ; SSE-NEXT: movaps %xmm0, 1376(%rax)
2833 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2834 ; SSE-NEXT: movaps %xmm0, 1360(%rax)
2835 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2836 ; SSE-NEXT: movaps %xmm0, 1344(%rax)
2837 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2838 ; SSE-NEXT: movaps %xmm0, 1328(%rax)
2839 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2840 ; SSE-NEXT: movaps %xmm0, 1312(%rax)
2841 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2842 ; SSE-NEXT: movaps %xmm0, 1296(%rax)
2843 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2844 ; SSE-NEXT: movaps %xmm0, 1280(%rax)
2845 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2846 ; SSE-NEXT: movaps %xmm0, 1264(%rax)
2847 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2848 ; SSE-NEXT: movaps %xmm0, 1248(%rax)
2849 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2850 ; SSE-NEXT: movaps %xmm0, 1232(%rax)
2851 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2852 ; SSE-NEXT: movaps %xmm0, 1216(%rax)
2853 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2854 ; SSE-NEXT: movaps %xmm0, 1200(%rax)
2855 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2856 ; SSE-NEXT: movaps %xmm0, 1184(%rax)
2857 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2858 ; SSE-NEXT: movaps %xmm0, 1168(%rax)
2859 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2860 ; SSE-NEXT: movaps %xmm0, 1152(%rax)
2861 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2862 ; SSE-NEXT: movaps %xmm0, 1136(%rax)
2863 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2864 ; SSE-NEXT: movaps %xmm0, 1120(%rax)
2865 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2866 ; SSE-NEXT: movaps %xmm0, 1104(%rax)
2867 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2868 ; SSE-NEXT: movaps %xmm0, 1088(%rax)
2869 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2870 ; SSE-NEXT: movaps %xmm0, 1072(%rax)
2871 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2872 ; SSE-NEXT: movaps %xmm0, 1056(%rax)
2873 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2874 ; SSE-NEXT: movaps %xmm0, 1040(%rax)
2875 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2876 ; SSE-NEXT: movaps %xmm0, 1024(%rax)
2877 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2878 ; SSE-NEXT: movaps %xmm0, 1008(%rax)
2879 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2880 ; SSE-NEXT: movaps %xmm0, 992(%rax)
2881 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2882 ; SSE-NEXT: movaps %xmm0, 976(%rax)
2883 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2884 ; SSE-NEXT: movaps %xmm0, 960(%rax)
2885 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2886 ; SSE-NEXT: movaps %xmm0, 944(%rax)
2887 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2888 ; SSE-NEXT: movaps %xmm0, 928(%rax)
2889 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2890 ; SSE-NEXT: movaps %xmm0, 912(%rax)
2891 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2892 ; SSE-NEXT: movaps %xmm0, 896(%rax)
2893 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2894 ; SSE-NEXT: movaps %xmm0, 880(%rax)
2895 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2896 ; SSE-NEXT: movaps %xmm0, 864(%rax)
2897 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2898 ; SSE-NEXT: movaps %xmm0, 848(%rax)
2899 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2900 ; SSE-NEXT: movaps %xmm0, 832(%rax)
2901 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2902 ; SSE-NEXT: movaps %xmm0, 816(%rax)
2903 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2904 ; SSE-NEXT: movaps %xmm0, 800(%rax)
2905 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2906 ; SSE-NEXT: movaps %xmm0, 784(%rax)
2907 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2908 ; SSE-NEXT: movaps %xmm0, 768(%rax)
2909 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2910 ; SSE-NEXT: movaps %xmm0, 752(%rax)
2911 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2912 ; SSE-NEXT: movaps %xmm0, 736(%rax)
2913 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2914 ; SSE-NEXT: movaps %xmm0, 720(%rax)
2915 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2916 ; SSE-NEXT: movaps %xmm0, 704(%rax)
2917 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2918 ; SSE-NEXT: movaps %xmm0, 688(%rax)
2919 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2920 ; SSE-NEXT: movaps %xmm0, 672(%rax)
2921 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2922 ; SSE-NEXT: movaps %xmm0, 656(%rax)
2923 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2924 ; SSE-NEXT: movaps %xmm0, 640(%rax)
2925 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2926 ; SSE-NEXT: movaps %xmm0, 624(%rax)
2927 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2928 ; SSE-NEXT: movaps %xmm0, 608(%rax)
2929 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2930 ; SSE-NEXT: movaps %xmm0, 592(%rax)
2931 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2932 ; SSE-NEXT: movaps %xmm0, 576(%rax)
2933 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2934 ; SSE-NEXT: movaps %xmm0, 560(%rax)
2935 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2936 ; SSE-NEXT: movaps %xmm0, 544(%rax)
2937 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2938 ; SSE-NEXT: movaps %xmm0, 528(%rax)
2939 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2940 ; SSE-NEXT: movaps %xmm0, 512(%rax)
2941 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2942 ; SSE-NEXT: movaps %xmm0, 496(%rax)
2943 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2944 ; SSE-NEXT: movaps %xmm0, 480(%rax)
2945 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2946 ; SSE-NEXT: movaps %xmm0, 464(%rax)
2947 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2948 ; SSE-NEXT: movaps %xmm0, 448(%rax)
2949 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2950 ; SSE-NEXT: movaps %xmm0, 432(%rax)
2951 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2952 ; SSE-NEXT: movaps %xmm0, 416(%rax)
2953 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2954 ; SSE-NEXT: movaps %xmm0, 400(%rax)
2955 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2956 ; SSE-NEXT: movaps %xmm0, 384(%rax)
2957 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2958 ; SSE-NEXT: movaps %xmm0, 368(%rax)
2959 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2960 ; SSE-NEXT: movaps %xmm0, 352(%rax)
2961 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2962 ; SSE-NEXT: movaps %xmm0, 336(%rax)
2963 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2964 ; SSE-NEXT: movaps %xmm0, 320(%rax)
2965 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2966 ; SSE-NEXT: movaps %xmm0, 304(%rax)
2967 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2968 ; SSE-NEXT: movaps %xmm0, 288(%rax)
2969 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2970 ; SSE-NEXT: movaps %xmm0, 272(%rax)
2971 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2972 ; SSE-NEXT: movaps %xmm0, 256(%rax)
2973 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2974 ; SSE-NEXT: movaps %xmm0, 240(%rax)
2975 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2976 ; SSE-NEXT: movaps %xmm0, 224(%rax)
2977 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2978 ; SSE-NEXT: movaps %xmm0, 208(%rax)
2979 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2980 ; SSE-NEXT: movaps %xmm0, 192(%rax)
2981 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2982 ; SSE-NEXT: movaps %xmm0, 176(%rax)
2983 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2984 ; SSE-NEXT: movaps %xmm0, 160(%rax)
2985 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2986 ; SSE-NEXT: movaps %xmm0, 144(%rax)
2987 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2988 ; SSE-NEXT: movaps %xmm0, 128(%rax)
2989 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2990 ; SSE-NEXT: movaps %xmm0, 112(%rax)
2991 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2992 ; SSE-NEXT: movaps %xmm0, 96(%rax)
2993 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2994 ; SSE-NEXT: movaps %xmm0, 80(%rax)
2995 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2996 ; SSE-NEXT: movaps %xmm0, 64(%rax)
2997 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2998 ; SSE-NEXT: movaps %xmm0, 48(%rax)
2999 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3000 ; SSE-NEXT: movaps %xmm0, 32(%rax)
3001 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3002 ; SSE-NEXT: movaps %xmm0, 16(%rax)
3003 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3004 ; SSE-NEXT: movaps %xmm0, (%rax)
3005 ; SSE-NEXT: addq $1688, %rsp # imm = 0x698
3008 ; AVX1-ONLY-LABEL: store_i64_stride8_vf32:
3009 ; AVX1-ONLY: # %bb.0:
3010 ; AVX1-ONLY-NEXT: subq $1672, %rsp # imm = 0x688
3011 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
3012 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
3013 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %xmm2
3014 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %xmm3
3015 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm3[0],xmm2[0]
3016 ; AVX1-ONLY-NEXT: vmovaps (%rcx), %xmm5
3017 ; AVX1-ONLY-NEXT: vmovaps 32(%rcx), %xmm1
3018 ; AVX1-ONLY-NEXT: vmovaps 64(%rcx), %xmm0
3019 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm4, %ymm6
3020 ; AVX1-ONLY-NEXT: vinsertf128 $1, (%rdx), %ymm4, %ymm4
3021 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm4 = ymm4[0],ymm6[1],ymm4[2],ymm6[2]
3022 ; AVX1-ONLY-NEXT: vmovupd %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3023 ; AVX1-ONLY-NEXT: vmovaps (%r9), %xmm4
3024 ; AVX1-ONLY-NEXT: vmovaps (%r8), %xmm6
3025 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm7 = xmm6[0],xmm4[0]
3026 ; AVX1-ONLY-NEXT: vmovaps (%rax), %xmm8
3027 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm7, %ymm9
3028 ; AVX1-ONLY-NEXT: vinsertf128 $1, (%r10), %ymm7, %ymm7
3029 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm7 = ymm7[0],ymm9[1],ymm7[2],ymm9[2]
3030 ; AVX1-ONLY-NEXT: vmovupd %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3031 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm3[1],xmm2[1]
3032 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm2, %ymm3
3033 ; AVX1-ONLY-NEXT: vbroadcastsd 8(%rdx), %ymm5
3034 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm5[4,5,6,7]
3035 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1],ymm3[2,3],ymm2[4,5],ymm3[6,7]
3036 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3037 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm6[1],xmm4[1]
3038 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm2, %ymm3
3039 ; AVX1-ONLY-NEXT: vbroadcastsd 8(%r10), %ymm4
3040 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm4[4,5,6,7]
3041 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1],ymm3[2,3],ymm2[4,5],ymm3[6,7]
3042 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3043 ; AVX1-ONLY-NEXT: vmovaps 32(%rsi), %xmm3
3044 ; AVX1-ONLY-NEXT: vmovaps 32(%rdi), %xmm4
3045 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm4[0],xmm3[0]
3046 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm5
3047 ; AVX1-ONLY-NEXT: vinsertf128 $1, 32(%rdx), %ymm2, %ymm2
3048 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0],ymm5[1],ymm2[2],ymm5[2]
3049 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3050 ; AVX1-ONLY-NEXT: vmovaps 32(%r9), %xmm5
3051 ; AVX1-ONLY-NEXT: vmovaps 32(%r8), %xmm6
3052 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm7 = xmm6[0],xmm5[0]
3053 ; AVX1-ONLY-NEXT: vmovaps 32(%rax), %xmm8
3054 ; AVX1-ONLY-NEXT: vmovaps 64(%rax), %xmm2
3055 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm7, %ymm9
3056 ; AVX1-ONLY-NEXT: vinsertf128 $1, 32(%r10), %ymm7, %ymm7
3057 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm7 = ymm7[0],ymm9[1],ymm7[2],ymm9[2]
3058 ; AVX1-ONLY-NEXT: vmovupd %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3059 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm4[1],xmm3[1]
3060 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm3, %ymm1
3061 ; AVX1-ONLY-NEXT: vbroadcastsd 40(%rdx), %ymm4
3062 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm4[4,5,6,7]
3063 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
3064 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3065 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm6[1],xmm5[1]
3066 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm1, %ymm3
3067 ; AVX1-ONLY-NEXT: vbroadcastsd 40(%r10), %ymm4
3068 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm4[4,5,6,7]
3069 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm3[2,3],ymm1[4,5],ymm3[6,7]
3070 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3071 ; AVX1-ONLY-NEXT: vmovaps 64(%rsi), %xmm1
3072 ; AVX1-ONLY-NEXT: vmovaps 64(%rdi), %xmm3
3073 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm3[0],xmm1[0]
3074 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm4, %ymm5
3075 ; AVX1-ONLY-NEXT: vinsertf128 $1, 64(%rdx), %ymm4, %ymm4
3076 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm4 = ymm4[0],ymm5[1],ymm4[2],ymm5[2]
3077 ; AVX1-ONLY-NEXT: vmovupd %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3078 ; AVX1-ONLY-NEXT: vmovaps 64(%r9), %xmm4
3079 ; AVX1-ONLY-NEXT: vmovaps 64(%r8), %xmm5
3080 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm6 = xmm5[0],xmm4[0]
3081 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm6, %ymm7
3082 ; AVX1-ONLY-NEXT: vinsertf128 $1, 64(%r10), %ymm6, %ymm6
3083 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2],ymm7[2]
3084 ; AVX1-ONLY-NEXT: vmovupd %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3085 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm3[1],xmm1[1]
3086 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
3087 ; AVX1-ONLY-NEXT: vbroadcastsd 72(%rdx), %ymm3
3088 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
3089 ; AVX1-ONLY-NEXT: vmovaps 96(%rcx), %xmm3
3090 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
3091 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3092 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm4[1]
3093 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm1
3094 ; AVX1-ONLY-NEXT: vbroadcastsd 72(%r10), %ymm2
3095 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
3096 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3097 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3098 ; AVX1-ONLY-NEXT: vmovaps 96(%rsi), %xmm0
3099 ; AVX1-ONLY-NEXT: vmovaps 96(%rdi), %xmm1
3100 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
3101 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm4
3102 ; AVX1-ONLY-NEXT: vinsertf128 $1, 96(%rdx), %ymm2, %ymm2
3103 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0],ymm4[1],ymm2[2],ymm4[2]
3104 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3105 ; AVX1-ONLY-NEXT: vmovaps 96(%r9), %xmm2
3106 ; AVX1-ONLY-NEXT: vmovaps 96(%r8), %xmm4
3107 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm5 = xmm4[0],xmm2[0]
3108 ; AVX1-ONLY-NEXT: vmovaps 96(%rax), %xmm6
3109 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm7
3110 ; AVX1-ONLY-NEXT: vinsertf128 $1, 96(%r10), %ymm5, %ymm5
3111 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm5 = ymm5[0],ymm7[1],ymm5[2],ymm7[2]
3112 ; AVX1-ONLY-NEXT: vmovupd %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3113 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
3114 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm1
3115 ; AVX1-ONLY-NEXT: vbroadcastsd 104(%rdx), %ymm3
3116 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
3117 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3118 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3119 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm2[1]
3120 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm1
3121 ; AVX1-ONLY-NEXT: vbroadcastsd 104(%r10), %ymm2
3122 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
3123 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3124 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3125 ; AVX1-ONLY-NEXT: vmovaps 128(%rsi), %xmm0
3126 ; AVX1-ONLY-NEXT: vmovaps 128(%rdi), %xmm1
3127 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
3128 ; AVX1-ONLY-NEXT: vmovaps 128(%rcx), %xmm3
3129 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm4
3130 ; AVX1-ONLY-NEXT: vinsertf128 $1, 128(%rdx), %ymm2, %ymm2
3131 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0],ymm4[1],ymm2[2],ymm4[2]
3132 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3133 ; AVX1-ONLY-NEXT: vmovaps 128(%r9), %xmm2
3134 ; AVX1-ONLY-NEXT: vmovaps 128(%r8), %xmm4
3135 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm5 = xmm4[0],xmm2[0]
3136 ; AVX1-ONLY-NEXT: vmovaps 128(%rax), %xmm6
3137 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm7
3138 ; AVX1-ONLY-NEXT: vinsertf128 $1, 128(%r10), %ymm5, %ymm5
3139 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm5 = ymm5[0],ymm7[1],ymm5[2],ymm7[2]
3140 ; AVX1-ONLY-NEXT: vmovupd %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3141 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
3142 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm1
3143 ; AVX1-ONLY-NEXT: vbroadcastsd 136(%rdx), %ymm3
3144 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
3145 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3146 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3147 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm2[1]
3148 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm1
3149 ; AVX1-ONLY-NEXT: vbroadcastsd 136(%r10), %ymm2
3150 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
3151 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3152 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3153 ; AVX1-ONLY-NEXT: vmovaps 160(%rsi), %xmm0
3154 ; AVX1-ONLY-NEXT: vmovaps 160(%rdi), %xmm1
3155 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
3156 ; AVX1-ONLY-NEXT: vmovaps 160(%rcx), %xmm3
3157 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm4
3158 ; AVX1-ONLY-NEXT: vinsertf128 $1, 160(%rdx), %ymm2, %ymm2
3159 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0],ymm4[1],ymm2[2],ymm4[2]
3160 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3161 ; AVX1-ONLY-NEXT: vmovaps 160(%rax), %xmm2
3162 ; AVX1-ONLY-NEXT: vmovaps 160(%r9), %xmm4
3163 ; AVX1-ONLY-NEXT: vmovaps 160(%r8), %xmm5
3164 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm6 = xmm5[0],xmm4[0]
3165 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm6, %ymm7
3166 ; AVX1-ONLY-NEXT: vinsertf128 $1, 160(%r10), %ymm6, %ymm6
3167 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2],ymm7[2]
3168 ; AVX1-ONLY-NEXT: vmovupd %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3169 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
3170 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm1
3171 ; AVX1-ONLY-NEXT: vbroadcastsd 168(%rdx), %ymm3
3172 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
3173 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3174 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3175 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm4[1]
3176 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm1
3177 ; AVX1-ONLY-NEXT: vbroadcastsd 168(%r10), %ymm2
3178 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
3179 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3180 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3181 ; AVX1-ONLY-NEXT: vmovaps 192(%rsi), %xmm0
3182 ; AVX1-ONLY-NEXT: vmovaps 192(%rdi), %xmm1
3183 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
3184 ; AVX1-ONLY-NEXT: vmovaps 192(%rcx), %xmm3
3185 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm4
3186 ; AVX1-ONLY-NEXT: vinsertf128 $1, 192(%rdx), %ymm2, %ymm2
3187 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0],ymm4[1],ymm2[2],ymm4[2]
3188 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3189 ; AVX1-ONLY-NEXT: vmovaps 192(%rax), %xmm2
3190 ; AVX1-ONLY-NEXT: vmovaps 192(%r9), %xmm4
3191 ; AVX1-ONLY-NEXT: vmovaps 192(%r8), %xmm5
3192 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm6 = xmm5[0],xmm4[0]
3193 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm6, %ymm7
3194 ; AVX1-ONLY-NEXT: vinsertf128 $1, 192(%r10), %ymm6, %ymm6
3195 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2],ymm7[2]
3196 ; AVX1-ONLY-NEXT: vmovupd %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3197 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
3198 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm1
3199 ; AVX1-ONLY-NEXT: vbroadcastsd 200(%rdx), %ymm3
3200 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
3201 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3202 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3203 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm4[1]
3204 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm1
3205 ; AVX1-ONLY-NEXT: vbroadcastsd 200(%r10), %ymm2
3206 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
3207 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3208 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3209 ; AVX1-ONLY-NEXT: vmovaps 224(%rsi), %xmm0
3210 ; AVX1-ONLY-NEXT: vmovaps 224(%rdi), %xmm1
3211 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
3212 ; AVX1-ONLY-NEXT: vmovaps 224(%rcx), %xmm3
3213 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm4
3214 ; AVX1-ONLY-NEXT: vinsertf128 $1, 224(%rdx), %ymm2, %ymm2
3215 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0],ymm4[1],ymm2[2],ymm4[2]
3216 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3217 ; AVX1-ONLY-NEXT: vmovaps 224(%rax), %xmm2
3218 ; AVX1-ONLY-NEXT: vmovaps 224(%r9), %xmm4
3219 ; AVX1-ONLY-NEXT: vmovaps 224(%r8), %xmm5
3220 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm6 = xmm5[0],xmm4[0]
3221 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm6, %ymm7
3222 ; AVX1-ONLY-NEXT: vinsertf128 $1, 224(%r10), %ymm6, %ymm6
3223 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2],ymm7[2]
3224 ; AVX1-ONLY-NEXT: vmovupd %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3225 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
3226 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm1
3227 ; AVX1-ONLY-NEXT: vbroadcastsd 232(%rdx), %ymm3
3228 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
3229 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3230 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3231 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm4[1]
3232 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm1
3233 ; AVX1-ONLY-NEXT: vbroadcastsd 232(%r10), %ymm2
3234 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
3235 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3236 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3237 ; AVX1-ONLY-NEXT: vmovaps 16(%rsi), %xmm0
3238 ; AVX1-ONLY-NEXT: vmovaps 16(%rdi), %xmm1
3239 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
3240 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
3241 ; AVX1-ONLY-NEXT: vbroadcastsd 16(%rcx), %ymm3
3242 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
3243 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3244 ; AVX1-ONLY-NEXT: vmovaps 16(%r9), %xmm2
3245 ; AVX1-ONLY-NEXT: vmovaps 16(%r8), %xmm3
3246 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm3[0],xmm2[0]
3247 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],mem[4,5,6,7]
3248 ; AVX1-ONLY-NEXT: vbroadcastsd 16(%rax), %ymm5
3249 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
3250 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3251 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
3252 ; AVX1-ONLY-NEXT: vbroadcastsd 24(%rdx), %ymm1
3253 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3254 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm2[1]
3255 ; AVX1-ONLY-NEXT: vbroadcastsd 24(%r10), %ymm1
3256 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3257 ; AVX1-ONLY-NEXT: vmovaps 48(%rsi), %xmm0
3258 ; AVX1-ONLY-NEXT: vmovaps 48(%rdi), %xmm1
3259 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
3260 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
3261 ; AVX1-ONLY-NEXT: vbroadcastsd 48(%rcx), %ymm3
3262 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
3263 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3264 ; AVX1-ONLY-NEXT: vmovaps 48(%r9), %xmm2
3265 ; AVX1-ONLY-NEXT: vmovaps 48(%r8), %xmm3
3266 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm3[0],xmm2[0]
3267 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],mem[4,5,6,7]
3268 ; AVX1-ONLY-NEXT: vbroadcastsd 48(%rax), %ymm5
3269 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
3270 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3271 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
3272 ; AVX1-ONLY-NEXT: vbroadcastsd 56(%rdx), %ymm1
3273 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3274 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3275 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm2[1]
3276 ; AVX1-ONLY-NEXT: vbroadcastsd 56(%r10), %ymm1
3277 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3278 ; AVX1-ONLY-NEXT: vmovaps 80(%rsi), %xmm0
3279 ; AVX1-ONLY-NEXT: vmovaps 80(%rdi), %xmm1
3280 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
3281 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
3282 ; AVX1-ONLY-NEXT: vbroadcastsd 80(%rcx), %ymm3
3283 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
3284 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3285 ; AVX1-ONLY-NEXT: vmovaps 80(%r9), %xmm2
3286 ; AVX1-ONLY-NEXT: vmovaps 80(%r8), %xmm3
3287 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm3[0],xmm2[0]
3288 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],mem[4,5,6,7]
3289 ; AVX1-ONLY-NEXT: vbroadcastsd 80(%rax), %ymm5
3290 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
3291 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3292 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
3293 ; AVX1-ONLY-NEXT: vbroadcastsd 88(%rdx), %ymm1
3294 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3295 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3296 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm2[1]
3297 ; AVX1-ONLY-NEXT: vbroadcastsd 88(%r10), %ymm1
3298 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3299 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3300 ; AVX1-ONLY-NEXT: vmovaps 112(%rsi), %xmm0
3301 ; AVX1-ONLY-NEXT: vmovaps 112(%rdi), %xmm1
3302 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
3303 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
3304 ; AVX1-ONLY-NEXT: vbroadcastsd 112(%rcx), %ymm3
3305 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
3306 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3307 ; AVX1-ONLY-NEXT: vmovaps 112(%r9), %xmm2
3308 ; AVX1-ONLY-NEXT: vmovaps 112(%r8), %xmm3
3309 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm3[0],xmm2[0]
3310 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],mem[4,5,6,7]
3311 ; AVX1-ONLY-NEXT: vbroadcastsd 112(%rax), %ymm5
3312 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
3313 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3314 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
3315 ; AVX1-ONLY-NEXT: vbroadcastsd 120(%rdx), %ymm1
3316 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3317 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm2[1]
3318 ; AVX1-ONLY-NEXT: vbroadcastsd 120(%r10), %ymm1
3319 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3320 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3321 ; AVX1-ONLY-NEXT: vmovaps 144(%rsi), %xmm0
3322 ; AVX1-ONLY-NEXT: vmovaps 144(%rdi), %xmm1
3323 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
3324 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
3325 ; AVX1-ONLY-NEXT: vbroadcastsd 144(%rcx), %ymm3
3326 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
3327 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3328 ; AVX1-ONLY-NEXT: vmovaps 144(%r9), %xmm2
3329 ; AVX1-ONLY-NEXT: vmovaps 144(%r8), %xmm3
3330 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm3[0],xmm2[0]
3331 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],mem[4,5,6,7]
3332 ; AVX1-ONLY-NEXT: vbroadcastsd 144(%rax), %ymm5
3333 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
3334 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3335 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
3336 ; AVX1-ONLY-NEXT: vbroadcastsd 152(%rdx), %ymm1
3337 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3338 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3339 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm2[1]
3340 ; AVX1-ONLY-NEXT: vbroadcastsd 152(%r10), %ymm1
3341 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3342 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3343 ; AVX1-ONLY-NEXT: vmovaps 176(%rsi), %xmm0
3344 ; AVX1-ONLY-NEXT: vmovaps 176(%rdi), %xmm1
3345 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
3346 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
3347 ; AVX1-ONLY-NEXT: vbroadcastsd 176(%rcx), %ymm3
3348 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
3349 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3350 ; AVX1-ONLY-NEXT: vmovaps 176(%r9), %xmm2
3351 ; AVX1-ONLY-NEXT: vmovaps 176(%r8), %xmm3
3352 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm3[0],xmm2[0]
3353 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],mem[4,5,6,7]
3354 ; AVX1-ONLY-NEXT: vbroadcastsd 176(%rax), %ymm5
3355 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
3356 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3357 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
3358 ; AVX1-ONLY-NEXT: vbroadcastsd 184(%rdx), %ymm1
3359 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3360 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3361 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm2[1]
3362 ; AVX1-ONLY-NEXT: vbroadcastsd 184(%r10), %ymm1
3363 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3364 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3365 ; AVX1-ONLY-NEXT: vmovaps 208(%rsi), %xmm0
3366 ; AVX1-ONLY-NEXT: vmovaps 208(%rdi), %xmm1
3367 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
3368 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
3369 ; AVX1-ONLY-NEXT: vbroadcastsd 208(%rcx), %ymm3
3370 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
3371 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3372 ; AVX1-ONLY-NEXT: vmovaps 208(%r9), %xmm2
3373 ; AVX1-ONLY-NEXT: vmovaps 208(%r8), %xmm3
3374 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm3[0],xmm2[0]
3375 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],mem[4,5,6,7]
3376 ; AVX1-ONLY-NEXT: vbroadcastsd 208(%rax), %ymm5
3377 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
3378 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3379 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
3380 ; AVX1-ONLY-NEXT: vbroadcastsd 216(%rdx), %ymm1
3381 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3382 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm2[1]
3383 ; AVX1-ONLY-NEXT: vbroadcastsd 216(%r10), %ymm1
3384 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3385 ; AVX1-ONLY-NEXT: vmovaps 240(%rsi), %xmm0
3386 ; AVX1-ONLY-NEXT: vmovaps 240(%rdi), %xmm3
3387 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm0[0]
3388 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
3389 ; AVX1-ONLY-NEXT: vbroadcastsd 240(%rcx), %ymm5
3390 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm1[0,1,2,3,4,5],ymm5[6,7]
3391 ; AVX1-ONLY-NEXT: vmovaps 240(%r9), %xmm5
3392 ; AVX1-ONLY-NEXT: vmovaps 240(%r8), %xmm6
3393 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm15 = xmm6[0],xmm5[0]
3394 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm15 = ymm15[0,1,2,3],mem[4,5,6,7]
3395 ; AVX1-ONLY-NEXT: vbroadcastsd 240(%rax), %ymm14
3396 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm15[0,1,2,3,4,5],ymm14[6,7]
3397 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm0[1]
3398 ; AVX1-ONLY-NEXT: vbroadcastsd 248(%rdx), %ymm3
3399 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm3[4,5,6,7]
3400 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm6[1],xmm5[1]
3401 ; AVX1-ONLY-NEXT: vbroadcastsd 248(%r10), %ymm3
3402 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm3[4,5,6,7]
3403 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rdx
3404 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm9[0,1,2,3,4,5],mem[6,7]
3405 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3406 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm13[0,1,2,3,4,5],mem[6,7]
3407 ; AVX1-ONLY-NEXT: vmovups %ymm2, (%rsp) # 32-byte Spill
3408 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3409 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm15 = ymm2[0,1,2,3,4,5],mem[6,7]
3410 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm12[0,1,2,3,4,5],mem[6,7]
3411 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3412 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm2[0,1,2,3,4,5],mem[6,7]
3413 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3414 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm2[0,1,2,3,4,5],mem[6,7]
3415 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm11[0,1,2,3,4,5],mem[6,7]
3416 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3417 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm2[0,1,2,3,4,5],mem[6,7]
3418 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3419 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm2[0,1,2,3,4,5],mem[6,7]
3420 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3421 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm2[0,1,2,3,4,5],mem[6,7]
3422 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3423 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm2[0,1,2,3,4,5],mem[6,7]
3424 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3425 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],mem[6,7]
3426 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1,2,3,4,5],mem[6,7]
3427 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],mem[6,7]
3428 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],mem[6,7]
3429 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
3430 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2016(%rdx)
3431 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 1984(%rdx)
3432 ; AVX1-ONLY-NEXT: vmovaps %ymm4, 1952(%rdx)
3433 ; AVX1-ONLY-NEXT: vmovaps %ymm7, 1920(%rdx)
3434 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3435 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1888(%rdx)
3436 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3437 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1856(%rdx)
3438 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3439 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1824(%rdx)
3440 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3441 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1792(%rdx)
3442 ; AVX1-ONLY-NEXT: vmovaps %ymm8, 1760(%rdx)
3443 ; AVX1-ONLY-NEXT: vmovaps %ymm10, 1728(%rdx)
3444 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3445 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1696(%rdx)
3446 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3447 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1664(%rdx)
3448 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3449 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1632(%rdx)
3450 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3451 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1600(%rdx)
3452 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3453 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1568(%rdx)
3454 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3455 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1536(%rdx)
3456 ; AVX1-ONLY-NEXT: vmovaps %ymm2, 1504(%rdx)
3457 ; AVX1-ONLY-NEXT: vmovaps %ymm3, 1472(%rdx)
3458 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3459 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1440(%rdx)
3460 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3461 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1408(%rdx)
3462 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3463 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1376(%rdx)
3464 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3465 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1344(%rdx)
3466 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3467 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1312(%rdx)
3468 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3469 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1280(%rdx)
3470 ; AVX1-ONLY-NEXT: vmovaps %ymm5, 1248(%rdx)
3471 ; AVX1-ONLY-NEXT: vmovaps %ymm6, 1216(%rdx)
3472 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3473 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1184(%rdx)
3474 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3475 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1152(%rdx)
3476 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3477 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1120(%rdx)
3478 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3479 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1088(%rdx)
3480 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3481 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1056(%rdx)
3482 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3483 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1024(%rdx)
3484 ; AVX1-ONLY-NEXT: vmovaps %ymm9, 992(%rdx)
3485 ; AVX1-ONLY-NEXT: vmovaps %ymm11, 960(%rdx)
3486 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3487 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 928(%rdx)
3488 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3489 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 896(%rdx)
3490 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3491 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 864(%rdx)
3492 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3493 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 832(%rdx)
3494 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3495 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 800(%rdx)
3496 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3497 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 768(%rdx)
3498 ; AVX1-ONLY-NEXT: vmovaps %ymm12, 736(%rdx)
3499 ; AVX1-ONLY-NEXT: vmovaps %ymm13, 704(%rdx)
3500 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3501 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 672(%rdx)
3502 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3503 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 640(%rdx)
3504 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3505 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 608(%rdx)
3506 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3507 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 576(%rdx)
3508 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3509 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 544(%rdx)
3510 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3511 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 512(%rdx)
3512 ; AVX1-ONLY-NEXT: vmovaps %ymm14, 480(%rdx)
3513 ; AVX1-ONLY-NEXT: vmovaps %ymm15, 448(%rdx)
3514 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3515 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 416(%rdx)
3516 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3517 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 384(%rdx)
3518 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3519 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 352(%rdx)
3520 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3521 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 320(%rdx)
3522 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3523 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 288(%rdx)
3524 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3525 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 256(%rdx)
3526 ; AVX1-ONLY-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
3527 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 224(%rdx)
3528 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3529 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 192(%rdx)
3530 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3531 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 160(%rdx)
3532 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3533 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 128(%rdx)
3534 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3535 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 96(%rdx)
3536 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3537 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 64(%rdx)
3538 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3539 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 32(%rdx)
3540 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3541 ; AVX1-ONLY-NEXT: vmovaps %ymm0, (%rdx)
3542 ; AVX1-ONLY-NEXT: addq $1672, %rsp # imm = 0x688
3543 ; AVX1-ONLY-NEXT: vzeroupper
3544 ; AVX1-ONLY-NEXT: retq
3546 ; AVX2-ONLY-LABEL: store_i64_stride8_vf32:
3547 ; AVX2-ONLY: # %bb.0:
3548 ; AVX2-ONLY-NEXT: subq $1704, %rsp # imm = 0x6A8
3549 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
3550 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
3551 ; AVX2-ONLY-NEXT: vmovaps (%rcx), %xmm0
3552 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3553 ; AVX2-ONLY-NEXT: vmovaps 32(%rcx), %xmm3
3554 ; AVX2-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3555 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
3556 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %xmm2
3557 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3558 ; AVX2-ONLY-NEXT: vmovaps 32(%rsi), %xmm4
3559 ; AVX2-ONLY-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3560 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %xmm1
3561 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3562 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %xmm5
3563 ; AVX2-ONLY-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3564 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
3565 ; AVX2-ONLY-NEXT: vbroadcastsd 8(%rdx), %ymm2
3566 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
3567 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
3568 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3569 ; AVX2-ONLY-NEXT: vmovaps (%rax), %xmm0
3570 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3571 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
3572 ; AVX2-ONLY-NEXT: vmovaps (%r9), %xmm2
3573 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3574 ; AVX2-ONLY-NEXT: vmovaps 32(%r9), %xmm6
3575 ; AVX2-ONLY-NEXT: vmovaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3576 ; AVX2-ONLY-NEXT: vmovaps (%r8), %xmm1
3577 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3578 ; AVX2-ONLY-NEXT: vmovaps 32(%r8), %xmm7
3579 ; AVX2-ONLY-NEXT: vmovaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3580 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
3581 ; AVX2-ONLY-NEXT: vbroadcastsd 8(%r10), %ymm2
3582 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
3583 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
3584 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3585 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm0
3586 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm5[1],xmm4[1]
3587 ; AVX2-ONLY-NEXT: vbroadcastsd 40(%rdx), %ymm2
3588 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
3589 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
3590 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3591 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm6[1]
3592 ; AVX2-ONLY-NEXT: vbroadcastsd 40(%r10), %ymm1
3593 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3594 ; AVX2-ONLY-NEXT: vmovaps 32(%rax), %xmm1
3595 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3596 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
3597 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
3598 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3599 ; AVX2-ONLY-NEXT: vmovaps 64(%rsi), %xmm1
3600 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3601 ; AVX2-ONLY-NEXT: vmovaps 64(%rdi), %xmm0
3602 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3603 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
3604 ; AVX2-ONLY-NEXT: vbroadcastsd 72(%rdx), %ymm1
3605 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3606 ; AVX2-ONLY-NEXT: vmovaps 64(%rcx), %xmm1
3607 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3608 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
3609 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
3610 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3611 ; AVX2-ONLY-NEXT: vmovaps 64(%r9), %xmm1
3612 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3613 ; AVX2-ONLY-NEXT: vmovaps 64(%r8), %xmm0
3614 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3615 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
3616 ; AVX2-ONLY-NEXT: vbroadcastsd 72(%r10), %ymm1
3617 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3618 ; AVX2-ONLY-NEXT: vmovaps 64(%rax), %xmm1
3619 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3620 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
3621 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
3622 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3623 ; AVX2-ONLY-NEXT: vmovaps 96(%rsi), %xmm1
3624 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3625 ; AVX2-ONLY-NEXT: vmovaps 96(%rdi), %xmm0
3626 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3627 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
3628 ; AVX2-ONLY-NEXT: vbroadcastsd 104(%rdx), %ymm1
3629 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3630 ; AVX2-ONLY-NEXT: vmovaps 96(%rcx), %xmm1
3631 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3632 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
3633 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
3634 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3635 ; AVX2-ONLY-NEXT: vmovaps 96(%r9), %xmm1
3636 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3637 ; AVX2-ONLY-NEXT: vmovaps 96(%r8), %xmm0
3638 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3639 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
3640 ; AVX2-ONLY-NEXT: vbroadcastsd 104(%r10), %ymm1
3641 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3642 ; AVX2-ONLY-NEXT: vmovaps 96(%rax), %xmm1
3643 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3644 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
3645 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
3646 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3647 ; AVX2-ONLY-NEXT: vmovaps 128(%rsi), %xmm1
3648 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3649 ; AVX2-ONLY-NEXT: vmovaps 128(%rdi), %xmm0
3650 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3651 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
3652 ; AVX2-ONLY-NEXT: vbroadcastsd 136(%rdx), %ymm1
3653 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3654 ; AVX2-ONLY-NEXT: vmovaps 128(%rcx), %xmm1
3655 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3656 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
3657 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
3658 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3659 ; AVX2-ONLY-NEXT: vmovaps 128(%r9), %xmm1
3660 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3661 ; AVX2-ONLY-NEXT: vmovaps 128(%r8), %xmm0
3662 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3663 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
3664 ; AVX2-ONLY-NEXT: vbroadcastsd 136(%r10), %ymm1
3665 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3666 ; AVX2-ONLY-NEXT: vmovaps 128(%rax), %xmm1
3667 ; AVX2-ONLY-NEXT: vmovaps %xmm1, (%rsp) # 16-byte Spill
3668 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
3669 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
3670 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3671 ; AVX2-ONLY-NEXT: vmovaps 160(%rsi), %xmm1
3672 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3673 ; AVX2-ONLY-NEXT: vmovaps 160(%rdi), %xmm0
3674 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3675 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
3676 ; AVX2-ONLY-NEXT: vbroadcastsd 168(%rdx), %ymm1
3677 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3678 ; AVX2-ONLY-NEXT: vmovaps 160(%rcx), %xmm1
3679 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3680 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
3681 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
3682 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3683 ; AVX2-ONLY-NEXT: vmovaps 160(%r9), %xmm0
3684 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3685 ; AVX2-ONLY-NEXT: vmovaps 160(%r8), %xmm13
3686 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm13[1],xmm0[1]
3687 ; AVX2-ONLY-NEXT: vbroadcastsd 168(%r10), %ymm1
3688 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3689 ; AVX2-ONLY-NEXT: vmovaps 160(%rax), %xmm12
3690 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm0, %ymm1
3691 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
3692 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3693 ; AVX2-ONLY-NEXT: vmovaps 192(%rsi), %xmm11
3694 ; AVX2-ONLY-NEXT: vmovaps 192(%rdi), %xmm10
3695 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm10[1],xmm11[1]
3696 ; AVX2-ONLY-NEXT: vbroadcastsd 200(%rdx), %ymm1
3697 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3698 ; AVX2-ONLY-NEXT: vmovaps 192(%rcx), %xmm9
3699 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm0, %ymm1
3700 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
3701 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3702 ; AVX2-ONLY-NEXT: vmovaps 192(%r9), %xmm8
3703 ; AVX2-ONLY-NEXT: vmovaps 192(%r8), %xmm7
3704 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm8[1]
3705 ; AVX2-ONLY-NEXT: vbroadcastsd 200(%r10), %ymm1
3706 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3707 ; AVX2-ONLY-NEXT: vmovaps 192(%rax), %xmm6
3708 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm1
3709 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
3710 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3711 ; AVX2-ONLY-NEXT: vmovaps 224(%rsi), %xmm5
3712 ; AVX2-ONLY-NEXT: vmovaps 224(%rdi), %xmm4
3713 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm5[1]
3714 ; AVX2-ONLY-NEXT: vbroadcastsd 232(%rdx), %ymm1
3715 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3716 ; AVX2-ONLY-NEXT: vmovaps 224(%rcx), %xmm3
3717 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm1
3718 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
3719 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3720 ; AVX2-ONLY-NEXT: vmovaps 224(%r9), %xmm2
3721 ; AVX2-ONLY-NEXT: vmovaps 224(%r8), %xmm1
3722 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm2[1]
3723 ; AVX2-ONLY-NEXT: vbroadcastsd 232(%r10), %ymm15
3724 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm15 = ymm0[0,1,2,3],ymm15[4,5,6,7]
3725 ; AVX2-ONLY-NEXT: vmovaps 224(%rax), %xmm0
3726 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm14
3727 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm15[0,1,2,3,4,5],ymm14[6,7]
3728 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3729 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3730 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
3731 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
3732 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%rdx), %ymm14, %ymm14
3733 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
3734 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
3735 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3736 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3737 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
3738 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
3739 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%r10), %ymm14, %ymm14
3740 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
3741 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
3742 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3743 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3744 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
3745 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
3746 ; AVX2-ONLY-NEXT: vinsertf128 $1, 32(%rdx), %ymm14, %ymm14
3747 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
3748 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
3749 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3750 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3751 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
3752 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
3753 ; AVX2-ONLY-NEXT: vinsertf128 $1, 32(%r10), %ymm14, %ymm14
3754 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
3755 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
3756 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3757 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3758 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
3759 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
3760 ; AVX2-ONLY-NEXT: vinsertf128 $1, 64(%rdx), %ymm14, %ymm14
3761 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
3762 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
3763 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3764 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3765 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
3766 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
3767 ; AVX2-ONLY-NEXT: vinsertf128 $1, 64(%r10), %ymm14, %ymm14
3768 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
3769 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
3770 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3771 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3772 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
3773 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
3774 ; AVX2-ONLY-NEXT: vinsertf128 $1, 96(%rdx), %ymm14, %ymm14
3775 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
3776 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
3777 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3778 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3779 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
3780 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
3781 ; AVX2-ONLY-NEXT: vinsertf128 $1, 96(%r10), %ymm14, %ymm14
3782 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
3783 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
3784 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3785 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3786 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
3787 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
3788 ; AVX2-ONLY-NEXT: vinsertf128 $1, 128(%rdx), %ymm14, %ymm14
3789 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
3790 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
3791 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3792 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3793 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
3794 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
3795 ; AVX2-ONLY-NEXT: vinsertf128 $1, 128(%r10), %ymm14, %ymm14
3796 ; AVX2-ONLY-NEXT: vbroadcastsd (%rsp), %ymm15 # 16-byte Folded Reload
3797 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
3798 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3799 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3800 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
3801 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
3802 ; AVX2-ONLY-NEXT: vinsertf128 $1, 160(%rdx), %ymm14, %ymm14
3803 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
3804 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
3805 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3806 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm13, %xmm13 # 16-byte Folded Reload
3807 ; AVX2-ONLY-NEXT: # xmm13 = xmm13[0],mem[0]
3808 ; AVX2-ONLY-NEXT: vinsertf128 $1, 160(%r10), %ymm13, %ymm13
3809 ; AVX2-ONLY-NEXT: vbroadcastsd %xmm12, %ymm12
3810 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm13[0,1,2,3,4,5],ymm12[6,7]
3811 ; AVX2-ONLY-NEXT: vmovups %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3812 ; AVX2-ONLY-NEXT: vmovlhps {{.*#+}} xmm10 = xmm10[0],xmm11[0]
3813 ; AVX2-ONLY-NEXT: vinsertf128 $1, 192(%rdx), %ymm10, %ymm10
3814 ; AVX2-ONLY-NEXT: vbroadcastsd %xmm9, %ymm9
3815 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm10[0,1,2,3,4,5],ymm9[6,7]
3816 ; AVX2-ONLY-NEXT: vmovups %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3817 ; AVX2-ONLY-NEXT: vmovlhps {{.*#+}} xmm7 = xmm7[0],xmm8[0]
3818 ; AVX2-ONLY-NEXT: vinsertf128 $1, 192(%r10), %ymm7, %ymm7
3819 ; AVX2-ONLY-NEXT: vbroadcastsd %xmm6, %ymm6
3820 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3,4,5],ymm6[6,7]
3821 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3822 ; AVX2-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm4[0],xmm5[0]
3823 ; AVX2-ONLY-NEXT: vinsertf128 $1, 224(%rdx), %ymm4, %ymm4
3824 ; AVX2-ONLY-NEXT: vbroadcastsd %xmm3, %ymm3
3825 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm4[0,1,2,3,4,5],ymm3[6,7]
3826 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3827 ; AVX2-ONLY-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
3828 ; AVX2-ONLY-NEXT: vinsertf128 $1, 224(%r10), %ymm1, %ymm1
3829 ; AVX2-ONLY-NEXT: vbroadcastsd %xmm0, %ymm0
3830 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
3831 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3832 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %ymm0
3833 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %ymm1
3834 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
3835 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],mem[2,3]
3836 ; AVX2-ONLY-NEXT: vbroadcastsd 16(%rcx), %ymm3
3837 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
3838 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3839 ; AVX2-ONLY-NEXT: vmovaps (%r8), %ymm2
3840 ; AVX2-ONLY-NEXT: vmovaps (%r9), %ymm3
3841 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
3842 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],mem[2,3]
3843 ; AVX2-ONLY-NEXT: vbroadcastsd 16(%rax), %ymm5
3844 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
3845 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3846 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
3847 ; AVX2-ONLY-NEXT: vbroadcastsd 24(%rdx), %ymm1
3848 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm8 = ymm0[2,3],ymm1[2,3]
3849 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
3850 ; AVX2-ONLY-NEXT: vbroadcastsd 24(%r10), %ymm1
3851 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm9 = ymm0[2,3],ymm1[2,3]
3852 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %ymm0
3853 ; AVX2-ONLY-NEXT: vmovaps 32(%rsi), %ymm1
3854 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
3855 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],mem[2,3]
3856 ; AVX2-ONLY-NEXT: vbroadcastsd 48(%rcx), %ymm3
3857 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
3858 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3859 ; AVX2-ONLY-NEXT: vmovaps 32(%r8), %ymm2
3860 ; AVX2-ONLY-NEXT: vmovaps 32(%r9), %ymm3
3861 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
3862 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],mem[2,3]
3863 ; AVX2-ONLY-NEXT: vbroadcastsd 48(%rax), %ymm5
3864 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
3865 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3866 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
3867 ; AVX2-ONLY-NEXT: vbroadcastsd 56(%rdx), %ymm1
3868 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm12 = ymm0[2,3],ymm1[2,3]
3869 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
3870 ; AVX2-ONLY-NEXT: vbroadcastsd 56(%r10), %ymm1
3871 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
3872 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3873 ; AVX2-ONLY-NEXT: vmovaps 64(%rdi), %ymm0
3874 ; AVX2-ONLY-NEXT: vmovaps 64(%rsi), %ymm1
3875 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
3876 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],mem[2,3]
3877 ; AVX2-ONLY-NEXT: vbroadcastsd 80(%rcx), %ymm3
3878 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
3879 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3880 ; AVX2-ONLY-NEXT: vmovaps 64(%r8), %ymm2
3881 ; AVX2-ONLY-NEXT: vmovaps 64(%r9), %ymm3
3882 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
3883 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],mem[2,3]
3884 ; AVX2-ONLY-NEXT: vbroadcastsd 80(%rax), %ymm5
3885 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
3886 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3887 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
3888 ; AVX2-ONLY-NEXT: vbroadcastsd 88(%rdx), %ymm1
3889 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
3890 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3891 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
3892 ; AVX2-ONLY-NEXT: vbroadcastsd 88(%r10), %ymm1
3893 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
3894 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3895 ; AVX2-ONLY-NEXT: vmovaps 96(%rdi), %ymm0
3896 ; AVX2-ONLY-NEXT: vmovaps 96(%rsi), %ymm1
3897 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
3898 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],mem[2,3]
3899 ; AVX2-ONLY-NEXT: vbroadcastsd 112(%rcx), %ymm3
3900 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
3901 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3902 ; AVX2-ONLY-NEXT: vmovaps 96(%r8), %ymm2
3903 ; AVX2-ONLY-NEXT: vmovaps 96(%r9), %ymm3
3904 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
3905 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],mem[2,3]
3906 ; AVX2-ONLY-NEXT: vbroadcastsd 112(%rax), %ymm5
3907 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
3908 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3909 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
3910 ; AVX2-ONLY-NEXT: vbroadcastsd 120(%rdx), %ymm1
3911 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
3912 ; AVX2-ONLY-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
3913 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
3914 ; AVX2-ONLY-NEXT: vbroadcastsd 120(%r10), %ymm1
3915 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
3916 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3917 ; AVX2-ONLY-NEXT: vmovaps 128(%rdi), %ymm0
3918 ; AVX2-ONLY-NEXT: vmovaps 128(%rsi), %ymm1
3919 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
3920 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],mem[2,3]
3921 ; AVX2-ONLY-NEXT: vbroadcastsd 144(%rcx), %ymm3
3922 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
3923 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3924 ; AVX2-ONLY-NEXT: vmovaps 128(%r8), %ymm2
3925 ; AVX2-ONLY-NEXT: vmovaps 128(%r9), %ymm3
3926 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
3927 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],mem[2,3]
3928 ; AVX2-ONLY-NEXT: vbroadcastsd 144(%rax), %ymm5
3929 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
3930 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3931 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
3932 ; AVX2-ONLY-NEXT: vbroadcastsd 152(%rdx), %ymm1
3933 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
3934 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3935 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
3936 ; AVX2-ONLY-NEXT: vbroadcastsd 152(%r10), %ymm1
3937 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
3938 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3939 ; AVX2-ONLY-NEXT: vmovaps 160(%rdi), %ymm0
3940 ; AVX2-ONLY-NEXT: vmovaps 160(%rsi), %ymm1
3941 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
3942 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],mem[2,3]
3943 ; AVX2-ONLY-NEXT: vbroadcastsd 176(%rcx), %ymm3
3944 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
3945 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3946 ; AVX2-ONLY-NEXT: vmovaps 160(%r8), %ymm2
3947 ; AVX2-ONLY-NEXT: vmovaps 160(%r9), %ymm3
3948 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
3949 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],mem[2,3]
3950 ; AVX2-ONLY-NEXT: vbroadcastsd 176(%rax), %ymm5
3951 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
3952 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3953 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
3954 ; AVX2-ONLY-NEXT: vbroadcastsd 184(%rdx), %ymm1
3955 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
3956 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3957 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
3958 ; AVX2-ONLY-NEXT: vbroadcastsd 184(%r10), %ymm1
3959 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
3960 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3961 ; AVX2-ONLY-NEXT: vmovaps 192(%rdi), %ymm0
3962 ; AVX2-ONLY-NEXT: vmovaps 192(%rsi), %ymm1
3963 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
3964 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],mem[2,3]
3965 ; AVX2-ONLY-NEXT: vbroadcastsd 208(%rcx), %ymm3
3966 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
3967 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3968 ; AVX2-ONLY-NEXT: vmovaps 192(%r8), %ymm2
3969 ; AVX2-ONLY-NEXT: vmovaps 192(%r9), %ymm3
3970 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
3971 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],mem[2,3]
3972 ; AVX2-ONLY-NEXT: vbroadcastsd 208(%rax), %ymm5
3973 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
3974 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3975 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
3976 ; AVX2-ONLY-NEXT: vbroadcastsd 216(%rdx), %ymm1
3977 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm13 = ymm0[2,3],ymm1[2,3]
3978 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
3979 ; AVX2-ONLY-NEXT: vbroadcastsd 216(%r10), %ymm1
3980 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm11 = ymm0[2,3],ymm1[2,3]
3981 ; AVX2-ONLY-NEXT: vmovaps 224(%rdi), %ymm0
3982 ; AVX2-ONLY-NEXT: vmovaps 224(%rsi), %ymm3
3983 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
3984 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm1[2,3],mem[2,3]
3985 ; AVX2-ONLY-NEXT: vbroadcastsd 240(%rcx), %ymm5
3986 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm1[0,1,2,3,4,5],ymm5[6,7]
3987 ; AVX2-ONLY-NEXT: vmovaps 224(%r8), %ymm5
3988 ; AVX2-ONLY-NEXT: vmovaps 224(%r9), %ymm6
3989 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm7 = ymm5[0],ymm6[0],ymm5[2],ymm6[2]
3990 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm7 = ymm7[2,3],mem[2,3]
3991 ; AVX2-ONLY-NEXT: vbroadcastsd 240(%rax), %ymm15
3992 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm15 = ymm7[0,1,2,3,4,5],ymm15[6,7]
3993 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm3[1],ymm0[3],ymm3[3]
3994 ; AVX2-ONLY-NEXT: vbroadcastsd 248(%rdx), %ymm3
3995 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm0[2,3],ymm3[2,3]
3996 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm5[1],ymm6[1],ymm5[3],ymm6[3]
3997 ; AVX2-ONLY-NEXT: vbroadcastsd 248(%r10), %ymm3
3998 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm1[2,3],ymm3[2,3]
3999 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rdx
4000 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm8[0,1,2,3,4,5],mem[6,7]
4001 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4002 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm9[0,1,2,3,4,5],mem[6,7]
4003 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4004 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm12[0,1,2,3,4,5],mem[6,7]
4005 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4006 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm0[0,1,2,3,4,5],mem[6,7]
4007 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4008 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm0[0,1,2,3,4,5],mem[6,7]
4009 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4010 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm0[0,1,2,3,4,5],mem[6,7]
4011 ; AVX2-ONLY-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
4012 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm0[0,1,2,3,4,5],mem[6,7]
4013 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4014 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm0[0,1,2,3,4,5],mem[6,7]
4015 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4016 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm0[0,1,2,3,4,5],mem[6,7]
4017 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4018 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm0[0,1,2,3,4,5],mem[6,7]
4019 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4020 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0,1,2,3,4,5],mem[6,7]
4021 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4022 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
4023 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3,4,5],mem[6,7]
4024 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm11[0,1,2,3,4,5],mem[6,7]
4025 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],mem[6,7]
4026 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],mem[6,7]
4027 ; AVX2-ONLY-NEXT: vmovaps %ymm2, 2016(%rdx)
4028 ; AVX2-ONLY-NEXT: vmovaps %ymm4, 1984(%rdx)
4029 ; AVX2-ONLY-NEXT: vmovaps %ymm15, 1952(%rdx)
4030 ; AVX2-ONLY-NEXT: vmovaps %ymm10, 1920(%rdx)
4031 ; AVX2-ONLY-NEXT: vmovaps %ymm11, 1760(%rdx)
4032 ; AVX2-ONLY-NEXT: vmovaps %ymm13, 1728(%rdx)
4033 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
4034 ; AVX2-ONLY-NEXT: vmovaps %ymm2, 1696(%rdx)
4035 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
4036 ; AVX2-ONLY-NEXT: vmovaps %ymm2, 1664(%rdx)
4037 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1504(%rdx)
4038 ; AVX2-ONLY-NEXT: vmovaps %ymm1, 1472(%rdx)
4039 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4040 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1440(%rdx)
4041 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4042 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1408(%rdx)
4043 ; AVX2-ONLY-NEXT: vmovaps %ymm3, 1248(%rdx)
4044 ; AVX2-ONLY-NEXT: vmovaps %ymm5, 1216(%rdx)
4045 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4046 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1184(%rdx)
4047 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4048 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1152(%rdx)
4049 ; AVX2-ONLY-NEXT: vmovaps %ymm6, 992(%rdx)
4050 ; AVX2-ONLY-NEXT: vmovaps %ymm7, 960(%rdx)
4051 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4052 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 928(%rdx)
4053 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4054 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 896(%rdx)
4055 ; AVX2-ONLY-NEXT: vmovaps %ymm8, 736(%rdx)
4056 ; AVX2-ONLY-NEXT: vmovaps %ymm9, 704(%rdx)
4057 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4058 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 672(%rdx)
4059 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4060 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 640(%rdx)
4061 ; AVX2-ONLY-NEXT: vmovaps %ymm12, 480(%rdx)
4062 ; AVX2-ONLY-NEXT: vmovaps %ymm14, 448(%rdx)
4063 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4064 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 416(%rdx)
4065 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4066 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 384(%rdx)
4067 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4068 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 224(%rdx)
4069 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4070 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 192(%rdx)
4071 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4072 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 160(%rdx)
4073 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4074 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 128(%rdx)
4075 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4076 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1888(%rdx)
4077 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4078 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1856(%rdx)
4079 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4080 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1824(%rdx)
4081 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4082 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1792(%rdx)
4083 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4084 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1632(%rdx)
4085 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4086 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1600(%rdx)
4087 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4088 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1568(%rdx)
4089 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4090 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1536(%rdx)
4091 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4092 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1376(%rdx)
4093 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4094 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1344(%rdx)
4095 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4096 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1312(%rdx)
4097 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4098 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1280(%rdx)
4099 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4100 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1120(%rdx)
4101 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4102 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1088(%rdx)
4103 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4104 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1056(%rdx)
4105 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4106 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1024(%rdx)
4107 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4108 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 864(%rdx)
4109 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4110 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 832(%rdx)
4111 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4112 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 800(%rdx)
4113 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4114 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 768(%rdx)
4115 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4116 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 608(%rdx)
4117 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4118 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 576(%rdx)
4119 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4120 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 544(%rdx)
4121 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4122 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 512(%rdx)
4123 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4124 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 352(%rdx)
4125 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4126 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 320(%rdx)
4127 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4128 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 288(%rdx)
4129 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4130 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 256(%rdx)
4131 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4132 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 96(%rdx)
4133 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4134 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 64(%rdx)
4135 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4136 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 32(%rdx)
4137 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4138 ; AVX2-ONLY-NEXT: vmovaps %ymm0, (%rdx)
4139 ; AVX2-ONLY-NEXT: addq $1704, %rsp # imm = 0x6A8
4140 ; AVX2-ONLY-NEXT: vzeroupper
4141 ; AVX2-ONLY-NEXT: retq
4143 ; AVX512F-LABEL: store_i64_stride8_vf32:
4145 ; AVX512F-NEXT: subq $2632, %rsp # imm = 0xA48
4146 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %rax
4147 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %r10
4148 ; AVX512F-NEXT: vmovaps 128(%rdi), %zmm0
4149 ; AVX512F-NEXT: vmovups %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4150 ; AVX512F-NEXT: vmovdqa64 64(%rdi), %zmm1
4151 ; AVX512F-NEXT: vmovdqa64 (%rdi), %zmm5
4152 ; AVX512F-NEXT: vmovdqa64 128(%rsi), %zmm25
4153 ; AVX512F-NEXT: vmovdqa64 64(%rsi), %zmm20
4154 ; AVX512F-NEXT: vmovdqa64 (%rsi), %zmm0
4155 ; AVX512F-NEXT: vmovaps 192(%rdx), %zmm2
4156 ; AVX512F-NEXT: vmovups %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4157 ; AVX512F-NEXT: vmovdqa64 128(%rdx), %zmm13
4158 ; AVX512F-NEXT: vmovdqa64 64(%rdx), %zmm2
4159 ; AVX512F-NEXT: vmovdqa64 (%rdx), %zmm6
4160 ; AVX512F-NEXT: vmovdqa64 64(%rcx), %zmm21
4161 ; AVX512F-NEXT: vmovdqa64 (%rcx), %zmm7
4162 ; AVX512F-NEXT: vmovdqa64 (%r8), %zmm23
4163 ; AVX512F-NEXT: vmovdqa64 64(%r8), %zmm19
4164 ; AVX512F-NEXT: vmovdqa64 (%r9), %zmm18
4165 ; AVX512F-NEXT: vmovdqa64 64(%r9), %zmm28
4166 ; AVX512F-NEXT: vmovdqa64 (%r10), %zmm17
4167 ; AVX512F-NEXT: vmovdqa64 64(%r10), %zmm16
4168 ; AVX512F-NEXT: vmovdqa64 (%rax), %zmm24
4169 ; AVX512F-NEXT: vmovdqa64 64(%rax), %zmm22
4170 ; AVX512F-NEXT: movb $-64, %r11b
4171 ; AVX512F-NEXT: kmovw %r11d, %k1
4172 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [4,12,4,12,4,12,4,12]
4173 ; AVX512F-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4174 ; AVX512F-NEXT: vmovdqa64 %zmm17, %zmm9
4175 ; AVX512F-NEXT: vpermt2q %zmm24, %zmm15, %zmm9
4176 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} zmm10 = zmm23[0],zmm18[0],zmm23[2],zmm18[2],zmm23[4],zmm18[4],zmm23[6],zmm18[6]
4177 ; AVX512F-NEXT: vmovdqa64 %zmm9, %zmm10 {%k1}
4178 ; AVX512F-NEXT: vmovdqa64 %zmm5, %zmm9
4179 ; AVX512F-NEXT: vpermt2q %zmm0, %zmm15, %zmm9
4180 ; AVX512F-NEXT: vbroadcasti128 {{.*#+}} ymm12 = [4,12,4,12]
4181 ; AVX512F-NEXT: # ymm12 = mem[0,1,0,1]
4182 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm11
4183 ; AVX512F-NEXT: vpermt2q %zmm7, %zmm12, %zmm11
4184 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm11[4,5,6,7]
4185 ; AVX512F-NEXT: vinserti64x4 $0, %ymm9, %zmm10, %zmm9
4186 ; AVX512F-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4187 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [5,13,5,13,5,13,5,13]
4188 ; AVX512F-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4189 ; AVX512F-NEXT: vmovdqa64 %zmm17, %zmm9
4190 ; AVX512F-NEXT: vpermt2q %zmm24, %zmm8, %zmm9
4191 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} zmm10 = zmm23[1],zmm18[1],zmm23[3],zmm18[3],zmm23[5],zmm18[5],zmm23[7],zmm18[7]
4192 ; AVX512F-NEXT: vmovdqa64 %zmm9, %zmm10 {%k1}
4193 ; AVX512F-NEXT: vmovdqa64 %zmm5, %zmm9
4194 ; AVX512F-NEXT: vpermt2q %zmm0, %zmm8, %zmm9
4195 ; AVX512F-NEXT: vbroadcasti128 {{.*#+}} ymm14 = [5,13,5,13]
4196 ; AVX512F-NEXT: # ymm14 = mem[0,1,0,1]
4197 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm11
4198 ; AVX512F-NEXT: vpermt2q %zmm7, %zmm14, %zmm11
4199 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm11[4,5,6,7]
4200 ; AVX512F-NEXT: vinserti64x4 $0, %ymm9, %zmm10, %zmm9
4201 ; AVX512F-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4202 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [6,14,6,14,6,14,6,14]
4203 ; AVX512F-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4204 ; AVX512F-NEXT: vmovdqa64 %zmm23, %zmm9
4205 ; AVX512F-NEXT: vpermt2q %zmm18, %zmm3, %zmm9
4206 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} zmm9 {%k1} = zmm17[0],zmm24[0],zmm17[2],zmm24[2],zmm17[4],zmm24[4],zmm17[6],zmm24[6]
4207 ; AVX512F-NEXT: vmovdqa64 %zmm5, %zmm10
4208 ; AVX512F-NEXT: vpermt2q %zmm0, %zmm3, %zmm10
4209 ; AVX512F-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [6,14,6,14]
4210 ; AVX512F-NEXT: # ymm4 = mem[0,1,0,1]
4211 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm11
4212 ; AVX512F-NEXT: vpermt2q %zmm7, %zmm4, %zmm11
4213 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm10 = ymm10[0,1,2,3],ymm11[4,5,6,7]
4214 ; AVX512F-NEXT: vinserti64x4 $0, %ymm10, %zmm9, %zmm9
4215 ; AVX512F-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4216 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm29 = [7,15,7,15,7,15,7,15]
4217 ; AVX512F-NEXT: # zmm29 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4218 ; AVX512F-NEXT: vmovdqa64 %zmm23, %zmm9
4219 ; AVX512F-NEXT: vpermt2q %zmm18, %zmm29, %zmm9
4220 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} zmm9 {%k1} = zmm17[1],zmm24[1],zmm17[3],zmm24[3],zmm17[5],zmm24[5],zmm17[7],zmm24[7]
4221 ; AVX512F-NEXT: vpermt2q %zmm0, %zmm29, %zmm5
4222 ; AVX512F-NEXT: vbroadcasti128 {{.*#+}} ymm0 = [7,15,7,15]
4223 ; AVX512F-NEXT: # ymm0 = mem[0,1,0,1]
4224 ; AVX512F-NEXT: vpermt2q %zmm7, %zmm0, %zmm6
4225 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm6[4,5,6,7]
4226 ; AVX512F-NEXT: vinserti64x4 $0, %ymm5, %zmm9, %zmm5
4227 ; AVX512F-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4228 ; AVX512F-NEXT: vmovdqa64 %zmm16, %zmm5
4229 ; AVX512F-NEXT: vpermt2q %zmm22, %zmm15, %zmm5
4230 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} zmm6 = zmm19[0],zmm28[0],zmm19[2],zmm28[2],zmm19[4],zmm28[4],zmm19[6],zmm28[6]
4231 ; AVX512F-NEXT: vmovdqa64 %zmm5, %zmm6 {%k1}
4232 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm5
4233 ; AVX512F-NEXT: vpermt2q %zmm20, %zmm15, %zmm5
4234 ; AVX512F-NEXT: vmovdqa64 %zmm2, %zmm7
4235 ; AVX512F-NEXT: vpermt2q %zmm21, %zmm12, %zmm7
4236 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm7[4,5,6,7]
4237 ; AVX512F-NEXT: vinserti64x4 $0, %ymm5, %zmm6, %zmm5
4238 ; AVX512F-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4239 ; AVX512F-NEXT: vmovdqa64 %zmm16, %zmm5
4240 ; AVX512F-NEXT: vpermt2q %zmm22, %zmm8, %zmm5
4241 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm19[1],zmm28[1],zmm19[3],zmm28[3],zmm19[5],zmm28[5],zmm19[7],zmm28[7]
4242 ; AVX512F-NEXT: vmovdqa64 %zmm5, %zmm6 {%k1}
4243 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm5
4244 ; AVX512F-NEXT: vpermt2q %zmm20, %zmm8, %zmm5
4245 ; AVX512F-NEXT: vmovdqa64 %zmm2, %zmm7
4246 ; AVX512F-NEXT: vpermt2q %zmm21, %zmm14, %zmm7
4247 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm7[4,5,6,7]
4248 ; AVX512F-NEXT: vinserti64x4 $0, %ymm5, %zmm6, %zmm5
4249 ; AVX512F-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4250 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm5
4251 ; AVX512F-NEXT: vpermt2q %zmm20, %zmm3, %zmm5
4252 ; AVX512F-NEXT: vmovdqa64 %zmm2, %zmm6
4253 ; AVX512F-NEXT: vpermt2q %zmm21, %zmm4, %zmm6
4254 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm6[4,5,6,7]
4255 ; AVX512F-NEXT: vmovdqa64 %zmm19, %zmm6
4256 ; AVX512F-NEXT: vpermt2q %zmm28, %zmm3, %zmm6
4257 ; AVX512F-NEXT: vmovdqa64 %zmm3, %zmm7
4258 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm16[0],zmm22[0],zmm16[2],zmm22[2],zmm16[4],zmm22[4],zmm16[6],zmm22[6]
4259 ; AVX512F-NEXT: vinserti64x4 $0, %ymm5, %zmm6, %zmm3
4260 ; AVX512F-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4261 ; AVX512F-NEXT: vmovdqa64 128(%rcx), %zmm5
4262 ; AVX512F-NEXT: vpermt2q %zmm20, %zmm29, %zmm1
4263 ; AVX512F-NEXT: vmovdqa64 %zmm19, %zmm3
4264 ; AVX512F-NEXT: vpermt2q %zmm28, %zmm29, %zmm3
4265 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm16[1],zmm22[1],zmm16[3],zmm22[3],zmm16[5],zmm22[5],zmm16[7],zmm22[7]
4266 ; AVX512F-NEXT: vpermt2q %zmm21, %zmm0, %zmm2
4267 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
4268 ; AVX512F-NEXT: vinserti64x4 $0, %ymm1, %zmm3, %zmm1
4269 ; AVX512F-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4270 ; AVX512F-NEXT: vmovdqa64 %zmm13, %zmm1
4271 ; AVX512F-NEXT: vpermt2q %zmm5, %zmm12, %zmm1
4272 ; AVX512F-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4273 ; AVX512F-NEXT: vmovdqa64 %zmm13, %zmm1
4274 ; AVX512F-NEXT: vpermt2q %zmm5, %zmm14, %zmm1
4275 ; AVX512F-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4276 ; AVX512F-NEXT: vmovdqa64 %zmm13, %zmm1
4277 ; AVX512F-NEXT: vpermt2q %zmm5, %zmm4, %zmm1
4278 ; AVX512F-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4279 ; AVX512F-NEXT: vpermt2q %zmm5, %zmm0, %zmm13
4280 ; AVX512F-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4281 ; AVX512F-NEXT: vmovdqa64 192(%rcx), %zmm1
4282 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
4283 ; AVX512F-NEXT: vpermi2q %zmm1, %zmm13, %zmm12
4284 ; AVX512F-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4285 ; AVX512F-NEXT: vpermi2q %zmm1, %zmm13, %zmm14
4286 ; AVX512F-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4287 ; AVX512F-NEXT: vpermi2q %zmm1, %zmm13, %zmm4
4288 ; AVX512F-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4289 ; AVX512F-NEXT: vpermt2q %zmm1, %zmm0, %zmm13
4290 ; AVX512F-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4291 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
4292 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm0
4293 ; AVX512F-NEXT: vpermt2q %zmm25, %zmm15, %zmm0
4294 ; AVX512F-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4295 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm0
4296 ; AVX512F-NEXT: vpermt2q %zmm25, %zmm8, %zmm0
4297 ; AVX512F-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4298 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm0
4299 ; AVX512F-NEXT: vpermt2q %zmm25, %zmm7, %zmm0
4300 ; AVX512F-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4301 ; AVX512F-NEXT: vpermt2q %zmm25, %zmm29, %zmm1
4302 ; AVX512F-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4303 ; AVX512F-NEXT: vmovdqa64 128(%r10), %zmm30
4304 ; AVX512F-NEXT: vmovdqa64 128(%rax), %zmm6
4305 ; AVX512F-NEXT: vmovdqa64 %zmm30, %zmm0
4306 ; AVX512F-NEXT: vpermt2q %zmm6, %zmm15, %zmm0
4307 ; AVX512F-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4308 ; AVX512F-NEXT: vmovdqa64 %zmm30, %zmm0
4309 ; AVX512F-NEXT: vpermt2q %zmm6, %zmm8, %zmm0
4310 ; AVX512F-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4311 ; AVX512F-NEXT: vmovdqa64 128(%r8), %zmm21
4312 ; AVX512F-NEXT: vmovdqa64 128(%r9), %zmm9
4313 ; AVX512F-NEXT: vmovdqa64 %zmm21, %zmm0
4314 ; AVX512F-NEXT: vpermt2q %zmm9, %zmm7, %zmm0
4315 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm10
4316 ; AVX512F-NEXT: vmovdqa64 192(%r10), %zmm14
4317 ; AVX512F-NEXT: vmovdqa64 192(%rax), %zmm4
4318 ; AVX512F-NEXT: vmovdqa64 %zmm14, %zmm0
4319 ; AVX512F-NEXT: vpermt2q %zmm4, %zmm15, %zmm0
4320 ; AVX512F-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4321 ; AVX512F-NEXT: vmovdqa64 192(%rdi), %zmm1
4322 ; AVX512F-NEXT: vmovdqa64 192(%rsi), %zmm0
4323 ; AVX512F-NEXT: vpermi2q %zmm0, %zmm1, %zmm15
4324 ; AVX512F-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4325 ; AVX512F-NEXT: vmovdqa64 %zmm14, %zmm2
4326 ; AVX512F-NEXT: vpermt2q %zmm4, %zmm8, %zmm2
4327 ; AVX512F-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4328 ; AVX512F-NEXT: vpermi2q %zmm0, %zmm1, %zmm8
4329 ; AVX512F-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4330 ; AVX512F-NEXT: vmovdqa64 192(%r8), %zmm31
4331 ; AVX512F-NEXT: vmovdqa64 192(%r9), %zmm3
4332 ; AVX512F-NEXT: vmovdqa64 %zmm31, %zmm27
4333 ; AVX512F-NEXT: vpermt2q %zmm3, %zmm7, %zmm27
4334 ; AVX512F-NEXT: vpermi2q %zmm0, %zmm1, %zmm7
4335 ; AVX512F-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4336 ; AVX512F-NEXT: vpermt2q %zmm0, %zmm29, %zmm1
4337 ; AVX512F-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4338 ; AVX512F-NEXT: vmovdqa64 %zmm21, %zmm20
4339 ; AVX512F-NEXT: vpermt2q %zmm9, %zmm29, %zmm20
4340 ; AVX512F-NEXT: vmovdqa64 %zmm31, %zmm0
4341 ; AVX512F-NEXT: vpermt2q %zmm3, %zmm29, %zmm0
4342 ; AVX512F-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4343 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [0,8,0,8,0,8,0,8]
4344 ; AVX512F-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4345 ; AVX512F-NEXT: vmovdqa64 %zmm17, %zmm0
4346 ; AVX512F-NEXT: vpermt2q %zmm24, %zmm2, %zmm0
4347 ; AVX512F-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4348 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [1,9,1,9,1,9,1,9]
4349 ; AVX512F-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4350 ; AVX512F-NEXT: vmovdqa64 %zmm17, %zmm0
4351 ; AVX512F-NEXT: vpermt2q %zmm24, %zmm1, %zmm0
4352 ; AVX512F-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
4353 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm29 = [2,10,2,10,2,10,2,10]
4354 ; AVX512F-NEXT: # zmm29 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4355 ; AVX512F-NEXT: vmovdqa64 %zmm17, %zmm0
4356 ; AVX512F-NEXT: vpermt2q %zmm24, %zmm29, %zmm0
4357 ; AVX512F-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4358 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [3,11,3,11,3,11,3,11]
4359 ; AVX512F-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4360 ; AVX512F-NEXT: vpermt2q %zmm24, %zmm0, %zmm17
4361 ; AVX512F-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4362 ; AVX512F-NEXT: vmovdqa64 %zmm23, %zmm13
4363 ; AVX512F-NEXT: vpermt2q %zmm18, %zmm2, %zmm13
4364 ; AVX512F-NEXT: vmovdqa64 %zmm23, %zmm12
4365 ; AVX512F-NEXT: vpermt2q %zmm18, %zmm1, %zmm12
4366 ; AVX512F-NEXT: vmovdqa64 %zmm23, %zmm24
4367 ; AVX512F-NEXT: vpermt2q %zmm18, %zmm29, %zmm24
4368 ; AVX512F-NEXT: vpermt2q %zmm18, %zmm0, %zmm23
4369 ; AVX512F-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4370 ; AVX512F-NEXT: vmovdqa64 %zmm16, %zmm26
4371 ; AVX512F-NEXT: vpermt2q %zmm22, %zmm2, %zmm26
4372 ; AVX512F-NEXT: vmovdqa64 %zmm16, %zmm23
4373 ; AVX512F-NEXT: vpermt2q %zmm22, %zmm1, %zmm23
4374 ; AVX512F-NEXT: vmovdqa64 %zmm16, %zmm5
4375 ; AVX512F-NEXT: vpermt2q %zmm22, %zmm29, %zmm5
4376 ; AVX512F-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4377 ; AVX512F-NEXT: vpermt2q %zmm22, %zmm0, %zmm16
4378 ; AVX512F-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4379 ; AVX512F-NEXT: vmovdqa64 %zmm19, %zmm5
4380 ; AVX512F-NEXT: vpermt2q %zmm28, %zmm2, %zmm19
4381 ; AVX512F-NEXT: vmovdqa64 %zmm5, %zmm18
4382 ; AVX512F-NEXT: vpermt2q %zmm28, %zmm1, %zmm18
4383 ; AVX512F-NEXT: vmovdqa64 %zmm5, %zmm22
4384 ; AVX512F-NEXT: vpermt2q %zmm28, %zmm29, %zmm22
4385 ; AVX512F-NEXT: vpermt2q %zmm28, %zmm0, %zmm5
4386 ; AVX512F-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4387 ; AVX512F-NEXT: vmovdqa64 %zmm30, %zmm15
4388 ; AVX512F-NEXT: vpermt2q %zmm6, %zmm2, %zmm15
4389 ; AVX512F-NEXT: vmovdqa64 %zmm30, %zmm16
4390 ; AVX512F-NEXT: vpermt2q %zmm6, %zmm1, %zmm16
4391 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} zmm10 {%k1} = zmm30[0],zmm6[0],zmm30[2],zmm6[2],zmm30[4],zmm6[4],zmm30[6],zmm6[6]
4392 ; AVX512F-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4393 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} zmm20 {%k1} = zmm30[1],zmm6[1],zmm30[3],zmm6[3],zmm30[5],zmm6[5],zmm30[7],zmm6[7]
4394 ; AVX512F-NEXT: vmovdqa64 %zmm30, %zmm5
4395 ; AVX512F-NEXT: vpermt2q %zmm6, %zmm29, %zmm5
4396 ; AVX512F-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4397 ; AVX512F-NEXT: vpermt2q %zmm6, %zmm0, %zmm30
4398 ; AVX512F-NEXT: vmovdqa64 %zmm21, %zmm17
4399 ; AVX512F-NEXT: vpermt2q %zmm9, %zmm2, %zmm17
4400 ; AVX512F-NEXT: vmovdqa64 %zmm21, %zmm5
4401 ; AVX512F-NEXT: vpermt2q %zmm9, %zmm1, %zmm5
4402 ; AVX512F-NEXT: vmovdqa64 %zmm21, %zmm11
4403 ; AVX512F-NEXT: vpermt2q %zmm9, %zmm29, %zmm11
4404 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} zmm7 = zmm21[0],zmm9[0],zmm21[2],zmm9[2],zmm21[4],zmm9[4],zmm21[6],zmm9[6]
4405 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} zmm8 = zmm21[1],zmm9[1],zmm21[3],zmm9[3],zmm21[5],zmm9[5],zmm21[7],zmm9[7]
4406 ; AVX512F-NEXT: vpermt2q %zmm9, %zmm0, %zmm21
4407 ; AVX512F-NEXT: vmovdqa64 %zmm14, %zmm10
4408 ; AVX512F-NEXT: vpermt2q %zmm4, %zmm2, %zmm10
4409 ; AVX512F-NEXT: vmovdqa64 %zmm14, %zmm25
4410 ; AVX512F-NEXT: vpermt2q %zmm4, %zmm1, %zmm25
4411 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} zmm27 {%k1} = zmm14[0],zmm4[0],zmm14[2],zmm4[2],zmm14[4],zmm4[4],zmm14[6],zmm4[6]
4412 ; AVX512F-NEXT: vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4413 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
4414 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} zmm9 {%k1} = zmm14[1],zmm4[1],zmm14[3],zmm4[3],zmm14[5],zmm4[5],zmm14[7],zmm4[7]
4415 ; AVX512F-NEXT: vmovdqa64 %zmm14, %zmm27
4416 ; AVX512F-NEXT: vpermt2q %zmm4, %zmm29, %zmm27
4417 ; AVX512F-NEXT: vpermt2q %zmm4, %zmm0, %zmm14
4418 ; AVX512F-NEXT: vpermi2q %zmm3, %zmm31, %zmm2
4419 ; AVX512F-NEXT: vpermi2q %zmm3, %zmm31, %zmm1
4420 ; AVX512F-NEXT: vpermi2q %zmm3, %zmm31, %zmm29
4421 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} zmm4 = zmm31[0],zmm3[0],zmm31[2],zmm3[2],zmm31[4],zmm3[4],zmm31[6],zmm3[6]
4422 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm31[1],zmm3[1],zmm31[3],zmm3[3],zmm31[5],zmm3[5],zmm31[7],zmm3[7]
4423 ; AVX512F-NEXT: vpermt2q %zmm3, %zmm0, %zmm31
4424 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4425 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm13 {%k1}
4426 ; AVX512F-NEXT: vmovdqa (%rsi), %xmm0
4427 ; AVX512F-NEXT: vinserti128 $1, (%rcx), %ymm0, %ymm0
4428 ; AVX512F-NEXT: vmovdqa (%rdi), %xmm3
4429 ; AVX512F-NEXT: vinserti128 $1, (%rdx), %ymm3, %ymm3
4430 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} ymm28 = ymm3[0],ymm0[0],ymm3[2],ymm0[2]
4431 ; AVX512F-NEXT: vinserti64x4 $0, %ymm28, %zmm13, %zmm13
4432 ; AVX512F-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4433 ; AVX512F-NEXT: vmovdqu64 (%rsp), %zmm13 # 64-byte Reload
4434 ; AVX512F-NEXT: vmovdqa64 %zmm13, %zmm12 {%k1}
4435 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm3[1],ymm0[1],ymm3[3],ymm0[3]
4436 ; AVX512F-NEXT: vinserti64x4 $0, %ymm0, %zmm12, %zmm0
4437 ; AVX512F-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
4438 ; AVX512F-NEXT: vmovdqa64 %zmm26, %zmm19 {%k1}
4439 ; AVX512F-NEXT: vmovdqa 64(%rsi), %xmm0
4440 ; AVX512F-NEXT: vinserti128 $1, 64(%rcx), %ymm0, %ymm0
4441 ; AVX512F-NEXT: vmovdqa 64(%rdi), %xmm12
4442 ; AVX512F-NEXT: vinserti128 $1, 64(%rdx), %ymm12, %ymm12
4443 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} ymm13 = ymm12[0],ymm0[0],ymm12[2],ymm0[2]
4444 ; AVX512F-NEXT: vinserti64x4 $0, %ymm13, %zmm19, %zmm3
4445 ; AVX512F-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4446 ; AVX512F-NEXT: vmovdqa64 %zmm23, %zmm18 {%k1}
4447 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm12[1],ymm0[1],ymm12[3],ymm0[3]
4448 ; AVX512F-NEXT: vinserti64x4 $0, %ymm0, %zmm18, %zmm0
4449 ; AVX512F-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4450 ; AVX512F-NEXT: vmovdqa64 %zmm15, %zmm17 {%k1}
4451 ; AVX512F-NEXT: vmovdqa 128(%rsi), %xmm0
4452 ; AVX512F-NEXT: vinserti128 $1, 128(%rcx), %ymm0, %ymm0
4453 ; AVX512F-NEXT: vmovdqa 128(%rdi), %xmm12
4454 ; AVX512F-NEXT: vinserti128 $1, 128(%rdx), %ymm12, %ymm13
4455 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} ymm12 = ymm13[0],ymm0[0],ymm13[2],ymm0[2]
4456 ; AVX512F-NEXT: vinserti64x4 $0, %ymm12, %zmm17, %zmm3
4457 ; AVX512F-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4458 ; AVX512F-NEXT: vmovdqa64 %zmm16, %zmm5 {%k1}
4459 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm13[1],ymm0[1],ymm13[3],ymm0[3]
4460 ; AVX512F-NEXT: vinserti64x4 $0, %ymm0, %zmm5, %zmm0
4461 ; AVX512F-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4462 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4463 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm7 {%k1}
4464 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4465 ; AVX512F-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm5 # 32-byte Folded Reload
4466 ; AVX512F-NEXT: # ymm5 = ymm0[0,1,2,3],mem[4,5,6,7]
4467 ; AVX512F-NEXT: vinserti64x4 $0, %ymm5, %zmm7, %zmm28
4468 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4469 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
4470 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4471 ; AVX512F-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm5 # 32-byte Folded Reload
4472 ; AVX512F-NEXT: # ymm5 = ymm0[0,1,2,3],mem[4,5,6,7]
4473 ; AVX512F-NEXT: vinserti64x4 $0, %ymm5, %zmm8, %zmm26
4474 ; AVX512F-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4475 ; AVX512F-NEXT: vblendps $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm5 # 32-byte Folded Reload
4476 ; AVX512F-NEXT: # ymm5 = ymm0[0,1,2,3],mem[4,5,6,7]
4477 ; AVX512F-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4478 ; AVX512F-NEXT: vinsertf64x4 $0, %ymm5, %zmm0, %zmm19
4479 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4480 ; AVX512F-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm5 # 32-byte Folded Reload
4481 ; AVX512F-NEXT: # ymm5 = ymm0[0,1,2,3],mem[4,5,6,7]
4482 ; AVX512F-NEXT: vinserti64x4 $0, %ymm5, %zmm20, %zmm18
4483 ; AVX512F-NEXT: vmovdqa64 %zmm10, %zmm2 {%k1}
4484 ; AVX512F-NEXT: vmovdqa 192(%rsi), %xmm7
4485 ; AVX512F-NEXT: vinserti128 $1, 192(%rcx), %ymm7, %ymm7
4486 ; AVX512F-NEXT: vmovdqa 192(%rdi), %xmm10
4487 ; AVX512F-NEXT: vinserti128 $1, 192(%rdx), %ymm10, %ymm10
4488 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} ymm15 = ymm10[0],ymm7[0],ymm10[2],ymm7[2]
4489 ; AVX512F-NEXT: vinserti64x4 $0, %ymm15, %zmm2, %zmm17
4490 ; AVX512F-NEXT: vmovdqa64 %zmm25, %zmm1 {%k1}
4491 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} ymm7 = ymm10[1],ymm7[1],ymm10[3],ymm7[3]
4492 ; AVX512F-NEXT: vinserti64x4 $0, %ymm7, %zmm1, %zmm1
4493 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4494 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm4 {%k1}
4495 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4496 ; AVX512F-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm7 # 32-byte Folded Reload
4497 ; AVX512F-NEXT: # ymm7 = ymm0[0,1,2,3],mem[4,5,6,7]
4498 ; AVX512F-NEXT: vinserti64x4 $0, %ymm7, %zmm4, %zmm7
4499 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4500 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
4501 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4502 ; AVX512F-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm4 # 32-byte Folded Reload
4503 ; AVX512F-NEXT: # ymm4 = ymm0[0,1,2,3],mem[4,5,6,7]
4504 ; AVX512F-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm4
4505 ; AVX512F-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4506 ; AVX512F-NEXT: vblendps $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm6 # 32-byte Folded Reload
4507 ; AVX512F-NEXT: # ymm6 = ymm0[0,1,2,3],mem[4,5,6,7]
4508 ; AVX512F-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4509 ; AVX512F-NEXT: vinsertf64x4 $0, %ymm6, %zmm0, %zmm10
4510 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4511 ; AVX512F-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm6 # 32-byte Folded Reload
4512 ; AVX512F-NEXT: # ymm6 = ymm0[0,1,2,3],mem[4,5,6,7]
4513 ; AVX512F-NEXT: vinserti64x4 $0, %ymm6, %zmm9, %zmm15
4514 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4515 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm24 {%k1}
4516 ; AVX512F-NEXT: vmovdqa (%rcx), %ymm6
4517 ; AVX512F-NEXT: vmovdqa64 (%rdx), %ymm16
4518 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} ymm3 = ymm16[0],ymm6[0],ymm16[2],ymm6[2]
4519 ; AVX512F-NEXT: vmovdqa64 (%rsi), %ymm20
4520 ; AVX512F-NEXT: vmovdqa64 (%rdi), %ymm23
4521 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} ymm12 = ymm23[0],ymm20[0],ymm23[2],ymm20[2]
4522 ; AVX512F-NEXT: vperm2i128 {{.*#+}} ymm3 = ymm12[2,3],ymm3[2,3]
4523 ; AVX512F-NEXT: vinserti64x4 $0, %ymm3, %zmm24, %zmm3
4524 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4525 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
4526 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm2 {%k1}
4527 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} ymm6 = ymm16[1],ymm6[1],ymm16[3],ymm6[3]
4528 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} ymm12 = ymm23[1],ymm20[1],ymm23[3],ymm20[3]
4529 ; AVX512F-NEXT: vperm2i128 {{.*#+}} ymm6 = ymm12[2,3],ymm6[2,3]
4530 ; AVX512F-NEXT: vinserti64x4 $0, %ymm6, %zmm2, %zmm6
4531 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4532 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm22 {%k1}
4533 ; AVX512F-NEXT: vmovdqa 64(%rcx), %ymm12
4534 ; AVX512F-NEXT: vmovdqa64 64(%rdx), %ymm16
4535 ; AVX512F-NEXT: vmovdqa64 64(%rsi), %ymm20
4536 ; AVX512F-NEXT: vmovdqa64 64(%rdi), %ymm23
4537 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm16[0],ymm12[0],ymm16[2],ymm12[2]
4538 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} ymm13 = ymm23[0],ymm20[0],ymm23[2],ymm20[2]
4539 ; AVX512F-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm13[2,3],ymm0[2,3]
4540 ; AVX512F-NEXT: vinserti64x4 $0, %ymm0, %zmm22, %zmm0
4541 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
4542 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
4543 ; AVX512F-NEXT: vmovdqa64 %zmm2, %zmm5 {%k1}
4544 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} ymm12 = ymm16[1],ymm12[1],ymm16[3],ymm12[3]
4545 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} ymm13 = ymm23[1],ymm20[1],ymm23[3],ymm20[3]
4546 ; AVX512F-NEXT: vperm2i128 {{.*#+}} ymm12 = ymm13[2,3],ymm12[2,3]
4547 ; AVX512F-NEXT: vinserti64x4 $0, %ymm12, %zmm5, %zmm12
4548 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
4549 ; AVX512F-NEXT: vmovdqa64 %zmm2, %zmm11 {%k1}
4550 ; AVX512F-NEXT: vmovdqa 128(%rcx), %ymm13
4551 ; AVX512F-NEXT: vmovdqa64 128(%rdx), %ymm16
4552 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} ymm5 = ymm16[0],ymm13[0],ymm16[2],ymm13[2]
4553 ; AVX512F-NEXT: vmovdqa64 128(%rsi), %ymm20
4554 ; AVX512F-NEXT: vmovdqa64 128(%rdi), %ymm22
4555 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm22[0],ymm20[0],ymm22[2],ymm20[2]
4556 ; AVX512F-NEXT: vperm2i128 {{.*#+}} ymm5 = ymm8[2,3],ymm5[2,3]
4557 ; AVX512F-NEXT: vinserti64x4 $0, %ymm5, %zmm11, %zmm5
4558 ; AVX512F-NEXT: vmovdqa64 %zmm30, %zmm21 {%k1}
4559 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} ymm8 = ymm16[1],ymm13[1],ymm16[3],ymm13[3]
4560 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} ymm11 = ymm22[1],ymm20[1],ymm22[3],ymm20[3]
4561 ; AVX512F-NEXT: vperm2i128 {{.*#+}} ymm8 = ymm11[2,3],ymm8[2,3]
4562 ; AVX512F-NEXT: vinserti64x4 $0, %ymm8, %zmm21, %zmm8
4563 ; AVX512F-NEXT: vmovdqa64 %zmm27, %zmm29 {%k1}
4564 ; AVX512F-NEXT: vmovdqa 192(%rcx), %ymm9
4565 ; AVX512F-NEXT: vmovdqa 192(%rdx), %ymm11
4566 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} ymm13 = ymm11[0],ymm9[0],ymm11[2],ymm9[2]
4567 ; AVX512F-NEXT: vmovdqa64 192(%rsi), %ymm16
4568 ; AVX512F-NEXT: vmovdqa64 192(%rdi), %ymm20
4569 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm20[0],ymm16[0],ymm20[2],ymm16[2]
4570 ; AVX512F-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm2[2,3],ymm13[2,3]
4571 ; AVX512F-NEXT: vinserti64x4 $0, %ymm2, %zmm29, %zmm2
4572 ; AVX512F-NEXT: vmovdqa64 %zmm14, %zmm31 {%k1}
4573 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} ymm9 = ymm11[1],ymm9[1],ymm11[3],ymm9[3]
4574 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} ymm11 = ymm20[1],ymm16[1],ymm20[3],ymm16[3]
4575 ; AVX512F-NEXT: vperm2i128 {{.*#+}} ymm9 = ymm11[2,3],ymm9[2,3]
4576 ; AVX512F-NEXT: vinserti64x4 $0, %ymm9, %zmm31, %zmm9
4577 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %rax
4578 ; AVX512F-NEXT: vmovdqa64 %zmm9, 1728(%rax)
4579 ; AVX512F-NEXT: vmovdqa64 %zmm2, 1664(%rax)
4580 ; AVX512F-NEXT: vmovdqa64 %zmm8, 1216(%rax)
4581 ; AVX512F-NEXT: vmovdqa64 %zmm5, 1152(%rax)
4582 ; AVX512F-NEXT: vmovdqa64 %zmm12, 704(%rax)
4583 ; AVX512F-NEXT: vmovdqa64 %zmm0, 640(%rax)
4584 ; AVX512F-NEXT: vmovdqa64 %zmm6, 192(%rax)
4585 ; AVX512F-NEXT: vmovdqa64 %zmm3, 128(%rax)
4586 ; AVX512F-NEXT: vmovdqa64 %zmm15, 1984(%rax)
4587 ; AVX512F-NEXT: vmovaps %zmm10, 1920(%rax)
4588 ; AVX512F-NEXT: vmovdqa64 %zmm4, 1856(%rax)
4589 ; AVX512F-NEXT: vmovdqa64 %zmm7, 1792(%rax)
4590 ; AVX512F-NEXT: vmovdqa64 %zmm1, 1600(%rax)
4591 ; AVX512F-NEXT: vmovdqa64 %zmm17, 1536(%rax)
4592 ; AVX512F-NEXT: vmovdqa64 %zmm18, 1472(%rax)
4593 ; AVX512F-NEXT: vmovaps %zmm19, 1408(%rax)
4594 ; AVX512F-NEXT: vmovdqa64 %zmm26, 1344(%rax)
4595 ; AVX512F-NEXT: vmovdqa64 %zmm28, 1280(%rax)
4596 ; AVX512F-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4597 ; AVX512F-NEXT: vmovaps %zmm0, 1088(%rax)
4598 ; AVX512F-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4599 ; AVX512F-NEXT: vmovaps %zmm0, 1024(%rax)
4600 ; AVX512F-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4601 ; AVX512F-NEXT: vmovaps %zmm0, 960(%rax)
4602 ; AVX512F-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4603 ; AVX512F-NEXT: vmovaps %zmm0, 896(%rax)
4604 ; AVX512F-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4605 ; AVX512F-NEXT: vmovaps %zmm0, 832(%rax)
4606 ; AVX512F-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4607 ; AVX512F-NEXT: vmovaps %zmm0, 768(%rax)
4608 ; AVX512F-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4609 ; AVX512F-NEXT: vmovaps %zmm0, 576(%rax)
4610 ; AVX512F-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4611 ; AVX512F-NEXT: vmovaps %zmm0, 512(%rax)
4612 ; AVX512F-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4613 ; AVX512F-NEXT: vmovaps %zmm0, 448(%rax)
4614 ; AVX512F-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4615 ; AVX512F-NEXT: vmovaps %zmm0, 384(%rax)
4616 ; AVX512F-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4617 ; AVX512F-NEXT: vmovaps %zmm0, 320(%rax)
4618 ; AVX512F-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4619 ; AVX512F-NEXT: vmovaps %zmm0, 256(%rax)
4620 ; AVX512F-NEXT: vmovups (%rsp), %zmm0 # 64-byte Reload
4621 ; AVX512F-NEXT: vmovaps %zmm0, 64(%rax)
4622 ; AVX512F-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4623 ; AVX512F-NEXT: vmovaps %zmm0, (%rax)
4624 ; AVX512F-NEXT: addq $2632, %rsp # imm = 0xA48
4625 ; AVX512F-NEXT: vzeroupper
4626 ; AVX512F-NEXT: retq
4628 ; AVX512BW-LABEL: store_i64_stride8_vf32:
4629 ; AVX512BW: # %bb.0:
4630 ; AVX512BW-NEXT: subq $2632, %rsp # imm = 0xA48
4631 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
4632 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r10
4633 ; AVX512BW-NEXT: vmovaps 128(%rdi), %zmm0
4634 ; AVX512BW-NEXT: vmovups %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4635 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %zmm1
4636 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm5
4637 ; AVX512BW-NEXT: vmovdqa64 128(%rsi), %zmm25
4638 ; AVX512BW-NEXT: vmovdqa64 64(%rsi), %zmm20
4639 ; AVX512BW-NEXT: vmovdqa64 (%rsi), %zmm0
4640 ; AVX512BW-NEXT: vmovaps 192(%rdx), %zmm2
4641 ; AVX512BW-NEXT: vmovups %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4642 ; AVX512BW-NEXT: vmovdqa64 128(%rdx), %zmm13
4643 ; AVX512BW-NEXT: vmovdqa64 64(%rdx), %zmm2
4644 ; AVX512BW-NEXT: vmovdqa64 (%rdx), %zmm6
4645 ; AVX512BW-NEXT: vmovdqa64 64(%rcx), %zmm21
4646 ; AVX512BW-NEXT: vmovdqa64 (%rcx), %zmm7
4647 ; AVX512BW-NEXT: vmovdqa64 (%r8), %zmm23
4648 ; AVX512BW-NEXT: vmovdqa64 64(%r8), %zmm19
4649 ; AVX512BW-NEXT: vmovdqa64 (%r9), %zmm18
4650 ; AVX512BW-NEXT: vmovdqa64 64(%r9), %zmm28
4651 ; AVX512BW-NEXT: vmovdqa64 (%r10), %zmm17
4652 ; AVX512BW-NEXT: vmovdqa64 64(%r10), %zmm16
4653 ; AVX512BW-NEXT: vmovdqa64 (%rax), %zmm24
4654 ; AVX512BW-NEXT: vmovdqa64 64(%rax), %zmm22
4655 ; AVX512BW-NEXT: movb $-64, %r11b
4656 ; AVX512BW-NEXT: kmovd %r11d, %k1
4657 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [4,12,4,12,4,12,4,12]
4658 ; AVX512BW-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4659 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm9
4660 ; AVX512BW-NEXT: vpermt2q %zmm24, %zmm15, %zmm9
4661 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} zmm10 = zmm23[0],zmm18[0],zmm23[2],zmm18[2],zmm23[4],zmm18[4],zmm23[6],zmm18[6]
4662 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm10 {%k1}
4663 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm9
4664 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm15, %zmm9
4665 ; AVX512BW-NEXT: vbroadcasti128 {{.*#+}} ymm12 = [4,12,4,12]
4666 ; AVX512BW-NEXT: # ymm12 = mem[0,1,0,1]
4667 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm11
4668 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm12, %zmm11
4669 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm11[4,5,6,7]
4670 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm9, %zmm10, %zmm9
4671 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4672 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [5,13,5,13,5,13,5,13]
4673 ; AVX512BW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4674 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm9
4675 ; AVX512BW-NEXT: vpermt2q %zmm24, %zmm8, %zmm9
4676 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} zmm10 = zmm23[1],zmm18[1],zmm23[3],zmm18[3],zmm23[5],zmm18[5],zmm23[7],zmm18[7]
4677 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm10 {%k1}
4678 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm9
4679 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm8, %zmm9
4680 ; AVX512BW-NEXT: vbroadcasti128 {{.*#+}} ymm14 = [5,13,5,13]
4681 ; AVX512BW-NEXT: # ymm14 = mem[0,1,0,1]
4682 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm11
4683 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm14, %zmm11
4684 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm11[4,5,6,7]
4685 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm9, %zmm10, %zmm9
4686 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4687 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [6,14,6,14,6,14,6,14]
4688 ; AVX512BW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4689 ; AVX512BW-NEXT: vmovdqa64 %zmm23, %zmm9
4690 ; AVX512BW-NEXT: vpermt2q %zmm18, %zmm3, %zmm9
4691 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} zmm9 {%k1} = zmm17[0],zmm24[0],zmm17[2],zmm24[2],zmm17[4],zmm24[4],zmm17[6],zmm24[6]
4692 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm10
4693 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm3, %zmm10
4694 ; AVX512BW-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [6,14,6,14]
4695 ; AVX512BW-NEXT: # ymm4 = mem[0,1,0,1]
4696 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm11
4697 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm4, %zmm11
4698 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm10 = ymm10[0,1,2,3],ymm11[4,5,6,7]
4699 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm10, %zmm9, %zmm9
4700 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4701 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm29 = [7,15,7,15,7,15,7,15]
4702 ; AVX512BW-NEXT: # zmm29 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4703 ; AVX512BW-NEXT: vmovdqa64 %zmm23, %zmm9
4704 ; AVX512BW-NEXT: vpermt2q %zmm18, %zmm29, %zmm9
4705 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} zmm9 {%k1} = zmm17[1],zmm24[1],zmm17[3],zmm24[3],zmm17[5],zmm24[5],zmm17[7],zmm24[7]
4706 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm29, %zmm5
4707 ; AVX512BW-NEXT: vbroadcasti128 {{.*#+}} ymm0 = [7,15,7,15]
4708 ; AVX512BW-NEXT: # ymm0 = mem[0,1,0,1]
4709 ; AVX512BW-NEXT: vpermt2q %zmm7, %zmm0, %zmm6
4710 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm6[4,5,6,7]
4711 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm5, %zmm9, %zmm5
4712 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4713 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm5
4714 ; AVX512BW-NEXT: vpermt2q %zmm22, %zmm15, %zmm5
4715 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} zmm6 = zmm19[0],zmm28[0],zmm19[2],zmm28[2],zmm19[4],zmm28[4],zmm19[6],zmm28[6]
4716 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm6 {%k1}
4717 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm5
4718 ; AVX512BW-NEXT: vpermt2q %zmm20, %zmm15, %zmm5
4719 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm7
4720 ; AVX512BW-NEXT: vpermt2q %zmm21, %zmm12, %zmm7
4721 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm7[4,5,6,7]
4722 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm5, %zmm6, %zmm5
4723 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4724 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm5
4725 ; AVX512BW-NEXT: vpermt2q %zmm22, %zmm8, %zmm5
4726 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm19[1],zmm28[1],zmm19[3],zmm28[3],zmm19[5],zmm28[5],zmm19[7],zmm28[7]
4727 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm6 {%k1}
4728 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm5
4729 ; AVX512BW-NEXT: vpermt2q %zmm20, %zmm8, %zmm5
4730 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm7
4731 ; AVX512BW-NEXT: vpermt2q %zmm21, %zmm14, %zmm7
4732 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm7[4,5,6,7]
4733 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm5, %zmm6, %zmm5
4734 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4735 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm5
4736 ; AVX512BW-NEXT: vpermt2q %zmm20, %zmm3, %zmm5
4737 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm6
4738 ; AVX512BW-NEXT: vpermt2q %zmm21, %zmm4, %zmm6
4739 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm6[4,5,6,7]
4740 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm6
4741 ; AVX512BW-NEXT: vpermt2q %zmm28, %zmm3, %zmm6
4742 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm7
4743 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm16[0],zmm22[0],zmm16[2],zmm22[2],zmm16[4],zmm22[4],zmm16[6],zmm22[6]
4744 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm5, %zmm6, %zmm3
4745 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4746 ; AVX512BW-NEXT: vmovdqa64 128(%rcx), %zmm5
4747 ; AVX512BW-NEXT: vpermt2q %zmm20, %zmm29, %zmm1
4748 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm3
4749 ; AVX512BW-NEXT: vpermt2q %zmm28, %zmm29, %zmm3
4750 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm16[1],zmm22[1],zmm16[3],zmm22[3],zmm16[5],zmm22[5],zmm16[7],zmm22[7]
4751 ; AVX512BW-NEXT: vpermt2q %zmm21, %zmm0, %zmm2
4752 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
4753 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm1, %zmm3, %zmm1
4754 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4755 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm1
4756 ; AVX512BW-NEXT: vpermt2q %zmm5, %zmm12, %zmm1
4757 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4758 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm1
4759 ; AVX512BW-NEXT: vpermt2q %zmm5, %zmm14, %zmm1
4760 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4761 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm1
4762 ; AVX512BW-NEXT: vpermt2q %zmm5, %zmm4, %zmm1
4763 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4764 ; AVX512BW-NEXT: vpermt2q %zmm5, %zmm0, %zmm13
4765 ; AVX512BW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4766 ; AVX512BW-NEXT: vmovdqa64 192(%rcx), %zmm1
4767 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
4768 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm13, %zmm12
4769 ; AVX512BW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4770 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm13, %zmm14
4771 ; AVX512BW-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4772 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm13, %zmm4
4773 ; AVX512BW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4774 ; AVX512BW-NEXT: vpermt2q %zmm1, %zmm0, %zmm13
4775 ; AVX512BW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4776 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
4777 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm0
4778 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm15, %zmm0
4779 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4780 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm0
4781 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm8, %zmm0
4782 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4783 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm0
4784 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm7, %zmm0
4785 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4786 ; AVX512BW-NEXT: vpermt2q %zmm25, %zmm29, %zmm1
4787 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4788 ; AVX512BW-NEXT: vmovdqa64 128(%r10), %zmm30
4789 ; AVX512BW-NEXT: vmovdqa64 128(%rax), %zmm6
4790 ; AVX512BW-NEXT: vmovdqa64 %zmm30, %zmm0
4791 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm15, %zmm0
4792 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4793 ; AVX512BW-NEXT: vmovdqa64 %zmm30, %zmm0
4794 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm8, %zmm0
4795 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4796 ; AVX512BW-NEXT: vmovdqa64 128(%r8), %zmm21
4797 ; AVX512BW-NEXT: vmovdqa64 128(%r9), %zmm9
4798 ; AVX512BW-NEXT: vmovdqa64 %zmm21, %zmm0
4799 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm7, %zmm0
4800 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm10
4801 ; AVX512BW-NEXT: vmovdqa64 192(%r10), %zmm14
4802 ; AVX512BW-NEXT: vmovdqa64 192(%rax), %zmm4
4803 ; AVX512BW-NEXT: vmovdqa64 %zmm14, %zmm0
4804 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm15, %zmm0
4805 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4806 ; AVX512BW-NEXT: vmovdqa64 192(%rdi), %zmm1
4807 ; AVX512BW-NEXT: vmovdqa64 192(%rsi), %zmm0
4808 ; AVX512BW-NEXT: vpermi2q %zmm0, %zmm1, %zmm15
4809 ; AVX512BW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4810 ; AVX512BW-NEXT: vmovdqa64 %zmm14, %zmm2
4811 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm8, %zmm2
4812 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4813 ; AVX512BW-NEXT: vpermi2q %zmm0, %zmm1, %zmm8
4814 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4815 ; AVX512BW-NEXT: vmovdqa64 192(%r8), %zmm31
4816 ; AVX512BW-NEXT: vmovdqa64 192(%r9), %zmm3
4817 ; AVX512BW-NEXT: vmovdqa64 %zmm31, %zmm27
4818 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm7, %zmm27
4819 ; AVX512BW-NEXT: vpermi2q %zmm0, %zmm1, %zmm7
4820 ; AVX512BW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4821 ; AVX512BW-NEXT: vpermt2q %zmm0, %zmm29, %zmm1
4822 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4823 ; AVX512BW-NEXT: vmovdqa64 %zmm21, %zmm20
4824 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm29, %zmm20
4825 ; AVX512BW-NEXT: vmovdqa64 %zmm31, %zmm0
4826 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm29, %zmm0
4827 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4828 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [0,8,0,8,0,8,0,8]
4829 ; AVX512BW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4830 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm0
4831 ; AVX512BW-NEXT: vpermt2q %zmm24, %zmm2, %zmm0
4832 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4833 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [1,9,1,9,1,9,1,9]
4834 ; AVX512BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4835 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm0
4836 ; AVX512BW-NEXT: vpermt2q %zmm24, %zmm1, %zmm0
4837 ; AVX512BW-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
4838 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm29 = [2,10,2,10,2,10,2,10]
4839 ; AVX512BW-NEXT: # zmm29 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4840 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm0
4841 ; AVX512BW-NEXT: vpermt2q %zmm24, %zmm29, %zmm0
4842 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4843 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [3,11,3,11,3,11,3,11]
4844 ; AVX512BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4845 ; AVX512BW-NEXT: vpermt2q %zmm24, %zmm0, %zmm17
4846 ; AVX512BW-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4847 ; AVX512BW-NEXT: vmovdqa64 %zmm23, %zmm13
4848 ; AVX512BW-NEXT: vpermt2q %zmm18, %zmm2, %zmm13
4849 ; AVX512BW-NEXT: vmovdqa64 %zmm23, %zmm12
4850 ; AVX512BW-NEXT: vpermt2q %zmm18, %zmm1, %zmm12
4851 ; AVX512BW-NEXT: vmovdqa64 %zmm23, %zmm24
4852 ; AVX512BW-NEXT: vpermt2q %zmm18, %zmm29, %zmm24
4853 ; AVX512BW-NEXT: vpermt2q %zmm18, %zmm0, %zmm23
4854 ; AVX512BW-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4855 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm26
4856 ; AVX512BW-NEXT: vpermt2q %zmm22, %zmm2, %zmm26
4857 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm23
4858 ; AVX512BW-NEXT: vpermt2q %zmm22, %zmm1, %zmm23
4859 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm5
4860 ; AVX512BW-NEXT: vpermt2q %zmm22, %zmm29, %zmm5
4861 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4862 ; AVX512BW-NEXT: vpermt2q %zmm22, %zmm0, %zmm16
4863 ; AVX512BW-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4864 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm5
4865 ; AVX512BW-NEXT: vpermt2q %zmm28, %zmm2, %zmm19
4866 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm18
4867 ; AVX512BW-NEXT: vpermt2q %zmm28, %zmm1, %zmm18
4868 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm22
4869 ; AVX512BW-NEXT: vpermt2q %zmm28, %zmm29, %zmm22
4870 ; AVX512BW-NEXT: vpermt2q %zmm28, %zmm0, %zmm5
4871 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4872 ; AVX512BW-NEXT: vmovdqa64 %zmm30, %zmm15
4873 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm2, %zmm15
4874 ; AVX512BW-NEXT: vmovdqa64 %zmm30, %zmm16
4875 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm1, %zmm16
4876 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} zmm10 {%k1} = zmm30[0],zmm6[0],zmm30[2],zmm6[2],zmm30[4],zmm6[4],zmm30[6],zmm6[6]
4877 ; AVX512BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4878 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} zmm20 {%k1} = zmm30[1],zmm6[1],zmm30[3],zmm6[3],zmm30[5],zmm6[5],zmm30[7],zmm6[7]
4879 ; AVX512BW-NEXT: vmovdqa64 %zmm30, %zmm5
4880 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm29, %zmm5
4881 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4882 ; AVX512BW-NEXT: vpermt2q %zmm6, %zmm0, %zmm30
4883 ; AVX512BW-NEXT: vmovdqa64 %zmm21, %zmm17
4884 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm2, %zmm17
4885 ; AVX512BW-NEXT: vmovdqa64 %zmm21, %zmm5
4886 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm1, %zmm5
4887 ; AVX512BW-NEXT: vmovdqa64 %zmm21, %zmm11
4888 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm29, %zmm11
4889 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} zmm7 = zmm21[0],zmm9[0],zmm21[2],zmm9[2],zmm21[4],zmm9[4],zmm21[6],zmm9[6]
4890 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} zmm8 = zmm21[1],zmm9[1],zmm21[3],zmm9[3],zmm21[5],zmm9[5],zmm21[7],zmm9[7]
4891 ; AVX512BW-NEXT: vpermt2q %zmm9, %zmm0, %zmm21
4892 ; AVX512BW-NEXT: vmovdqa64 %zmm14, %zmm10
4893 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm2, %zmm10
4894 ; AVX512BW-NEXT: vmovdqa64 %zmm14, %zmm25
4895 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm1, %zmm25
4896 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} zmm27 {%k1} = zmm14[0],zmm4[0],zmm14[2],zmm4[2],zmm14[4],zmm4[4],zmm14[6],zmm4[6]
4897 ; AVX512BW-NEXT: vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4898 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
4899 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} zmm9 {%k1} = zmm14[1],zmm4[1],zmm14[3],zmm4[3],zmm14[5],zmm4[5],zmm14[7],zmm4[7]
4900 ; AVX512BW-NEXT: vmovdqa64 %zmm14, %zmm27
4901 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm29, %zmm27
4902 ; AVX512BW-NEXT: vpermt2q %zmm4, %zmm0, %zmm14
4903 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm31, %zmm2
4904 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm31, %zmm1
4905 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm31, %zmm29
4906 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} zmm4 = zmm31[0],zmm3[0],zmm31[2],zmm3[2],zmm31[4],zmm3[4],zmm31[6],zmm3[6]
4907 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm31[1],zmm3[1],zmm31[3],zmm3[3],zmm31[5],zmm3[5],zmm31[7],zmm3[7]
4908 ; AVX512BW-NEXT: vpermt2q %zmm3, %zmm0, %zmm31
4909 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4910 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm13 {%k1}
4911 ; AVX512BW-NEXT: vmovdqa (%rsi), %xmm0
4912 ; AVX512BW-NEXT: vinserti128 $1, (%rcx), %ymm0, %ymm0
4913 ; AVX512BW-NEXT: vmovdqa (%rdi), %xmm3
4914 ; AVX512BW-NEXT: vinserti128 $1, (%rdx), %ymm3, %ymm3
4915 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} ymm28 = ymm3[0],ymm0[0],ymm3[2],ymm0[2]
4916 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm28, %zmm13, %zmm13
4917 ; AVX512BW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4918 ; AVX512BW-NEXT: vmovdqu64 (%rsp), %zmm13 # 64-byte Reload
4919 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm12 {%k1}
4920 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm3[1],ymm0[1],ymm3[3],ymm0[3]
4921 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm0, %zmm12, %zmm0
4922 ; AVX512BW-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
4923 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm19 {%k1}
4924 ; AVX512BW-NEXT: vmovdqa 64(%rsi), %xmm0
4925 ; AVX512BW-NEXT: vinserti128 $1, 64(%rcx), %ymm0, %ymm0
4926 ; AVX512BW-NEXT: vmovdqa 64(%rdi), %xmm12
4927 ; AVX512BW-NEXT: vinserti128 $1, 64(%rdx), %ymm12, %ymm12
4928 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} ymm13 = ymm12[0],ymm0[0],ymm12[2],ymm0[2]
4929 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm13, %zmm19, %zmm3
4930 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4931 ; AVX512BW-NEXT: vmovdqa64 %zmm23, %zmm18 {%k1}
4932 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm12[1],ymm0[1],ymm12[3],ymm0[3]
4933 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm0, %zmm18, %zmm0
4934 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4935 ; AVX512BW-NEXT: vmovdqa64 %zmm15, %zmm17 {%k1}
4936 ; AVX512BW-NEXT: vmovdqa 128(%rsi), %xmm0
4937 ; AVX512BW-NEXT: vinserti128 $1, 128(%rcx), %ymm0, %ymm0
4938 ; AVX512BW-NEXT: vmovdqa 128(%rdi), %xmm12
4939 ; AVX512BW-NEXT: vinserti128 $1, 128(%rdx), %ymm12, %ymm13
4940 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} ymm12 = ymm13[0],ymm0[0],ymm13[2],ymm0[2]
4941 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm12, %zmm17, %zmm3
4942 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4943 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm5 {%k1}
4944 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm13[1],ymm0[1],ymm13[3],ymm0[3]
4945 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm0, %zmm5, %zmm0
4946 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4947 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4948 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm7 {%k1}
4949 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4950 ; AVX512BW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm5 # 32-byte Folded Reload
4951 ; AVX512BW-NEXT: # ymm5 = ymm0[0,1,2,3],mem[4,5,6,7]
4952 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm5, %zmm7, %zmm28
4953 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4954 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
4955 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4956 ; AVX512BW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm5 # 32-byte Folded Reload
4957 ; AVX512BW-NEXT: # ymm5 = ymm0[0,1,2,3],mem[4,5,6,7]
4958 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm5, %zmm8, %zmm26
4959 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4960 ; AVX512BW-NEXT: vblendps $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm5 # 32-byte Folded Reload
4961 ; AVX512BW-NEXT: # ymm5 = ymm0[0,1,2,3],mem[4,5,6,7]
4962 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4963 ; AVX512BW-NEXT: vinsertf64x4 $0, %ymm5, %zmm0, %zmm19
4964 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4965 ; AVX512BW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm5 # 32-byte Folded Reload
4966 ; AVX512BW-NEXT: # ymm5 = ymm0[0,1,2,3],mem[4,5,6,7]
4967 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm5, %zmm20, %zmm18
4968 ; AVX512BW-NEXT: vmovdqa64 %zmm10, %zmm2 {%k1}
4969 ; AVX512BW-NEXT: vmovdqa 192(%rsi), %xmm7
4970 ; AVX512BW-NEXT: vinserti128 $1, 192(%rcx), %ymm7, %ymm7
4971 ; AVX512BW-NEXT: vmovdqa 192(%rdi), %xmm10
4972 ; AVX512BW-NEXT: vinserti128 $1, 192(%rdx), %ymm10, %ymm10
4973 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} ymm15 = ymm10[0],ymm7[0],ymm10[2],ymm7[2]
4974 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm15, %zmm2, %zmm17
4975 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm1 {%k1}
4976 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm7 = ymm10[1],ymm7[1],ymm10[3],ymm7[3]
4977 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm7, %zmm1, %zmm1
4978 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4979 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm4 {%k1}
4980 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4981 ; AVX512BW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm7 # 32-byte Folded Reload
4982 ; AVX512BW-NEXT: # ymm7 = ymm0[0,1,2,3],mem[4,5,6,7]
4983 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm7, %zmm4, %zmm7
4984 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4985 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
4986 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4987 ; AVX512BW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm4 # 32-byte Folded Reload
4988 ; AVX512BW-NEXT: # ymm4 = ymm0[0,1,2,3],mem[4,5,6,7]
4989 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm4
4990 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4991 ; AVX512BW-NEXT: vblendps $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm6 # 32-byte Folded Reload
4992 ; AVX512BW-NEXT: # ymm6 = ymm0[0,1,2,3],mem[4,5,6,7]
4993 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4994 ; AVX512BW-NEXT: vinsertf64x4 $0, %ymm6, %zmm0, %zmm10
4995 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4996 ; AVX512BW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm6 # 32-byte Folded Reload
4997 ; AVX512BW-NEXT: # ymm6 = ymm0[0,1,2,3],mem[4,5,6,7]
4998 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm6, %zmm9, %zmm15
4999 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
5000 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm24 {%k1}
5001 ; AVX512BW-NEXT: vmovdqa (%rcx), %ymm6
5002 ; AVX512BW-NEXT: vmovdqa64 (%rdx), %ymm16
5003 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} ymm3 = ymm16[0],ymm6[0],ymm16[2],ymm6[2]
5004 ; AVX512BW-NEXT: vmovdqa64 (%rsi), %ymm20
5005 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %ymm23
5006 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} ymm12 = ymm23[0],ymm20[0],ymm23[2],ymm20[2]
5007 ; AVX512BW-NEXT: vperm2i128 {{.*#+}} ymm3 = ymm12[2,3],ymm3[2,3]
5008 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm3, %zmm24, %zmm3
5009 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
5010 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
5011 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm2 {%k1}
5012 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm6 = ymm16[1],ymm6[1],ymm16[3],ymm6[3]
5013 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm12 = ymm23[1],ymm20[1],ymm23[3],ymm20[3]
5014 ; AVX512BW-NEXT: vperm2i128 {{.*#+}} ymm6 = ymm12[2,3],ymm6[2,3]
5015 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm6, %zmm2, %zmm6
5016 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
5017 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm22 {%k1}
5018 ; AVX512BW-NEXT: vmovdqa 64(%rcx), %ymm12
5019 ; AVX512BW-NEXT: vmovdqa64 64(%rdx), %ymm16
5020 ; AVX512BW-NEXT: vmovdqa64 64(%rsi), %ymm20
5021 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %ymm23
5022 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm16[0],ymm12[0],ymm16[2],ymm12[2]
5023 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} ymm13 = ymm23[0],ymm20[0],ymm23[2],ymm20[2]
5024 ; AVX512BW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm13[2,3],ymm0[2,3]
5025 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm0, %zmm22, %zmm0
5026 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
5027 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
5028 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm5 {%k1}
5029 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm12 = ymm16[1],ymm12[1],ymm16[3],ymm12[3]
5030 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm13 = ymm23[1],ymm20[1],ymm23[3],ymm20[3]
5031 ; AVX512BW-NEXT: vperm2i128 {{.*#+}} ymm12 = ymm13[2,3],ymm12[2,3]
5032 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm12, %zmm5, %zmm12
5033 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
5034 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm11 {%k1}
5035 ; AVX512BW-NEXT: vmovdqa 128(%rcx), %ymm13
5036 ; AVX512BW-NEXT: vmovdqa64 128(%rdx), %ymm16
5037 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} ymm5 = ymm16[0],ymm13[0],ymm16[2],ymm13[2]
5038 ; AVX512BW-NEXT: vmovdqa64 128(%rsi), %ymm20
5039 ; AVX512BW-NEXT: vmovdqa64 128(%rdi), %ymm22
5040 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm22[0],ymm20[0],ymm22[2],ymm20[2]
5041 ; AVX512BW-NEXT: vperm2i128 {{.*#+}} ymm5 = ymm8[2,3],ymm5[2,3]
5042 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm5, %zmm11, %zmm5
5043 ; AVX512BW-NEXT: vmovdqa64 %zmm30, %zmm21 {%k1}
5044 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm8 = ymm16[1],ymm13[1],ymm16[3],ymm13[3]
5045 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm11 = ymm22[1],ymm20[1],ymm22[3],ymm20[3]
5046 ; AVX512BW-NEXT: vperm2i128 {{.*#+}} ymm8 = ymm11[2,3],ymm8[2,3]
5047 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm8, %zmm21, %zmm8
5048 ; AVX512BW-NEXT: vmovdqa64 %zmm27, %zmm29 {%k1}
5049 ; AVX512BW-NEXT: vmovdqa 192(%rcx), %ymm9
5050 ; AVX512BW-NEXT: vmovdqa 192(%rdx), %ymm11
5051 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} ymm13 = ymm11[0],ymm9[0],ymm11[2],ymm9[2]
5052 ; AVX512BW-NEXT: vmovdqa64 192(%rsi), %ymm16
5053 ; AVX512BW-NEXT: vmovdqa64 192(%rdi), %ymm20
5054 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm20[0],ymm16[0],ymm20[2],ymm16[2]
5055 ; AVX512BW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm2[2,3],ymm13[2,3]
5056 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm2, %zmm29, %zmm2
5057 ; AVX512BW-NEXT: vmovdqa64 %zmm14, %zmm31 {%k1}
5058 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm9 = ymm11[1],ymm9[1],ymm11[3],ymm9[3]
5059 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm11 = ymm20[1],ymm16[1],ymm20[3],ymm16[3]
5060 ; AVX512BW-NEXT: vperm2i128 {{.*#+}} ymm9 = ymm11[2,3],ymm9[2,3]
5061 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm9, %zmm31, %zmm9
5062 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
5063 ; AVX512BW-NEXT: vmovdqa64 %zmm9, 1728(%rax)
5064 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 1664(%rax)
5065 ; AVX512BW-NEXT: vmovdqa64 %zmm8, 1216(%rax)
5066 ; AVX512BW-NEXT: vmovdqa64 %zmm5, 1152(%rax)
5067 ; AVX512BW-NEXT: vmovdqa64 %zmm12, 704(%rax)
5068 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 640(%rax)
5069 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 192(%rax)
5070 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 128(%rax)
5071 ; AVX512BW-NEXT: vmovdqa64 %zmm15, 1984(%rax)
5072 ; AVX512BW-NEXT: vmovaps %zmm10, 1920(%rax)
5073 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 1856(%rax)
5074 ; AVX512BW-NEXT: vmovdqa64 %zmm7, 1792(%rax)
5075 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 1600(%rax)
5076 ; AVX512BW-NEXT: vmovdqa64 %zmm17, 1536(%rax)
5077 ; AVX512BW-NEXT: vmovdqa64 %zmm18, 1472(%rax)
5078 ; AVX512BW-NEXT: vmovaps %zmm19, 1408(%rax)
5079 ; AVX512BW-NEXT: vmovdqa64 %zmm26, 1344(%rax)
5080 ; AVX512BW-NEXT: vmovdqa64 %zmm28, 1280(%rax)
5081 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
5082 ; AVX512BW-NEXT: vmovaps %zmm0, 1088(%rax)
5083 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
5084 ; AVX512BW-NEXT: vmovaps %zmm0, 1024(%rax)
5085 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
5086 ; AVX512BW-NEXT: vmovaps %zmm0, 960(%rax)
5087 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
5088 ; AVX512BW-NEXT: vmovaps %zmm0, 896(%rax)
5089 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
5090 ; AVX512BW-NEXT: vmovaps %zmm0, 832(%rax)
5091 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
5092 ; AVX512BW-NEXT: vmovaps %zmm0, 768(%rax)
5093 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
5094 ; AVX512BW-NEXT: vmovaps %zmm0, 576(%rax)
5095 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
5096 ; AVX512BW-NEXT: vmovaps %zmm0, 512(%rax)
5097 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
5098 ; AVX512BW-NEXT: vmovaps %zmm0, 448(%rax)
5099 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
5100 ; AVX512BW-NEXT: vmovaps %zmm0, 384(%rax)
5101 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
5102 ; AVX512BW-NEXT: vmovaps %zmm0, 320(%rax)
5103 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
5104 ; AVX512BW-NEXT: vmovaps %zmm0, 256(%rax)
5105 ; AVX512BW-NEXT: vmovups (%rsp), %zmm0 # 64-byte Reload
5106 ; AVX512BW-NEXT: vmovaps %zmm0, 64(%rax)
5107 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
5108 ; AVX512BW-NEXT: vmovaps %zmm0, (%rax)
5109 ; AVX512BW-NEXT: addq $2632, %rsp # imm = 0xA48
5110 ; AVX512BW-NEXT: vzeroupper
5111 ; AVX512BW-NEXT: retq
5112 %in.vec0 = load <32 x i64>, ptr %in.vecptr0, align 64
5113 %in.vec1 = load <32 x i64>, ptr %in.vecptr1, align 64
5114 %in.vec2 = load <32 x i64>, ptr %in.vecptr2, align 64
5115 %in.vec3 = load <32 x i64>, ptr %in.vecptr3, align 64
5116 %in.vec4 = load <32 x i64>, ptr %in.vecptr4, align 64
5117 %in.vec5 = load <32 x i64>, ptr %in.vecptr5, align 64
5118 %in.vec6 = load <32 x i64>, ptr %in.vecptr6, align 64
5119 %in.vec7 = load <32 x i64>, ptr %in.vecptr7, align 64
5120 %1 = shufflevector <32 x i64> %in.vec0, <32 x i64> %in.vec1, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
5121 %2 = shufflevector <32 x i64> %in.vec2, <32 x i64> %in.vec3, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
5122 %3 = shufflevector <32 x i64> %in.vec4, <32 x i64> %in.vec5, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
5123 %4 = shufflevector <32 x i64> %in.vec6, <32 x i64> %in.vec7, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
5124 %5 = shufflevector <64 x i64> %1, <64 x i64> %2, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
5125 %6 = shufflevector <64 x i64> %3, <64 x i64> %4, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
5126 %7 = shufflevector <128 x i64> %5, <128 x i64> %6, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255>
5127 %interleaved.vec = shufflevector <256 x i64> %7, <256 x i64> poison, <256 x i32> <i32 0, i32 32, i32 64, i32 96, i32 128, i32 160, i32 192, i32 224, i32 1, i32 33, i32 65, i32 97, i32 129, i32 161, i32 193, i32 225, i32 2, i32 34, i32 66, i32 98, i32 130, i32 162, i32 194, i32 226, i32 3, i32 35, i32 67, i32 99, i32 131, i32 163, i32 195, i32 227, i32 4, i32 36, i32 68, i32 100, i32 132, i32 164, i32 196, i32 228, i32 5, i32 37, i32 69, i32 101, i32 133, i32 165, i32 197, i32 229, i32 6, i32 38, i32 70, i32 102, i32 134, i32 166, i32 198, i32 230, i32 7, i32 39, i32 71, i32 103, i32 135, i32 167, i32 199, i32 231, i32 8, i32 40, i32 72, i32 104, i32 136, i32 168, i32 200, i32 232, i32 9, i32 41, i32 73, i32 105, i32 137, i32 169, i32 201, i32 233, i32 10, i32 42, i32 74, i32 106, i32 138, i32 170, i32 202, i32 234, i32 11, i32 43, i32 75, i32 107, i32 139, i32 171, i32 203, i32 235, i32 12, i32 44, i32 76, i32 108, i32 140, i32 172, i32 204, i32 236, i32 13, i32 45, i32 77, i32 109, i32 141, i32 173, i32 205, i32 237, i32 14, i32 46, i32 78, i32 110, i32 142, i32 174, i32 206, i32 238, i32 15, i32 47, i32 79, i32 111, i32 143, i32 175, i32 207, i32 239, i32 16, i32 48, i32 80, i32 112, i32 144, i32 176, i32 208, i32 240, i32 17, i32 49, i32 81, i32 113, i32 145, i32 177, i32 209, i32 241, i32 18, i32 50, i32 82, i32 114, i32 146, i32 178, i32 210, i32 242, i32 19, i32 51, i32 83, i32 115, i32 147, i32 179, i32 211, i32 243, i32 20, i32 52, i32 84, i32 116, i32 148, i32 180, i32 212, i32 244, i32 21, i32 53, i32 85, i32 117, i32 149, i32 181, i32 213, i32 245, i32 22, i32 54, i32 86, i32 118, i32 150, i32 182, i32 214, i32 246, i32 23, i32 55, i32 87, i32 119, i32 151, i32 183, i32 215, i32 247, i32 24, i32 56, i32 88, i32 120, i32 152, i32 184, i32 216, i32 248, i32 25, i32 57, i32 89, i32 121, i32 153, i32 185, i32 217, i32 249, i32 26, i32 58, i32 90, i32 122, i32 154, i32 186, i32 218, i32 250, i32 27, i32 59, i32 91, i32 123, i32 155, i32 187, i32 219, i32 251, i32 28, i32 60, i32 92, i32 124, i32 156, i32 188, i32 220, i32 252, i32 29, i32 61, i32 93, i32 125, i32 157, i32 189, i32 221, i32 253, i32 30, i32 62, i32 94, i32 126, i32 158, i32 190, i32 222, i32 254, i32 31, i32 63, i32 95, i32 127, i32 159, i32 191, i32 223, i32 255>
5128 store <256 x i64> %interleaved.vec, ptr %out.vec, align 64
5132 define void @store_i64_stride8_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %in.vecptr7, ptr %out.vec) nounwind {
5133 ; SSE-LABEL: store_i64_stride8_vf64:
5135 ; SSE-NEXT: subq $3736, %rsp # imm = 0xE98
5136 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
5137 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
5138 ; SSE-NEXT: movaps (%rdi), %xmm7
5139 ; SSE-NEXT: movaps 16(%rdi), %xmm8
5140 ; SSE-NEXT: movaps (%rsi), %xmm1
5141 ; SSE-NEXT: movaps 16(%rsi), %xmm0
5142 ; SSE-NEXT: movaps (%rdx), %xmm9
5143 ; SSE-NEXT: movaps 16(%rdx), %xmm10
5144 ; SSE-NEXT: movaps (%rcx), %xmm3
5145 ; SSE-NEXT: movaps 16(%rcx), %xmm2
5146 ; SSE-NEXT: movaps 16(%r8), %xmm12
5147 ; SSE-NEXT: movaps (%r8), %xmm11
5148 ; SSE-NEXT: movaps 16(%r9), %xmm4
5149 ; SSE-NEXT: movaps (%r9), %xmm5
5150 ; SSE-NEXT: movaps 16(%r10), %xmm14
5151 ; SSE-NEXT: movaps (%r10), %xmm13
5152 ; SSE-NEXT: movaps (%rax), %xmm6
5153 ; SSE-NEXT: movaps %xmm7, %xmm15
5154 ; SSE-NEXT: movlhps {{.*#+}} xmm15 = xmm15[0],xmm1[0]
5155 ; SSE-NEXT: movaps %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5156 ; SSE-NEXT: unpckhpd {{.*#+}} xmm7 = xmm7[1],xmm1[1]
5157 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5158 ; SSE-NEXT: movaps %xmm9, %xmm1
5159 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
5160 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5161 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm3[1]
5162 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5163 ; SSE-NEXT: movaps %xmm11, %xmm1
5164 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm5[0]
5165 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5166 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm5[1]
5167 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5168 ; SSE-NEXT: movaps %xmm13, %xmm1
5169 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm6[0]
5170 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5171 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm6[1]
5172 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5173 ; SSE-NEXT: movaps %xmm8, %xmm1
5174 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5175 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5176 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm0[1]
5177 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5178 ; SSE-NEXT: movaps %xmm10, %xmm0
5179 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm2[0]
5180 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5181 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm2[1]
5182 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5183 ; SSE-NEXT: movaps %xmm12, %xmm0
5184 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm4[0]
5185 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5186 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm4[1]
5187 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5188 ; SSE-NEXT: movaps 16(%rax), %xmm0
5189 ; SSE-NEXT: movaps %xmm14, %xmm1
5190 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5191 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5192 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm0[1]
5193 ; SSE-NEXT: movaps %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5194 ; SSE-NEXT: movaps 32(%rdi), %xmm2
5195 ; SSE-NEXT: movaps 32(%rsi), %xmm0
5196 ; SSE-NEXT: movaps %xmm2, %xmm1
5197 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5198 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5199 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5200 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5201 ; SSE-NEXT: movaps 32(%rdx), %xmm2
5202 ; SSE-NEXT: movaps 32(%rcx), %xmm0
5203 ; SSE-NEXT: movaps %xmm2, %xmm1
5204 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5205 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5206 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5207 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5208 ; SSE-NEXT: movaps 32(%r8), %xmm2
5209 ; SSE-NEXT: movaps 32(%r9), %xmm0
5210 ; SSE-NEXT: movaps %xmm2, %xmm1
5211 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5212 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5213 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5214 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5215 ; SSE-NEXT: movaps 32(%r10), %xmm2
5216 ; SSE-NEXT: movaps 32(%rax), %xmm0
5217 ; SSE-NEXT: movaps %xmm2, %xmm1
5218 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5219 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5220 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5221 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5222 ; SSE-NEXT: movaps 48(%rdi), %xmm2
5223 ; SSE-NEXT: movaps 48(%rsi), %xmm0
5224 ; SSE-NEXT: movaps %xmm2, %xmm1
5225 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5226 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5227 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5228 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5229 ; SSE-NEXT: movaps 48(%rdx), %xmm2
5230 ; SSE-NEXT: movaps 48(%rcx), %xmm0
5231 ; SSE-NEXT: movaps %xmm2, %xmm1
5232 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5233 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5234 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5235 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5236 ; SSE-NEXT: movaps 48(%r8), %xmm2
5237 ; SSE-NEXT: movaps 48(%r9), %xmm0
5238 ; SSE-NEXT: movaps %xmm2, %xmm1
5239 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5240 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5241 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5242 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5243 ; SSE-NEXT: movaps 48(%r10), %xmm2
5244 ; SSE-NEXT: movaps 48(%rax), %xmm0
5245 ; SSE-NEXT: movaps %xmm2, %xmm1
5246 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5247 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5248 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5249 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5250 ; SSE-NEXT: movaps 64(%rdi), %xmm2
5251 ; SSE-NEXT: movaps 64(%rsi), %xmm0
5252 ; SSE-NEXT: movaps %xmm2, %xmm1
5253 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5254 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5255 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5256 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5257 ; SSE-NEXT: movaps 64(%rdx), %xmm2
5258 ; SSE-NEXT: movaps 64(%rcx), %xmm0
5259 ; SSE-NEXT: movaps %xmm2, %xmm1
5260 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5261 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5262 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5263 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5264 ; SSE-NEXT: movaps 64(%r8), %xmm2
5265 ; SSE-NEXT: movaps 64(%r9), %xmm0
5266 ; SSE-NEXT: movaps %xmm2, %xmm1
5267 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5268 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5269 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5270 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5271 ; SSE-NEXT: movaps 64(%r10), %xmm2
5272 ; SSE-NEXT: movaps 64(%rax), %xmm0
5273 ; SSE-NEXT: movaps %xmm2, %xmm1
5274 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5275 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5276 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5277 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5278 ; SSE-NEXT: movaps 80(%rdi), %xmm2
5279 ; SSE-NEXT: movaps 80(%rsi), %xmm0
5280 ; SSE-NEXT: movaps %xmm2, %xmm1
5281 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5282 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5283 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5284 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5285 ; SSE-NEXT: movaps 80(%rdx), %xmm2
5286 ; SSE-NEXT: movaps 80(%rcx), %xmm0
5287 ; SSE-NEXT: movaps %xmm2, %xmm1
5288 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5289 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5290 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5291 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5292 ; SSE-NEXT: movaps 80(%r8), %xmm2
5293 ; SSE-NEXT: movaps 80(%r9), %xmm0
5294 ; SSE-NEXT: movaps %xmm2, %xmm1
5295 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5296 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5297 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5298 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5299 ; SSE-NEXT: movaps 80(%r10), %xmm2
5300 ; SSE-NEXT: movaps 80(%rax), %xmm0
5301 ; SSE-NEXT: movaps %xmm2, %xmm1
5302 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5303 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5304 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5305 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5306 ; SSE-NEXT: movaps 96(%rdi), %xmm2
5307 ; SSE-NEXT: movaps 96(%rsi), %xmm0
5308 ; SSE-NEXT: movaps %xmm2, %xmm1
5309 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5310 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5311 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5312 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5313 ; SSE-NEXT: movaps 96(%rdx), %xmm2
5314 ; SSE-NEXT: movaps 96(%rcx), %xmm0
5315 ; SSE-NEXT: movaps %xmm2, %xmm1
5316 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5317 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5318 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5319 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5320 ; SSE-NEXT: movaps 96(%r8), %xmm2
5321 ; SSE-NEXT: movaps 96(%r9), %xmm0
5322 ; SSE-NEXT: movaps %xmm2, %xmm1
5323 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5324 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5325 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5326 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5327 ; SSE-NEXT: movaps 96(%r10), %xmm2
5328 ; SSE-NEXT: movaps 96(%rax), %xmm0
5329 ; SSE-NEXT: movaps %xmm2, %xmm1
5330 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5331 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5332 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5333 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5334 ; SSE-NEXT: movaps 112(%rdi), %xmm2
5335 ; SSE-NEXT: movaps 112(%rsi), %xmm0
5336 ; SSE-NEXT: movaps %xmm2, %xmm1
5337 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5338 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5339 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5340 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5341 ; SSE-NEXT: movaps 112(%rdx), %xmm2
5342 ; SSE-NEXT: movaps 112(%rcx), %xmm0
5343 ; SSE-NEXT: movaps %xmm2, %xmm1
5344 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5345 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5346 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5347 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5348 ; SSE-NEXT: movaps 112(%r8), %xmm2
5349 ; SSE-NEXT: movaps 112(%r9), %xmm0
5350 ; SSE-NEXT: movaps %xmm2, %xmm1
5351 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5352 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5353 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5354 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5355 ; SSE-NEXT: movaps 112(%r10), %xmm2
5356 ; SSE-NEXT: movaps 112(%rax), %xmm0
5357 ; SSE-NEXT: movaps %xmm2, %xmm1
5358 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5359 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5360 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5361 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5362 ; SSE-NEXT: movaps 128(%rdi), %xmm2
5363 ; SSE-NEXT: movaps 128(%rsi), %xmm0
5364 ; SSE-NEXT: movaps %xmm2, %xmm1
5365 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5366 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5367 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5368 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5369 ; SSE-NEXT: movaps 128(%rdx), %xmm2
5370 ; SSE-NEXT: movaps 128(%rcx), %xmm0
5371 ; SSE-NEXT: movaps %xmm2, %xmm1
5372 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5373 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5374 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5375 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5376 ; SSE-NEXT: movaps 128(%r8), %xmm2
5377 ; SSE-NEXT: movaps 128(%r9), %xmm0
5378 ; SSE-NEXT: movaps %xmm2, %xmm1
5379 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5380 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5381 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5382 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5383 ; SSE-NEXT: movaps 128(%r10), %xmm2
5384 ; SSE-NEXT: movaps 128(%rax), %xmm0
5385 ; SSE-NEXT: movaps %xmm2, %xmm1
5386 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5387 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5388 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5389 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5390 ; SSE-NEXT: movaps 144(%rdi), %xmm2
5391 ; SSE-NEXT: movaps 144(%rsi), %xmm0
5392 ; SSE-NEXT: movaps %xmm2, %xmm1
5393 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5394 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5395 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5396 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5397 ; SSE-NEXT: movaps 144(%rdx), %xmm2
5398 ; SSE-NEXT: movaps 144(%rcx), %xmm0
5399 ; SSE-NEXT: movaps %xmm2, %xmm1
5400 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5401 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5402 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5403 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5404 ; SSE-NEXT: movaps 144(%r8), %xmm2
5405 ; SSE-NEXT: movaps 144(%r9), %xmm0
5406 ; SSE-NEXT: movaps %xmm2, %xmm1
5407 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5408 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5409 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5410 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5411 ; SSE-NEXT: movaps 144(%r10), %xmm2
5412 ; SSE-NEXT: movaps 144(%rax), %xmm0
5413 ; SSE-NEXT: movaps %xmm2, %xmm1
5414 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5415 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5416 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5417 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5418 ; SSE-NEXT: movaps 160(%rdi), %xmm2
5419 ; SSE-NEXT: movaps 160(%rsi), %xmm0
5420 ; SSE-NEXT: movaps %xmm2, %xmm1
5421 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5422 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5423 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5424 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5425 ; SSE-NEXT: movaps 160(%rdx), %xmm2
5426 ; SSE-NEXT: movaps 160(%rcx), %xmm0
5427 ; SSE-NEXT: movaps %xmm2, %xmm1
5428 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5429 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5430 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5431 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5432 ; SSE-NEXT: movaps 160(%r8), %xmm2
5433 ; SSE-NEXT: movaps 160(%r9), %xmm0
5434 ; SSE-NEXT: movaps %xmm2, %xmm1
5435 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5436 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5437 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5438 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5439 ; SSE-NEXT: movaps 160(%r10), %xmm2
5440 ; SSE-NEXT: movaps 160(%rax), %xmm0
5441 ; SSE-NEXT: movaps %xmm2, %xmm1
5442 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5443 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5444 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5445 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5446 ; SSE-NEXT: movaps 176(%rdi), %xmm2
5447 ; SSE-NEXT: movaps 176(%rsi), %xmm0
5448 ; SSE-NEXT: movaps %xmm2, %xmm1
5449 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5450 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5451 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5452 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5453 ; SSE-NEXT: movaps 176(%rdx), %xmm2
5454 ; SSE-NEXT: movaps 176(%rcx), %xmm0
5455 ; SSE-NEXT: movaps %xmm2, %xmm1
5456 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5457 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5458 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5459 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5460 ; SSE-NEXT: movaps 176(%r8), %xmm2
5461 ; SSE-NEXT: movaps 176(%r9), %xmm0
5462 ; SSE-NEXT: movaps %xmm2, %xmm1
5463 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5464 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5465 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5466 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5467 ; SSE-NEXT: movaps 176(%r10), %xmm2
5468 ; SSE-NEXT: movaps 176(%rax), %xmm0
5469 ; SSE-NEXT: movaps %xmm2, %xmm1
5470 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5471 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5472 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5473 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5474 ; SSE-NEXT: movaps 192(%rdi), %xmm2
5475 ; SSE-NEXT: movaps 192(%rsi), %xmm0
5476 ; SSE-NEXT: movaps %xmm2, %xmm1
5477 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5478 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5479 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5480 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5481 ; SSE-NEXT: movaps 192(%rdx), %xmm2
5482 ; SSE-NEXT: movaps 192(%rcx), %xmm0
5483 ; SSE-NEXT: movaps %xmm2, %xmm1
5484 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5485 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5486 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5487 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5488 ; SSE-NEXT: movaps 192(%r8), %xmm2
5489 ; SSE-NEXT: movaps 192(%r9), %xmm0
5490 ; SSE-NEXT: movaps %xmm2, %xmm1
5491 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5492 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5493 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5494 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5495 ; SSE-NEXT: movaps 192(%r10), %xmm2
5496 ; SSE-NEXT: movaps 192(%rax), %xmm0
5497 ; SSE-NEXT: movaps %xmm2, %xmm1
5498 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5499 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5500 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5501 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5502 ; SSE-NEXT: movaps 208(%rdi), %xmm2
5503 ; SSE-NEXT: movaps 208(%rsi), %xmm0
5504 ; SSE-NEXT: movaps %xmm2, %xmm1
5505 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5506 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5507 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5508 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5509 ; SSE-NEXT: movaps 208(%rdx), %xmm2
5510 ; SSE-NEXT: movaps 208(%rcx), %xmm0
5511 ; SSE-NEXT: movaps %xmm2, %xmm1
5512 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5513 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5514 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5515 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5516 ; SSE-NEXT: movaps 208(%r8), %xmm2
5517 ; SSE-NEXT: movaps 208(%r9), %xmm0
5518 ; SSE-NEXT: movaps %xmm2, %xmm1
5519 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5520 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5521 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5522 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5523 ; SSE-NEXT: movaps 208(%r10), %xmm2
5524 ; SSE-NEXT: movaps 208(%rax), %xmm0
5525 ; SSE-NEXT: movaps %xmm2, %xmm1
5526 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5527 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5528 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5529 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5530 ; SSE-NEXT: movaps 224(%rdi), %xmm2
5531 ; SSE-NEXT: movaps 224(%rsi), %xmm0
5532 ; SSE-NEXT: movaps %xmm2, %xmm1
5533 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5534 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5535 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5536 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5537 ; SSE-NEXT: movaps 224(%rdx), %xmm2
5538 ; SSE-NEXT: movaps 224(%rcx), %xmm0
5539 ; SSE-NEXT: movaps %xmm2, %xmm1
5540 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5541 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5542 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5543 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5544 ; SSE-NEXT: movaps 224(%r8), %xmm2
5545 ; SSE-NEXT: movaps 224(%r9), %xmm0
5546 ; SSE-NEXT: movaps %xmm2, %xmm1
5547 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5548 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5549 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5550 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5551 ; SSE-NEXT: movaps 224(%r10), %xmm2
5552 ; SSE-NEXT: movaps 224(%rax), %xmm0
5553 ; SSE-NEXT: movaps %xmm2, %xmm1
5554 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5555 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5556 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5557 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5558 ; SSE-NEXT: movaps 240(%rdi), %xmm2
5559 ; SSE-NEXT: movaps 240(%rsi), %xmm0
5560 ; SSE-NEXT: movaps %xmm2, %xmm1
5561 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5562 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5563 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5564 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5565 ; SSE-NEXT: movaps 240(%rdx), %xmm2
5566 ; SSE-NEXT: movaps 240(%rcx), %xmm0
5567 ; SSE-NEXT: movaps %xmm2, %xmm1
5568 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5569 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5570 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5571 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5572 ; SSE-NEXT: movaps 240(%r8), %xmm2
5573 ; SSE-NEXT: movaps 240(%r9), %xmm0
5574 ; SSE-NEXT: movaps %xmm2, %xmm1
5575 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5576 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5577 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5578 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5579 ; SSE-NEXT: movaps 240(%r10), %xmm2
5580 ; SSE-NEXT: movaps 240(%rax), %xmm0
5581 ; SSE-NEXT: movaps %xmm2, %xmm1
5582 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5583 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5584 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5585 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5586 ; SSE-NEXT: movaps 256(%rdi), %xmm2
5587 ; SSE-NEXT: movaps 256(%rsi), %xmm0
5588 ; SSE-NEXT: movaps %xmm2, %xmm1
5589 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5590 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5591 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5592 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5593 ; SSE-NEXT: movaps 256(%rdx), %xmm2
5594 ; SSE-NEXT: movaps 256(%rcx), %xmm0
5595 ; SSE-NEXT: movaps %xmm2, %xmm1
5596 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5597 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5598 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5599 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5600 ; SSE-NEXT: movaps 256(%r8), %xmm2
5601 ; SSE-NEXT: movaps 256(%r9), %xmm0
5602 ; SSE-NEXT: movaps %xmm2, %xmm1
5603 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5604 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5605 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5606 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5607 ; SSE-NEXT: movaps 256(%r10), %xmm2
5608 ; SSE-NEXT: movaps 256(%rax), %xmm0
5609 ; SSE-NEXT: movaps %xmm2, %xmm1
5610 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5611 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5612 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5613 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5614 ; SSE-NEXT: movaps 272(%rdi), %xmm2
5615 ; SSE-NEXT: movaps 272(%rsi), %xmm0
5616 ; SSE-NEXT: movaps %xmm2, %xmm1
5617 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5618 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5619 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5620 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5621 ; SSE-NEXT: movaps 272(%rdx), %xmm2
5622 ; SSE-NEXT: movaps 272(%rcx), %xmm0
5623 ; SSE-NEXT: movaps %xmm2, %xmm1
5624 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5625 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5626 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5627 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5628 ; SSE-NEXT: movaps 272(%r8), %xmm2
5629 ; SSE-NEXT: movaps 272(%r9), %xmm0
5630 ; SSE-NEXT: movaps %xmm2, %xmm1
5631 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5632 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5633 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5634 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5635 ; SSE-NEXT: movaps 272(%r10), %xmm2
5636 ; SSE-NEXT: movaps 272(%rax), %xmm0
5637 ; SSE-NEXT: movaps %xmm2, %xmm1
5638 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5639 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5640 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5641 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5642 ; SSE-NEXT: movaps 288(%rdi), %xmm2
5643 ; SSE-NEXT: movaps 288(%rsi), %xmm0
5644 ; SSE-NEXT: movaps %xmm2, %xmm1
5645 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5646 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5647 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5648 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5649 ; SSE-NEXT: movaps 288(%rdx), %xmm2
5650 ; SSE-NEXT: movaps 288(%rcx), %xmm0
5651 ; SSE-NEXT: movaps %xmm2, %xmm1
5652 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5653 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5654 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5655 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5656 ; SSE-NEXT: movaps 288(%r8), %xmm2
5657 ; SSE-NEXT: movaps 288(%r9), %xmm0
5658 ; SSE-NEXT: movaps %xmm2, %xmm1
5659 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5660 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5661 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5662 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5663 ; SSE-NEXT: movaps 288(%r10), %xmm2
5664 ; SSE-NEXT: movaps 288(%rax), %xmm0
5665 ; SSE-NEXT: movaps %xmm2, %xmm1
5666 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5667 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5668 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5669 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5670 ; SSE-NEXT: movaps 304(%rdi), %xmm2
5671 ; SSE-NEXT: movaps 304(%rsi), %xmm0
5672 ; SSE-NEXT: movaps %xmm2, %xmm1
5673 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5674 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5675 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5676 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5677 ; SSE-NEXT: movaps 304(%rdx), %xmm2
5678 ; SSE-NEXT: movaps 304(%rcx), %xmm0
5679 ; SSE-NEXT: movaps %xmm2, %xmm1
5680 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5681 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5682 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5683 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5684 ; SSE-NEXT: movaps 304(%r8), %xmm2
5685 ; SSE-NEXT: movaps 304(%r9), %xmm0
5686 ; SSE-NEXT: movaps %xmm2, %xmm1
5687 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5688 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5689 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5690 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5691 ; SSE-NEXT: movaps 304(%r10), %xmm2
5692 ; SSE-NEXT: movaps 304(%rax), %xmm0
5693 ; SSE-NEXT: movaps %xmm2, %xmm1
5694 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5695 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5696 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5697 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5698 ; SSE-NEXT: movaps 320(%rdi), %xmm2
5699 ; SSE-NEXT: movaps 320(%rsi), %xmm0
5700 ; SSE-NEXT: movaps %xmm2, %xmm1
5701 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5702 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5703 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5704 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5705 ; SSE-NEXT: movaps 320(%rdx), %xmm2
5706 ; SSE-NEXT: movaps 320(%rcx), %xmm0
5707 ; SSE-NEXT: movaps %xmm2, %xmm1
5708 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5709 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5710 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5711 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5712 ; SSE-NEXT: movaps 320(%r8), %xmm2
5713 ; SSE-NEXT: movaps 320(%r9), %xmm0
5714 ; SSE-NEXT: movaps %xmm2, %xmm1
5715 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5716 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5717 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5718 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5719 ; SSE-NEXT: movaps 320(%r10), %xmm2
5720 ; SSE-NEXT: movaps 320(%rax), %xmm0
5721 ; SSE-NEXT: movaps %xmm2, %xmm1
5722 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5723 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5724 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5725 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5726 ; SSE-NEXT: movaps 336(%rdi), %xmm2
5727 ; SSE-NEXT: movaps 336(%rsi), %xmm0
5728 ; SSE-NEXT: movaps %xmm2, %xmm1
5729 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5730 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5731 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5732 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5733 ; SSE-NEXT: movaps 336(%rdx), %xmm2
5734 ; SSE-NEXT: movaps 336(%rcx), %xmm0
5735 ; SSE-NEXT: movaps %xmm2, %xmm1
5736 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5737 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5738 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5739 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5740 ; SSE-NEXT: movaps 336(%r8), %xmm2
5741 ; SSE-NEXT: movaps 336(%r9), %xmm0
5742 ; SSE-NEXT: movaps %xmm2, %xmm1
5743 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5744 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5745 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5746 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5747 ; SSE-NEXT: movaps 336(%r10), %xmm2
5748 ; SSE-NEXT: movaps 336(%rax), %xmm0
5749 ; SSE-NEXT: movaps %xmm2, %xmm1
5750 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5751 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5752 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5753 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5754 ; SSE-NEXT: movaps 352(%rdi), %xmm2
5755 ; SSE-NEXT: movaps 352(%rsi), %xmm0
5756 ; SSE-NEXT: movaps %xmm2, %xmm1
5757 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5758 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5759 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5760 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5761 ; SSE-NEXT: movaps 352(%rdx), %xmm2
5762 ; SSE-NEXT: movaps 352(%rcx), %xmm0
5763 ; SSE-NEXT: movaps %xmm2, %xmm1
5764 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5765 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5766 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5767 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5768 ; SSE-NEXT: movaps 352(%r8), %xmm2
5769 ; SSE-NEXT: movaps 352(%r9), %xmm0
5770 ; SSE-NEXT: movaps %xmm2, %xmm1
5771 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5772 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5773 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5774 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5775 ; SSE-NEXT: movaps 352(%r10), %xmm2
5776 ; SSE-NEXT: movaps 352(%rax), %xmm0
5777 ; SSE-NEXT: movaps %xmm2, %xmm1
5778 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5779 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5780 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5781 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5782 ; SSE-NEXT: movaps 368(%rdi), %xmm2
5783 ; SSE-NEXT: movaps 368(%rsi), %xmm0
5784 ; SSE-NEXT: movaps %xmm2, %xmm1
5785 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5786 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5787 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5788 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5789 ; SSE-NEXT: movaps 368(%rdx), %xmm2
5790 ; SSE-NEXT: movaps 368(%rcx), %xmm0
5791 ; SSE-NEXT: movaps %xmm2, %xmm1
5792 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5793 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5794 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5795 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5796 ; SSE-NEXT: movaps 368(%r8), %xmm2
5797 ; SSE-NEXT: movaps 368(%r9), %xmm0
5798 ; SSE-NEXT: movaps %xmm2, %xmm1
5799 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5800 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5801 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5802 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5803 ; SSE-NEXT: movaps 368(%r10), %xmm2
5804 ; SSE-NEXT: movaps 368(%rax), %xmm0
5805 ; SSE-NEXT: movaps %xmm2, %xmm1
5806 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5807 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5808 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5809 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5810 ; SSE-NEXT: movaps 384(%rdi), %xmm2
5811 ; SSE-NEXT: movaps 384(%rsi), %xmm0
5812 ; SSE-NEXT: movaps %xmm2, %xmm1
5813 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5814 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5815 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5816 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5817 ; SSE-NEXT: movaps 384(%rdx), %xmm2
5818 ; SSE-NEXT: movaps 384(%rcx), %xmm0
5819 ; SSE-NEXT: movaps %xmm2, %xmm1
5820 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5821 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5822 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5823 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5824 ; SSE-NEXT: movaps 384(%r8), %xmm2
5825 ; SSE-NEXT: movaps 384(%r9), %xmm0
5826 ; SSE-NEXT: movaps %xmm2, %xmm1
5827 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5828 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5829 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5830 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5831 ; SSE-NEXT: movaps 384(%r10), %xmm2
5832 ; SSE-NEXT: movaps 384(%rax), %xmm0
5833 ; SSE-NEXT: movaps %xmm2, %xmm1
5834 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5835 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5836 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5837 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5838 ; SSE-NEXT: movaps 400(%rdi), %xmm2
5839 ; SSE-NEXT: movaps 400(%rsi), %xmm0
5840 ; SSE-NEXT: movaps %xmm2, %xmm1
5841 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5842 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5843 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5844 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5845 ; SSE-NEXT: movaps 400(%rdx), %xmm2
5846 ; SSE-NEXT: movaps 400(%rcx), %xmm0
5847 ; SSE-NEXT: movaps %xmm2, %xmm1
5848 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5849 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5850 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5851 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5852 ; SSE-NEXT: movaps 400(%r8), %xmm2
5853 ; SSE-NEXT: movaps 400(%r9), %xmm0
5854 ; SSE-NEXT: movaps %xmm2, %xmm1
5855 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5856 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5857 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5858 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5859 ; SSE-NEXT: movaps 400(%r10), %xmm2
5860 ; SSE-NEXT: movaps 400(%rax), %xmm0
5861 ; SSE-NEXT: movaps %xmm2, %xmm1
5862 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5863 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5864 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5865 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5866 ; SSE-NEXT: movaps 416(%rdi), %xmm2
5867 ; SSE-NEXT: movaps 416(%rsi), %xmm0
5868 ; SSE-NEXT: movaps %xmm2, %xmm1
5869 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5870 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5871 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5872 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5873 ; SSE-NEXT: movaps 416(%rdx), %xmm2
5874 ; SSE-NEXT: movaps 416(%rcx), %xmm0
5875 ; SSE-NEXT: movaps %xmm2, %xmm1
5876 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5877 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5878 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5879 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5880 ; SSE-NEXT: movaps 416(%r8), %xmm2
5881 ; SSE-NEXT: movaps 416(%r9), %xmm0
5882 ; SSE-NEXT: movaps %xmm2, %xmm1
5883 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5884 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5885 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5886 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5887 ; SSE-NEXT: movaps 416(%r10), %xmm2
5888 ; SSE-NEXT: movaps 416(%rax), %xmm0
5889 ; SSE-NEXT: movaps %xmm2, %xmm1
5890 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5891 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5892 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5893 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5894 ; SSE-NEXT: movaps 432(%rdi), %xmm2
5895 ; SSE-NEXT: movaps 432(%rsi), %xmm0
5896 ; SSE-NEXT: movaps %xmm2, %xmm1
5897 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5898 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5899 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5900 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5901 ; SSE-NEXT: movaps 432(%rdx), %xmm2
5902 ; SSE-NEXT: movaps 432(%rcx), %xmm0
5903 ; SSE-NEXT: movaps %xmm2, %xmm1
5904 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5905 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5906 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5907 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5908 ; SSE-NEXT: movaps 432(%r8), %xmm2
5909 ; SSE-NEXT: movaps 432(%r9), %xmm0
5910 ; SSE-NEXT: movaps %xmm2, %xmm1
5911 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5912 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5913 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5914 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5915 ; SSE-NEXT: movaps 432(%r10), %xmm2
5916 ; SSE-NEXT: movaps 432(%rax), %xmm0
5917 ; SSE-NEXT: movaps %xmm2, %xmm1
5918 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5919 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5920 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5921 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5922 ; SSE-NEXT: movaps 448(%rdi), %xmm2
5923 ; SSE-NEXT: movaps 448(%rsi), %xmm0
5924 ; SSE-NEXT: movaps %xmm2, %xmm1
5925 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5926 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5927 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5928 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5929 ; SSE-NEXT: movaps 448(%rdx), %xmm2
5930 ; SSE-NEXT: movaps 448(%rcx), %xmm0
5931 ; SSE-NEXT: movaps %xmm2, %xmm1
5932 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5933 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5934 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5935 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5936 ; SSE-NEXT: movaps 448(%r8), %xmm2
5937 ; SSE-NEXT: movaps 448(%r9), %xmm0
5938 ; SSE-NEXT: movaps %xmm2, %xmm1
5939 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5940 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5941 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5942 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5943 ; SSE-NEXT: movaps 448(%r10), %xmm2
5944 ; SSE-NEXT: movaps 448(%rax), %xmm0
5945 ; SSE-NEXT: movaps %xmm2, %xmm1
5946 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5947 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5948 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5949 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5950 ; SSE-NEXT: movaps 464(%rdi), %xmm2
5951 ; SSE-NEXT: movaps 464(%rsi), %xmm0
5952 ; SSE-NEXT: movaps %xmm2, %xmm1
5953 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5954 ; SSE-NEXT: movaps %xmm1, (%rsp) # 16-byte Spill
5955 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5956 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5957 ; SSE-NEXT: movaps 464(%rdx), %xmm2
5958 ; SSE-NEXT: movaps 464(%rcx), %xmm0
5959 ; SSE-NEXT: movaps %xmm2, %xmm1
5960 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5961 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5962 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5963 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5964 ; SSE-NEXT: movaps 464(%r8), %xmm2
5965 ; SSE-NEXT: movaps 464(%r9), %xmm0
5966 ; SSE-NEXT: movaps %xmm2, %xmm1
5967 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5968 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5969 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5970 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5971 ; SSE-NEXT: movaps 464(%r10), %xmm2
5972 ; SSE-NEXT: movaps 464(%rax), %xmm0
5973 ; SSE-NEXT: movaps %xmm2, %xmm1
5974 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5975 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5976 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5977 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5978 ; SSE-NEXT: movaps 480(%rdi), %xmm13
5979 ; SSE-NEXT: movaps 480(%rsi), %xmm0
5980 ; SSE-NEXT: movaps %xmm13, %xmm1
5981 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
5982 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5983 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm0[1]
5984 ; SSE-NEXT: movaps 480(%rdx), %xmm10
5985 ; SSE-NEXT: movaps 480(%rcx), %xmm0
5986 ; SSE-NEXT: movaps %xmm10, %xmm15
5987 ; SSE-NEXT: movlhps {{.*#+}} xmm15 = xmm15[0],xmm0[0]
5988 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm0[1]
5989 ; SSE-NEXT: movaps 480(%r8), %xmm9
5990 ; SSE-NEXT: movaps 480(%r9), %xmm0
5991 ; SSE-NEXT: movaps %xmm9, %xmm14
5992 ; SSE-NEXT: movlhps {{.*#+}} xmm14 = xmm14[0],xmm0[0]
5993 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm0[1]
5994 ; SSE-NEXT: movaps 480(%r10), %xmm11
5995 ; SSE-NEXT: movaps 480(%rax), %xmm1
5996 ; SSE-NEXT: movaps %xmm11, %xmm12
5997 ; SSE-NEXT: movlhps {{.*#+}} xmm12 = xmm12[0],xmm1[0]
5998 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm1[1]
5999 ; SSE-NEXT: movaps 496(%rdi), %xmm7
6000 ; SSE-NEXT: movaps 496(%rsi), %xmm0
6001 ; SSE-NEXT: movaps %xmm7, %xmm8
6002 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm0[0]
6003 ; SSE-NEXT: unpckhpd {{.*#+}} xmm7 = xmm7[1],xmm0[1]
6004 ; SSE-NEXT: movaps 496(%rdx), %xmm5
6005 ; SSE-NEXT: movaps 496(%rcx), %xmm1
6006 ; SSE-NEXT: movaps %xmm5, %xmm6
6007 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm1[0]
6008 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm1[1]
6009 ; SSE-NEXT: movaps 496(%r8), %xmm1
6010 ; SSE-NEXT: movaps 496(%r9), %xmm2
6011 ; SSE-NEXT: movaps %xmm1, %xmm4
6012 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm2[0]
6013 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
6014 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rcx
6015 ; SSE-NEXT: movaps 496(%r10), %xmm2
6016 ; SSE-NEXT: movaps 496(%rax), %xmm3
6017 ; SSE-NEXT: movaps %xmm2, %xmm0
6018 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm3[0]
6019 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
6020 ; SSE-NEXT: movaps %xmm2, 4080(%rcx)
6021 ; SSE-NEXT: movaps %xmm1, 4064(%rcx)
6022 ; SSE-NEXT: movaps %xmm5, 4048(%rcx)
6023 ; SSE-NEXT: movaps %xmm7, 4032(%rcx)
6024 ; SSE-NEXT: movaps %xmm0, 4016(%rcx)
6025 ; SSE-NEXT: movaps %xmm4, 4000(%rcx)
6026 ; SSE-NEXT: movaps %xmm6, 3984(%rcx)
6027 ; SSE-NEXT: movaps %xmm8, 3968(%rcx)
6028 ; SSE-NEXT: movaps %xmm11, 3952(%rcx)
6029 ; SSE-NEXT: movaps %xmm9, 3936(%rcx)
6030 ; SSE-NEXT: movaps %xmm10, 3920(%rcx)
6031 ; SSE-NEXT: movaps %xmm13, 3904(%rcx)
6032 ; SSE-NEXT: movaps %xmm12, 3888(%rcx)
6033 ; SSE-NEXT: movaps %xmm14, 3872(%rcx)
6034 ; SSE-NEXT: movaps %xmm15, 3856(%rcx)
6035 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6036 ; SSE-NEXT: movaps %xmm0, 3840(%rcx)
6037 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6038 ; SSE-NEXT: movaps %xmm0, 3824(%rcx)
6039 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6040 ; SSE-NEXT: movaps %xmm0, 3808(%rcx)
6041 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6042 ; SSE-NEXT: movaps %xmm0, 3792(%rcx)
6043 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6044 ; SSE-NEXT: movaps %xmm0, 3776(%rcx)
6045 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6046 ; SSE-NEXT: movaps %xmm0, 3760(%rcx)
6047 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6048 ; SSE-NEXT: movaps %xmm0, 3744(%rcx)
6049 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6050 ; SSE-NEXT: movaps %xmm0, 3728(%rcx)
6051 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
6052 ; SSE-NEXT: movaps %xmm0, 3712(%rcx)
6053 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6054 ; SSE-NEXT: movaps %xmm0, 3696(%rcx)
6055 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6056 ; SSE-NEXT: movaps %xmm0, 3680(%rcx)
6057 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6058 ; SSE-NEXT: movaps %xmm0, 3664(%rcx)
6059 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6060 ; SSE-NEXT: movaps %xmm0, 3648(%rcx)
6061 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6062 ; SSE-NEXT: movaps %xmm0, 3632(%rcx)
6063 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6064 ; SSE-NEXT: movaps %xmm0, 3616(%rcx)
6065 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6066 ; SSE-NEXT: movaps %xmm0, 3600(%rcx)
6067 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6068 ; SSE-NEXT: movaps %xmm0, 3584(%rcx)
6069 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6070 ; SSE-NEXT: movaps %xmm0, 3568(%rcx)
6071 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6072 ; SSE-NEXT: movaps %xmm0, 3552(%rcx)
6073 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6074 ; SSE-NEXT: movaps %xmm0, 3536(%rcx)
6075 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6076 ; SSE-NEXT: movaps %xmm0, 3520(%rcx)
6077 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6078 ; SSE-NEXT: movaps %xmm0, 3504(%rcx)
6079 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6080 ; SSE-NEXT: movaps %xmm0, 3488(%rcx)
6081 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6082 ; SSE-NEXT: movaps %xmm0, 3472(%rcx)
6083 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6084 ; SSE-NEXT: movaps %xmm0, 3456(%rcx)
6085 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6086 ; SSE-NEXT: movaps %xmm0, 3440(%rcx)
6087 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6088 ; SSE-NEXT: movaps %xmm0, 3424(%rcx)
6089 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6090 ; SSE-NEXT: movaps %xmm0, 3408(%rcx)
6091 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6092 ; SSE-NEXT: movaps %xmm0, 3392(%rcx)
6093 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6094 ; SSE-NEXT: movaps %xmm0, 3376(%rcx)
6095 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6096 ; SSE-NEXT: movaps %xmm0, 3360(%rcx)
6097 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6098 ; SSE-NEXT: movaps %xmm0, 3344(%rcx)
6099 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6100 ; SSE-NEXT: movaps %xmm0, 3328(%rcx)
6101 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6102 ; SSE-NEXT: movaps %xmm0, 3312(%rcx)
6103 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6104 ; SSE-NEXT: movaps %xmm0, 3296(%rcx)
6105 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6106 ; SSE-NEXT: movaps %xmm0, 3280(%rcx)
6107 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6108 ; SSE-NEXT: movaps %xmm0, 3264(%rcx)
6109 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6110 ; SSE-NEXT: movaps %xmm0, 3248(%rcx)
6111 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6112 ; SSE-NEXT: movaps %xmm0, 3232(%rcx)
6113 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6114 ; SSE-NEXT: movaps %xmm0, 3216(%rcx)
6115 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6116 ; SSE-NEXT: movaps %xmm0, 3200(%rcx)
6117 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6118 ; SSE-NEXT: movaps %xmm0, 3184(%rcx)
6119 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6120 ; SSE-NEXT: movaps %xmm0, 3168(%rcx)
6121 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6122 ; SSE-NEXT: movaps %xmm0, 3152(%rcx)
6123 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6124 ; SSE-NEXT: movaps %xmm0, 3136(%rcx)
6125 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6126 ; SSE-NEXT: movaps %xmm0, 3120(%rcx)
6127 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6128 ; SSE-NEXT: movaps %xmm0, 3104(%rcx)
6129 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6130 ; SSE-NEXT: movaps %xmm0, 3088(%rcx)
6131 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6132 ; SSE-NEXT: movaps %xmm0, 3072(%rcx)
6133 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6134 ; SSE-NEXT: movaps %xmm0, 3056(%rcx)
6135 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6136 ; SSE-NEXT: movaps %xmm0, 3040(%rcx)
6137 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6138 ; SSE-NEXT: movaps %xmm0, 3024(%rcx)
6139 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6140 ; SSE-NEXT: movaps %xmm0, 3008(%rcx)
6141 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6142 ; SSE-NEXT: movaps %xmm0, 2992(%rcx)
6143 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6144 ; SSE-NEXT: movaps %xmm0, 2976(%rcx)
6145 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6146 ; SSE-NEXT: movaps %xmm0, 2960(%rcx)
6147 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6148 ; SSE-NEXT: movaps %xmm0, 2944(%rcx)
6149 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6150 ; SSE-NEXT: movaps %xmm0, 2928(%rcx)
6151 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6152 ; SSE-NEXT: movaps %xmm0, 2912(%rcx)
6153 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6154 ; SSE-NEXT: movaps %xmm0, 2896(%rcx)
6155 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6156 ; SSE-NEXT: movaps %xmm0, 2880(%rcx)
6157 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6158 ; SSE-NEXT: movaps %xmm0, 2864(%rcx)
6159 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6160 ; SSE-NEXT: movaps %xmm0, 2848(%rcx)
6161 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6162 ; SSE-NEXT: movaps %xmm0, 2832(%rcx)
6163 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6164 ; SSE-NEXT: movaps %xmm0, 2816(%rcx)
6165 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6166 ; SSE-NEXT: movaps %xmm0, 2800(%rcx)
6167 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6168 ; SSE-NEXT: movaps %xmm0, 2784(%rcx)
6169 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6170 ; SSE-NEXT: movaps %xmm0, 2768(%rcx)
6171 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6172 ; SSE-NEXT: movaps %xmm0, 2752(%rcx)
6173 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6174 ; SSE-NEXT: movaps %xmm0, 2736(%rcx)
6175 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6176 ; SSE-NEXT: movaps %xmm0, 2720(%rcx)
6177 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6178 ; SSE-NEXT: movaps %xmm0, 2704(%rcx)
6179 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6180 ; SSE-NEXT: movaps %xmm0, 2688(%rcx)
6181 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6182 ; SSE-NEXT: movaps %xmm0, 2672(%rcx)
6183 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6184 ; SSE-NEXT: movaps %xmm0, 2656(%rcx)
6185 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6186 ; SSE-NEXT: movaps %xmm0, 2640(%rcx)
6187 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6188 ; SSE-NEXT: movaps %xmm0, 2624(%rcx)
6189 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6190 ; SSE-NEXT: movaps %xmm0, 2608(%rcx)
6191 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6192 ; SSE-NEXT: movaps %xmm0, 2592(%rcx)
6193 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6194 ; SSE-NEXT: movaps %xmm0, 2576(%rcx)
6195 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6196 ; SSE-NEXT: movaps %xmm0, 2560(%rcx)
6197 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6198 ; SSE-NEXT: movaps %xmm0, 2544(%rcx)
6199 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6200 ; SSE-NEXT: movaps %xmm0, 2528(%rcx)
6201 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6202 ; SSE-NEXT: movaps %xmm0, 2512(%rcx)
6203 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6204 ; SSE-NEXT: movaps %xmm0, 2496(%rcx)
6205 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6206 ; SSE-NEXT: movaps %xmm0, 2480(%rcx)
6207 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6208 ; SSE-NEXT: movaps %xmm0, 2464(%rcx)
6209 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6210 ; SSE-NEXT: movaps %xmm0, 2448(%rcx)
6211 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6212 ; SSE-NEXT: movaps %xmm0, 2432(%rcx)
6213 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6214 ; SSE-NEXT: movaps %xmm0, 2416(%rcx)
6215 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6216 ; SSE-NEXT: movaps %xmm0, 2400(%rcx)
6217 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6218 ; SSE-NEXT: movaps %xmm0, 2384(%rcx)
6219 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6220 ; SSE-NEXT: movaps %xmm0, 2368(%rcx)
6221 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6222 ; SSE-NEXT: movaps %xmm0, 2352(%rcx)
6223 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6224 ; SSE-NEXT: movaps %xmm0, 2336(%rcx)
6225 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6226 ; SSE-NEXT: movaps %xmm0, 2320(%rcx)
6227 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6228 ; SSE-NEXT: movaps %xmm0, 2304(%rcx)
6229 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6230 ; SSE-NEXT: movaps %xmm0, 2288(%rcx)
6231 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6232 ; SSE-NEXT: movaps %xmm0, 2272(%rcx)
6233 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6234 ; SSE-NEXT: movaps %xmm0, 2256(%rcx)
6235 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6236 ; SSE-NEXT: movaps %xmm0, 2240(%rcx)
6237 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6238 ; SSE-NEXT: movaps %xmm0, 2224(%rcx)
6239 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6240 ; SSE-NEXT: movaps %xmm0, 2208(%rcx)
6241 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6242 ; SSE-NEXT: movaps %xmm0, 2192(%rcx)
6243 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6244 ; SSE-NEXT: movaps %xmm0, 2176(%rcx)
6245 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6246 ; SSE-NEXT: movaps %xmm0, 2160(%rcx)
6247 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6248 ; SSE-NEXT: movaps %xmm0, 2144(%rcx)
6249 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6250 ; SSE-NEXT: movaps %xmm0, 2128(%rcx)
6251 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6252 ; SSE-NEXT: movaps %xmm0, 2112(%rcx)
6253 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6254 ; SSE-NEXT: movaps %xmm0, 2096(%rcx)
6255 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6256 ; SSE-NEXT: movaps %xmm0, 2080(%rcx)
6257 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6258 ; SSE-NEXT: movaps %xmm0, 2064(%rcx)
6259 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6260 ; SSE-NEXT: movaps %xmm0, 2048(%rcx)
6261 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6262 ; SSE-NEXT: movaps %xmm0, 2032(%rcx)
6263 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6264 ; SSE-NEXT: movaps %xmm0, 2016(%rcx)
6265 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6266 ; SSE-NEXT: movaps %xmm0, 2000(%rcx)
6267 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6268 ; SSE-NEXT: movaps %xmm0, 1984(%rcx)
6269 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6270 ; SSE-NEXT: movaps %xmm0, 1968(%rcx)
6271 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6272 ; SSE-NEXT: movaps %xmm0, 1952(%rcx)
6273 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6274 ; SSE-NEXT: movaps %xmm0, 1936(%rcx)
6275 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6276 ; SSE-NEXT: movaps %xmm0, 1920(%rcx)
6277 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6278 ; SSE-NEXT: movaps %xmm0, 1904(%rcx)
6279 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6280 ; SSE-NEXT: movaps %xmm0, 1888(%rcx)
6281 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6282 ; SSE-NEXT: movaps %xmm0, 1872(%rcx)
6283 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6284 ; SSE-NEXT: movaps %xmm0, 1856(%rcx)
6285 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6286 ; SSE-NEXT: movaps %xmm0, 1840(%rcx)
6287 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6288 ; SSE-NEXT: movaps %xmm0, 1824(%rcx)
6289 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6290 ; SSE-NEXT: movaps %xmm0, 1808(%rcx)
6291 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6292 ; SSE-NEXT: movaps %xmm0, 1792(%rcx)
6293 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6294 ; SSE-NEXT: movaps %xmm0, 1776(%rcx)
6295 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6296 ; SSE-NEXT: movaps %xmm0, 1760(%rcx)
6297 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6298 ; SSE-NEXT: movaps %xmm0, 1744(%rcx)
6299 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6300 ; SSE-NEXT: movaps %xmm0, 1728(%rcx)
6301 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6302 ; SSE-NEXT: movaps %xmm0, 1712(%rcx)
6303 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6304 ; SSE-NEXT: movaps %xmm0, 1696(%rcx)
6305 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6306 ; SSE-NEXT: movaps %xmm0, 1680(%rcx)
6307 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6308 ; SSE-NEXT: movaps %xmm0, 1664(%rcx)
6309 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6310 ; SSE-NEXT: movaps %xmm0, 1648(%rcx)
6311 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6312 ; SSE-NEXT: movaps %xmm0, 1632(%rcx)
6313 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6314 ; SSE-NEXT: movaps %xmm0, 1616(%rcx)
6315 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6316 ; SSE-NEXT: movaps %xmm0, 1600(%rcx)
6317 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6318 ; SSE-NEXT: movaps %xmm0, 1584(%rcx)
6319 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6320 ; SSE-NEXT: movaps %xmm0, 1568(%rcx)
6321 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6322 ; SSE-NEXT: movaps %xmm0, 1552(%rcx)
6323 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6324 ; SSE-NEXT: movaps %xmm0, 1536(%rcx)
6325 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6326 ; SSE-NEXT: movaps %xmm0, 1520(%rcx)
6327 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6328 ; SSE-NEXT: movaps %xmm0, 1504(%rcx)
6329 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6330 ; SSE-NEXT: movaps %xmm0, 1488(%rcx)
6331 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6332 ; SSE-NEXT: movaps %xmm0, 1472(%rcx)
6333 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6334 ; SSE-NEXT: movaps %xmm0, 1456(%rcx)
6335 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6336 ; SSE-NEXT: movaps %xmm0, 1440(%rcx)
6337 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6338 ; SSE-NEXT: movaps %xmm0, 1424(%rcx)
6339 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6340 ; SSE-NEXT: movaps %xmm0, 1408(%rcx)
6341 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6342 ; SSE-NEXT: movaps %xmm0, 1392(%rcx)
6343 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6344 ; SSE-NEXT: movaps %xmm0, 1376(%rcx)
6345 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6346 ; SSE-NEXT: movaps %xmm0, 1360(%rcx)
6347 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6348 ; SSE-NEXT: movaps %xmm0, 1344(%rcx)
6349 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6350 ; SSE-NEXT: movaps %xmm0, 1328(%rcx)
6351 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6352 ; SSE-NEXT: movaps %xmm0, 1312(%rcx)
6353 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6354 ; SSE-NEXT: movaps %xmm0, 1296(%rcx)
6355 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6356 ; SSE-NEXT: movaps %xmm0, 1280(%rcx)
6357 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6358 ; SSE-NEXT: movaps %xmm0, 1264(%rcx)
6359 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6360 ; SSE-NEXT: movaps %xmm0, 1248(%rcx)
6361 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6362 ; SSE-NEXT: movaps %xmm0, 1232(%rcx)
6363 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6364 ; SSE-NEXT: movaps %xmm0, 1216(%rcx)
6365 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6366 ; SSE-NEXT: movaps %xmm0, 1200(%rcx)
6367 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6368 ; SSE-NEXT: movaps %xmm0, 1184(%rcx)
6369 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6370 ; SSE-NEXT: movaps %xmm0, 1168(%rcx)
6371 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6372 ; SSE-NEXT: movaps %xmm0, 1152(%rcx)
6373 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6374 ; SSE-NEXT: movaps %xmm0, 1136(%rcx)
6375 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6376 ; SSE-NEXT: movaps %xmm0, 1120(%rcx)
6377 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6378 ; SSE-NEXT: movaps %xmm0, 1104(%rcx)
6379 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6380 ; SSE-NEXT: movaps %xmm0, 1088(%rcx)
6381 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6382 ; SSE-NEXT: movaps %xmm0, 1072(%rcx)
6383 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6384 ; SSE-NEXT: movaps %xmm0, 1056(%rcx)
6385 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6386 ; SSE-NEXT: movaps %xmm0, 1040(%rcx)
6387 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6388 ; SSE-NEXT: movaps %xmm0, 1024(%rcx)
6389 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6390 ; SSE-NEXT: movaps %xmm0, 1008(%rcx)
6391 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6392 ; SSE-NEXT: movaps %xmm0, 992(%rcx)
6393 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6394 ; SSE-NEXT: movaps %xmm0, 976(%rcx)
6395 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6396 ; SSE-NEXT: movaps %xmm0, 960(%rcx)
6397 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6398 ; SSE-NEXT: movaps %xmm0, 944(%rcx)
6399 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6400 ; SSE-NEXT: movaps %xmm0, 928(%rcx)
6401 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6402 ; SSE-NEXT: movaps %xmm0, 912(%rcx)
6403 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6404 ; SSE-NEXT: movaps %xmm0, 896(%rcx)
6405 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6406 ; SSE-NEXT: movaps %xmm0, 880(%rcx)
6407 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6408 ; SSE-NEXT: movaps %xmm0, 864(%rcx)
6409 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6410 ; SSE-NEXT: movaps %xmm0, 848(%rcx)
6411 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6412 ; SSE-NEXT: movaps %xmm0, 832(%rcx)
6413 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6414 ; SSE-NEXT: movaps %xmm0, 816(%rcx)
6415 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6416 ; SSE-NEXT: movaps %xmm0, 800(%rcx)
6417 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6418 ; SSE-NEXT: movaps %xmm0, 784(%rcx)
6419 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6420 ; SSE-NEXT: movaps %xmm0, 768(%rcx)
6421 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6422 ; SSE-NEXT: movaps %xmm0, 752(%rcx)
6423 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6424 ; SSE-NEXT: movaps %xmm0, 736(%rcx)
6425 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6426 ; SSE-NEXT: movaps %xmm0, 720(%rcx)
6427 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6428 ; SSE-NEXT: movaps %xmm0, 704(%rcx)
6429 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6430 ; SSE-NEXT: movaps %xmm0, 688(%rcx)
6431 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6432 ; SSE-NEXT: movaps %xmm0, 672(%rcx)
6433 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6434 ; SSE-NEXT: movaps %xmm0, 656(%rcx)
6435 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6436 ; SSE-NEXT: movaps %xmm0, 640(%rcx)
6437 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6438 ; SSE-NEXT: movaps %xmm0, 624(%rcx)
6439 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6440 ; SSE-NEXT: movaps %xmm0, 608(%rcx)
6441 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6442 ; SSE-NEXT: movaps %xmm0, 592(%rcx)
6443 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6444 ; SSE-NEXT: movaps %xmm0, 576(%rcx)
6445 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6446 ; SSE-NEXT: movaps %xmm0, 560(%rcx)
6447 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6448 ; SSE-NEXT: movaps %xmm0, 544(%rcx)
6449 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6450 ; SSE-NEXT: movaps %xmm0, 528(%rcx)
6451 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6452 ; SSE-NEXT: movaps %xmm0, 512(%rcx)
6453 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6454 ; SSE-NEXT: movaps %xmm0, 496(%rcx)
6455 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6456 ; SSE-NEXT: movaps %xmm0, 480(%rcx)
6457 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6458 ; SSE-NEXT: movaps %xmm0, 464(%rcx)
6459 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6460 ; SSE-NEXT: movaps %xmm0, 448(%rcx)
6461 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6462 ; SSE-NEXT: movaps %xmm0, 432(%rcx)
6463 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6464 ; SSE-NEXT: movaps %xmm0, 416(%rcx)
6465 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6466 ; SSE-NEXT: movaps %xmm0, 400(%rcx)
6467 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6468 ; SSE-NEXT: movaps %xmm0, 384(%rcx)
6469 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6470 ; SSE-NEXT: movaps %xmm0, 368(%rcx)
6471 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6472 ; SSE-NEXT: movaps %xmm0, 352(%rcx)
6473 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6474 ; SSE-NEXT: movaps %xmm0, 336(%rcx)
6475 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6476 ; SSE-NEXT: movaps %xmm0, 320(%rcx)
6477 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6478 ; SSE-NEXT: movaps %xmm0, 304(%rcx)
6479 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6480 ; SSE-NEXT: movaps %xmm0, 288(%rcx)
6481 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6482 ; SSE-NEXT: movaps %xmm0, 272(%rcx)
6483 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6484 ; SSE-NEXT: movaps %xmm0, 256(%rcx)
6485 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6486 ; SSE-NEXT: movaps %xmm0, 240(%rcx)
6487 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6488 ; SSE-NEXT: movaps %xmm0, 224(%rcx)
6489 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6490 ; SSE-NEXT: movaps %xmm0, 208(%rcx)
6491 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6492 ; SSE-NEXT: movaps %xmm0, 192(%rcx)
6493 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6494 ; SSE-NEXT: movaps %xmm0, 176(%rcx)
6495 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6496 ; SSE-NEXT: movaps %xmm0, 160(%rcx)
6497 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6498 ; SSE-NEXT: movaps %xmm0, 144(%rcx)
6499 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6500 ; SSE-NEXT: movaps %xmm0, 128(%rcx)
6501 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6502 ; SSE-NEXT: movaps %xmm0, 112(%rcx)
6503 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6504 ; SSE-NEXT: movaps %xmm0, 96(%rcx)
6505 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6506 ; SSE-NEXT: movaps %xmm0, 80(%rcx)
6507 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6508 ; SSE-NEXT: movaps %xmm0, 64(%rcx)
6509 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6510 ; SSE-NEXT: movaps %xmm0, 48(%rcx)
6511 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6512 ; SSE-NEXT: movaps %xmm0, 32(%rcx)
6513 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6514 ; SSE-NEXT: movaps %xmm0, 16(%rcx)
6515 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6516 ; SSE-NEXT: movaps %xmm0, (%rcx)
6517 ; SSE-NEXT: addq $3736, %rsp # imm = 0xE98
6520 ; AVX1-ONLY-LABEL: store_i64_stride8_vf64:
6521 ; AVX1-ONLY: # %bb.0:
6522 ; AVX1-ONLY-NEXT: subq $3784, %rsp # imm = 0xEC8
6523 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
6524 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
6525 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %xmm0
6526 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %xmm1
6527 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
6528 ; AVX1-ONLY-NEXT: vmovaps (%rcx), %xmm3
6529 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm4
6530 ; AVX1-ONLY-NEXT: vinsertf128 $1, (%rdx), %ymm2, %ymm2
6531 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0],ymm4[1],ymm2[2],ymm4[2]
6532 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6533 ; AVX1-ONLY-NEXT: vmovaps (%r9), %xmm2
6534 ; AVX1-ONLY-NEXT: vmovaps (%r8), %xmm4
6535 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm5 = xmm4[0],xmm2[0]
6536 ; AVX1-ONLY-NEXT: vmovaps (%rax), %xmm6
6537 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm7
6538 ; AVX1-ONLY-NEXT: vinsertf128 $1, (%r10), %ymm5, %ymm5
6539 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm5 = ymm5[0],ymm7[1],ymm5[2],ymm7[2]
6540 ; AVX1-ONLY-NEXT: vmovupd %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6541 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
6542 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm1
6543 ; AVX1-ONLY-NEXT: vbroadcastsd 8(%rdx), %ymm3
6544 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
6545 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
6546 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6547 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm2[1]
6548 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm1
6549 ; AVX1-ONLY-NEXT: vbroadcastsd 8(%r10), %ymm2
6550 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6551 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
6552 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6553 ; AVX1-ONLY-NEXT: vmovaps 32(%rsi), %xmm4
6554 ; AVX1-ONLY-NEXT: vmovaps 32(%rdi), %xmm5
6555 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm1 = xmm5[0],xmm4[0]
6556 ; AVX1-ONLY-NEXT: vmovaps 32(%rcx), %xmm6
6557 ; AVX1-ONLY-NEXT: vmovaps 64(%rcx), %xmm0
6558 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm1, %ymm2
6559 ; AVX1-ONLY-NEXT: vinsertf128 $1, 32(%rdx), %ymm1, %ymm1
6560 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2],ymm2[2]
6561 ; AVX1-ONLY-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6562 ; AVX1-ONLY-NEXT: vmovaps 32(%r9), %xmm7
6563 ; AVX1-ONLY-NEXT: vmovaps 64(%r9), %xmm1
6564 ; AVX1-ONLY-NEXT: vmovaps 32(%r8), %xmm8
6565 ; AVX1-ONLY-NEXT: vmovaps 64(%r8), %xmm2
6566 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm3 = xmm8[0],xmm7[0]
6567 ; AVX1-ONLY-NEXT: vmovaps 32(%rax), %xmm9
6568 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm3, %ymm10
6569 ; AVX1-ONLY-NEXT: vinsertf128 $1, 32(%r10), %ymm3, %ymm3
6570 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm3 = ymm3[0],ymm10[1],ymm3[2],ymm10[2]
6571 ; AVX1-ONLY-NEXT: vmovupd %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6572 ; AVX1-ONLY-NEXT: vmovaps 64(%rax), %xmm3
6573 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm5[1],xmm4[1]
6574 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm4, %ymm5
6575 ; AVX1-ONLY-NEXT: vbroadcastsd 40(%rdx), %ymm6
6576 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
6577 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1],ymm5[2,3],ymm4[4,5],ymm5[6,7]
6578 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6579 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm8[1],xmm7[1]
6580 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm4, %ymm5
6581 ; AVX1-ONLY-NEXT: vbroadcastsd 40(%r10), %ymm6
6582 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
6583 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1],ymm5[2,3],ymm4[4,5],ymm5[6,7]
6584 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6585 ; AVX1-ONLY-NEXT: vmovaps 64(%rsi), %xmm4
6586 ; AVX1-ONLY-NEXT: vmovaps 64(%rdi), %xmm5
6587 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm6 = xmm5[0],xmm4[0]
6588 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm6, %ymm7
6589 ; AVX1-ONLY-NEXT: vinsertf128 $1, 64(%rdx), %ymm6, %ymm6
6590 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2],ymm7[2]
6591 ; AVX1-ONLY-NEXT: vmovupd %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6592 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm6 = xmm2[0],xmm1[0]
6593 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm6, %ymm7
6594 ; AVX1-ONLY-NEXT: vinsertf128 $1, 64(%r10), %ymm6, %ymm6
6595 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2],ymm7[2]
6596 ; AVX1-ONLY-NEXT: vmovupd %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6597 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm5[1],xmm4[1]
6598 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm4, %ymm0
6599 ; AVX1-ONLY-NEXT: vbroadcastsd 72(%rdx), %ymm5
6600 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
6601 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1],ymm0[2,3],ymm4[4,5],ymm0[6,7]
6602 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6603 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm2[1],xmm1[1]
6604 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm1
6605 ; AVX1-ONLY-NEXT: vbroadcastsd 72(%r10), %ymm2
6606 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6607 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
6608 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6609 ; AVX1-ONLY-NEXT: vmovaps 96(%rsi), %xmm0
6610 ; AVX1-ONLY-NEXT: vmovaps 96(%rdi), %xmm1
6611 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
6612 ; AVX1-ONLY-NEXT: vmovaps 96(%rcx), %xmm3
6613 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm4
6614 ; AVX1-ONLY-NEXT: vinsertf128 $1, 96(%rdx), %ymm2, %ymm2
6615 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0],ymm4[1],ymm2[2],ymm4[2]
6616 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6617 ; AVX1-ONLY-NEXT: vmovaps 96(%r9), %xmm2
6618 ; AVX1-ONLY-NEXT: vmovaps 96(%r8), %xmm4
6619 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm5 = xmm4[0],xmm2[0]
6620 ; AVX1-ONLY-NEXT: vmovaps 96(%rax), %xmm6
6621 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm7
6622 ; AVX1-ONLY-NEXT: vinsertf128 $1, 96(%r10), %ymm5, %ymm5
6623 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm5 = ymm5[0],ymm7[1],ymm5[2],ymm7[2]
6624 ; AVX1-ONLY-NEXT: vmovupd %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6625 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
6626 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm1
6627 ; AVX1-ONLY-NEXT: vbroadcastsd 104(%rdx), %ymm3
6628 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
6629 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
6630 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6631 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm2[1]
6632 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm1
6633 ; AVX1-ONLY-NEXT: vbroadcastsd 104(%r10), %ymm2
6634 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6635 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
6636 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6637 ; AVX1-ONLY-NEXT: vmovaps 128(%rsi), %xmm0
6638 ; AVX1-ONLY-NEXT: vmovaps 128(%rdi), %xmm1
6639 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
6640 ; AVX1-ONLY-NEXT: vmovaps 128(%rcx), %xmm3
6641 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm4
6642 ; AVX1-ONLY-NEXT: vinsertf128 $1, 128(%rdx), %ymm2, %ymm2
6643 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0],ymm4[1],ymm2[2],ymm4[2]
6644 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6645 ; AVX1-ONLY-NEXT: vmovaps 128(%r9), %xmm2
6646 ; AVX1-ONLY-NEXT: vmovaps 128(%r8), %xmm4
6647 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm5 = xmm4[0],xmm2[0]
6648 ; AVX1-ONLY-NEXT: vmovaps 128(%rax), %xmm6
6649 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm7
6650 ; AVX1-ONLY-NEXT: vinsertf128 $1, 128(%r10), %ymm5, %ymm5
6651 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm5 = ymm5[0],ymm7[1],ymm5[2],ymm7[2]
6652 ; AVX1-ONLY-NEXT: vmovupd %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6653 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
6654 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm1
6655 ; AVX1-ONLY-NEXT: vbroadcastsd 136(%rdx), %ymm3
6656 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
6657 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
6658 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6659 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm2[1]
6660 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm1
6661 ; AVX1-ONLY-NEXT: vbroadcastsd 136(%r10), %ymm2
6662 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6663 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
6664 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6665 ; AVX1-ONLY-NEXT: vmovaps 160(%rcx), %xmm0
6666 ; AVX1-ONLY-NEXT: vmovaps 160(%rsi), %xmm1
6667 ; AVX1-ONLY-NEXT: vmovaps 160(%rdi), %xmm2
6668 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm3 = xmm2[0],xmm1[0]
6669 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm3, %ymm4
6670 ; AVX1-ONLY-NEXT: vinsertf128 $1, 160(%rdx), %ymm3, %ymm3
6671 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm3 = ymm3[0],ymm4[1],ymm3[2],ymm4[2]
6672 ; AVX1-ONLY-NEXT: vmovupd %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6673 ; AVX1-ONLY-NEXT: vmovaps 160(%r9), %xmm3
6674 ; AVX1-ONLY-NEXT: vmovaps 160(%r8), %xmm4
6675 ; AVX1-ONLY-NEXT: vmovaps 160(%rax), %xmm5
6676 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm6 = xmm4[0],xmm3[0]
6677 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm6, %ymm7
6678 ; AVX1-ONLY-NEXT: vinsertf128 $1, 160(%r10), %ymm6, %ymm6
6679 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2],ymm7[2]
6680 ; AVX1-ONLY-NEXT: vmovupd %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6681 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm2[1],xmm1[1]
6682 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
6683 ; AVX1-ONLY-NEXT: vbroadcastsd 168(%rdx), %ymm2
6684 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
6685 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
6686 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6687 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm3[1]
6688 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm1
6689 ; AVX1-ONLY-NEXT: vbroadcastsd 168(%r10), %ymm2
6690 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6691 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
6692 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6693 ; AVX1-ONLY-NEXT: vmovaps 192(%rsi), %xmm0
6694 ; AVX1-ONLY-NEXT: vmovaps 192(%rdi), %xmm1
6695 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
6696 ; AVX1-ONLY-NEXT: vmovaps 192(%rcx), %xmm3
6697 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm4
6698 ; AVX1-ONLY-NEXT: vinsertf128 $1, 192(%rdx), %ymm2, %ymm2
6699 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0],ymm4[1],ymm2[2],ymm4[2]
6700 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6701 ; AVX1-ONLY-NEXT: vmovaps 192(%r9), %xmm2
6702 ; AVX1-ONLY-NEXT: vmovaps 192(%r8), %xmm4
6703 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm5 = xmm4[0],xmm2[0]
6704 ; AVX1-ONLY-NEXT: vmovaps 192(%rax), %xmm6
6705 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm7
6706 ; AVX1-ONLY-NEXT: vinsertf128 $1, 192(%r10), %ymm5, %ymm5
6707 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm5 = ymm5[0],ymm7[1],ymm5[2],ymm7[2]
6708 ; AVX1-ONLY-NEXT: vmovupd %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6709 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
6710 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm1
6711 ; AVX1-ONLY-NEXT: vbroadcastsd 200(%rdx), %ymm3
6712 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
6713 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
6714 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6715 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm2[1]
6716 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm1
6717 ; AVX1-ONLY-NEXT: vbroadcastsd 200(%r10), %ymm2
6718 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6719 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
6720 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6721 ; AVX1-ONLY-NEXT: vmovaps 224(%rcx), %xmm0
6722 ; AVX1-ONLY-NEXT: vmovaps 224(%rsi), %xmm1
6723 ; AVX1-ONLY-NEXT: vmovaps 224(%rdi), %xmm2
6724 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm3 = xmm2[0],xmm1[0]
6725 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm3, %ymm4
6726 ; AVX1-ONLY-NEXT: vinsertf128 $1, 224(%rdx), %ymm3, %ymm3
6727 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm3 = ymm3[0],ymm4[1],ymm3[2],ymm4[2]
6728 ; AVX1-ONLY-NEXT: vmovupd %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6729 ; AVX1-ONLY-NEXT: vmovaps 224(%r9), %xmm3
6730 ; AVX1-ONLY-NEXT: vmovaps 224(%r8), %xmm4
6731 ; AVX1-ONLY-NEXT: vmovaps 224(%rax), %xmm5
6732 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm6 = xmm4[0],xmm3[0]
6733 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm6, %ymm7
6734 ; AVX1-ONLY-NEXT: vinsertf128 $1, 224(%r10), %ymm6, %ymm6
6735 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2],ymm7[2]
6736 ; AVX1-ONLY-NEXT: vmovupd %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6737 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm2[1],xmm1[1]
6738 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
6739 ; AVX1-ONLY-NEXT: vbroadcastsd 232(%rdx), %ymm2
6740 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
6741 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
6742 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6743 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm3[1]
6744 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm1
6745 ; AVX1-ONLY-NEXT: vbroadcastsd 232(%r10), %ymm2
6746 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6747 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
6748 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6749 ; AVX1-ONLY-NEXT: vmovaps 256(%rsi), %xmm0
6750 ; AVX1-ONLY-NEXT: vmovaps 256(%rdi), %xmm1
6751 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
6752 ; AVX1-ONLY-NEXT: vmovaps 256(%rcx), %xmm3
6753 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm4
6754 ; AVX1-ONLY-NEXT: vinsertf128 $1, 256(%rdx), %ymm2, %ymm2
6755 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0],ymm4[1],ymm2[2],ymm4[2]
6756 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6757 ; AVX1-ONLY-NEXT: vmovaps 256(%r9), %xmm2
6758 ; AVX1-ONLY-NEXT: vmovaps 256(%r8), %xmm4
6759 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm5 = xmm4[0],xmm2[0]
6760 ; AVX1-ONLY-NEXT: vmovaps 256(%rax), %xmm6
6761 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm7
6762 ; AVX1-ONLY-NEXT: vinsertf128 $1, 256(%r10), %ymm5, %ymm5
6763 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm5 = ymm5[0],ymm7[1],ymm5[2],ymm7[2]
6764 ; AVX1-ONLY-NEXT: vmovupd %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6765 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
6766 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm1
6767 ; AVX1-ONLY-NEXT: vbroadcastsd 264(%rdx), %ymm3
6768 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
6769 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
6770 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6771 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm2[1]
6772 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm1
6773 ; AVX1-ONLY-NEXT: vbroadcastsd 264(%r10), %ymm2
6774 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6775 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
6776 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6777 ; AVX1-ONLY-NEXT: vmovaps 288(%rsi), %xmm0
6778 ; AVX1-ONLY-NEXT: vmovaps 288(%rdi), %xmm1
6779 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
6780 ; AVX1-ONLY-NEXT: vmovaps 288(%rcx), %xmm3
6781 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm4
6782 ; AVX1-ONLY-NEXT: vinsertf128 $1, 288(%rdx), %ymm2, %ymm2
6783 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0],ymm4[1],ymm2[2],ymm4[2]
6784 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6785 ; AVX1-ONLY-NEXT: vmovaps 288(%r9), %xmm2
6786 ; AVX1-ONLY-NEXT: vmovaps 288(%r8), %xmm4
6787 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm5 = xmm4[0],xmm2[0]
6788 ; AVX1-ONLY-NEXT: vmovaps 288(%rax), %xmm6
6789 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm7
6790 ; AVX1-ONLY-NEXT: vinsertf128 $1, 288(%r10), %ymm5, %ymm5
6791 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm5 = ymm5[0],ymm7[1],ymm5[2],ymm7[2]
6792 ; AVX1-ONLY-NEXT: vmovupd %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6793 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
6794 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm1
6795 ; AVX1-ONLY-NEXT: vbroadcastsd 296(%rdx), %ymm3
6796 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
6797 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
6798 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6799 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm2[1]
6800 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm1
6801 ; AVX1-ONLY-NEXT: vbroadcastsd 296(%r10), %ymm2
6802 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6803 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
6804 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6805 ; AVX1-ONLY-NEXT: vmovaps 320(%rsi), %xmm0
6806 ; AVX1-ONLY-NEXT: vmovaps 320(%rdi), %xmm1
6807 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
6808 ; AVX1-ONLY-NEXT: vmovaps 320(%rcx), %xmm3
6809 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm4
6810 ; AVX1-ONLY-NEXT: vinsertf128 $1, 320(%rdx), %ymm2, %ymm2
6811 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0],ymm4[1],ymm2[2],ymm4[2]
6812 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6813 ; AVX1-ONLY-NEXT: vmovaps 320(%r9), %xmm2
6814 ; AVX1-ONLY-NEXT: vmovaps 320(%r8), %xmm4
6815 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm5 = xmm4[0],xmm2[0]
6816 ; AVX1-ONLY-NEXT: vmovaps 320(%rax), %xmm6
6817 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm7
6818 ; AVX1-ONLY-NEXT: vinsertf128 $1, 320(%r10), %ymm5, %ymm5
6819 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm5 = ymm5[0],ymm7[1],ymm5[2],ymm7[2]
6820 ; AVX1-ONLY-NEXT: vmovupd %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6821 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
6822 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm1
6823 ; AVX1-ONLY-NEXT: vbroadcastsd 328(%rdx), %ymm3
6824 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
6825 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
6826 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6827 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm2[1]
6828 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm1
6829 ; AVX1-ONLY-NEXT: vbroadcastsd 328(%r10), %ymm2
6830 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6831 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
6832 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6833 ; AVX1-ONLY-NEXT: vmovaps 352(%rcx), %xmm0
6834 ; AVX1-ONLY-NEXT: vmovaps 352(%rsi), %xmm1
6835 ; AVX1-ONLY-NEXT: vmovaps 352(%rdi), %xmm2
6836 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm3 = xmm2[0],xmm1[0]
6837 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm3, %ymm4
6838 ; AVX1-ONLY-NEXT: vinsertf128 $1, 352(%rdx), %ymm3, %ymm3
6839 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm3 = ymm3[0],ymm4[1],ymm3[2],ymm4[2]
6840 ; AVX1-ONLY-NEXT: vmovupd %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6841 ; AVX1-ONLY-NEXT: vmovaps 352(%r9), %xmm3
6842 ; AVX1-ONLY-NEXT: vmovaps 352(%r8), %xmm4
6843 ; AVX1-ONLY-NEXT: vmovaps 352(%rax), %xmm5
6844 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm6 = xmm4[0],xmm3[0]
6845 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm6, %ymm7
6846 ; AVX1-ONLY-NEXT: vinsertf128 $1, 352(%r10), %ymm6, %ymm6
6847 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2],ymm7[2]
6848 ; AVX1-ONLY-NEXT: vmovupd %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6849 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm2[1],xmm1[1]
6850 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
6851 ; AVX1-ONLY-NEXT: vbroadcastsd 360(%rdx), %ymm2
6852 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
6853 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
6854 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6855 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm3[1]
6856 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm1
6857 ; AVX1-ONLY-NEXT: vbroadcastsd 360(%r10), %ymm2
6858 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6859 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
6860 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6861 ; AVX1-ONLY-NEXT: vmovaps 384(%rcx), %xmm0
6862 ; AVX1-ONLY-NEXT: vmovaps 384(%rsi), %xmm1
6863 ; AVX1-ONLY-NEXT: vmovaps 384(%rdi), %xmm2
6864 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm3 = xmm2[0],xmm1[0]
6865 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm3, %ymm4
6866 ; AVX1-ONLY-NEXT: vinsertf128 $1, 384(%rdx), %ymm3, %ymm3
6867 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm3 = ymm3[0],ymm4[1],ymm3[2],ymm4[2]
6868 ; AVX1-ONLY-NEXT: vmovupd %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6869 ; AVX1-ONLY-NEXT: vmovaps 384(%r9), %xmm3
6870 ; AVX1-ONLY-NEXT: vmovaps 384(%r8), %xmm4
6871 ; AVX1-ONLY-NEXT: vmovaps 384(%rax), %xmm5
6872 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm6 = xmm4[0],xmm3[0]
6873 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm6, %ymm7
6874 ; AVX1-ONLY-NEXT: vinsertf128 $1, 384(%r10), %ymm6, %ymm6
6875 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2],ymm7[2]
6876 ; AVX1-ONLY-NEXT: vmovupd %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6877 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm2[1],xmm1[1]
6878 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
6879 ; AVX1-ONLY-NEXT: vbroadcastsd 392(%rdx), %ymm2
6880 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
6881 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
6882 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6883 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm3[1]
6884 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm1
6885 ; AVX1-ONLY-NEXT: vbroadcastsd 392(%r10), %ymm2
6886 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6887 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
6888 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6889 ; AVX1-ONLY-NEXT: vmovaps 416(%rcx), %xmm0
6890 ; AVX1-ONLY-NEXT: vmovaps 416(%rsi), %xmm1
6891 ; AVX1-ONLY-NEXT: vmovaps 416(%rdi), %xmm2
6892 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm3 = xmm2[0],xmm1[0]
6893 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm3, %ymm4
6894 ; AVX1-ONLY-NEXT: vinsertf128 $1, 416(%rdx), %ymm3, %ymm3
6895 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm3 = ymm3[0],ymm4[1],ymm3[2],ymm4[2]
6896 ; AVX1-ONLY-NEXT: vmovupd %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6897 ; AVX1-ONLY-NEXT: vmovaps 416(%r9), %xmm3
6898 ; AVX1-ONLY-NEXT: vmovaps 416(%r8), %xmm4
6899 ; AVX1-ONLY-NEXT: vmovaps 416(%rax), %xmm5
6900 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm6 = xmm4[0],xmm3[0]
6901 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm6, %ymm7
6902 ; AVX1-ONLY-NEXT: vinsertf128 $1, 416(%r10), %ymm6, %ymm6
6903 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2],ymm7[2]
6904 ; AVX1-ONLY-NEXT: vmovupd %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6905 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm2[1],xmm1[1]
6906 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
6907 ; AVX1-ONLY-NEXT: vbroadcastsd 424(%rdx), %ymm2
6908 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
6909 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
6910 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6911 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm3[1]
6912 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm1
6913 ; AVX1-ONLY-NEXT: vbroadcastsd 424(%r10), %ymm2
6914 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6915 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
6916 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6917 ; AVX1-ONLY-NEXT: vmovaps 448(%rcx), %xmm0
6918 ; AVX1-ONLY-NEXT: vmovaps 448(%rsi), %xmm1
6919 ; AVX1-ONLY-NEXT: vmovaps 448(%rdi), %xmm2
6920 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm3 = xmm2[0],xmm1[0]
6921 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm3, %ymm4
6922 ; AVX1-ONLY-NEXT: vinsertf128 $1, 448(%rdx), %ymm3, %ymm3
6923 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm3 = ymm3[0],ymm4[1],ymm3[2],ymm4[2]
6924 ; AVX1-ONLY-NEXT: vmovupd %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6925 ; AVX1-ONLY-NEXT: vmovaps 448(%r9), %xmm3
6926 ; AVX1-ONLY-NEXT: vmovaps 448(%r8), %xmm4
6927 ; AVX1-ONLY-NEXT: vmovaps 448(%rax), %xmm5
6928 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm6 = xmm4[0],xmm3[0]
6929 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm6, %ymm7
6930 ; AVX1-ONLY-NEXT: vinsertf128 $1, 448(%r10), %ymm6, %ymm6
6931 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2],ymm7[2]
6932 ; AVX1-ONLY-NEXT: vmovupd %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6933 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm2[1],xmm1[1]
6934 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
6935 ; AVX1-ONLY-NEXT: vbroadcastsd 456(%rdx), %ymm2
6936 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
6937 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
6938 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6939 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm3[1]
6940 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm1
6941 ; AVX1-ONLY-NEXT: vbroadcastsd 456(%r10), %ymm2
6942 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6943 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
6944 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6945 ; AVX1-ONLY-NEXT: vmovaps 480(%rcx), %xmm0
6946 ; AVX1-ONLY-NEXT: vmovaps 480(%rsi), %xmm1
6947 ; AVX1-ONLY-NEXT: vmovaps 480(%rdi), %xmm2
6948 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm3 = xmm2[0],xmm1[0]
6949 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm3, %ymm4
6950 ; AVX1-ONLY-NEXT: vinsertf128 $1, 480(%rdx), %ymm3, %ymm3
6951 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm3 = ymm3[0],ymm4[1],ymm3[2],ymm4[2]
6952 ; AVX1-ONLY-NEXT: vmovupd %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6953 ; AVX1-ONLY-NEXT: vmovaps 480(%r9), %xmm3
6954 ; AVX1-ONLY-NEXT: vmovaps 480(%r8), %xmm4
6955 ; AVX1-ONLY-NEXT: vmovaps 480(%rax), %xmm5
6956 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm6 = xmm4[0],xmm3[0]
6957 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm6, %ymm7
6958 ; AVX1-ONLY-NEXT: vinsertf128 $1, 480(%r10), %ymm6, %ymm6
6959 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2],ymm7[2]
6960 ; AVX1-ONLY-NEXT: vmovupd %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6961 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm2[1],xmm1[1]
6962 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
6963 ; AVX1-ONLY-NEXT: vbroadcastsd 488(%rdx), %ymm2
6964 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
6965 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
6966 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6967 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm3[1]
6968 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm1
6969 ; AVX1-ONLY-NEXT: vbroadcastsd 488(%r10), %ymm2
6970 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6971 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
6972 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6973 ; AVX1-ONLY-NEXT: vmovaps 16(%rsi), %xmm0
6974 ; AVX1-ONLY-NEXT: vmovaps 16(%rdi), %xmm1
6975 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
6976 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
6977 ; AVX1-ONLY-NEXT: vbroadcastsd 16(%rcx), %ymm3
6978 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
6979 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6980 ; AVX1-ONLY-NEXT: vmovaps 16(%r9), %xmm2
6981 ; AVX1-ONLY-NEXT: vmovaps 16(%r8), %xmm3
6982 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm3[0],xmm2[0]
6983 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],mem[4,5,6,7]
6984 ; AVX1-ONLY-NEXT: vbroadcastsd 16(%rax), %ymm5
6985 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
6986 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6987 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
6988 ; AVX1-ONLY-NEXT: vbroadcastsd 24(%rdx), %ymm1
6989 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm0[0,1,2,3],ymm1[4,5,6,7]
6990 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm2[1]
6991 ; AVX1-ONLY-NEXT: vbroadcastsd 24(%r10), %ymm1
6992 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm0[0,1,2,3],ymm1[4,5,6,7]
6993 ; AVX1-ONLY-NEXT: vmovaps 48(%rsi), %xmm0
6994 ; AVX1-ONLY-NEXT: vmovaps 48(%rdi), %xmm1
6995 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
6996 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
6997 ; AVX1-ONLY-NEXT: vbroadcastsd 48(%rcx), %ymm3
6998 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
6999 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7000 ; AVX1-ONLY-NEXT: vmovaps 48(%r9), %xmm2
7001 ; AVX1-ONLY-NEXT: vmovaps 48(%r8), %xmm3
7002 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm3[0],xmm2[0]
7003 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],mem[4,5,6,7]
7004 ; AVX1-ONLY-NEXT: vbroadcastsd 48(%rax), %ymm5
7005 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
7006 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7007 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
7008 ; AVX1-ONLY-NEXT: vbroadcastsd 56(%rdx), %ymm1
7009 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7010 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm2[1]
7011 ; AVX1-ONLY-NEXT: vbroadcastsd 56(%r10), %ymm1
7012 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7013 ; AVX1-ONLY-NEXT: vmovaps 80(%rsi), %xmm0
7014 ; AVX1-ONLY-NEXT: vmovaps 80(%rdi), %xmm1
7015 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
7016 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
7017 ; AVX1-ONLY-NEXT: vbroadcastsd 80(%rcx), %ymm3
7018 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
7019 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7020 ; AVX1-ONLY-NEXT: vmovaps 80(%r9), %xmm2
7021 ; AVX1-ONLY-NEXT: vmovaps 80(%r8), %xmm3
7022 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm3[0],xmm2[0]
7023 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],mem[4,5,6,7]
7024 ; AVX1-ONLY-NEXT: vbroadcastsd 80(%rax), %ymm5
7025 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
7026 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7027 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
7028 ; AVX1-ONLY-NEXT: vbroadcastsd 88(%rdx), %ymm1
7029 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7030 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm2[1]
7031 ; AVX1-ONLY-NEXT: vbroadcastsd 88(%r10), %ymm1
7032 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm15 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7033 ; AVX1-ONLY-NEXT: vmovaps 112(%rsi), %xmm0
7034 ; AVX1-ONLY-NEXT: vmovaps 112(%rdi), %xmm1
7035 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
7036 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
7037 ; AVX1-ONLY-NEXT: vbroadcastsd 112(%rcx), %ymm3
7038 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
7039 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7040 ; AVX1-ONLY-NEXT: vmovaps 112(%r9), %xmm2
7041 ; AVX1-ONLY-NEXT: vmovaps 112(%r8), %xmm3
7042 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm3[0],xmm2[0]
7043 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],mem[4,5,6,7]
7044 ; AVX1-ONLY-NEXT: vbroadcastsd 112(%rax), %ymm5
7045 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
7046 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7047 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
7048 ; AVX1-ONLY-NEXT: vbroadcastsd 120(%rdx), %ymm1
7049 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7050 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7051 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm2[1]
7052 ; AVX1-ONLY-NEXT: vbroadcastsd 120(%r10), %ymm1
7053 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7054 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7055 ; AVX1-ONLY-NEXT: vmovaps 144(%rsi), %xmm0
7056 ; AVX1-ONLY-NEXT: vmovaps 144(%rdi), %xmm1
7057 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
7058 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
7059 ; AVX1-ONLY-NEXT: vbroadcastsd 144(%rcx), %ymm3
7060 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
7061 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7062 ; AVX1-ONLY-NEXT: vmovaps 144(%r9), %xmm2
7063 ; AVX1-ONLY-NEXT: vmovaps 144(%r8), %xmm3
7064 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm3[0],xmm2[0]
7065 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],mem[4,5,6,7]
7066 ; AVX1-ONLY-NEXT: vbroadcastsd 144(%rax), %ymm5
7067 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
7068 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7069 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
7070 ; AVX1-ONLY-NEXT: vbroadcastsd 152(%rdx), %ymm1
7071 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7072 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7073 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm2[1]
7074 ; AVX1-ONLY-NEXT: vbroadcastsd 152(%r10), %ymm1
7075 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7076 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7077 ; AVX1-ONLY-NEXT: vmovaps 176(%rsi), %xmm0
7078 ; AVX1-ONLY-NEXT: vmovaps 176(%rdi), %xmm1
7079 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
7080 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
7081 ; AVX1-ONLY-NEXT: vbroadcastsd 176(%rcx), %ymm3
7082 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
7083 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7084 ; AVX1-ONLY-NEXT: vmovaps 176(%r9), %xmm2
7085 ; AVX1-ONLY-NEXT: vmovaps 176(%r8), %xmm3
7086 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm3[0],xmm2[0]
7087 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],mem[4,5,6,7]
7088 ; AVX1-ONLY-NEXT: vbroadcastsd 176(%rax), %ymm5
7089 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
7090 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7091 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
7092 ; AVX1-ONLY-NEXT: vbroadcastsd 184(%rdx), %ymm1
7093 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7094 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7095 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm2[1]
7096 ; AVX1-ONLY-NEXT: vbroadcastsd 184(%r10), %ymm1
7097 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7098 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7099 ; AVX1-ONLY-NEXT: vmovaps 208(%rsi), %xmm0
7100 ; AVX1-ONLY-NEXT: vmovaps 208(%rdi), %xmm1
7101 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
7102 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
7103 ; AVX1-ONLY-NEXT: vbroadcastsd 208(%rcx), %ymm3
7104 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
7105 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7106 ; AVX1-ONLY-NEXT: vmovaps 208(%r9), %xmm2
7107 ; AVX1-ONLY-NEXT: vmovaps 208(%r8), %xmm3
7108 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm3[0],xmm2[0]
7109 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],mem[4,5,6,7]
7110 ; AVX1-ONLY-NEXT: vbroadcastsd 208(%rax), %ymm5
7111 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
7112 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7113 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
7114 ; AVX1-ONLY-NEXT: vbroadcastsd 216(%rdx), %ymm1
7115 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7116 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7117 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm2[1]
7118 ; AVX1-ONLY-NEXT: vbroadcastsd 216(%r10), %ymm1
7119 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7120 ; AVX1-ONLY-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
7121 ; AVX1-ONLY-NEXT: vmovaps 240(%rsi), %xmm0
7122 ; AVX1-ONLY-NEXT: vmovaps 240(%rdi), %xmm1
7123 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
7124 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
7125 ; AVX1-ONLY-NEXT: vbroadcastsd 240(%rcx), %ymm3
7126 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
7127 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7128 ; AVX1-ONLY-NEXT: vmovaps 240(%r9), %xmm2
7129 ; AVX1-ONLY-NEXT: vmovaps 240(%r8), %xmm3
7130 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm3[0],xmm2[0]
7131 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],mem[4,5,6,7]
7132 ; AVX1-ONLY-NEXT: vbroadcastsd 240(%rax), %ymm5
7133 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
7134 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7135 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
7136 ; AVX1-ONLY-NEXT: vbroadcastsd 248(%rdx), %ymm1
7137 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7138 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7139 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm2[1]
7140 ; AVX1-ONLY-NEXT: vbroadcastsd 248(%r10), %ymm1
7141 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7142 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7143 ; AVX1-ONLY-NEXT: vmovaps 272(%rsi), %xmm0
7144 ; AVX1-ONLY-NEXT: vmovaps 272(%rdi), %xmm1
7145 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
7146 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
7147 ; AVX1-ONLY-NEXT: vbroadcastsd 272(%rcx), %ymm3
7148 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
7149 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7150 ; AVX1-ONLY-NEXT: vmovaps 272(%r9), %xmm2
7151 ; AVX1-ONLY-NEXT: vmovaps 272(%r8), %xmm3
7152 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm3[0],xmm2[0]
7153 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],mem[4,5,6,7]
7154 ; AVX1-ONLY-NEXT: vbroadcastsd 272(%rax), %ymm5
7155 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
7156 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7157 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
7158 ; AVX1-ONLY-NEXT: vbroadcastsd 280(%rdx), %ymm1
7159 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7160 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7161 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm2[1]
7162 ; AVX1-ONLY-NEXT: vbroadcastsd 280(%r10), %ymm1
7163 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7164 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7165 ; AVX1-ONLY-NEXT: vmovaps 304(%rsi), %xmm0
7166 ; AVX1-ONLY-NEXT: vmovaps 304(%rdi), %xmm1
7167 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
7168 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
7169 ; AVX1-ONLY-NEXT: vbroadcastsd 304(%rcx), %ymm3
7170 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
7171 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7172 ; AVX1-ONLY-NEXT: vmovaps 304(%r9), %xmm2
7173 ; AVX1-ONLY-NEXT: vmovaps 304(%r8), %xmm3
7174 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm3[0],xmm2[0]
7175 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],mem[4,5,6,7]
7176 ; AVX1-ONLY-NEXT: vbroadcastsd 304(%rax), %ymm5
7177 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
7178 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7179 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
7180 ; AVX1-ONLY-NEXT: vbroadcastsd 312(%rdx), %ymm1
7181 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7182 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7183 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm2[1]
7184 ; AVX1-ONLY-NEXT: vbroadcastsd 312(%r10), %ymm1
7185 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7186 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7187 ; AVX1-ONLY-NEXT: vmovaps 336(%rsi), %xmm0
7188 ; AVX1-ONLY-NEXT: vmovaps 336(%rdi), %xmm1
7189 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
7190 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
7191 ; AVX1-ONLY-NEXT: vbroadcastsd 336(%rcx), %ymm3
7192 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
7193 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7194 ; AVX1-ONLY-NEXT: vmovaps 336(%r9), %xmm2
7195 ; AVX1-ONLY-NEXT: vmovaps 336(%r8), %xmm3
7196 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm3[0],xmm2[0]
7197 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],mem[4,5,6,7]
7198 ; AVX1-ONLY-NEXT: vbroadcastsd 336(%rax), %ymm5
7199 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
7200 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7201 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
7202 ; AVX1-ONLY-NEXT: vbroadcastsd 344(%rdx), %ymm1
7203 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7204 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7205 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm2[1]
7206 ; AVX1-ONLY-NEXT: vbroadcastsd 344(%r10), %ymm1
7207 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7208 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7209 ; AVX1-ONLY-NEXT: vmovaps 368(%rsi), %xmm0
7210 ; AVX1-ONLY-NEXT: vmovaps 368(%rdi), %xmm1
7211 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
7212 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
7213 ; AVX1-ONLY-NEXT: vbroadcastsd 368(%rcx), %ymm3
7214 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
7215 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7216 ; AVX1-ONLY-NEXT: vmovaps 368(%r9), %xmm2
7217 ; AVX1-ONLY-NEXT: vmovaps 368(%r8), %xmm3
7218 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm3[0],xmm2[0]
7219 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],mem[4,5,6,7]
7220 ; AVX1-ONLY-NEXT: vbroadcastsd 368(%rax), %ymm5
7221 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
7222 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7223 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
7224 ; AVX1-ONLY-NEXT: vbroadcastsd 376(%rdx), %ymm1
7225 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7226 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7227 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm2[1]
7228 ; AVX1-ONLY-NEXT: vbroadcastsd 376(%r10), %ymm1
7229 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7230 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7231 ; AVX1-ONLY-NEXT: vmovaps 400(%rsi), %xmm0
7232 ; AVX1-ONLY-NEXT: vmovaps 400(%rdi), %xmm1
7233 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
7234 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
7235 ; AVX1-ONLY-NEXT: vbroadcastsd 400(%rcx), %ymm3
7236 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
7237 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7238 ; AVX1-ONLY-NEXT: vmovaps 400(%r9), %xmm2
7239 ; AVX1-ONLY-NEXT: vmovaps 400(%r8), %xmm3
7240 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm3[0],xmm2[0]
7241 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],mem[4,5,6,7]
7242 ; AVX1-ONLY-NEXT: vbroadcastsd 400(%rax), %ymm5
7243 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
7244 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7245 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
7246 ; AVX1-ONLY-NEXT: vbroadcastsd 408(%rdx), %ymm1
7247 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7248 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7249 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm2[1]
7250 ; AVX1-ONLY-NEXT: vbroadcastsd 408(%r10), %ymm1
7251 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7252 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7253 ; AVX1-ONLY-NEXT: vmovaps 432(%rsi), %xmm0
7254 ; AVX1-ONLY-NEXT: vmovaps 432(%rdi), %xmm1
7255 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
7256 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
7257 ; AVX1-ONLY-NEXT: vbroadcastsd 432(%rcx), %ymm3
7258 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
7259 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7260 ; AVX1-ONLY-NEXT: vmovaps 432(%r9), %xmm2
7261 ; AVX1-ONLY-NEXT: vmovaps 432(%r8), %xmm3
7262 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm3[0],xmm2[0]
7263 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],mem[4,5,6,7]
7264 ; AVX1-ONLY-NEXT: vbroadcastsd 432(%rax), %ymm5
7265 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
7266 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7267 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
7268 ; AVX1-ONLY-NEXT: vbroadcastsd 440(%rdx), %ymm1
7269 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7270 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7271 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm2[1]
7272 ; AVX1-ONLY-NEXT: vbroadcastsd 440(%r10), %ymm1
7273 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7274 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7275 ; AVX1-ONLY-NEXT: vmovaps 464(%rsi), %xmm0
7276 ; AVX1-ONLY-NEXT: vmovaps 464(%rdi), %xmm1
7277 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
7278 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
7279 ; AVX1-ONLY-NEXT: vbroadcastsd 464(%rcx), %ymm3
7280 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
7281 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7282 ; AVX1-ONLY-NEXT: vmovaps 464(%r9), %xmm2
7283 ; AVX1-ONLY-NEXT: vmovaps 464(%r8), %xmm3
7284 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm3[0],xmm2[0]
7285 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],mem[4,5,6,7]
7286 ; AVX1-ONLY-NEXT: vbroadcastsd 464(%rax), %ymm5
7287 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
7288 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7289 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
7290 ; AVX1-ONLY-NEXT: vbroadcastsd 472(%rdx), %ymm1
7291 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7292 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm2[1]
7293 ; AVX1-ONLY-NEXT: vbroadcastsd 472(%r10), %ymm1
7294 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7295 ; AVX1-ONLY-NEXT: vmovaps 496(%rsi), %xmm0
7296 ; AVX1-ONLY-NEXT: vmovaps 496(%rdi), %xmm1
7297 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
7298 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
7299 ; AVX1-ONLY-NEXT: vbroadcastsd 496(%rcx), %ymm3
7300 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm2[0,1,2,3,4,5],ymm3[6,7]
7301 ; AVX1-ONLY-NEXT: vmovaps 496(%r9), %xmm3
7302 ; AVX1-ONLY-NEXT: vmovaps 496(%r8), %xmm4
7303 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm4[0],xmm3[0]
7304 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
7305 ; AVX1-ONLY-NEXT: vbroadcastsd 496(%rax), %ymm5
7306 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm2[0,1,2,3,4,5],ymm5[6,7]
7307 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm0[1]
7308 ; AVX1-ONLY-NEXT: vbroadcastsd 504(%rdx), %ymm1
7309 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7310 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm3[1]
7311 ; AVX1-ONLY-NEXT: vbroadcastsd 504(%r10), %ymm1
7312 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7313 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rdx
7314 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1,2,3,4,5],mem[6,7]
7315 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7316 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm7[0,1,2,3,4,5],mem[6,7]
7317 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7318 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm9[0,1,2,3,4,5],mem[6,7]
7319 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7320 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm10[0,1,2,3,4,5],mem[6,7]
7321 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7322 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm14[0,1,2,3,4,5],mem[6,7]
7323 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7324 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1,2,3,4,5],mem[6,7]
7325 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7326 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7327 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
7328 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7329 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7330 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
7331 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7332 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7333 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
7334 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7335 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7336 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
7337 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7338 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7339 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
7340 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7341 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7342 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
7343 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7344 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7345 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
7346 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7347 ; AVX1-ONLY-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
7348 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
7349 ; AVX1-ONLY-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
7350 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7351 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
7352 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7353 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7354 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
7355 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7356 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7357 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
7358 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7359 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7360 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
7361 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7362 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7363 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm15 = ymm0[0,1,2,3,4,5],mem[6,7]
7364 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7365 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm0[0,1,2,3,4,5],mem[6,7]
7366 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7367 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm0[0,1,2,3,4,5],mem[6,7]
7368 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7369 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm0[0,1,2,3,4,5],mem[6,7]
7370 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7371 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm0[0,1,2,3,4,5],mem[6,7]
7372 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7373 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm0[0,1,2,3,4,5],mem[6,7]
7374 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7375 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm0[0,1,2,3,4,5],mem[6,7]
7376 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7377 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm0[0,1,2,3,4,5],mem[6,7]
7378 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7379 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0,1,2,3,4,5],mem[6,7]
7380 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7381 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
7382 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3,4,5],mem[6,7]
7383 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm12[0,1,2,3,4,5],mem[6,7]
7384 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],mem[6,7]
7385 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],mem[6,7]
7386 ; AVX1-ONLY-NEXT: vmovaps %ymm3, 4064(%rdx)
7387 ; AVX1-ONLY-NEXT: vmovaps %ymm5, 4032(%rdx)
7388 ; AVX1-ONLY-NEXT: vmovaps %ymm8, 4000(%rdx)
7389 ; AVX1-ONLY-NEXT: vmovaps %ymm11, 3968(%rdx)
7390 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
7391 ; AVX1-ONLY-NEXT: vmovaps %ymm3, 3936(%rdx)
7392 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
7393 ; AVX1-ONLY-NEXT: vmovaps %ymm3, 3904(%rdx)
7394 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
7395 ; AVX1-ONLY-NEXT: vmovaps %ymm3, 3872(%rdx)
7396 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
7397 ; AVX1-ONLY-NEXT: vmovaps %ymm3, 3840(%rdx)
7398 ; AVX1-ONLY-NEXT: vmovaps %ymm12, 3808(%rdx)
7399 ; AVX1-ONLY-NEXT: vmovaps %ymm13, 3776(%rdx)
7400 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
7401 ; AVX1-ONLY-NEXT: vmovaps %ymm3, 3744(%rdx)
7402 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
7403 ; AVX1-ONLY-NEXT: vmovaps %ymm3, 3712(%rdx)
7404 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
7405 ; AVX1-ONLY-NEXT: vmovaps %ymm3, 3680(%rdx)
7406 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
7407 ; AVX1-ONLY-NEXT: vmovaps %ymm3, 3648(%rdx)
7408 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
7409 ; AVX1-ONLY-NEXT: vmovaps %ymm3, 3616(%rdx)
7410 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
7411 ; AVX1-ONLY-NEXT: vmovaps %ymm3, 3584(%rdx)
7412 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3552(%rdx)
7413 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 3520(%rdx)
7414 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7415 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3488(%rdx)
7416 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7417 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3456(%rdx)
7418 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7419 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3424(%rdx)
7420 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7421 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3392(%rdx)
7422 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7423 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3360(%rdx)
7424 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7425 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3328(%rdx)
7426 ; AVX1-ONLY-NEXT: vmovaps %ymm2, 3296(%rdx)
7427 ; AVX1-ONLY-NEXT: vmovaps %ymm4, 3264(%rdx)
7428 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7429 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3232(%rdx)
7430 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7431 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3200(%rdx)
7432 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7433 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3168(%rdx)
7434 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7435 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3136(%rdx)
7436 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7437 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3104(%rdx)
7438 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7439 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3072(%rdx)
7440 ; AVX1-ONLY-NEXT: vmovaps %ymm6, 3040(%rdx)
7441 ; AVX1-ONLY-NEXT: vmovaps %ymm7, 3008(%rdx)
7442 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7443 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2976(%rdx)
7444 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7445 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2944(%rdx)
7446 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7447 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2912(%rdx)
7448 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7449 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2880(%rdx)
7450 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7451 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2848(%rdx)
7452 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7453 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2816(%rdx)
7454 ; AVX1-ONLY-NEXT: vmovaps %ymm9, 2784(%rdx)
7455 ; AVX1-ONLY-NEXT: vmovaps %ymm10, 2752(%rdx)
7456 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7457 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2720(%rdx)
7458 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7459 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2688(%rdx)
7460 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7461 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2656(%rdx)
7462 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7463 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2624(%rdx)
7464 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7465 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2592(%rdx)
7466 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7467 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2560(%rdx)
7468 ; AVX1-ONLY-NEXT: vmovaps %ymm14, 2528(%rdx)
7469 ; AVX1-ONLY-NEXT: vmovaps %ymm15, 2496(%rdx)
7470 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7471 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2464(%rdx)
7472 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7473 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2432(%rdx)
7474 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7475 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2400(%rdx)
7476 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7477 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2368(%rdx)
7478 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7479 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2336(%rdx)
7480 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7481 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2304(%rdx)
7482 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7483 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2272(%rdx)
7484 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7485 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2240(%rdx)
7486 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7487 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2208(%rdx)
7488 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7489 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2176(%rdx)
7490 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7491 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2144(%rdx)
7492 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7493 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2112(%rdx)
7494 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7495 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2080(%rdx)
7496 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7497 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2048(%rdx)
7498 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7499 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2016(%rdx)
7500 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7501 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1984(%rdx)
7502 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7503 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1952(%rdx)
7504 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7505 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1920(%rdx)
7506 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7507 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1888(%rdx)
7508 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7509 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1856(%rdx)
7510 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7511 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1824(%rdx)
7512 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7513 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1792(%rdx)
7514 ; AVX1-ONLY-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
7515 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1760(%rdx)
7516 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7517 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1728(%rdx)
7518 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7519 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1696(%rdx)
7520 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7521 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1664(%rdx)
7522 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7523 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1632(%rdx)
7524 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7525 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1600(%rdx)
7526 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7527 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1568(%rdx)
7528 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7529 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1536(%rdx)
7530 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7531 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1504(%rdx)
7532 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7533 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1472(%rdx)
7534 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7535 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1440(%rdx)
7536 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7537 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1408(%rdx)
7538 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7539 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1376(%rdx)
7540 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7541 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1344(%rdx)
7542 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7543 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1312(%rdx)
7544 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7545 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1280(%rdx)
7546 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7547 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1248(%rdx)
7548 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7549 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1216(%rdx)
7550 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7551 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1184(%rdx)
7552 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7553 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1152(%rdx)
7554 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7555 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1120(%rdx)
7556 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7557 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1088(%rdx)
7558 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7559 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1056(%rdx)
7560 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7561 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1024(%rdx)
7562 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7563 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 992(%rdx)
7564 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7565 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 960(%rdx)
7566 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7567 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 928(%rdx)
7568 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7569 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 896(%rdx)
7570 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7571 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 864(%rdx)
7572 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7573 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 832(%rdx)
7574 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7575 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 800(%rdx)
7576 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7577 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 768(%rdx)
7578 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7579 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 736(%rdx)
7580 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7581 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 704(%rdx)
7582 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7583 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 672(%rdx)
7584 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7585 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 640(%rdx)
7586 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7587 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 608(%rdx)
7588 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7589 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 576(%rdx)
7590 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7591 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 544(%rdx)
7592 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7593 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 512(%rdx)
7594 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7595 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 480(%rdx)
7596 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7597 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 448(%rdx)
7598 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7599 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 416(%rdx)
7600 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7601 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 384(%rdx)
7602 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7603 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 352(%rdx)
7604 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7605 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 320(%rdx)
7606 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7607 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 288(%rdx)
7608 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7609 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 256(%rdx)
7610 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7611 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 224(%rdx)
7612 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7613 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 192(%rdx)
7614 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7615 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 160(%rdx)
7616 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7617 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 128(%rdx)
7618 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7619 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 96(%rdx)
7620 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7621 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 64(%rdx)
7622 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7623 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 32(%rdx)
7624 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7625 ; AVX1-ONLY-NEXT: vmovaps %ymm0, (%rdx)
7626 ; AVX1-ONLY-NEXT: addq $3784, %rsp # imm = 0xEC8
7627 ; AVX1-ONLY-NEXT: vzeroupper
7628 ; AVX1-ONLY-NEXT: retq
7630 ; AVX2-ONLY-LABEL: store_i64_stride8_vf64:
7631 ; AVX2-ONLY: # %bb.0:
7632 ; AVX2-ONLY-NEXT: subq $3880, %rsp # imm = 0xF28
7633 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
7634 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
7635 ; AVX2-ONLY-NEXT: vmovaps (%rcx), %xmm0
7636 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7637 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
7638 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %xmm2
7639 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7640 ; AVX2-ONLY-NEXT: vmovaps 32(%rsi), %xmm3
7641 ; AVX2-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7642 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %xmm1
7643 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7644 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %xmm4
7645 ; AVX2-ONLY-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7646 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7647 ; AVX2-ONLY-NEXT: vbroadcastsd 8(%rdx), %ymm2
7648 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
7649 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
7650 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7651 ; AVX2-ONLY-NEXT: vmovaps (%rax), %xmm0
7652 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7653 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
7654 ; AVX2-ONLY-NEXT: vmovaps (%r9), %xmm2
7655 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7656 ; AVX2-ONLY-NEXT: vmovaps (%r8), %xmm1
7657 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7658 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7659 ; AVX2-ONLY-NEXT: vbroadcastsd 8(%r10), %ymm2
7660 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
7661 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
7662 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7663 ; AVX2-ONLY-NEXT: vmovaps 32(%rcx), %xmm0
7664 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7665 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
7666 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm4[1],xmm3[1]
7667 ; AVX2-ONLY-NEXT: vbroadcastsd 40(%rdx), %ymm2
7668 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
7669 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
7670 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7671 ; AVX2-ONLY-NEXT: vmovaps 32(%r9), %xmm1
7672 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7673 ; AVX2-ONLY-NEXT: vmovaps 32(%r8), %xmm0
7674 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7675 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7676 ; AVX2-ONLY-NEXT: vbroadcastsd 40(%r10), %ymm1
7677 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7678 ; AVX2-ONLY-NEXT: vmovaps 32(%rax), %xmm1
7679 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7680 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7681 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7682 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7683 ; AVX2-ONLY-NEXT: vmovaps 64(%rsi), %xmm1
7684 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7685 ; AVX2-ONLY-NEXT: vmovaps 64(%rdi), %xmm0
7686 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7687 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7688 ; AVX2-ONLY-NEXT: vbroadcastsd 72(%rdx), %ymm1
7689 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7690 ; AVX2-ONLY-NEXT: vmovaps 64(%rcx), %xmm1
7691 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7692 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7693 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7694 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7695 ; AVX2-ONLY-NEXT: vmovaps 64(%r9), %xmm1
7696 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7697 ; AVX2-ONLY-NEXT: vmovaps 64(%r8), %xmm0
7698 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7699 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7700 ; AVX2-ONLY-NEXT: vbroadcastsd 72(%r10), %ymm1
7701 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7702 ; AVX2-ONLY-NEXT: vmovaps 64(%rax), %xmm1
7703 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7704 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7705 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7706 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7707 ; AVX2-ONLY-NEXT: vmovaps 96(%rsi), %xmm1
7708 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7709 ; AVX2-ONLY-NEXT: vmovaps 96(%rdi), %xmm0
7710 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7711 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7712 ; AVX2-ONLY-NEXT: vbroadcastsd 104(%rdx), %ymm1
7713 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7714 ; AVX2-ONLY-NEXT: vmovaps 96(%rcx), %xmm1
7715 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7716 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7717 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7718 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7719 ; AVX2-ONLY-NEXT: vmovaps 96(%r9), %xmm1
7720 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7721 ; AVX2-ONLY-NEXT: vmovaps 96(%r8), %xmm0
7722 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7723 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7724 ; AVX2-ONLY-NEXT: vbroadcastsd 104(%r10), %ymm1
7725 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7726 ; AVX2-ONLY-NEXT: vmovaps 96(%rax), %xmm1
7727 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7728 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7729 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7730 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7731 ; AVX2-ONLY-NEXT: vmovaps 128(%rsi), %xmm1
7732 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7733 ; AVX2-ONLY-NEXT: vmovaps 128(%rdi), %xmm0
7734 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7735 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7736 ; AVX2-ONLY-NEXT: vbroadcastsd 136(%rdx), %ymm1
7737 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7738 ; AVX2-ONLY-NEXT: vmovaps 128(%rcx), %xmm1
7739 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7740 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7741 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7742 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7743 ; AVX2-ONLY-NEXT: vmovaps 128(%r9), %xmm1
7744 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7745 ; AVX2-ONLY-NEXT: vmovaps 128(%r8), %xmm0
7746 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7747 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7748 ; AVX2-ONLY-NEXT: vbroadcastsd 136(%r10), %ymm1
7749 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7750 ; AVX2-ONLY-NEXT: vmovaps 128(%rax), %xmm1
7751 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7752 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7753 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7754 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7755 ; AVX2-ONLY-NEXT: vmovaps 160(%rsi), %xmm1
7756 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7757 ; AVX2-ONLY-NEXT: vmovaps 160(%rdi), %xmm0
7758 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7759 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7760 ; AVX2-ONLY-NEXT: vbroadcastsd 168(%rdx), %ymm1
7761 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7762 ; AVX2-ONLY-NEXT: vmovaps 160(%rcx), %xmm1
7763 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7764 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7765 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7766 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7767 ; AVX2-ONLY-NEXT: vmovaps 160(%r9), %xmm1
7768 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7769 ; AVX2-ONLY-NEXT: vmovaps 160(%r8), %xmm0
7770 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7771 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7772 ; AVX2-ONLY-NEXT: vbroadcastsd 168(%r10), %ymm1
7773 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7774 ; AVX2-ONLY-NEXT: vmovaps 160(%rax), %xmm1
7775 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7776 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7777 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7778 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7779 ; AVX2-ONLY-NEXT: vmovaps 192(%rsi), %xmm1
7780 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7781 ; AVX2-ONLY-NEXT: vmovaps 192(%rdi), %xmm0
7782 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7783 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7784 ; AVX2-ONLY-NEXT: vbroadcastsd 200(%rdx), %ymm1
7785 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7786 ; AVX2-ONLY-NEXT: vmovaps 192(%rcx), %xmm1
7787 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7788 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7789 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7790 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7791 ; AVX2-ONLY-NEXT: vmovaps 192(%r9), %xmm1
7792 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7793 ; AVX2-ONLY-NEXT: vmovaps 192(%r8), %xmm0
7794 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7795 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7796 ; AVX2-ONLY-NEXT: vbroadcastsd 200(%r10), %ymm1
7797 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7798 ; AVX2-ONLY-NEXT: vmovaps 192(%rax), %xmm1
7799 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7800 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7801 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7802 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7803 ; AVX2-ONLY-NEXT: vmovaps 224(%rsi), %xmm1
7804 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7805 ; AVX2-ONLY-NEXT: vmovaps 224(%rdi), %xmm0
7806 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7807 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7808 ; AVX2-ONLY-NEXT: vbroadcastsd 232(%rdx), %ymm1
7809 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7810 ; AVX2-ONLY-NEXT: vmovaps 224(%rcx), %xmm1
7811 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7812 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7813 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7814 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7815 ; AVX2-ONLY-NEXT: vmovaps 224(%r9), %xmm1
7816 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7817 ; AVX2-ONLY-NEXT: vmovaps 224(%r8), %xmm0
7818 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7819 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7820 ; AVX2-ONLY-NEXT: vbroadcastsd 232(%r10), %ymm1
7821 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7822 ; AVX2-ONLY-NEXT: vmovaps 224(%rax), %xmm1
7823 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7824 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7825 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7826 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7827 ; AVX2-ONLY-NEXT: vmovaps 256(%rsi), %xmm1
7828 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7829 ; AVX2-ONLY-NEXT: vmovaps 256(%rdi), %xmm0
7830 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7831 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7832 ; AVX2-ONLY-NEXT: vbroadcastsd 264(%rdx), %ymm1
7833 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7834 ; AVX2-ONLY-NEXT: vmovaps 256(%rcx), %xmm1
7835 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7836 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7837 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7838 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7839 ; AVX2-ONLY-NEXT: vmovaps 256(%r9), %xmm1
7840 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7841 ; AVX2-ONLY-NEXT: vmovaps 256(%r8), %xmm0
7842 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7843 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7844 ; AVX2-ONLY-NEXT: vbroadcastsd 264(%r10), %ymm1
7845 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7846 ; AVX2-ONLY-NEXT: vmovaps 256(%rax), %xmm1
7847 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7848 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7849 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7850 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7851 ; AVX2-ONLY-NEXT: vmovaps 288(%rsi), %xmm1
7852 ; AVX2-ONLY-NEXT: vmovaps %xmm1, (%rsp) # 16-byte Spill
7853 ; AVX2-ONLY-NEXT: vmovaps 288(%rdi), %xmm0
7854 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7855 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7856 ; AVX2-ONLY-NEXT: vbroadcastsd 296(%rdx), %ymm1
7857 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7858 ; AVX2-ONLY-NEXT: vmovaps 288(%rcx), %xmm1
7859 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7860 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7861 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7862 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7863 ; AVX2-ONLY-NEXT: vmovaps 288(%r9), %xmm1
7864 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7865 ; AVX2-ONLY-NEXT: vmovaps 288(%r8), %xmm0
7866 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7867 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7868 ; AVX2-ONLY-NEXT: vbroadcastsd 296(%r10), %ymm1
7869 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7870 ; AVX2-ONLY-NEXT: vmovaps 288(%rax), %xmm1
7871 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7872 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7873 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7874 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7875 ; AVX2-ONLY-NEXT: vmovaps 320(%rsi), %xmm1
7876 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7877 ; AVX2-ONLY-NEXT: vmovaps 320(%rdi), %xmm0
7878 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7879 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7880 ; AVX2-ONLY-NEXT: vbroadcastsd 328(%rdx), %ymm1
7881 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7882 ; AVX2-ONLY-NEXT: vmovaps 320(%rcx), %xmm1
7883 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7884 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7885 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7886 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7887 ; AVX2-ONLY-NEXT: vmovaps 320(%r9), %xmm1
7888 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7889 ; AVX2-ONLY-NEXT: vmovaps 320(%r8), %xmm0
7890 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7891 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7892 ; AVX2-ONLY-NEXT: vbroadcastsd 328(%r10), %ymm1
7893 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7894 ; AVX2-ONLY-NEXT: vmovaps 320(%rax), %xmm1
7895 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7896 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7897 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7898 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7899 ; AVX2-ONLY-NEXT: vmovaps 352(%rsi), %xmm1
7900 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7901 ; AVX2-ONLY-NEXT: vmovaps 352(%rdi), %xmm0
7902 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7903 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7904 ; AVX2-ONLY-NEXT: vbroadcastsd 360(%rdx), %ymm1
7905 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7906 ; AVX2-ONLY-NEXT: vmovaps 352(%rcx), %xmm1
7907 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7908 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7909 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7910 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7911 ; AVX2-ONLY-NEXT: vmovaps 352(%r9), %xmm1
7912 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7913 ; AVX2-ONLY-NEXT: vmovaps 352(%r8), %xmm0
7914 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7915 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7916 ; AVX2-ONLY-NEXT: vbroadcastsd 360(%r10), %ymm1
7917 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7918 ; AVX2-ONLY-NEXT: vmovaps 352(%rax), %xmm1
7919 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7920 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7921 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7922 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7923 ; AVX2-ONLY-NEXT: vmovaps 384(%rsi), %xmm1
7924 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7925 ; AVX2-ONLY-NEXT: vmovaps 384(%rdi), %xmm0
7926 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7927 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7928 ; AVX2-ONLY-NEXT: vbroadcastsd 392(%rdx), %ymm1
7929 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7930 ; AVX2-ONLY-NEXT: vmovaps 384(%rcx), %xmm1
7931 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7932 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7933 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7934 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7935 ; AVX2-ONLY-NEXT: vmovaps 384(%r9), %xmm1
7936 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7937 ; AVX2-ONLY-NEXT: vmovaps 384(%r8), %xmm0
7938 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7939 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7940 ; AVX2-ONLY-NEXT: vbroadcastsd 392(%r10), %ymm1
7941 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7942 ; AVX2-ONLY-NEXT: vmovaps 384(%rax), %xmm1
7943 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7944 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7945 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7946 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7947 ; AVX2-ONLY-NEXT: vmovaps 416(%rsi), %xmm1
7948 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7949 ; AVX2-ONLY-NEXT: vmovaps 416(%rdi), %xmm0
7950 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7951 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7952 ; AVX2-ONLY-NEXT: vbroadcastsd 424(%rdx), %ymm1
7953 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7954 ; AVX2-ONLY-NEXT: vmovaps 416(%rcx), %xmm1
7955 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7956 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7957 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7958 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7959 ; AVX2-ONLY-NEXT: vmovaps 416(%r9), %xmm0
7960 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7961 ; AVX2-ONLY-NEXT: vmovaps 416(%r8), %xmm13
7962 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm13[1],xmm0[1]
7963 ; AVX2-ONLY-NEXT: vbroadcastsd 424(%r10), %ymm1
7964 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7965 ; AVX2-ONLY-NEXT: vmovaps 416(%rax), %xmm12
7966 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm0, %ymm1
7967 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7968 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7969 ; AVX2-ONLY-NEXT: vmovaps 448(%rsi), %xmm11
7970 ; AVX2-ONLY-NEXT: vmovaps 448(%rdi), %xmm10
7971 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm10[1],xmm11[1]
7972 ; AVX2-ONLY-NEXT: vbroadcastsd 456(%rdx), %ymm1
7973 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7974 ; AVX2-ONLY-NEXT: vmovaps 448(%rcx), %xmm9
7975 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm0, %ymm1
7976 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7977 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7978 ; AVX2-ONLY-NEXT: vmovaps 448(%r9), %xmm8
7979 ; AVX2-ONLY-NEXT: vmovaps 448(%r8), %xmm7
7980 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm8[1]
7981 ; AVX2-ONLY-NEXT: vbroadcastsd 456(%r10), %ymm1
7982 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7983 ; AVX2-ONLY-NEXT: vmovaps 448(%rax), %xmm6
7984 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm1
7985 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7986 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7987 ; AVX2-ONLY-NEXT: vmovaps 480(%rsi), %xmm5
7988 ; AVX2-ONLY-NEXT: vmovaps 480(%rdi), %xmm4
7989 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm5[1]
7990 ; AVX2-ONLY-NEXT: vbroadcastsd 488(%rdx), %ymm1
7991 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7992 ; AVX2-ONLY-NEXT: vmovaps 480(%rcx), %xmm3
7993 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm1
7994 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7995 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7996 ; AVX2-ONLY-NEXT: vmovaps 480(%r9), %xmm2
7997 ; AVX2-ONLY-NEXT: vmovaps 480(%r8), %xmm1
7998 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm1[1],xmm2[1]
7999 ; AVX2-ONLY-NEXT: vbroadcastsd 488(%r10), %ymm15
8000 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm15 = ymm0[0,1,2,3],ymm15[4,5,6,7]
8001 ; AVX2-ONLY-NEXT: vmovaps 480(%rax), %xmm0
8002 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm14
8003 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm15[0,1,2,3,4,5],ymm14[6,7]
8004 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8005 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8006 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
8007 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
8008 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%rdx), %ymm14, %ymm14
8009 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
8010 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
8011 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8012 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8013 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
8014 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
8015 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%r10), %ymm14, %ymm14
8016 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
8017 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
8018 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8019 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8020 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
8021 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
8022 ; AVX2-ONLY-NEXT: vinsertf128 $1, 32(%rdx), %ymm14, %ymm14
8023 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
8024 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
8025 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8026 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8027 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
8028 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
8029 ; AVX2-ONLY-NEXT: vinsertf128 $1, 32(%r10), %ymm14, %ymm14
8030 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
8031 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
8032 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8033 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8034 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
8035 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
8036 ; AVX2-ONLY-NEXT: vinsertf128 $1, 64(%rdx), %ymm14, %ymm14
8037 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
8038 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
8039 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8040 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8041 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
8042 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
8043 ; AVX2-ONLY-NEXT: vinsertf128 $1, 64(%r10), %ymm14, %ymm14
8044 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
8045 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
8046 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8047 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8048 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
8049 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
8050 ; AVX2-ONLY-NEXT: vinsertf128 $1, 96(%rdx), %ymm14, %ymm14
8051 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
8052 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
8053 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8054 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8055 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
8056 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
8057 ; AVX2-ONLY-NEXT: vinsertf128 $1, 96(%r10), %ymm14, %ymm14
8058 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
8059 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
8060 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8061 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8062 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
8063 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
8064 ; AVX2-ONLY-NEXT: vinsertf128 $1, 128(%rdx), %ymm14, %ymm14
8065 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
8066 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
8067 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8068 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8069 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
8070 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
8071 ; AVX2-ONLY-NEXT: vinsertf128 $1, 128(%r10), %ymm14, %ymm14
8072 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
8073 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
8074 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8075 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8076 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
8077 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
8078 ; AVX2-ONLY-NEXT: vinsertf128 $1, 160(%rdx), %ymm14, %ymm14
8079 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
8080 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
8081 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8082 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8083 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
8084 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
8085 ; AVX2-ONLY-NEXT: vinsertf128 $1, 160(%r10), %ymm14, %ymm14
8086 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
8087 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
8088 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8089 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8090 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
8091 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
8092 ; AVX2-ONLY-NEXT: vinsertf128 $1, 192(%rdx), %ymm14, %ymm14
8093 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
8094 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
8095 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8096 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8097 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
8098 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
8099 ; AVX2-ONLY-NEXT: vinsertf128 $1, 192(%r10), %ymm14, %ymm14
8100 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
8101 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
8102 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8103 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8104 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
8105 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
8106 ; AVX2-ONLY-NEXT: vinsertf128 $1, 224(%rdx), %ymm14, %ymm14
8107 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
8108 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
8109 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8110 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8111 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
8112 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
8113 ; AVX2-ONLY-NEXT: vinsertf128 $1, 224(%r10), %ymm14, %ymm14
8114 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
8115 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
8116 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8117 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8118 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
8119 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
8120 ; AVX2-ONLY-NEXT: vinsertf128 $1, 256(%rdx), %ymm14, %ymm14
8121 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
8122 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
8123 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8124 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8125 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
8126 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
8127 ; AVX2-ONLY-NEXT: vinsertf128 $1, 256(%r10), %ymm14, %ymm14
8128 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
8129 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
8130 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8131 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8132 ; AVX2-ONLY-NEXT: vunpcklpd (%rsp), %xmm14, %xmm14 # 16-byte Folded Reload
8133 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
8134 ; AVX2-ONLY-NEXT: vinsertf128 $1, 288(%rdx), %ymm14, %ymm14
8135 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
8136 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
8137 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8138 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8139 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
8140 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
8141 ; AVX2-ONLY-NEXT: vinsertf128 $1, 288(%r10), %ymm14, %ymm14
8142 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
8143 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
8144 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8145 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8146 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
8147 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
8148 ; AVX2-ONLY-NEXT: vinsertf128 $1, 320(%rdx), %ymm14, %ymm14
8149 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
8150 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
8151 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8152 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8153 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
8154 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
8155 ; AVX2-ONLY-NEXT: vinsertf128 $1, 320(%r10), %ymm14, %ymm14
8156 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
8157 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
8158 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8159 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8160 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
8161 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
8162 ; AVX2-ONLY-NEXT: vinsertf128 $1, 352(%rdx), %ymm14, %ymm14
8163 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
8164 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
8165 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8166 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8167 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
8168 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
8169 ; AVX2-ONLY-NEXT: vinsertf128 $1, 352(%r10), %ymm14, %ymm14
8170 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
8171 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
8172 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8173 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8174 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
8175 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
8176 ; AVX2-ONLY-NEXT: vinsertf128 $1, 384(%rdx), %ymm14, %ymm14
8177 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
8178 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
8179 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8180 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8181 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
8182 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
8183 ; AVX2-ONLY-NEXT: vinsertf128 $1, 384(%r10), %ymm14, %ymm14
8184 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
8185 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
8186 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8187 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8188 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
8189 ; AVX2-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
8190 ; AVX2-ONLY-NEXT: vinsertf128 $1, 416(%rdx), %ymm14, %ymm14
8191 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 16-byte Folded Reload
8192 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
8193 ; AVX2-ONLY-NEXT: vmovups %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8194 ; AVX2-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm13, %xmm13 # 16-byte Folded Reload
8195 ; AVX2-ONLY-NEXT: # xmm13 = xmm13[0],mem[0]
8196 ; AVX2-ONLY-NEXT: vinsertf128 $1, 416(%r10), %ymm13, %ymm13
8197 ; AVX2-ONLY-NEXT: vbroadcastsd %xmm12, %ymm12
8198 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm13[0,1,2,3,4,5],ymm12[6,7]
8199 ; AVX2-ONLY-NEXT: vmovups %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8200 ; AVX2-ONLY-NEXT: vmovlhps {{.*#+}} xmm10 = xmm10[0],xmm11[0]
8201 ; AVX2-ONLY-NEXT: vinsertf128 $1, 448(%rdx), %ymm10, %ymm10
8202 ; AVX2-ONLY-NEXT: vbroadcastsd %xmm9, %ymm9
8203 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm10[0,1,2,3,4,5],ymm9[6,7]
8204 ; AVX2-ONLY-NEXT: vmovups %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8205 ; AVX2-ONLY-NEXT: vmovlhps {{.*#+}} xmm7 = xmm7[0],xmm8[0]
8206 ; AVX2-ONLY-NEXT: vinsertf128 $1, 448(%r10), %ymm7, %ymm7
8207 ; AVX2-ONLY-NEXT: vbroadcastsd %xmm6, %ymm6
8208 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3,4,5],ymm6[6,7]
8209 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8210 ; AVX2-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm4[0],xmm5[0]
8211 ; AVX2-ONLY-NEXT: vinsertf128 $1, 480(%rdx), %ymm4, %ymm4
8212 ; AVX2-ONLY-NEXT: vbroadcastsd %xmm3, %ymm3
8213 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm4[0,1,2,3,4,5],ymm3[6,7]
8214 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8215 ; AVX2-ONLY-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
8216 ; AVX2-ONLY-NEXT: vinsertf128 $1, 480(%r10), %ymm1, %ymm1
8217 ; AVX2-ONLY-NEXT: vbroadcastsd %xmm0, %ymm0
8218 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
8219 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8220 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %ymm0
8221 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %ymm1
8222 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8223 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],mem[2,3]
8224 ; AVX2-ONLY-NEXT: vbroadcastsd 16(%rcx), %ymm3
8225 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
8226 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8227 ; AVX2-ONLY-NEXT: vmovaps (%r8), %ymm2
8228 ; AVX2-ONLY-NEXT: vmovaps (%r9), %ymm3
8229 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
8230 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],mem[2,3]
8231 ; AVX2-ONLY-NEXT: vbroadcastsd 16(%rax), %ymm5
8232 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
8233 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8234 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8235 ; AVX2-ONLY-NEXT: vbroadcastsd 24(%rdx), %ymm1
8236 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm6 = ymm0[2,3],ymm1[2,3]
8237 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
8238 ; AVX2-ONLY-NEXT: vbroadcastsd 24(%r10), %ymm1
8239 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm7 = ymm0[2,3],ymm1[2,3]
8240 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %ymm0
8241 ; AVX2-ONLY-NEXT: vmovaps 32(%rsi), %ymm1
8242 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8243 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],mem[2,3]
8244 ; AVX2-ONLY-NEXT: vbroadcastsd 48(%rcx), %ymm3
8245 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
8246 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8247 ; AVX2-ONLY-NEXT: vmovaps 32(%r8), %ymm2
8248 ; AVX2-ONLY-NEXT: vmovaps 32(%r9), %ymm3
8249 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
8250 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],mem[2,3]
8251 ; AVX2-ONLY-NEXT: vbroadcastsd 48(%rax), %ymm5
8252 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
8253 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8254 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8255 ; AVX2-ONLY-NEXT: vbroadcastsd 56(%rdx), %ymm1
8256 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm8 = ymm0[2,3],ymm1[2,3]
8257 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
8258 ; AVX2-ONLY-NEXT: vbroadcastsd 56(%r10), %ymm1
8259 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm10 = ymm0[2,3],ymm1[2,3]
8260 ; AVX2-ONLY-NEXT: vmovaps 64(%rdi), %ymm0
8261 ; AVX2-ONLY-NEXT: vmovaps 64(%rsi), %ymm1
8262 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8263 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],mem[2,3]
8264 ; AVX2-ONLY-NEXT: vbroadcastsd 80(%rcx), %ymm3
8265 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
8266 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8267 ; AVX2-ONLY-NEXT: vmovaps 64(%r8), %ymm2
8268 ; AVX2-ONLY-NEXT: vmovaps 64(%r9), %ymm3
8269 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
8270 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],mem[2,3]
8271 ; AVX2-ONLY-NEXT: vbroadcastsd 80(%rax), %ymm5
8272 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
8273 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8274 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8275 ; AVX2-ONLY-NEXT: vbroadcastsd 88(%rdx), %ymm1
8276 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm12 = ymm0[2,3],ymm1[2,3]
8277 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
8278 ; AVX2-ONLY-NEXT: vbroadcastsd 88(%r10), %ymm1
8279 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm13 = ymm0[2,3],ymm1[2,3]
8280 ; AVX2-ONLY-NEXT: vmovaps 96(%rdi), %ymm0
8281 ; AVX2-ONLY-NEXT: vmovaps 96(%rsi), %ymm1
8282 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8283 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],mem[2,3]
8284 ; AVX2-ONLY-NEXT: vbroadcastsd 112(%rcx), %ymm3
8285 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
8286 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8287 ; AVX2-ONLY-NEXT: vmovaps 96(%r8), %ymm2
8288 ; AVX2-ONLY-NEXT: vmovaps 96(%r9), %ymm3
8289 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
8290 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],mem[2,3]
8291 ; AVX2-ONLY-NEXT: vbroadcastsd 112(%rax), %ymm5
8292 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
8293 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8294 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8295 ; AVX2-ONLY-NEXT: vbroadcastsd 120(%rdx), %ymm1
8296 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm14 = ymm0[2,3],ymm1[2,3]
8297 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
8298 ; AVX2-ONLY-NEXT: vbroadcastsd 120(%r10), %ymm1
8299 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8300 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8301 ; AVX2-ONLY-NEXT: vmovaps 128(%rdi), %ymm0
8302 ; AVX2-ONLY-NEXT: vmovaps 128(%rsi), %ymm1
8303 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8304 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],mem[2,3]
8305 ; AVX2-ONLY-NEXT: vbroadcastsd 144(%rcx), %ymm3
8306 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
8307 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8308 ; AVX2-ONLY-NEXT: vmovaps 128(%r8), %ymm2
8309 ; AVX2-ONLY-NEXT: vmovaps 128(%r9), %ymm3
8310 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
8311 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],mem[2,3]
8312 ; AVX2-ONLY-NEXT: vbroadcastsd 144(%rax), %ymm5
8313 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
8314 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8315 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8316 ; AVX2-ONLY-NEXT: vbroadcastsd 152(%rdx), %ymm1
8317 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8318 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8319 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
8320 ; AVX2-ONLY-NEXT: vbroadcastsd 152(%r10), %ymm1
8321 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8322 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8323 ; AVX2-ONLY-NEXT: vmovaps 160(%rdi), %ymm0
8324 ; AVX2-ONLY-NEXT: vmovaps 160(%rsi), %ymm1
8325 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8326 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],mem[2,3]
8327 ; AVX2-ONLY-NEXT: vbroadcastsd 176(%rcx), %ymm3
8328 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
8329 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8330 ; AVX2-ONLY-NEXT: vmovaps 160(%r8), %ymm2
8331 ; AVX2-ONLY-NEXT: vmovaps 160(%r9), %ymm3
8332 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
8333 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],mem[2,3]
8334 ; AVX2-ONLY-NEXT: vbroadcastsd 176(%rax), %ymm5
8335 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
8336 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8337 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8338 ; AVX2-ONLY-NEXT: vbroadcastsd 184(%rdx), %ymm1
8339 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8340 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8341 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
8342 ; AVX2-ONLY-NEXT: vbroadcastsd 184(%r10), %ymm1
8343 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8344 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8345 ; AVX2-ONLY-NEXT: vmovaps 192(%rdi), %ymm0
8346 ; AVX2-ONLY-NEXT: vmovaps 192(%rsi), %ymm1
8347 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8348 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],mem[2,3]
8349 ; AVX2-ONLY-NEXT: vbroadcastsd 208(%rcx), %ymm3
8350 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
8351 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8352 ; AVX2-ONLY-NEXT: vmovaps 192(%r8), %ymm2
8353 ; AVX2-ONLY-NEXT: vmovaps 192(%r9), %ymm3
8354 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
8355 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],mem[2,3]
8356 ; AVX2-ONLY-NEXT: vbroadcastsd 208(%rax), %ymm5
8357 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
8358 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8359 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8360 ; AVX2-ONLY-NEXT: vbroadcastsd 216(%rdx), %ymm1
8361 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8362 ; AVX2-ONLY-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
8363 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
8364 ; AVX2-ONLY-NEXT: vbroadcastsd 216(%r10), %ymm1
8365 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8366 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8367 ; AVX2-ONLY-NEXT: vmovaps 224(%rdi), %ymm0
8368 ; AVX2-ONLY-NEXT: vmovaps 224(%rsi), %ymm1
8369 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8370 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],mem[2,3]
8371 ; AVX2-ONLY-NEXT: vbroadcastsd 240(%rcx), %ymm3
8372 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
8373 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8374 ; AVX2-ONLY-NEXT: vmovaps 224(%r8), %ymm2
8375 ; AVX2-ONLY-NEXT: vmovaps 224(%r9), %ymm3
8376 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
8377 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],mem[2,3]
8378 ; AVX2-ONLY-NEXT: vbroadcastsd 240(%rax), %ymm5
8379 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
8380 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8381 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8382 ; AVX2-ONLY-NEXT: vbroadcastsd 248(%rdx), %ymm1
8383 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8384 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8385 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
8386 ; AVX2-ONLY-NEXT: vbroadcastsd 248(%r10), %ymm1
8387 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8388 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8389 ; AVX2-ONLY-NEXT: vmovaps 256(%rdi), %ymm0
8390 ; AVX2-ONLY-NEXT: vmovaps 256(%rsi), %ymm1
8391 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8392 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],mem[2,3]
8393 ; AVX2-ONLY-NEXT: vbroadcastsd 272(%rcx), %ymm3
8394 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
8395 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8396 ; AVX2-ONLY-NEXT: vmovaps 256(%r8), %ymm2
8397 ; AVX2-ONLY-NEXT: vmovaps 256(%r9), %ymm3
8398 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
8399 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],mem[2,3]
8400 ; AVX2-ONLY-NEXT: vbroadcastsd 272(%rax), %ymm5
8401 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
8402 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8403 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8404 ; AVX2-ONLY-NEXT: vbroadcastsd 280(%rdx), %ymm1
8405 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8406 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8407 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
8408 ; AVX2-ONLY-NEXT: vbroadcastsd 280(%r10), %ymm1
8409 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8410 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8411 ; AVX2-ONLY-NEXT: vmovaps 288(%rdi), %ymm0
8412 ; AVX2-ONLY-NEXT: vmovaps 288(%rsi), %ymm1
8413 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8414 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],mem[2,3]
8415 ; AVX2-ONLY-NEXT: vbroadcastsd 304(%rcx), %ymm3
8416 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
8417 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8418 ; AVX2-ONLY-NEXT: vmovaps 288(%r8), %ymm2
8419 ; AVX2-ONLY-NEXT: vmovaps 288(%r9), %ymm3
8420 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
8421 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],mem[2,3]
8422 ; AVX2-ONLY-NEXT: vbroadcastsd 304(%rax), %ymm5
8423 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
8424 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8425 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8426 ; AVX2-ONLY-NEXT: vbroadcastsd 312(%rdx), %ymm1
8427 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8428 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8429 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
8430 ; AVX2-ONLY-NEXT: vbroadcastsd 312(%r10), %ymm1
8431 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8432 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8433 ; AVX2-ONLY-NEXT: vmovaps 320(%rdi), %ymm0
8434 ; AVX2-ONLY-NEXT: vmovaps 320(%rsi), %ymm1
8435 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8436 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],mem[2,3]
8437 ; AVX2-ONLY-NEXT: vbroadcastsd 336(%rcx), %ymm3
8438 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
8439 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8440 ; AVX2-ONLY-NEXT: vmovaps 320(%r8), %ymm2
8441 ; AVX2-ONLY-NEXT: vmovaps 320(%r9), %ymm3
8442 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
8443 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],mem[2,3]
8444 ; AVX2-ONLY-NEXT: vbroadcastsd 336(%rax), %ymm5
8445 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
8446 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8447 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8448 ; AVX2-ONLY-NEXT: vbroadcastsd 344(%rdx), %ymm1
8449 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8450 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8451 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
8452 ; AVX2-ONLY-NEXT: vbroadcastsd 344(%r10), %ymm1
8453 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8454 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8455 ; AVX2-ONLY-NEXT: vmovaps 352(%rdi), %ymm0
8456 ; AVX2-ONLY-NEXT: vmovaps 352(%rsi), %ymm1
8457 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8458 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],mem[2,3]
8459 ; AVX2-ONLY-NEXT: vbroadcastsd 368(%rcx), %ymm3
8460 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
8461 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8462 ; AVX2-ONLY-NEXT: vmovaps 352(%r8), %ymm2
8463 ; AVX2-ONLY-NEXT: vmovaps 352(%r9), %ymm3
8464 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
8465 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],mem[2,3]
8466 ; AVX2-ONLY-NEXT: vbroadcastsd 368(%rax), %ymm5
8467 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
8468 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8469 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8470 ; AVX2-ONLY-NEXT: vbroadcastsd 376(%rdx), %ymm1
8471 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8472 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8473 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
8474 ; AVX2-ONLY-NEXT: vbroadcastsd 376(%r10), %ymm1
8475 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8476 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8477 ; AVX2-ONLY-NEXT: vmovaps 384(%rdi), %ymm0
8478 ; AVX2-ONLY-NEXT: vmovaps 384(%rsi), %ymm1
8479 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8480 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],mem[2,3]
8481 ; AVX2-ONLY-NEXT: vbroadcastsd 400(%rcx), %ymm3
8482 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
8483 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8484 ; AVX2-ONLY-NEXT: vmovaps 384(%r8), %ymm2
8485 ; AVX2-ONLY-NEXT: vmovaps 384(%r9), %ymm3
8486 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
8487 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],mem[2,3]
8488 ; AVX2-ONLY-NEXT: vbroadcastsd 400(%rax), %ymm5
8489 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
8490 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8491 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8492 ; AVX2-ONLY-NEXT: vbroadcastsd 408(%rdx), %ymm1
8493 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8494 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8495 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
8496 ; AVX2-ONLY-NEXT: vbroadcastsd 408(%r10), %ymm1
8497 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8498 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8499 ; AVX2-ONLY-NEXT: vmovaps 416(%rdi), %ymm0
8500 ; AVX2-ONLY-NEXT: vmovaps 416(%rsi), %ymm1
8501 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8502 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],mem[2,3]
8503 ; AVX2-ONLY-NEXT: vbroadcastsd 432(%rcx), %ymm3
8504 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
8505 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8506 ; AVX2-ONLY-NEXT: vmovaps 416(%r8), %ymm2
8507 ; AVX2-ONLY-NEXT: vmovaps 416(%r9), %ymm3
8508 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
8509 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],mem[2,3]
8510 ; AVX2-ONLY-NEXT: vbroadcastsd 432(%rax), %ymm5
8511 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
8512 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8513 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8514 ; AVX2-ONLY-NEXT: vbroadcastsd 440(%rdx), %ymm1
8515 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8516 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8517 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
8518 ; AVX2-ONLY-NEXT: vbroadcastsd 440(%r10), %ymm1
8519 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8520 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8521 ; AVX2-ONLY-NEXT: vmovaps 448(%rdi), %ymm0
8522 ; AVX2-ONLY-NEXT: vmovaps 448(%rsi), %ymm1
8523 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8524 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],mem[2,3]
8525 ; AVX2-ONLY-NEXT: vbroadcastsd 464(%rcx), %ymm3
8526 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
8527 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8528 ; AVX2-ONLY-NEXT: vmovaps 448(%r8), %ymm2
8529 ; AVX2-ONLY-NEXT: vmovaps 448(%r9), %ymm3
8530 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
8531 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],mem[2,3]
8532 ; AVX2-ONLY-NEXT: vbroadcastsd 464(%rax), %ymm5
8533 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
8534 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8535 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8536 ; AVX2-ONLY-NEXT: vbroadcastsd 472(%rdx), %ymm1
8537 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8538 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8539 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
8540 ; AVX2-ONLY-NEXT: vbroadcastsd 472(%r10), %ymm1
8541 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8542 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8543 ; AVX2-ONLY-NEXT: vmovaps 480(%rdi), %ymm0
8544 ; AVX2-ONLY-NEXT: vmovaps 480(%rsi), %ymm1
8545 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8546 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],mem[2,3]
8547 ; AVX2-ONLY-NEXT: vbroadcastsd 496(%rcx), %ymm3
8548 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
8549 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8550 ; AVX2-ONLY-NEXT: vmovaps 480(%r8), %ymm3
8551 ; AVX2-ONLY-NEXT: vmovaps 480(%r9), %ymm4
8552 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm3[0],ymm4[0],ymm3[2],ymm4[2]
8553 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],mem[2,3]
8554 ; AVX2-ONLY-NEXT: vbroadcastsd 496(%rax), %ymm5
8555 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm15 = ymm2[0,1,2,3,4,5],ymm5[6,7]
8556 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8557 ; AVX2-ONLY-NEXT: vbroadcastsd 504(%rdx), %ymm1
8558 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm11 = ymm0[2,3],ymm1[2,3]
8559 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm3[1],ymm4[1],ymm3[3],ymm4[3]
8560 ; AVX2-ONLY-NEXT: vbroadcastsd 504(%r10), %ymm1
8561 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm9 = ymm0[2,3],ymm1[2,3]
8562 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rdx
8563 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1,2,3,4,5],mem[6,7]
8564 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8565 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm7[0,1,2,3,4,5],mem[6,7]
8566 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8567 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm8[0,1,2,3,4,5],mem[6,7]
8568 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8569 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm10[0,1,2,3,4,5],mem[6,7]
8570 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8571 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm12[0,1,2,3,4,5],mem[6,7]
8572 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8573 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm13[0,1,2,3,4,5],mem[6,7]
8574 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8575 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm14[0,1,2,3,4,5],mem[6,7]
8576 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8577 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8578 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
8579 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8580 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8581 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
8582 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8583 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8584 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
8585 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8586 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8587 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
8588 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8589 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8590 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
8591 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8592 ; AVX2-ONLY-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
8593 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
8594 ; AVX2-ONLY-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
8595 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8596 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
8597 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8598 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8599 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
8600 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8601 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8602 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
8603 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8604 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8605 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
8606 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8607 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8608 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm0[0,1,2,3,4,5],mem[6,7]
8609 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8610 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm0[0,1,2,3,4,5],mem[6,7]
8611 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8612 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm0[0,1,2,3,4,5],mem[6,7]
8613 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8614 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm0[0,1,2,3,4,5],mem[6,7]
8615 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8616 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm0[0,1,2,3,4,5],mem[6,7]
8617 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8618 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm0[0,1,2,3,4,5],mem[6,7]
8619 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8620 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm0[0,1,2,3,4,5],mem[6,7]
8621 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8622 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm0[0,1,2,3,4,5],mem[6,7]
8623 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8624 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm0[0,1,2,3,4,5],mem[6,7]
8625 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8626 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm0[0,1,2,3,4,5],mem[6,7]
8627 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8628 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm0[0,1,2,3,4,5],mem[6,7]
8629 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8630 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0,1,2,3,4,5],mem[6,7]
8631 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8632 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
8633 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm11[0,1,2,3,4,5],mem[6,7]
8634 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],mem[6,7]
8635 ; AVX2-ONLY-NEXT: vmovaps %ymm9, 4064(%rdx)
8636 ; AVX2-ONLY-NEXT: vmovaps %ymm11, 4032(%rdx)
8637 ; AVX2-ONLY-NEXT: vmovaps %ymm15, 4000(%rdx)
8638 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
8639 ; AVX2-ONLY-NEXT: vmovaps %ymm9, 3968(%rdx)
8640 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3808(%rdx)
8641 ; AVX2-ONLY-NEXT: vmovaps %ymm1, 3776(%rdx)
8642 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8643 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3744(%rdx)
8644 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8645 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3712(%rdx)
8646 ; AVX2-ONLY-NEXT: vmovaps %ymm2, 3552(%rdx)
8647 ; AVX2-ONLY-NEXT: vmovaps %ymm3, 3520(%rdx)
8648 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8649 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3488(%rdx)
8650 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8651 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3456(%rdx)
8652 ; AVX2-ONLY-NEXT: vmovaps %ymm4, 3296(%rdx)
8653 ; AVX2-ONLY-NEXT: vmovaps %ymm5, 3264(%rdx)
8654 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8655 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3232(%rdx)
8656 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8657 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3200(%rdx)
8658 ; AVX2-ONLY-NEXT: vmovaps %ymm6, 3040(%rdx)
8659 ; AVX2-ONLY-NEXT: vmovaps %ymm7, 3008(%rdx)
8660 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8661 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2976(%rdx)
8662 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8663 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2944(%rdx)
8664 ; AVX2-ONLY-NEXT: vmovaps %ymm8, 2784(%rdx)
8665 ; AVX2-ONLY-NEXT: vmovaps %ymm10, 2752(%rdx)
8666 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8667 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2720(%rdx)
8668 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8669 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2688(%rdx)
8670 ; AVX2-ONLY-NEXT: vmovaps %ymm12, 2528(%rdx)
8671 ; AVX2-ONLY-NEXT: vmovaps %ymm13, 2496(%rdx)
8672 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8673 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2464(%rdx)
8674 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8675 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2432(%rdx)
8676 ; AVX2-ONLY-NEXT: vmovaps %ymm14, 2272(%rdx)
8677 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8678 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2240(%rdx)
8679 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8680 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2208(%rdx)
8681 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8682 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2176(%rdx)
8683 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8684 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2016(%rdx)
8685 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8686 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1984(%rdx)
8687 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8688 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1952(%rdx)
8689 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8690 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1920(%rdx)
8691 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8692 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1760(%rdx)
8693 ; AVX2-ONLY-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
8694 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1728(%rdx)
8695 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8696 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1696(%rdx)
8697 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8698 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1664(%rdx)
8699 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8700 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1504(%rdx)
8701 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8702 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1472(%rdx)
8703 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8704 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1440(%rdx)
8705 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8706 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1408(%rdx)
8707 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8708 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1248(%rdx)
8709 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8710 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1216(%rdx)
8711 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8712 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1184(%rdx)
8713 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8714 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1152(%rdx)
8715 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8716 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 992(%rdx)
8717 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8718 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 960(%rdx)
8719 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8720 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 928(%rdx)
8721 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8722 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 896(%rdx)
8723 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8724 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 736(%rdx)
8725 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8726 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 704(%rdx)
8727 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8728 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 672(%rdx)
8729 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8730 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 640(%rdx)
8731 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8732 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 480(%rdx)
8733 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8734 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 448(%rdx)
8735 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8736 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 416(%rdx)
8737 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8738 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 384(%rdx)
8739 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8740 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 224(%rdx)
8741 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8742 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 192(%rdx)
8743 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8744 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 160(%rdx)
8745 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8746 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 128(%rdx)
8747 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8748 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3936(%rdx)
8749 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8750 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3904(%rdx)
8751 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8752 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3872(%rdx)
8753 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8754 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3840(%rdx)
8755 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8756 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3680(%rdx)
8757 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8758 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3648(%rdx)
8759 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8760 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3616(%rdx)
8761 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8762 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3584(%rdx)
8763 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8764 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3424(%rdx)
8765 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8766 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3392(%rdx)
8767 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8768 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3360(%rdx)
8769 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8770 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3328(%rdx)
8771 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8772 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3168(%rdx)
8773 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8774 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3136(%rdx)
8775 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8776 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3104(%rdx)
8777 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8778 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3072(%rdx)
8779 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8780 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2912(%rdx)
8781 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8782 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2880(%rdx)
8783 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8784 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2848(%rdx)
8785 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8786 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2816(%rdx)
8787 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8788 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2656(%rdx)
8789 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8790 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2624(%rdx)
8791 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8792 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2592(%rdx)
8793 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8794 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2560(%rdx)
8795 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8796 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2400(%rdx)
8797 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8798 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2368(%rdx)
8799 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8800 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2336(%rdx)
8801 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8802 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2304(%rdx)
8803 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8804 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2144(%rdx)
8805 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8806 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2112(%rdx)
8807 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8808 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2080(%rdx)
8809 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8810 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2048(%rdx)
8811 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8812 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1888(%rdx)
8813 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8814 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1856(%rdx)
8815 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8816 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1824(%rdx)
8817 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8818 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1792(%rdx)
8819 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8820 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1632(%rdx)
8821 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8822 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1600(%rdx)
8823 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8824 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1568(%rdx)
8825 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8826 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1536(%rdx)
8827 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8828 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1376(%rdx)
8829 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8830 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1344(%rdx)
8831 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8832 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1312(%rdx)
8833 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8834 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1280(%rdx)
8835 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8836 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1120(%rdx)
8837 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8838 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1088(%rdx)
8839 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8840 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1056(%rdx)
8841 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8842 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1024(%rdx)
8843 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8844 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 864(%rdx)
8845 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8846 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 832(%rdx)
8847 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8848 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 800(%rdx)
8849 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8850 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 768(%rdx)
8851 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8852 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 608(%rdx)
8853 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8854 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 576(%rdx)
8855 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8856 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 544(%rdx)
8857 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8858 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 512(%rdx)
8859 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8860 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 352(%rdx)
8861 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8862 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 320(%rdx)
8863 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8864 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 288(%rdx)
8865 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8866 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 256(%rdx)
8867 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8868 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 96(%rdx)
8869 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8870 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 64(%rdx)
8871 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8872 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 32(%rdx)
8873 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8874 ; AVX2-ONLY-NEXT: vmovaps %ymm0, (%rdx)
8875 ; AVX2-ONLY-NEXT: addq $3880, %rsp # imm = 0xF28
8876 ; AVX2-ONLY-NEXT: vzeroupper
8877 ; AVX2-ONLY-NEXT: retq
8879 ; AVX512F-ONLY-SLOW-LABEL: store_i64_stride8_vf64:
8880 ; AVX512F-ONLY-SLOW: # %bb.0:
8881 ; AVX512F-ONLY-SLOW-NEXT: subq $5512, %rsp # imm = 0x1588
8882 ; AVX512F-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
8883 ; AVX512F-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
8884 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%rdi), %zmm2
8885 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm4
8886 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rdi), %zmm8
8887 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%rsi), %zmm17
8888 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm19
8889 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rsi), %zmm22
8890 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm5
8891 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rdx), %zmm10
8892 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm20
8893 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rcx), %zmm11
8894 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%r8), %zmm1
8895 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%r8), %zmm25
8896 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%r8), %zmm23
8897 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%r9), %zmm28
8898 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%r9), %zmm26
8899 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%r9), %zmm24
8900 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%r10), %zmm21
8901 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%r10), %zmm14
8902 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rax), %zmm27
8903 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rax), %zmm16
8904 ; AVX512F-ONLY-SLOW-NEXT: movb $-64, %r11b
8905 ; AVX512F-ONLY-SLOW-NEXT: kmovw %r11d, %k1
8906 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [4,12,4,12,4,12,4,12]
8907 ; AVX512F-ONLY-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8908 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm0
8909 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm3, %zmm0
8910 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm12 = zmm1[0],zmm28[0],zmm1[2],zmm28[2],zmm1[4],zmm28[4],zmm1[6],zmm28[6]
8911 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
8912 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
8913 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm3, %zmm0
8914 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm6 = [4,12,4,12]
8915 ; AVX512F-ONLY-SLOW-NEXT: # ymm6 = mem[0,1,0,1]
8916 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm15
8917 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm6, %zmm15
8918 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
8919 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm12, %zmm0
8920 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8921 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [5,13,5,13,5,13,5,13]
8922 ; AVX512F-ONLY-SLOW-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8923 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm0
8924 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm9, %zmm0
8925 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm12 = zmm1[1],zmm28[1],zmm1[3],zmm28[3],zmm1[5],zmm28[5],zmm1[7],zmm28[7]
8926 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
8927 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
8928 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm9, %zmm0
8929 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [5,13,5,13]
8930 ; AVX512F-ONLY-SLOW-NEXT: # ymm7 = mem[0,1,0,1]
8931 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm15
8932 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm7, %zmm15
8933 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
8934 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm12, %zmm0
8935 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8936 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm13 = [6,14,6,14,6,14,6,14]
8937 ; AVX512F-ONLY-SLOW-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8938 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm0
8939 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm29
8940 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8941 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm28, %zmm13, %zmm0
8942 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm0 {%k1} = zmm21[0],zmm27[0],zmm21[2],zmm27[2],zmm21[4],zmm27[4],zmm21[6],zmm27[6]
8943 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm12
8944 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm13, %zmm12
8945 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [6,14,6,14]
8946 ; AVX512F-ONLY-SLOW-NEXT: # ymm1 = mem[0,1,0,1]
8947 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm15
8948 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm1, %zmm15
8949 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3],ymm15[4,5,6,7]
8950 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm0
8951 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8952 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm18 = [7,15,7,15,7,15,7,15]
8953 ; AVX512F-ONLY-SLOW-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8954 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm29, %zmm12
8955 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm28, %zmm18, %zmm12
8956 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm12 {%k1} = zmm21[1],zmm27[1],zmm21[3],zmm27[3],zmm21[5],zmm27[5],zmm21[7],zmm27[7]
8957 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm18, %zmm8
8958 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} ymm30 = [7,15,7,15]
8959 ; AVX512F-ONLY-SLOW-NEXT: # ymm30 = mem[0,1,2,3,0,1,2,3]
8960 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm30, %zmm10
8961 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
8962 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm12, %zmm0
8963 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8964 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm8
8965 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm16, %zmm3, %zmm8
8966 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm10 = zmm25[0],zmm26[0],zmm25[2],zmm26[2],zmm25[4],zmm26[4],zmm25[6],zmm26[6]
8967 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm10 {%k1}
8968 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8
8969 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm3, %zmm8
8970 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm11
8971 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm15
8972 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm6, %zmm11
8973 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
8974 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm10, %zmm0
8975 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8976 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm8
8977 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm16, %zmm9, %zmm8
8978 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm10 = zmm25[1],zmm26[1],zmm25[3],zmm26[3],zmm25[5],zmm26[5],zmm25[7],zmm26[7]
8979 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm10 {%k1}
8980 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8
8981 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm9, %zmm8
8982 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm11
8983 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm12
8984 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm7, %zmm11
8985 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
8986 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm10, %zmm0
8987 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8988 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8
8989 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm13, %zmm8
8990 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm10
8991 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm1, %zmm10
8992 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
8993 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8994 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, %zmm10
8995 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8996 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm13, %zmm10
8997 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm11
8998 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm14, (%rsp) # 64-byte Spill
8999 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9000 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm10 {%k1} = zmm14[0],zmm16[0],zmm14[2],zmm16[2],zmm14[4],zmm16[4],zmm14[6],zmm16[6]
9001 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm10, %zmm0
9002 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9003 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%r10), %zmm10
9004 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm18, %zmm4
9005 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%rax), %zmm14
9006 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm30, %zmm5
9007 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, %zmm6
9008 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm18, %zmm6
9009 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm6 {%k1} = zmm11[1],zmm16[1],zmm11[3],zmm16[3],zmm11[5],zmm16[5],zmm11[7],zmm16[7]
9010 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
9011 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
9012 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9013 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm4
9014 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm3, %zmm4
9015 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm5 = zmm23[0],zmm24[0],zmm23[2],zmm24[2],zmm23[4],zmm24[4],zmm23[6],zmm24[6]
9016 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm5 {%k1}
9017 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm6
9018 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm17, %zmm3, %zmm6
9019 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm11
9020 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%rdx), %zmm4
9021 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%rcx), %zmm7
9022 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8
9023 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm15, %zmm8
9024 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
9025 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm6, %zmm5, %zmm0
9026 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9027 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm5
9028 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm9, %zmm5
9029 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm24[1],zmm23[3],zmm24[3],zmm23[5],zmm24[5],zmm23[7],zmm24[7]
9030 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm6 {%k1}
9031 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm5
9032 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm17, %zmm9, %zmm5
9033 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8
9034 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm12, %zmm8
9035 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm8[4,5,6,7]
9036 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm5, %zmm6, %zmm0
9037 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9038 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm5
9039 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm17, %zmm13, %zmm5
9040 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm6
9041 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm1, %zmm6
9042 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm6[4,5,6,7]
9043 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm3
9044 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9045 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm6
9046 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9047 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm13, %zmm6
9048 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9049 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9050 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm14[0],zmm10[2],zmm14[2],zmm10[4],zmm14[4],zmm10[6],zmm14[6]
9051 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm5, %zmm6, %zmm0
9052 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9053 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm17, %zmm18, %zmm2
9054 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm30, %zmm4
9055 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm4[4,5,6,7]
9056 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm18, %zmm3
9057 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm14[1],zmm10[3],zmm14[3],zmm10[5],zmm14[5],zmm10[7],zmm14[7]
9058 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm3, %zmm0
9059 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9060 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%r10), %zmm10
9061 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%rax), %zmm26
9062 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm2
9063 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm11, %zmm2
9064 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%r8), %zmm23
9065 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%r9), %zmm29
9066 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm4 = zmm23[0],zmm29[0],zmm23[2],zmm29[2],zmm23[4],zmm29[4],zmm23[6],zmm29[6]
9067 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
9068 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%rdi), %zmm2
9069 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%rsi), %zmm5
9070 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm6
9071 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm11, %zmm6
9072 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%rdx), %zmm3
9073 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%rcx), %zmm7
9074 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm8
9075 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm15, %zmm8
9076 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
9077 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm6, %zmm4, %zmm0
9078 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9079 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm4
9080 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm9, %zmm4
9081 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm29[1],zmm23[3],zmm29[3],zmm23[5],zmm29[5],zmm23[7],zmm29[7]
9082 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm6 {%k1}
9083 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4
9084 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm9, %zmm4
9085 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm8
9086 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm12, %zmm8
9087 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
9088 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
9089 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9090 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4
9091 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm13, %zmm4
9092 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm6
9093 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm1, %zmm6
9094 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
9095 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm6
9096 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm13, %zmm6
9097 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9098 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm26[0],zmm10[2],zmm26[2],zmm10[4],zmm26[4],zmm10[6],zmm26[6]
9099 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
9100 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9101 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm18, %zmm2
9102 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm30, %zmm3
9103 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
9104 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm3
9105 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm18, %zmm3
9106 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm26[1],zmm10[3],zmm26[3],zmm10[5],zmm26[5],zmm10[7],zmm26[7]
9107 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm3, %zmm0
9108 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9109 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 256(%r10), %zmm10
9110 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 256(%rax), %zmm22
9111 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm2
9112 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm11, %zmm2
9113 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 256(%r8), %zmm0
9114 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 256(%r9), %zmm24
9115 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm4 = zmm0[0],zmm24[0],zmm0[2],zmm24[2],zmm0[4],zmm24[4],zmm0[6],zmm24[6]
9116 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm17
9117 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
9118 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 256(%rdi), %zmm2
9119 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 256(%rsi), %zmm5
9120 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm6
9121 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm11, %zmm6
9122 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 256(%rdx), %zmm3
9123 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 256(%rcx), %zmm7
9124 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm8
9125 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm15, %zmm8
9126 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
9127 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm6, %zmm4, %zmm0
9128 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9129 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm4
9130 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm9, %zmm4
9131 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm17[1],zmm24[1],zmm17[3],zmm24[3],zmm17[5],zmm24[5],zmm17[7],zmm24[7]
9132 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm6 {%k1}
9133 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4
9134 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm9, %zmm4
9135 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm8
9136 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm12, %zmm8
9137 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
9138 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
9139 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9140 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4
9141 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm13, %zmm4
9142 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm6
9143 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm1, %zmm6
9144 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
9145 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9146 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm6
9147 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm13, %zmm6
9148 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9149 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm22[0],zmm10[2],zmm22[2],zmm10[4],zmm22[4],zmm10[6],zmm22[6]
9150 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
9151 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9152 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm18, %zmm2
9153 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm30, %zmm3
9154 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
9155 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm3
9156 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm18, %zmm3
9157 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm22[1],zmm10[3],zmm22[3],zmm10[5],zmm22[5],zmm10[7],zmm22[7]
9158 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm3, %zmm0
9159 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9160 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 320(%rdi), %zmm2
9161 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 320(%rsi), %zmm3
9162 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm5
9163 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm11, %zmm5
9164 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 320(%rdx), %zmm14
9165 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 320(%rcx), %zmm6
9166 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm7
9167 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm15, %zmm7
9168 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm7[4,5,6,7]
9169 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm7
9170 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm12, %zmm7
9171 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm8
9172 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm1, %zmm8
9173 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm30, %zmm14
9174 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 384(%rdx), %zmm0
9175 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 384(%rcx), %zmm6
9176 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm10
9177 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm15, %zmm10
9178 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9179 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm10
9180 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm12, %zmm10
9181 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9182 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm10
9183 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm1, %zmm10
9184 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9185 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm30, %zmm0
9186 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9187 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 448(%rdx), %zmm0
9188 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 448(%rcx), %zmm6
9189 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm6, %zmm0, %zmm15
9190 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9191 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm6, %zmm0, %zmm12
9192 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9193 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm6, %zmm0, %zmm1
9194 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9195 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm30, %zmm0
9196 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9197 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 320(%r10), %zmm31
9198 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 320(%rax), %zmm12
9199 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, %zmm1
9200 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm11, %zmm1
9201 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 320(%r8), %zmm17
9202 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 320(%r9), %zmm4
9203 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm10 = zmm17[0],zmm4[0],zmm17[2],zmm4[2],zmm17[4],zmm4[4],zmm17[6],zmm4[6]
9204 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm10 {%k1}
9205 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm5, %zmm10, %zmm0
9206 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9207 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, %zmm1
9208 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm9, %zmm1
9209 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm5 = zmm17[1],zmm4[1],zmm17[3],zmm4[3],zmm17[5],zmm4[5],zmm17[7],zmm4[7]
9210 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm5 {%k1}
9211 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm1
9212 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm9, %zmm1
9213 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm7[4,5,6,7]
9214 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm5, %zmm0
9215 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9216 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm1
9217 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm13, %zmm1
9218 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm8[4,5,6,7]
9219 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm5
9220 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm13, %zmm5
9221 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm5 {%k1} = zmm31[0],zmm12[0],zmm31[2],zmm12[2],zmm31[4],zmm12[4],zmm31[6],zmm12[6]
9222 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm5, %zmm0
9223 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9224 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm18, %zmm2
9225 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm14[4,5,6,7]
9226 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm2
9227 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm18, %zmm2
9228 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm2 {%k1} = zmm31[1],zmm12[1],zmm31[3],zmm12[3],zmm31[5],zmm12[5],zmm31[7],zmm12[7]
9229 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm2, %zmm0
9230 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9231 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 384(%rdi), %zmm0
9232 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 384(%rsi), %zmm1
9233 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3
9234 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm2
9235 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm11, %zmm3
9236 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9237 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3
9238 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
9239 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9240 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3
9241 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm13, %zmm3
9242 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9243 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm18, %zmm0
9244 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9245 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 384(%r10), %zmm11
9246 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 384(%rax), %zmm20
9247 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm0
9248 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm2, %zmm0
9249 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9250 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm0
9251 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm9, %zmm0
9252 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9253 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 384(%r8), %zmm30
9254 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 384(%r9), %zmm0
9255 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm30, %zmm1
9256 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm13, %zmm1
9257 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9258 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 448(%r10), %zmm16
9259 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 448(%rax), %zmm8
9260 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm1
9261 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm2, %zmm1
9262 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9263 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 448(%rdi), %zmm3
9264 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 448(%rsi), %zmm1
9265 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm1, %zmm3, %zmm2
9266 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9267 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm2
9268 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm9, %zmm2
9269 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9270 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm1, %zmm3, %zmm9
9271 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9272 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 448(%r8), %zmm19
9273 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 448(%r9), %zmm7
9274 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm19, %zmm2
9275 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm13, %zmm2
9276 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9277 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm1, %zmm3, %zmm13
9278 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9279 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm18, %zmm3
9280 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9281 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm30, %zmm1
9282 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm18, %zmm1
9283 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm5
9284 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm19, %zmm1
9285 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm18, %zmm1
9286 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9287 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [0,8,0,8,0,8,0,8]
9288 ; AVX512F-ONLY-SLOW-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
9289 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm1
9290 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm15, %zmm1
9291 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9292 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm14 = [1,9,1,9,1,9,1,9]
9293 ; AVX512F-ONLY-SLOW-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
9294 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm1
9295 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm14, %zmm1
9296 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9297 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [2,10,2,10,2,10,2,10]
9298 ; AVX512F-ONLY-SLOW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
9299 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm1
9300 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm6, %zmm1
9301 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9302 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [3,11,3,11,3,11,3,11]
9303 ; AVX512F-ONLY-SLOW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
9304 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm1, %zmm21
9305 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9306 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9307 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
9308 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm28, %zmm15, %zmm3
9309 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9310 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
9311 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm28, %zmm14, %zmm3
9312 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9313 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm25
9314 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm28, %zmm6, %zmm25
9315 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm28, %zmm1, %zmm2
9316 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9317 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 (%rsp), %zmm27 # 64-byte Reload
9318 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, %zmm2
9319 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
9320 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm15, %zmm2
9321 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9322 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, %zmm2
9323 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm14, %zmm2
9324 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9325 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, %zmm2
9326 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm6, %zmm2
9327 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9328 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm1, %zmm27
9329 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm27, (%rsp) # 64-byte Spill
9330 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
9331 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm3
9332 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9333 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm15, %zmm3
9334 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9335 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm3
9336 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm14, %zmm3
9337 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9338 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm18
9339 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm6, %zmm18
9340 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm1, %zmm9
9341 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9342 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9343 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
9344 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
9345 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm15, %zmm3
9346 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9347 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
9348 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm14, %zmm3
9349 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9350 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
9351 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm6, %zmm3
9352 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9353 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm1, %zmm2
9354 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9355 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
9356 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm3
9357 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9358 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm15, %zmm3
9359 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9360 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm3
9361 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm14, %zmm3
9362 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9363 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm13
9364 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm6, %zmm13
9365 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm1, %zmm9
9366 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9367 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9368 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
9369 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm15, %zmm3
9370 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9371 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
9372 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm14, %zmm3
9373 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9374 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
9375 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm6, %zmm3
9376 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9377 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm1, %zmm2
9378 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9379 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm3
9380 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm15, %zmm3
9381 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9382 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm3
9383 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm14, %zmm3
9384 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9385 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm10
9386 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm6, %zmm10
9387 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm1, %zmm23
9388 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9389 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
9390 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm2
9391 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm15, %zmm2
9392 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9393 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm2
9394 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm14, %zmm2
9395 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9396 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm2
9397 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm6, %zmm2
9398 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9399 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm1, %zmm23
9400 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9401 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
9402 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm15, %zmm3
9403 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9404 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
9405 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm14, %zmm3
9406 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9407 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm26
9408 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm6, %zmm26
9409 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm1, %zmm2
9410 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9411 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, %zmm2
9412 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm15, %zmm2
9413 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9414 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, %zmm2
9415 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm14, %zmm2
9416 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9417 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, %zmm2
9418 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm6, %zmm2
9419 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9420 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm1, %zmm31
9421 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm29
9422 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm15, %zmm29
9423 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm2
9424 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm14, %zmm2
9425 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9426 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm24
9427 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm6, %zmm24
9428 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm1, %zmm17
9429 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm2
9430 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm15, %zmm2
9431 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9432 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm2
9433 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm14, %zmm2
9434 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9435 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
9436 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm9 {%k1} = zmm11[0],zmm20[0],zmm11[2],zmm20[2],zmm11[4],zmm20[4],zmm11[6],zmm20[6]
9437 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm11[1],zmm20[1],zmm11[3],zmm20[3],zmm11[5],zmm20[5],zmm11[7],zmm20[7]
9438 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm27
9439 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm22
9440 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm6, %zmm22
9441 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm1, %zmm11
9442 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm30, %zmm21
9443 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm15, %zmm21
9444 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm30, %zmm28
9445 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm14, %zmm28
9446 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm30, %zmm20
9447 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm6, %zmm20
9448 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm2 = zmm30[0],zmm0[0],zmm30[2],zmm0[2],zmm30[4],zmm0[4],zmm30[6],zmm0[6]
9449 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm3 = zmm30[1],zmm0[1],zmm30[3],zmm0[3],zmm30[5],zmm0[5],zmm30[7],zmm0[7]
9450 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm1, %zmm30
9451 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm0
9452 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm15, %zmm0
9453 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9454 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm0
9455 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm14, %zmm0
9456 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9457 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
9458 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm4 {%k1} = zmm16[0],zmm8[0],zmm16[2],zmm8[2],zmm16[4],zmm8[4],zmm16[6],zmm8[6]
9459 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
9460 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm16[1],zmm8[1],zmm16[3],zmm8[3],zmm16[5],zmm8[5],zmm16[7],zmm8[7]
9461 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm12
9462 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm6, %zmm12
9463 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm1, %zmm16
9464 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm19, %zmm15
9465 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm19, %zmm14
9466 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm19, %zmm6
9467 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm8 = zmm19[0],zmm7[0],zmm19[2],zmm7[2],zmm19[4],zmm7[4],zmm19[6],zmm7[6]
9468 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm0 = zmm19[1],zmm7[1],zmm19[3],zmm7[3],zmm19[5],zmm7[5],zmm19[7],zmm7[7]
9469 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm1, %zmm19
9470 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9471 ; AVX512F-ONLY-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
9472 ; AVX512F-ONLY-SLOW-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
9473 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
9474 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm2 {%k1}
9475 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm2, %zmm1
9476 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9477 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9478 ; AVX512F-ONLY-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
9479 ; AVX512F-ONLY-SLOW-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
9480 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9481 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3 {%k1}
9482 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm3, %zmm1
9483 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9484 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9485 ; AVX512F-ONLY-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
9486 ; AVX512F-ONLY-SLOW-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
9487 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm9, %zmm1
9488 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9489 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9490 ; AVX512F-ONLY-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
9491 ; AVX512F-ONLY-SLOW-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
9492 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm27, %zmm1
9493 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9494 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9495 ; AVX512F-ONLY-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
9496 ; AVX512F-ONLY-SLOW-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
9497 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9498 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm8 {%k1}
9499 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm8, %zmm1
9500 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9501 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9502 ; AVX512F-ONLY-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
9503 ; AVX512F-ONLY-SLOW-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
9504 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9505 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm0 {%k1}
9506 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm0
9507 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9508 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9509 ; AVX512F-ONLY-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
9510 ; AVX512F-ONLY-SLOW-NEXT: # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
9511 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm0
9512 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9513 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9514 ; AVX512F-ONLY-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
9515 ; AVX512F-ONLY-SLOW-NEXT: # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
9516 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm5, %zmm0
9517 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9518 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9519 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm25 {%k1}
9520 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rcx), %ymm0
9521 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdx), %ymm1
9522 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
9523 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rsi), %ymm3
9524 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdi), %ymm7
9525 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
9526 ; AVX512F-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
9527 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm25, %zmm2
9528 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9529 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9530 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
9531 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
9532 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
9533 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
9534 ; AVX512F-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
9535 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm0
9536 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9537 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9538 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm18 {%k1}
9539 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rcx), %ymm0
9540 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rdx), %ymm1
9541 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rsi), %ymm2
9542 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rdi), %ymm3
9543 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm7 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
9544 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm3[0],ymm2[0],ymm3[2],ymm2[2]
9545 ; AVX512F-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm7 = ymm8[2,3],ymm7[2,3]
9546 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm7, %zmm18, %zmm4
9547 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9548 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 (%rsp), %zmm4 # 64-byte Reload
9549 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
9550 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm5 {%k1}
9551 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
9552 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm3[1],ymm2[1],ymm3[3],ymm2[3]
9553 ; AVX512F-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
9554 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm5, %zmm0
9555 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9556 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9557 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm13 {%k1}
9558 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 128(%rcx), %ymm0
9559 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 128(%rdx), %ymm1
9560 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
9561 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 128(%rsi), %ymm3
9562 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 128(%rdi), %ymm7
9563 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
9564 ; AVX512F-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
9565 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm13, %zmm2
9566 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
9567 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9568 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
9569 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
9570 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
9571 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
9572 ; AVX512F-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
9573 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm0
9574 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9575 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9576 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm10 {%k1}
9577 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 192(%rcx), %ymm0
9578 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 192(%rdx), %ymm1
9579 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
9580 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 192(%rsi), %ymm3
9581 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 192(%rdi), %ymm7
9582 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
9583 ; AVX512F-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
9584 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm10, %zmm2
9585 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9586 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9587 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
9588 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
9589 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
9590 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
9591 ; AVX512F-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
9592 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm9
9593 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9594 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm26 {%k1}
9595 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 256(%rcx), %ymm0
9596 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 256(%rdx), %ymm1
9597 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
9598 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 256(%rsi), %ymm3
9599 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 256(%rdi), %ymm7
9600 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
9601 ; AVX512F-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
9602 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm26, %zmm26
9603 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9604 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm2 {%k1}
9605 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
9606 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
9607 ; AVX512F-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
9608 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm2, %zmm7
9609 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9610 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm24 {%k1}
9611 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 320(%rcx), %ymm0
9612 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 320(%rdx), %ymm1
9613 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
9614 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 320(%rsi), %ymm3
9615 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 320(%rdi), %ymm23
9616 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm23[0],ymm3[0],ymm23[2],ymm3[2]
9617 ; AVX512F-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
9618 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm24, %zmm8
9619 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, %zmm17 {%k1}
9620 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
9621 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm23[1],ymm3[1],ymm23[3],ymm3[3]
9622 ; AVX512F-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
9623 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm17, %zmm17
9624 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm22, %zmm20 {%k1}
9625 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 384(%rcx), %ymm0
9626 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 384(%rdx), %ymm1
9627 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
9628 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 384(%rsi), %ymm3
9629 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 384(%rdi), %ymm18
9630 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm13 = ymm18[0],ymm3[0],ymm18[2],ymm3[2]
9631 ; AVX512F-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm13[2,3],ymm2[2,3]
9632 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm20, %zmm20
9633 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm30 {%k1}
9634 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
9635 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm18[1],ymm3[1],ymm18[3],ymm3[3]
9636 ; AVX512F-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
9637 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm30, %zmm24
9638 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, %zmm6 {%k1}
9639 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 448(%rcx), %ymm0
9640 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 448(%rdx), %ymm1
9641 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
9642 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 448(%rsi), %ymm3
9643 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 448(%rdi), %ymm10
9644 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm12 = ymm10[0],ymm3[0],ymm10[2],ymm3[2]
9645 ; AVX512F-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm12[2,3],ymm2[2,3]
9646 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm6, %zmm31
9647 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
9648 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm10[1],ymm3[1],ymm10[3],ymm3[3]
9649 ; AVX512F-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
9650 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm19 {%k1}
9651 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm19, %zmm0
9652 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9653 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
9654 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm4 {%k1}
9655 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rsi), %xmm1
9656 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
9657 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdi), %xmm2
9658 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, (%rdx), %ymm2, %ymm2
9659 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm3 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
9660 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm3, %zmm4, %zmm3
9661 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
9662 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
9663 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm10 {%k1}
9664 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm2[1],ymm1[1],ymm2[3],ymm1[3]
9665 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm10, %zmm2
9666 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9667 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
9668 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm11 {%k1}
9669 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rsi), %xmm1
9670 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, 64(%rcx), %ymm1, %ymm1
9671 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rdi), %xmm4
9672 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, 64(%rdx), %ymm4, %ymm4
9673 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm10 = ymm4[0],ymm1[0],ymm4[2],ymm1[2]
9674 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm11, %zmm10
9675 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
9676 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
9677 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm12 {%k1}
9678 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm4[1],ymm1[1],ymm4[3],ymm1[3]
9679 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm12, %zmm11
9680 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9681 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
9682 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm13 {%k1}
9683 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 128(%rsi), %xmm1
9684 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, 128(%rcx), %ymm1, %ymm1
9685 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 128(%rdi), %xmm4
9686 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, 128(%rdx), %ymm4, %ymm12
9687 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
9688 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm13, %zmm4
9689 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
9690 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
9691 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, %zmm18 {%k1}
9692 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
9693 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm18, %zmm19
9694 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9695 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
9696 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm18 {%k1}
9697 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 192(%rsi), %xmm1
9698 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, 192(%rcx), %ymm1, %ymm1
9699 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 192(%rdi), %xmm12
9700 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, 192(%rdx), %ymm12, %ymm12
9701 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm13 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
9702 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm13, %zmm18, %zmm30
9703 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
9704 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
9705 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, %zmm18 {%k1}
9706 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
9707 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm18, %zmm1
9708 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
9709 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
9710 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, %zmm5 {%k1}
9711 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 256(%rsi), %xmm12
9712 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, 256(%rcx), %ymm12, %ymm13
9713 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 256(%rdi), %xmm12
9714 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $1, 256(%rdx), %ymm12, %ymm18
9715 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm12 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
9716 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm12, %zmm5, %zmm12
9717 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
9718 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
9719 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm5 {%k1}
9720 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
9721 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm13, %zmm5, %zmm23
9722 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
9723 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm29 {%k1}
9724 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 320(%rsi), %xmm13
9725 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, 320(%rcx), %ymm13, %ymm13
9726 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 320(%rdi), %xmm18
9727 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $1, 320(%rdx), %ymm18, %ymm18
9728 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm27 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
9729 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm27, %zmm29, %zmm22
9730 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
9731 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
9732 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm6 {%k1}
9733 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
9734 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm13, %zmm6, %zmm13
9735 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
9736 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm21 {%k1}
9737 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 384(%rsi), %xmm18
9738 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $1, 384(%rcx), %ymm18, %ymm18
9739 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 384(%rdi), %xmm25
9740 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $1, 384(%rdx), %ymm25, %ymm25
9741 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
9742 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm27, %zmm21, %zmm16
9743 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
9744 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm28 {%k1}
9745 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
9746 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm18, %zmm28, %zmm21
9747 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
9748 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm15 {%k1}
9749 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 448(%rsi), %xmm18
9750 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $1, 448(%rcx), %ymm18, %ymm18
9751 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 448(%rdi), %xmm25
9752 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $1, 448(%rdx), %ymm25, %ymm25
9753 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
9754 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm27, %zmm15, %zmm6
9755 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
9756 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm14 {%k1}
9757 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
9758 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm18, %zmm14, %zmm5
9759 ; AVX512F-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
9760 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, 3776(%rax)
9761 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, 3712(%rax)
9762 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm24, 3264(%rax)
9763 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm20, 3200(%rax)
9764 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, 2752(%rax)
9765 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, 2688(%rax)
9766 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, 2240(%rax)
9767 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm26, 2176(%rax)
9768 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, 1728(%rax)
9769 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9770 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 1664(%rax)
9771 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9772 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 1216(%rax)
9773 ; AVX512F-ONLY-SLOW-NEXT: vmovups (%rsp), %zmm0 # 64-byte Reload
9774 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 1152(%rax)
9775 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9776 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 704(%rax)
9777 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9778 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 640(%rax)
9779 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9780 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 192(%rax)
9781 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9782 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 128(%rax)
9783 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9784 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 4032(%rax)
9785 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9786 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 3968(%rax)
9787 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9788 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 3904(%rax)
9789 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9790 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 3840(%rax)
9791 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, 3648(%rax)
9792 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, 3584(%rax)
9793 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9794 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 3520(%rax)
9795 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9796 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 3456(%rax)
9797 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9798 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 3392(%rax)
9799 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9800 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 3328(%rax)
9801 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, 3136(%rax)
9802 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, 3072(%rax)
9803 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9804 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 3008(%rax)
9805 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9806 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 2944(%rax)
9807 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9808 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 2880(%rax)
9809 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9810 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 2816(%rax)
9811 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, 2624(%rax)
9812 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm22, 2560(%rax)
9813 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9814 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 2496(%rax)
9815 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9816 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 2432(%rax)
9817 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9818 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 2368(%rax)
9819 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9820 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 2304(%rax)
9821 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, 2112(%rax)
9822 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, 2048(%rax)
9823 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9824 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 1984(%rax)
9825 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9826 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 1920(%rax)
9827 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9828 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 1856(%rax)
9829 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9830 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 1792(%rax)
9831 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, 1600(%rax)
9832 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm30, 1536(%rax)
9833 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9834 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 1472(%rax)
9835 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9836 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 1408(%rax)
9837 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9838 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 1344(%rax)
9839 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9840 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 1280(%rax)
9841 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm19, 1088(%rax)
9842 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, 1024(%rax)
9843 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9844 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 960(%rax)
9845 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9846 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 896(%rax)
9847 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9848 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 832(%rax)
9849 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9850 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 768(%rax)
9851 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, 576(%rax)
9852 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, 512(%rax)
9853 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9854 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 448(%rax)
9855 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9856 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 384(%rax)
9857 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9858 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 320(%rax)
9859 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9860 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 256(%rax)
9861 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, 64(%rax)
9862 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, (%rax)
9863 ; AVX512F-ONLY-SLOW-NEXT: addq $5512, %rsp # imm = 0x1588
9864 ; AVX512F-ONLY-SLOW-NEXT: vzeroupper
9865 ; AVX512F-ONLY-SLOW-NEXT: retq
9867 ; AVX512F-ONLY-FAST-LABEL: store_i64_stride8_vf64:
9868 ; AVX512F-ONLY-FAST: # %bb.0:
9869 ; AVX512F-ONLY-FAST-NEXT: subq $5512, %rsp # imm = 0x1588
9870 ; AVX512F-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
9871 ; AVX512F-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
9872 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%rdi), %zmm2
9873 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rdi), %zmm4
9874 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rdi), %zmm8
9875 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%rsi), %zmm17
9876 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rsi), %zmm19
9877 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rsi), %zmm22
9878 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rdx), %zmm5
9879 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rdx), %zmm10
9880 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rcx), %zmm20
9881 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rcx), %zmm11
9882 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%r8), %zmm1
9883 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%r8), %zmm25
9884 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%r8), %zmm23
9885 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%r9), %zmm28
9886 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%r9), %zmm26
9887 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%r9), %zmm24
9888 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%r10), %zmm21
9889 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%r10), %zmm14
9890 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rax), %zmm27
9891 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rax), %zmm16
9892 ; AVX512F-ONLY-FAST-NEXT: movb $-64, %r11b
9893 ; AVX512F-ONLY-FAST-NEXT: kmovw %r11d, %k1
9894 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [4,12,4,12,4,12,4,12]
9895 ; AVX512F-ONLY-FAST-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
9896 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm21, %zmm0
9897 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm3, %zmm0
9898 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm12 = zmm1[0],zmm28[0],zmm1[2],zmm28[2],zmm1[4],zmm28[4],zmm1[6],zmm28[6]
9899 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
9900 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
9901 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm3, %zmm0
9902 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm6 = [4,12,4,12]
9903 ; AVX512F-ONLY-FAST-NEXT: # ymm6 = mem[0,1,0,1]
9904 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm15
9905 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm6, %zmm15
9906 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
9907 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm12, %zmm0
9908 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9909 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [5,13,5,13,5,13,5,13]
9910 ; AVX512F-ONLY-FAST-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
9911 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm21, %zmm0
9912 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm9, %zmm0
9913 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm12 = zmm1[1],zmm28[1],zmm1[3],zmm28[3],zmm1[5],zmm28[5],zmm1[7],zmm28[7]
9914 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
9915 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
9916 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm9, %zmm0
9917 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [5,13,5,13]
9918 ; AVX512F-ONLY-FAST-NEXT: # ymm7 = mem[0,1,0,1]
9919 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm15
9920 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm7, %zmm15
9921 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
9922 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm12, %zmm0
9923 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9924 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm13 = [6,14,6,14,6,14,6,14]
9925 ; AVX512F-ONLY-FAST-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
9926 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm0
9927 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm29
9928 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9929 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm28, %zmm13, %zmm0
9930 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm0 {%k1} = zmm21[0],zmm27[0],zmm21[2],zmm27[2],zmm21[4],zmm27[4],zmm21[6],zmm27[6]
9931 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm12
9932 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm13, %zmm12
9933 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [6,14,6,14]
9934 ; AVX512F-ONLY-FAST-NEXT: # ymm1 = mem[0,1,0,1]
9935 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm15
9936 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm1, %zmm15
9937 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3],ymm15[4,5,6,7]
9938 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm0
9939 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9940 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm18 = [7,15,7,15,7,15,7,15]
9941 ; AVX512F-ONLY-FAST-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
9942 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm29, %zmm12
9943 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm28, %zmm18, %zmm12
9944 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm12 {%k1} = zmm21[1],zmm27[1],zmm21[3],zmm27[3],zmm21[5],zmm27[5],zmm21[7],zmm27[7]
9945 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm18, %zmm8
9946 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} ymm30 = [7,15,7,15]
9947 ; AVX512F-ONLY-FAST-NEXT: # ymm30 = mem[0,1,2,3,0,1,2,3]
9948 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm30, %zmm10
9949 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
9950 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm8, %zmm12, %zmm0
9951 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9952 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm14, %zmm8
9953 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm16, %zmm3, %zmm8
9954 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm10 = zmm25[0],zmm26[0],zmm25[2],zmm26[2],zmm25[4],zmm26[4],zmm25[6],zmm26[6]
9955 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm10 {%k1}
9956 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm8
9957 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm3, %zmm8
9958 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm11
9959 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm15
9960 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm6, %zmm11
9961 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
9962 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm8, %zmm10, %zmm0
9963 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9964 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm14, %zmm8
9965 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm16, %zmm9, %zmm8
9966 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm10 = zmm25[1],zmm26[1],zmm25[3],zmm26[3],zmm25[5],zmm26[5],zmm25[7],zmm26[7]
9967 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm10 {%k1}
9968 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm8
9969 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm9, %zmm8
9970 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm11
9971 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm12
9972 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm7, %zmm11
9973 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
9974 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm8, %zmm10, %zmm0
9975 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9976 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm8
9977 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm13, %zmm8
9978 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm10
9979 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm1, %zmm10
9980 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
9981 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9982 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm25, %zmm10
9983 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9984 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm13, %zmm10
9985 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm14, %zmm11
9986 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm14, (%rsp) # 64-byte Spill
9987 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9988 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm10 {%k1} = zmm14[0],zmm16[0],zmm14[2],zmm16[2],zmm14[4],zmm16[4],zmm14[6],zmm16[6]
9989 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm8, %zmm10, %zmm0
9990 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9991 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%r10), %zmm10
9992 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm18, %zmm4
9993 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%rax), %zmm14
9994 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm30, %zmm5
9995 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm25, %zmm6
9996 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm18, %zmm6
9997 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm6 {%k1} = zmm11[1],zmm16[1],zmm11[3],zmm16[3],zmm11[5],zmm16[5],zmm11[7],zmm16[7]
9998 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
9999 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
10000 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10001 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm4
10002 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm3, %zmm4
10003 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm5 = zmm23[0],zmm24[0],zmm23[2],zmm24[2],zmm23[4],zmm24[4],zmm23[6],zmm24[6]
10004 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm5 {%k1}
10005 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm6
10006 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm3, %zmm6
10007 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm11
10008 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%rdx), %zmm4
10009 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%rcx), %zmm7
10010 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm8
10011 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm15, %zmm8
10012 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
10013 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm6, %zmm5, %zmm0
10014 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10015 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm5
10016 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm9, %zmm5
10017 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm24[1],zmm23[3],zmm24[3],zmm23[5],zmm24[5],zmm23[7],zmm24[7]
10018 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm6 {%k1}
10019 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm5
10020 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm9, %zmm5
10021 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm8
10022 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm12, %zmm8
10023 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm8[4,5,6,7]
10024 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm5, %zmm6, %zmm0
10025 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10026 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm5
10027 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm13, %zmm5
10028 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm6
10029 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm1, %zmm6
10030 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm6[4,5,6,7]
10031 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm3
10032 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10033 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm6
10034 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10035 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm13, %zmm6
10036 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10037 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10038 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm14[0],zmm10[2],zmm14[2],zmm10[4],zmm14[4],zmm10[6],zmm14[6]
10039 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm5, %zmm6, %zmm0
10040 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10041 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm18, %zmm2
10042 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm30, %zmm4
10043 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm4[4,5,6,7]
10044 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm18, %zmm3
10045 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm14[1],zmm10[3],zmm14[3],zmm10[5],zmm14[5],zmm10[7],zmm14[7]
10046 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm3, %zmm0
10047 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10048 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%r10), %zmm10
10049 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%rax), %zmm26
10050 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm2
10051 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm11, %zmm2
10052 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%r8), %zmm23
10053 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%r9), %zmm29
10054 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm4 = zmm23[0],zmm29[0],zmm23[2],zmm29[2],zmm23[4],zmm29[4],zmm23[6],zmm29[6]
10055 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
10056 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%rdi), %zmm2
10057 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%rsi), %zmm5
10058 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm6
10059 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm11, %zmm6
10060 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%rdx), %zmm3
10061 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%rcx), %zmm7
10062 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm8
10063 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm15, %zmm8
10064 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
10065 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm6, %zmm4, %zmm0
10066 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10067 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm4
10068 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm9, %zmm4
10069 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm29[1],zmm23[3],zmm29[3],zmm23[5],zmm29[5],zmm23[7],zmm29[7]
10070 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm6 {%k1}
10071 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm4
10072 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm9, %zmm4
10073 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm8
10074 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm12, %zmm8
10075 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
10076 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
10077 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10078 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm4
10079 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm13, %zmm4
10080 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm6
10081 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm1, %zmm6
10082 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
10083 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm6
10084 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm29, %zmm13, %zmm6
10085 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10086 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm26[0],zmm10[2],zmm26[2],zmm10[4],zmm26[4],zmm10[6],zmm26[6]
10087 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
10088 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10089 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm18, %zmm2
10090 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm30, %zmm3
10091 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
10092 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm3
10093 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm29, %zmm18, %zmm3
10094 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm26[1],zmm10[3],zmm26[3],zmm10[5],zmm26[5],zmm10[7],zmm26[7]
10095 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm3, %zmm0
10096 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10097 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 256(%r10), %zmm10
10098 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 256(%rax), %zmm22
10099 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm2
10100 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm11, %zmm2
10101 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 256(%r8), %zmm0
10102 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 256(%r9), %zmm24
10103 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm4 = zmm0[0],zmm24[0],zmm0[2],zmm24[2],zmm0[4],zmm24[4],zmm0[6],zmm24[6]
10104 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm17
10105 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
10106 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 256(%rdi), %zmm2
10107 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 256(%rsi), %zmm5
10108 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm6
10109 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm11, %zmm6
10110 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 256(%rdx), %zmm3
10111 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 256(%rcx), %zmm7
10112 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm8
10113 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm15, %zmm8
10114 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
10115 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm6, %zmm4, %zmm0
10116 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10117 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm4
10118 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm9, %zmm4
10119 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm17[1],zmm24[1],zmm17[3],zmm24[3],zmm17[5],zmm24[5],zmm17[7],zmm24[7]
10120 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm6 {%k1}
10121 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm4
10122 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm9, %zmm4
10123 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm8
10124 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm12, %zmm8
10125 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
10126 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
10127 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10128 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm4
10129 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm13, %zmm4
10130 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm6
10131 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm1, %zmm6
10132 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
10133 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10134 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm17, %zmm6
10135 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm13, %zmm6
10136 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10137 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm22[0],zmm10[2],zmm22[2],zmm10[4],zmm22[4],zmm10[6],zmm22[6]
10138 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
10139 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10140 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm18, %zmm2
10141 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm30, %zmm3
10142 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
10143 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm17, %zmm3
10144 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm18, %zmm3
10145 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm22[1],zmm10[3],zmm22[3],zmm10[5],zmm22[5],zmm10[7],zmm22[7]
10146 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm3, %zmm0
10147 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10148 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 320(%rdi), %zmm2
10149 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 320(%rsi), %zmm3
10150 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm5
10151 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm11, %zmm5
10152 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 320(%rdx), %zmm14
10153 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 320(%rcx), %zmm6
10154 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm14, %zmm7
10155 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm15, %zmm7
10156 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm7[4,5,6,7]
10157 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm14, %zmm7
10158 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm12, %zmm7
10159 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm14, %zmm8
10160 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm1, %zmm8
10161 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm30, %zmm14
10162 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 384(%rdx), %zmm0
10163 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 384(%rcx), %zmm6
10164 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm10
10165 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm15, %zmm10
10166 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10167 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm10
10168 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm12, %zmm10
10169 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10170 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm10
10171 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm1, %zmm10
10172 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10173 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm30, %zmm0
10174 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10175 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 448(%rdx), %zmm0
10176 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 448(%rcx), %zmm6
10177 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm6, %zmm0, %zmm15
10178 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10179 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm6, %zmm0, %zmm12
10180 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10181 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm6, %zmm0, %zmm1
10182 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10183 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm30, %zmm0
10184 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10185 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 320(%r10), %zmm31
10186 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 320(%rax), %zmm12
10187 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm1
10188 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm12, %zmm11, %zmm1
10189 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 320(%r8), %zmm17
10190 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 320(%r9), %zmm4
10191 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm10 = zmm17[0],zmm4[0],zmm17[2],zmm4[2],zmm17[4],zmm4[4],zmm17[6],zmm4[6]
10192 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm10 {%k1}
10193 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm5, %zmm10, %zmm0
10194 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10195 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm1
10196 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm12, %zmm9, %zmm1
10197 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm5 = zmm17[1],zmm4[1],zmm17[3],zmm4[3],zmm17[5],zmm4[5],zmm17[7],zmm4[7]
10198 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm5 {%k1}
10199 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
10200 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm9, %zmm1
10201 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm7[4,5,6,7]
10202 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm5, %zmm0
10203 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10204 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
10205 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm13, %zmm1
10206 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm8[4,5,6,7]
10207 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm17, %zmm5
10208 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm13, %zmm5
10209 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm5 {%k1} = zmm31[0],zmm12[0],zmm31[2],zmm12[2],zmm31[4],zmm12[4],zmm31[6],zmm12[6]
10210 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm5, %zmm0
10211 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10212 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm18, %zmm2
10213 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm14[4,5,6,7]
10214 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm17, %zmm2
10215 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm18, %zmm2
10216 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm2 {%k1} = zmm31[1],zmm12[1],zmm31[3],zmm12[3],zmm31[5],zmm12[5],zmm31[7],zmm12[7]
10217 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm2, %zmm0
10218 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10219 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 384(%rdi), %zmm0
10220 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 384(%rsi), %zmm1
10221 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm3
10222 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm2
10223 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm11, %zmm3
10224 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10225 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm3
10226 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
10227 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10228 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm3
10229 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm13, %zmm3
10230 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10231 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm18, %zmm0
10232 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10233 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 384(%r10), %zmm11
10234 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 384(%rax), %zmm20
10235 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm0
10236 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm2, %zmm0
10237 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10238 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm0
10239 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm9, %zmm0
10240 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10241 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 384(%r8), %zmm30
10242 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 384(%r9), %zmm0
10243 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm30, %zmm1
10244 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm13, %zmm1
10245 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10246 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 448(%r10), %zmm16
10247 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 448(%rax), %zmm8
10248 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm16, %zmm1
10249 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm2, %zmm1
10250 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10251 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 448(%rdi), %zmm3
10252 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 448(%rsi), %zmm1
10253 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm1, %zmm3, %zmm2
10254 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10255 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm16, %zmm2
10256 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm9, %zmm2
10257 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10258 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm1, %zmm3, %zmm9
10259 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10260 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 448(%r8), %zmm19
10261 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 448(%r9), %zmm7
10262 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm19, %zmm2
10263 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm13, %zmm2
10264 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10265 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm1, %zmm3, %zmm13
10266 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10267 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm18, %zmm3
10268 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10269 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm30, %zmm1
10270 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm18, %zmm1
10271 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm5
10272 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm19, %zmm1
10273 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm18, %zmm1
10274 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10275 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [0,8,0,8,0,8,0,8]
10276 ; AVX512F-ONLY-FAST-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
10277 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm21, %zmm1
10278 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm15, %zmm1
10279 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10280 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm14 = [1,9,1,9,1,9,1,9]
10281 ; AVX512F-ONLY-FAST-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
10282 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm21, %zmm1
10283 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm14, %zmm1
10284 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10285 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [2,10,2,10,2,10,2,10]
10286 ; AVX512F-ONLY-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
10287 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm21, %zmm1
10288 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm6, %zmm1
10289 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10290 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [3,11,3,11,3,11,3,11]
10291 ; AVX512F-ONLY-FAST-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
10292 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm1, %zmm21
10293 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10294 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
10295 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
10296 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm28, %zmm15, %zmm3
10297 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10298 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
10299 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm28, %zmm14, %zmm3
10300 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10301 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm25
10302 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm28, %zmm6, %zmm25
10303 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm28, %zmm1, %zmm2
10304 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10305 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 (%rsp), %zmm27 # 64-byte Reload
10306 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm27, %zmm2
10307 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
10308 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm15, %zmm2
10309 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10310 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm27, %zmm2
10311 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm14, %zmm2
10312 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10313 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm27, %zmm2
10314 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm6, %zmm2
10315 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10316 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm1, %zmm27
10317 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm27, (%rsp) # 64-byte Spill
10318 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
10319 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm3
10320 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
10321 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm15, %zmm3
10322 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10323 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm3
10324 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm14, %zmm3
10325 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10326 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm18
10327 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm6, %zmm18
10328 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm1, %zmm9
10329 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10330 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
10331 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
10332 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
10333 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm15, %zmm3
10334 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10335 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
10336 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm14, %zmm3
10337 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10338 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
10339 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm6, %zmm3
10340 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10341 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm1, %zmm2
10342 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10343 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
10344 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm3
10345 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
10346 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm15, %zmm3
10347 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10348 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm3
10349 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm14, %zmm3
10350 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10351 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm13
10352 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm6, %zmm13
10353 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm1, %zmm9
10354 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10355 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
10356 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
10357 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm15, %zmm3
10358 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10359 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
10360 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm14, %zmm3
10361 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10362 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
10363 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm6, %zmm3
10364 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10365 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm1, %zmm2
10366 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10367 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm3
10368 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm29, %zmm15, %zmm3
10369 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10370 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm3
10371 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm29, %zmm14, %zmm3
10372 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10373 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm10
10374 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm29, %zmm6, %zmm10
10375 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm29, %zmm1, %zmm23
10376 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10377 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
10378 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm2
10379 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm15, %zmm2
10380 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10381 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm2
10382 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm14, %zmm2
10383 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10384 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm2
10385 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm6, %zmm2
10386 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10387 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm1, %zmm23
10388 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
10389 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
10390 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm15, %zmm3
10391 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10392 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
10393 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm14, %zmm3
10394 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10395 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm26
10396 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm6, %zmm26
10397 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm1, %zmm2
10398 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10399 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm2
10400 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm12, %zmm15, %zmm2
10401 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10402 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm2
10403 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm12, %zmm14, %zmm2
10404 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10405 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm2
10406 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm12, %zmm6, %zmm2
10407 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10408 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm12, %zmm1, %zmm31
10409 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm17, %zmm29
10410 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm15, %zmm29
10411 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm17, %zmm2
10412 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm14, %zmm2
10413 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10414 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm17, %zmm24
10415 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm6, %zmm24
10416 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm1, %zmm17
10417 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm2
10418 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm15, %zmm2
10419 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10420 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm2
10421 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm14, %zmm2
10422 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10423 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
10424 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm9 {%k1} = zmm11[0],zmm20[0],zmm11[2],zmm20[2],zmm11[4],zmm20[4],zmm11[6],zmm20[6]
10425 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm11[1],zmm20[1],zmm11[3],zmm20[3],zmm11[5],zmm20[5],zmm11[7],zmm20[7]
10426 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm27
10427 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm22
10428 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm6, %zmm22
10429 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm1, %zmm11
10430 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm30, %zmm21
10431 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm15, %zmm21
10432 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm30, %zmm28
10433 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm14, %zmm28
10434 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm30, %zmm20
10435 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm6, %zmm20
10436 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm2 = zmm30[0],zmm0[0],zmm30[2],zmm0[2],zmm30[4],zmm0[4],zmm30[6],zmm0[6]
10437 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm3 = zmm30[1],zmm0[1],zmm30[3],zmm0[3],zmm30[5],zmm0[5],zmm30[7],zmm0[7]
10438 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm1, %zmm30
10439 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm16, %zmm0
10440 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm15, %zmm0
10441 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10442 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm16, %zmm0
10443 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm14, %zmm0
10444 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10445 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
10446 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm4 {%k1} = zmm16[0],zmm8[0],zmm16[2],zmm8[2],zmm16[4],zmm8[4],zmm16[6],zmm8[6]
10447 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
10448 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm16[1],zmm8[1],zmm16[3],zmm8[3],zmm16[5],zmm8[5],zmm16[7],zmm8[7]
10449 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm16, %zmm12
10450 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm6, %zmm12
10451 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm1, %zmm16
10452 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm7, %zmm19, %zmm15
10453 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm7, %zmm19, %zmm14
10454 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm7, %zmm19, %zmm6
10455 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm8 = zmm19[0],zmm7[0],zmm19[2],zmm7[2],zmm19[4],zmm7[4],zmm19[6],zmm7[6]
10456 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm0 = zmm19[1],zmm7[1],zmm19[3],zmm7[3],zmm19[5],zmm7[5],zmm19[7],zmm7[7]
10457 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm1, %zmm19
10458 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
10459 ; AVX512F-ONLY-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
10460 ; AVX512F-ONLY-FAST-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
10461 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
10462 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm2 {%k1}
10463 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm2, %zmm1
10464 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10465 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
10466 ; AVX512F-ONLY-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
10467 ; AVX512F-ONLY-FAST-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
10468 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
10469 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3 {%k1}
10470 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm3, %zmm1
10471 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10472 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
10473 ; AVX512F-ONLY-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
10474 ; AVX512F-ONLY-FAST-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
10475 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm9, %zmm1
10476 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10477 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
10478 ; AVX512F-ONLY-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
10479 ; AVX512F-ONLY-FAST-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
10480 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm27, %zmm1
10481 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10482 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
10483 ; AVX512F-ONLY-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
10484 ; AVX512F-ONLY-FAST-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
10485 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
10486 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm8 {%k1}
10487 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm8, %zmm1
10488 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10489 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
10490 ; AVX512F-ONLY-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
10491 ; AVX512F-ONLY-FAST-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
10492 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
10493 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm0 {%k1}
10494 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm0
10495 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10496 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10497 ; AVX512F-ONLY-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10498 ; AVX512F-ONLY-FAST-NEXT: # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
10499 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm0
10500 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10501 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10502 ; AVX512F-ONLY-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10503 ; AVX512F-ONLY-FAST-NEXT: # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
10504 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm5, %zmm0
10505 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10506 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10507 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm25 {%k1}
10508 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rcx), %ymm0
10509 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdx), %ymm1
10510 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
10511 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rsi), %ymm3
10512 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdi), %ymm7
10513 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
10514 ; AVX512F-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
10515 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm25, %zmm2
10516 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10517 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
10518 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
10519 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
10520 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
10521 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
10522 ; AVX512F-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
10523 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm0
10524 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10525 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10526 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm18 {%k1}
10527 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rcx), %ymm0
10528 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rdx), %ymm1
10529 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rsi), %ymm2
10530 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rdi), %ymm3
10531 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm7 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
10532 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm3[0],ymm2[0],ymm3[2],ymm2[2]
10533 ; AVX512F-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm7 = ymm8[2,3],ymm7[2,3]
10534 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm7, %zmm18, %zmm4
10535 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10536 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 (%rsp), %zmm4 # 64-byte Reload
10537 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
10538 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm5 {%k1}
10539 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
10540 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm3[1],ymm2[1],ymm3[3],ymm2[3]
10541 ; AVX512F-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
10542 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm5, %zmm0
10543 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10544 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10545 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm13 {%k1}
10546 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 128(%rcx), %ymm0
10547 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 128(%rdx), %ymm1
10548 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
10549 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 128(%rsi), %ymm3
10550 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 128(%rdi), %ymm7
10551 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
10552 ; AVX512F-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
10553 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm13, %zmm2
10554 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
10555 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
10556 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
10557 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
10558 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
10559 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
10560 ; AVX512F-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
10561 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm0
10562 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10563 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10564 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm10 {%k1}
10565 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 192(%rcx), %ymm0
10566 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 192(%rdx), %ymm1
10567 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
10568 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 192(%rsi), %ymm3
10569 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 192(%rdi), %ymm7
10570 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
10571 ; AVX512F-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
10572 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm10, %zmm2
10573 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10574 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
10575 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
10576 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
10577 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
10578 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
10579 ; AVX512F-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
10580 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm9
10581 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10582 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm26 {%k1}
10583 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 256(%rcx), %ymm0
10584 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 256(%rdx), %ymm1
10585 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
10586 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 256(%rsi), %ymm3
10587 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 256(%rdi), %ymm7
10588 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
10589 ; AVX512F-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
10590 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm26, %zmm26
10591 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
10592 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm2 {%k1}
10593 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
10594 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
10595 ; AVX512F-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
10596 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm2, %zmm7
10597 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10598 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm24 {%k1}
10599 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 320(%rcx), %ymm0
10600 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 320(%rdx), %ymm1
10601 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
10602 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 320(%rsi), %ymm3
10603 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 320(%rdi), %ymm23
10604 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm23[0],ymm3[0],ymm23[2],ymm3[2]
10605 ; AVX512F-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
10606 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm24, %zmm8
10607 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm17 {%k1}
10608 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
10609 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm23[1],ymm3[1],ymm23[3],ymm3[3]
10610 ; AVX512F-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
10611 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm17, %zmm17
10612 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm22, %zmm20 {%k1}
10613 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 384(%rcx), %ymm0
10614 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 384(%rdx), %ymm1
10615 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
10616 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 384(%rsi), %ymm3
10617 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 384(%rdi), %ymm18
10618 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm13 = ymm18[0],ymm3[0],ymm18[2],ymm3[2]
10619 ; AVX512F-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm13[2,3],ymm2[2,3]
10620 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm20, %zmm20
10621 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm30 {%k1}
10622 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
10623 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm18[1],ymm3[1],ymm18[3],ymm3[3]
10624 ; AVX512F-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
10625 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm30, %zmm24
10626 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm6 {%k1}
10627 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 448(%rcx), %ymm0
10628 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 448(%rdx), %ymm1
10629 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
10630 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 448(%rsi), %ymm3
10631 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 448(%rdi), %ymm10
10632 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm12 = ymm10[0],ymm3[0],ymm10[2],ymm3[2]
10633 ; AVX512F-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm12[2,3],ymm2[2,3]
10634 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm6, %zmm31
10635 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
10636 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm10[1],ymm3[1],ymm10[3],ymm3[3]
10637 ; AVX512F-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
10638 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm16, %zmm19 {%k1}
10639 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm19, %zmm0
10640 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
10641 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
10642 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm4 {%k1}
10643 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rsi), %xmm1
10644 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
10645 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdi), %xmm2
10646 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, (%rdx), %ymm2, %ymm2
10647 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm3 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
10648 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm4, %zmm3
10649 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
10650 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
10651 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm10 {%k1}
10652 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm2[1],ymm1[1],ymm2[3],ymm1[3]
10653 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm10, %zmm2
10654 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
10655 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
10656 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm11 {%k1}
10657 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rsi), %xmm1
10658 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, 64(%rcx), %ymm1, %ymm1
10659 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rdi), %xmm4
10660 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, 64(%rdx), %ymm4, %ymm4
10661 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm10 = ymm4[0],ymm1[0],ymm4[2],ymm1[2]
10662 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm11, %zmm10
10663 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
10664 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
10665 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm12 {%k1}
10666 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm4[1],ymm1[1],ymm4[3],ymm1[3]
10667 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm12, %zmm11
10668 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
10669 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
10670 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm13 {%k1}
10671 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 128(%rsi), %xmm1
10672 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, 128(%rcx), %ymm1, %ymm1
10673 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 128(%rdi), %xmm4
10674 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, 128(%rdx), %ymm4, %ymm12
10675 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
10676 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm13, %zmm4
10677 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
10678 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
10679 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm18 {%k1}
10680 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
10681 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm18, %zmm19
10682 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
10683 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
10684 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm18 {%k1}
10685 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 192(%rsi), %xmm1
10686 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, 192(%rcx), %ymm1, %ymm1
10687 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 192(%rdi), %xmm12
10688 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, 192(%rdx), %ymm12, %ymm12
10689 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm13 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
10690 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm13, %zmm18, %zmm30
10691 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
10692 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
10693 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm18 {%k1}
10694 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
10695 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm18, %zmm1
10696 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
10697 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
10698 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm5 {%k1}
10699 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 256(%rsi), %xmm12
10700 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, 256(%rcx), %ymm12, %ymm13
10701 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 256(%rdi), %xmm12
10702 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $1, 256(%rdx), %ymm12, %ymm18
10703 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm12 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
10704 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm12, %zmm5, %zmm12
10705 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
10706 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
10707 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm5 {%k1}
10708 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
10709 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm13, %zmm5, %zmm23
10710 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
10711 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm29 {%k1}
10712 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 320(%rsi), %xmm13
10713 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, 320(%rcx), %ymm13, %ymm13
10714 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 320(%rdi), %xmm18
10715 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $1, 320(%rdx), %ymm18, %ymm18
10716 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm27 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
10717 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm27, %zmm29, %zmm22
10718 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
10719 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
10720 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm6 {%k1}
10721 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
10722 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm13, %zmm6, %zmm13
10723 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
10724 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm21 {%k1}
10725 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 384(%rsi), %xmm18
10726 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $1, 384(%rcx), %ymm18, %ymm18
10727 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 384(%rdi), %xmm25
10728 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $1, 384(%rdx), %ymm25, %ymm25
10729 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
10730 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm27, %zmm21, %zmm16
10731 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
10732 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm28 {%k1}
10733 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
10734 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm18, %zmm28, %zmm21
10735 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
10736 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm15 {%k1}
10737 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 448(%rsi), %xmm18
10738 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $1, 448(%rcx), %ymm18, %ymm18
10739 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 448(%rdi), %xmm25
10740 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $1, 448(%rdx), %ymm25, %ymm25
10741 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
10742 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm27, %zmm15, %zmm6
10743 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
10744 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm14 {%k1}
10745 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
10746 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm18, %zmm14, %zmm5
10747 ; AVX512F-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
10748 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, 3776(%rax)
10749 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm31, 3712(%rax)
10750 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm24, 3264(%rax)
10751 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm20, 3200(%rax)
10752 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm17, 2752(%rax)
10753 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm8, 2688(%rax)
10754 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, 2240(%rax)
10755 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm26, 2176(%rax)
10756 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm9, 1728(%rax)
10757 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10758 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 1664(%rax)
10759 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10760 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 1216(%rax)
10761 ; AVX512F-ONLY-FAST-NEXT: vmovups (%rsp), %zmm0 # 64-byte Reload
10762 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 1152(%rax)
10763 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10764 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 704(%rax)
10765 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10766 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 640(%rax)
10767 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10768 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 192(%rax)
10769 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10770 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 128(%rax)
10771 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10772 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 4032(%rax)
10773 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10774 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 3968(%rax)
10775 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10776 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 3904(%rax)
10777 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10778 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 3840(%rax)
10779 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, 3648(%rax)
10780 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm6, 3584(%rax)
10781 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10782 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 3520(%rax)
10783 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10784 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 3456(%rax)
10785 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10786 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 3392(%rax)
10787 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10788 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 3328(%rax)
10789 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm21, 3136(%rax)
10790 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm16, 3072(%rax)
10791 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10792 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 3008(%rax)
10793 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10794 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 2944(%rax)
10795 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10796 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 2880(%rax)
10797 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10798 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 2816(%rax)
10799 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, 2624(%rax)
10800 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm22, 2560(%rax)
10801 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10802 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 2496(%rax)
10803 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10804 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 2432(%rax)
10805 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10806 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 2368(%rax)
10807 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10808 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 2304(%rax)
10809 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm23, 2112(%rax)
10810 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm12, 2048(%rax)
10811 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10812 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 1984(%rax)
10813 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10814 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 1920(%rax)
10815 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10816 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 1856(%rax)
10817 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10818 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 1792(%rax)
10819 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, 1600(%rax)
10820 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm30, 1536(%rax)
10821 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10822 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 1472(%rax)
10823 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10824 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 1408(%rax)
10825 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10826 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 1344(%rax)
10827 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10828 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 1280(%rax)
10829 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm19, 1088(%rax)
10830 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, 1024(%rax)
10831 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10832 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 960(%rax)
10833 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10834 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 896(%rax)
10835 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10836 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 832(%rax)
10837 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10838 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 768(%rax)
10839 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, 576(%rax)
10840 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, 512(%rax)
10841 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10842 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 448(%rax)
10843 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10844 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 384(%rax)
10845 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10846 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 320(%rax)
10847 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10848 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 256(%rax)
10849 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, 64(%rax)
10850 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, (%rax)
10851 ; AVX512F-ONLY-FAST-NEXT: addq $5512, %rsp # imm = 0x1588
10852 ; AVX512F-ONLY-FAST-NEXT: vzeroupper
10853 ; AVX512F-ONLY-FAST-NEXT: retq
10855 ; AVX512DQ-SLOW-LABEL: store_i64_stride8_vf64:
10856 ; AVX512DQ-SLOW: # %bb.0:
10857 ; AVX512DQ-SLOW-NEXT: subq $5512, %rsp # imm = 0x1588
10858 ; AVX512DQ-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
10859 ; AVX512DQ-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
10860 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%rdi), %zmm2
10861 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm4
10862 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rdi), %zmm8
10863 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%rsi), %zmm17
10864 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm19
10865 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rsi), %zmm22
10866 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm5
10867 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rdx), %zmm10
10868 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm20
10869 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rcx), %zmm11
10870 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%r8), %zmm1
10871 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%r8), %zmm25
10872 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%r8), %zmm23
10873 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%r9), %zmm28
10874 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%r9), %zmm26
10875 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%r9), %zmm24
10876 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%r10), %zmm21
10877 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%r10), %zmm14
10878 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rax), %zmm27
10879 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rax), %zmm16
10880 ; AVX512DQ-SLOW-NEXT: movb $-64, %r11b
10881 ; AVX512DQ-SLOW-NEXT: kmovw %r11d, %k1
10882 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [4,12,4,12,4,12,4,12]
10883 ; AVX512DQ-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
10884 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm21, %zmm0
10885 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm27, %zmm3, %zmm0
10886 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm12 = zmm1[0],zmm28[0],zmm1[2],zmm28[2],zmm1[4],zmm28[4],zmm1[6],zmm28[6]
10887 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
10888 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
10889 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm3, %zmm0
10890 ; AVX512DQ-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm6 = [4,12,4,12]
10891 ; AVX512DQ-SLOW-NEXT: # ymm6 = mem[0,1,0,1]
10892 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, %zmm15
10893 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm11, %zmm6, %zmm15
10894 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
10895 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm12, %zmm0
10896 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10897 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [5,13,5,13,5,13,5,13]
10898 ; AVX512DQ-SLOW-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
10899 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm21, %zmm0
10900 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm27, %zmm9, %zmm0
10901 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm12 = zmm1[1],zmm28[1],zmm1[3],zmm28[3],zmm1[5],zmm28[5],zmm1[7],zmm28[7]
10902 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
10903 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
10904 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm9, %zmm0
10905 ; AVX512DQ-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [5,13,5,13]
10906 ; AVX512DQ-SLOW-NEXT: # ymm7 = mem[0,1,0,1]
10907 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, %zmm15
10908 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm11, %zmm7, %zmm15
10909 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
10910 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm12, %zmm0
10911 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10912 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm13 = [6,14,6,14,6,14,6,14]
10913 ; AVX512DQ-SLOW-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
10914 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm0
10915 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm29
10916 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10917 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm28, %zmm13, %zmm0
10918 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm0 {%k1} = zmm21[0],zmm27[0],zmm21[2],zmm27[2],zmm21[4],zmm27[4],zmm21[6],zmm27[6]
10919 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, %zmm12
10920 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm13, %zmm12
10921 ; AVX512DQ-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [6,14,6,14]
10922 ; AVX512DQ-SLOW-NEXT: # ymm1 = mem[0,1,0,1]
10923 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, %zmm15
10924 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm11, %zmm1, %zmm15
10925 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3],ymm15[4,5,6,7]
10926 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm0
10927 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10928 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm18 = [7,15,7,15,7,15,7,15]
10929 ; AVX512DQ-SLOW-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
10930 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm29, %zmm12
10931 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm28, %zmm18, %zmm12
10932 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm12 {%k1} = zmm21[1],zmm27[1],zmm21[3],zmm27[3],zmm21[5],zmm27[5],zmm21[7],zmm27[7]
10933 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm18, %zmm8
10934 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x2 {{.*#+}} ymm30 = [7,15,7,15]
10935 ; AVX512DQ-SLOW-NEXT: # ymm30 = mem[0,1,0,1]
10936 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm11, %zmm30, %zmm10
10937 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
10938 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm12, %zmm0
10939 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10940 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm14, %zmm8
10941 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm16, %zmm3, %zmm8
10942 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm10 = zmm25[0],zmm26[0],zmm25[2],zmm26[2],zmm25[4],zmm26[4],zmm25[6],zmm26[6]
10943 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, %zmm10 {%k1}
10944 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8
10945 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm19, %zmm3, %zmm8
10946 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm11
10947 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, %zmm15
10948 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm20, %zmm6, %zmm11
10949 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
10950 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm10, %zmm0
10951 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10952 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm14, %zmm8
10953 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm16, %zmm9, %zmm8
10954 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm10 = zmm25[1],zmm26[1],zmm25[3],zmm26[3],zmm25[5],zmm26[5],zmm25[7],zmm26[7]
10955 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, %zmm10 {%k1}
10956 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8
10957 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm19, %zmm9, %zmm8
10958 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm11
10959 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, %zmm12
10960 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm20, %zmm7, %zmm11
10961 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
10962 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm10, %zmm0
10963 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10964 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8
10965 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm19, %zmm13, %zmm8
10966 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm10
10967 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm20, %zmm1, %zmm10
10968 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
10969 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10970 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm25, %zmm10
10971 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10972 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm26, %zmm13, %zmm10
10973 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm14, %zmm11
10974 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm14, (%rsp) # 64-byte Spill
10975 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10976 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm10 {%k1} = zmm14[0],zmm16[0],zmm14[2],zmm16[2],zmm14[4],zmm16[4],zmm14[6],zmm16[6]
10977 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm10, %zmm0
10978 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10979 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%r10), %zmm10
10980 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm19, %zmm18, %zmm4
10981 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%rax), %zmm14
10982 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm20, %zmm30, %zmm5
10983 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm25, %zmm6
10984 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm26, %zmm18, %zmm6
10985 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm6 {%k1} = zmm11[1],zmm16[1],zmm11[3],zmm16[3],zmm11[5],zmm16[5],zmm11[7],zmm16[7]
10986 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
10987 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
10988 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10989 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, %zmm4
10990 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm14, %zmm3, %zmm4
10991 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm5 = zmm23[0],zmm24[0],zmm23[2],zmm24[2],zmm23[4],zmm24[4],zmm23[6],zmm24[6]
10992 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm5 {%k1}
10993 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm6
10994 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm17, %zmm3, %zmm6
10995 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm11
10996 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%rdx), %zmm4
10997 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%rcx), %zmm7
10998 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8
10999 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm15, %zmm8
11000 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
11001 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm6, %zmm5, %zmm0
11002 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11003 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, %zmm5
11004 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm14, %zmm9, %zmm5
11005 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm24[1],zmm23[3],zmm24[3],zmm23[5],zmm24[5],zmm23[7],zmm24[7]
11006 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm6 {%k1}
11007 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm5
11008 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm17, %zmm9, %zmm5
11009 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8
11010 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm12, %zmm8
11011 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm8[4,5,6,7]
11012 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm5, %zmm6, %zmm0
11013 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11014 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm5
11015 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm17, %zmm13, %zmm5
11016 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm6
11017 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm1, %zmm6
11018 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm6[4,5,6,7]
11019 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm23, %zmm3
11020 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11021 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm23, %zmm6
11022 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11023 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm24, %zmm13, %zmm6
11024 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11025 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11026 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm14[0],zmm10[2],zmm14[2],zmm10[4],zmm14[4],zmm10[6],zmm14[6]
11027 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm5, %zmm6, %zmm0
11028 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11029 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm17, %zmm18, %zmm2
11030 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm30, %zmm4
11031 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm4[4,5,6,7]
11032 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm24, %zmm18, %zmm3
11033 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm14[1],zmm10[3],zmm14[3],zmm10[5],zmm14[5],zmm10[7],zmm14[7]
11034 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm3, %zmm0
11035 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11036 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%r10), %zmm10
11037 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%rax), %zmm26
11038 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, %zmm2
11039 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm26, %zmm11, %zmm2
11040 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%r8), %zmm23
11041 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%r9), %zmm29
11042 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm4 = zmm23[0],zmm29[0],zmm23[2],zmm29[2],zmm23[4],zmm29[4],zmm23[6],zmm29[6]
11043 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
11044 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%rdi), %zmm2
11045 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%rsi), %zmm5
11046 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm6
11047 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm11, %zmm6
11048 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%rdx), %zmm3
11049 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%rcx), %zmm7
11050 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm8
11051 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm15, %zmm8
11052 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
11053 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm6, %zmm4, %zmm0
11054 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11055 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, %zmm4
11056 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm26, %zmm9, %zmm4
11057 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm29[1],zmm23[3],zmm29[3],zmm23[5],zmm29[5],zmm23[7],zmm29[7]
11058 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm6 {%k1}
11059 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4
11060 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm9, %zmm4
11061 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm8
11062 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm12, %zmm8
11063 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
11064 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
11065 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11066 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4
11067 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm13, %zmm4
11068 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm6
11069 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm1, %zmm6
11070 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
11071 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm23, %zmm6
11072 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm29, %zmm13, %zmm6
11073 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11074 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm26[0],zmm10[2],zmm26[2],zmm10[4],zmm26[4],zmm10[6],zmm26[6]
11075 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
11076 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11077 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm18, %zmm2
11078 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm30, %zmm3
11079 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
11080 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm23, %zmm3
11081 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm29, %zmm18, %zmm3
11082 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm26[1],zmm10[3],zmm26[3],zmm10[5],zmm26[5],zmm10[7],zmm26[7]
11083 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm3, %zmm0
11084 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11085 ; AVX512DQ-SLOW-NEXT: vmovdqa64 256(%r10), %zmm10
11086 ; AVX512DQ-SLOW-NEXT: vmovdqa64 256(%rax), %zmm22
11087 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, %zmm2
11088 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm11, %zmm2
11089 ; AVX512DQ-SLOW-NEXT: vmovdqa64 256(%r8), %zmm0
11090 ; AVX512DQ-SLOW-NEXT: vmovdqa64 256(%r9), %zmm24
11091 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm4 = zmm0[0],zmm24[0],zmm0[2],zmm24[2],zmm0[4],zmm24[4],zmm0[6],zmm24[6]
11092 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm17
11093 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
11094 ; AVX512DQ-SLOW-NEXT: vmovdqa64 256(%rdi), %zmm2
11095 ; AVX512DQ-SLOW-NEXT: vmovdqa64 256(%rsi), %zmm5
11096 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm6
11097 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm11, %zmm6
11098 ; AVX512DQ-SLOW-NEXT: vmovdqa64 256(%rdx), %zmm3
11099 ; AVX512DQ-SLOW-NEXT: vmovdqa64 256(%rcx), %zmm7
11100 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm8
11101 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm15, %zmm8
11102 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
11103 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm6, %zmm4, %zmm0
11104 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11105 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, %zmm4
11106 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm9, %zmm4
11107 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm17[1],zmm24[1],zmm17[3],zmm24[3],zmm17[5],zmm24[5],zmm17[7],zmm24[7]
11108 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm6 {%k1}
11109 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4
11110 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm9, %zmm4
11111 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm8
11112 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm12, %zmm8
11113 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
11114 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
11115 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11116 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4
11117 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm13, %zmm4
11118 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm6
11119 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm1, %zmm6
11120 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
11121 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11122 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm17, %zmm6
11123 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm24, %zmm13, %zmm6
11124 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11125 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm22[0],zmm10[2],zmm22[2],zmm10[4],zmm22[4],zmm10[6],zmm22[6]
11126 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
11127 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11128 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm18, %zmm2
11129 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm30, %zmm3
11130 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
11131 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm17, %zmm3
11132 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm24, %zmm18, %zmm3
11133 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm22[1],zmm10[3],zmm22[3],zmm10[5],zmm22[5],zmm10[7],zmm22[7]
11134 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm3, %zmm0
11135 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11136 ; AVX512DQ-SLOW-NEXT: vmovdqa64 320(%rdi), %zmm2
11137 ; AVX512DQ-SLOW-NEXT: vmovdqa64 320(%rsi), %zmm3
11138 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm5
11139 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm11, %zmm5
11140 ; AVX512DQ-SLOW-NEXT: vmovdqa64 320(%rdx), %zmm14
11141 ; AVX512DQ-SLOW-NEXT: vmovdqa64 320(%rcx), %zmm6
11142 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm14, %zmm7
11143 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm15, %zmm7
11144 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm7[4,5,6,7]
11145 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm14, %zmm7
11146 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm12, %zmm7
11147 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm14, %zmm8
11148 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm1, %zmm8
11149 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm30, %zmm14
11150 ; AVX512DQ-SLOW-NEXT: vmovdqa64 384(%rdx), %zmm0
11151 ; AVX512DQ-SLOW-NEXT: vmovdqa64 384(%rcx), %zmm6
11152 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm10
11153 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm15, %zmm10
11154 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11155 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm10
11156 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm12, %zmm10
11157 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11158 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm10
11159 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm1, %zmm10
11160 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11161 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm30, %zmm0
11162 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11163 ; AVX512DQ-SLOW-NEXT: vmovdqa64 448(%rdx), %zmm0
11164 ; AVX512DQ-SLOW-NEXT: vmovdqa64 448(%rcx), %zmm6
11165 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm6, %zmm0, %zmm15
11166 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11167 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm6, %zmm0, %zmm12
11168 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11169 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm6, %zmm0, %zmm1
11170 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11171 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm30, %zmm0
11172 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11173 ; AVX512DQ-SLOW-NEXT: vmovdqa64 320(%r10), %zmm31
11174 ; AVX512DQ-SLOW-NEXT: vmovdqa64 320(%rax), %zmm12
11175 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm31, %zmm1
11176 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm12, %zmm11, %zmm1
11177 ; AVX512DQ-SLOW-NEXT: vmovdqa64 320(%r8), %zmm17
11178 ; AVX512DQ-SLOW-NEXT: vmovdqa64 320(%r9), %zmm4
11179 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm10 = zmm17[0],zmm4[0],zmm17[2],zmm4[2],zmm17[4],zmm4[4],zmm17[6],zmm4[6]
11180 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm10 {%k1}
11181 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm5, %zmm10, %zmm0
11182 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11183 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm31, %zmm1
11184 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm12, %zmm9, %zmm1
11185 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm5 = zmm17[1],zmm4[1],zmm17[3],zmm4[3],zmm17[5],zmm4[5],zmm17[7],zmm4[7]
11186 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm5 {%k1}
11187 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm1
11188 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm9, %zmm1
11189 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm7[4,5,6,7]
11190 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm5, %zmm0
11191 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11192 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm1
11193 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm13, %zmm1
11194 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm8[4,5,6,7]
11195 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm17, %zmm5
11196 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm13, %zmm5
11197 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm5 {%k1} = zmm31[0],zmm12[0],zmm31[2],zmm12[2],zmm31[4],zmm12[4],zmm31[6],zmm12[6]
11198 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm5, %zmm0
11199 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11200 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm18, %zmm2
11201 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm14[4,5,6,7]
11202 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm17, %zmm2
11203 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm18, %zmm2
11204 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm2 {%k1} = zmm31[1],zmm12[1],zmm31[3],zmm12[3],zmm31[5],zmm12[5],zmm31[7],zmm12[7]
11205 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm2, %zmm0
11206 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11207 ; AVX512DQ-SLOW-NEXT: vmovdqa64 384(%rdi), %zmm0
11208 ; AVX512DQ-SLOW-NEXT: vmovdqa64 384(%rsi), %zmm1
11209 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3
11210 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm2
11211 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm11, %zmm3
11212 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11213 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3
11214 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
11215 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11216 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3
11217 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm13, %zmm3
11218 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11219 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm18, %zmm0
11220 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11221 ; AVX512DQ-SLOW-NEXT: vmovdqa64 384(%r10), %zmm11
11222 ; AVX512DQ-SLOW-NEXT: vmovdqa64 384(%rax), %zmm20
11223 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm0
11224 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm20, %zmm2, %zmm0
11225 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11226 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm0
11227 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm20, %zmm9, %zmm0
11228 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11229 ; AVX512DQ-SLOW-NEXT: vmovdqa64 384(%r8), %zmm30
11230 ; AVX512DQ-SLOW-NEXT: vmovdqa64 384(%r9), %zmm0
11231 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm30, %zmm1
11232 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm0, %zmm13, %zmm1
11233 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11234 ; AVX512DQ-SLOW-NEXT: vmovdqa64 448(%r10), %zmm16
11235 ; AVX512DQ-SLOW-NEXT: vmovdqa64 448(%rax), %zmm8
11236 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm16, %zmm1
11237 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm8, %zmm2, %zmm1
11238 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11239 ; AVX512DQ-SLOW-NEXT: vmovdqa64 448(%rdi), %zmm3
11240 ; AVX512DQ-SLOW-NEXT: vmovdqa64 448(%rsi), %zmm1
11241 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm1, %zmm3, %zmm2
11242 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11243 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm16, %zmm2
11244 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm8, %zmm9, %zmm2
11245 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11246 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm1, %zmm3, %zmm9
11247 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11248 ; AVX512DQ-SLOW-NEXT: vmovdqa64 448(%r8), %zmm19
11249 ; AVX512DQ-SLOW-NEXT: vmovdqa64 448(%r9), %zmm7
11250 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm19, %zmm2
11251 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm13, %zmm2
11252 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11253 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm1, %zmm3, %zmm13
11254 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11255 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm18, %zmm3
11256 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11257 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm30, %zmm1
11258 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm0, %zmm18, %zmm1
11259 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm5
11260 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm19, %zmm1
11261 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm18, %zmm1
11262 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11263 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [0,8,0,8,0,8,0,8]
11264 ; AVX512DQ-SLOW-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
11265 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm21, %zmm1
11266 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm27, %zmm15, %zmm1
11267 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11268 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm14 = [1,9,1,9,1,9,1,9]
11269 ; AVX512DQ-SLOW-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
11270 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm21, %zmm1
11271 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm27, %zmm14, %zmm1
11272 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11273 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [2,10,2,10,2,10,2,10]
11274 ; AVX512DQ-SLOW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
11275 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm21, %zmm1
11276 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm27, %zmm6, %zmm1
11277 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11278 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [3,11,3,11,3,11,3,11]
11279 ; AVX512DQ-SLOW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
11280 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm27, %zmm1, %zmm21
11281 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11282 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
11283 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
11284 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm28, %zmm15, %zmm3
11285 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11286 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
11287 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm28, %zmm14, %zmm3
11288 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11289 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm25
11290 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm28, %zmm6, %zmm25
11291 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm28, %zmm1, %zmm2
11292 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11293 ; AVX512DQ-SLOW-NEXT: vmovdqu64 (%rsp), %zmm27 # 64-byte Reload
11294 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm27, %zmm2
11295 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
11296 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm15, %zmm2
11297 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11298 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm27, %zmm2
11299 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm14, %zmm2
11300 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11301 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm27, %zmm2
11302 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm6, %zmm2
11303 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11304 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm1, %zmm27
11305 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm27, (%rsp) # 64-byte Spill
11306 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
11307 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, %zmm3
11308 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
11309 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm15, %zmm3
11310 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11311 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, %zmm3
11312 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm14, %zmm3
11313 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11314 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, %zmm18
11315 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm6, %zmm18
11316 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm1, %zmm9
11317 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11318 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
11319 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
11320 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
11321 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm9, %zmm15, %zmm3
11322 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11323 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
11324 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm9, %zmm14, %zmm3
11325 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11326 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
11327 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm9, %zmm6, %zmm3
11328 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11329 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm9, %zmm1, %zmm2
11330 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11331 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
11332 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, %zmm3
11333 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
11334 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm15, %zmm3
11335 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11336 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, %zmm3
11337 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm14, %zmm3
11338 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11339 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, %zmm13
11340 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm6, %zmm13
11341 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm1, %zmm9
11342 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11343 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
11344 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
11345 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm26, %zmm15, %zmm3
11346 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11347 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
11348 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm26, %zmm14, %zmm3
11349 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11350 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
11351 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm26, %zmm6, %zmm3
11352 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11353 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm26, %zmm1, %zmm2
11354 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11355 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm23, %zmm3
11356 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm29, %zmm15, %zmm3
11357 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11358 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm23, %zmm3
11359 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm29, %zmm14, %zmm3
11360 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11361 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm23, %zmm10
11362 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm29, %zmm6, %zmm10
11363 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm29, %zmm1, %zmm23
11364 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11365 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
11366 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm23, %zmm2
11367 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm15, %zmm2
11368 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11369 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm23, %zmm2
11370 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm14, %zmm2
11371 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11372 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm23, %zmm2
11373 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm6, %zmm2
11374 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11375 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm1, %zmm23
11376 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
11377 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
11378 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm24, %zmm15, %zmm3
11379 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11380 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
11381 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm24, %zmm14, %zmm3
11382 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11383 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm26
11384 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm24, %zmm6, %zmm26
11385 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm24, %zmm1, %zmm2
11386 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11387 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm31, %zmm2
11388 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm12, %zmm15, %zmm2
11389 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11390 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm31, %zmm2
11391 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm12, %zmm14, %zmm2
11392 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11393 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm31, %zmm2
11394 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm12, %zmm6, %zmm2
11395 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11396 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm12, %zmm1, %zmm31
11397 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm17, %zmm29
11398 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm15, %zmm29
11399 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm17, %zmm2
11400 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm14, %zmm2
11401 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11402 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm17, %zmm24
11403 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm6, %zmm24
11404 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm1, %zmm17
11405 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm2
11406 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm20, %zmm15, %zmm2
11407 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11408 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm2
11409 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm20, %zmm14, %zmm2
11410 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11411 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
11412 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm9 {%k1} = zmm11[0],zmm20[0],zmm11[2],zmm20[2],zmm11[4],zmm20[4],zmm11[6],zmm20[6]
11413 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm11[1],zmm20[1],zmm11[3],zmm20[3],zmm11[5],zmm20[5],zmm11[7],zmm20[7]
11414 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm27
11415 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm22
11416 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm20, %zmm6, %zmm22
11417 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm20, %zmm1, %zmm11
11418 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm30, %zmm21
11419 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm0, %zmm15, %zmm21
11420 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm30, %zmm28
11421 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm0, %zmm14, %zmm28
11422 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm30, %zmm20
11423 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm0, %zmm6, %zmm20
11424 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm2 = zmm30[0],zmm0[0],zmm30[2],zmm0[2],zmm30[4],zmm0[4],zmm30[6],zmm0[6]
11425 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm3 = zmm30[1],zmm0[1],zmm30[3],zmm0[3],zmm30[5],zmm0[5],zmm30[7],zmm0[7]
11426 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm0, %zmm1, %zmm30
11427 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm16, %zmm0
11428 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm8, %zmm15, %zmm0
11429 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11430 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm16, %zmm0
11431 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm8, %zmm14, %zmm0
11432 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11433 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
11434 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm4 {%k1} = zmm16[0],zmm8[0],zmm16[2],zmm8[2],zmm16[4],zmm8[4],zmm16[6],zmm8[6]
11435 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
11436 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm16[1],zmm8[1],zmm16[3],zmm8[3],zmm16[5],zmm8[5],zmm16[7],zmm8[7]
11437 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm16, %zmm12
11438 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm8, %zmm6, %zmm12
11439 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm8, %zmm1, %zmm16
11440 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm7, %zmm19, %zmm15
11441 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm7, %zmm19, %zmm14
11442 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm7, %zmm19, %zmm6
11443 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm8 = zmm19[0],zmm7[0],zmm19[2],zmm7[2],zmm19[4],zmm7[4],zmm19[6],zmm7[6]
11444 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm0 = zmm19[1],zmm7[1],zmm19[3],zmm7[3],zmm19[5],zmm7[5],zmm19[7],zmm7[7]
11445 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm1, %zmm19
11446 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
11447 ; AVX512DQ-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
11448 ; AVX512DQ-SLOW-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
11449 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
11450 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, %zmm2 {%k1}
11451 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm2, %zmm1
11452 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11453 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
11454 ; AVX512DQ-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
11455 ; AVX512DQ-SLOW-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
11456 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
11457 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3 {%k1}
11458 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm3, %zmm1
11459 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11460 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
11461 ; AVX512DQ-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
11462 ; AVX512DQ-SLOW-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
11463 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm9, %zmm1
11464 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11465 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
11466 ; AVX512DQ-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
11467 ; AVX512DQ-SLOW-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
11468 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm27, %zmm1
11469 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11470 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
11471 ; AVX512DQ-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
11472 ; AVX512DQ-SLOW-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
11473 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
11474 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm8 {%k1}
11475 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm8, %zmm1
11476 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11477 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
11478 ; AVX512DQ-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
11479 ; AVX512DQ-SLOW-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
11480 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
11481 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm0 {%k1}
11482 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm0
11483 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11484 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11485 ; AVX512DQ-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11486 ; AVX512DQ-SLOW-NEXT: # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
11487 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm0
11488 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11489 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11490 ; AVX512DQ-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11491 ; AVX512DQ-SLOW-NEXT: # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
11492 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm5, %zmm0
11493 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11494 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11495 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm25 {%k1}
11496 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rcx), %ymm0
11497 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdx), %ymm1
11498 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11499 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rsi), %ymm3
11500 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdi), %ymm7
11501 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
11502 ; AVX512DQ-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
11503 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm25, %zmm2
11504 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11505 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
11506 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
11507 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
11508 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
11509 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
11510 ; AVX512DQ-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
11511 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm0
11512 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11513 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11514 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm18 {%k1}
11515 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rcx), %ymm0
11516 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rdx), %ymm1
11517 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rsi), %ymm2
11518 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rdi), %ymm3
11519 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm7 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11520 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm3[0],ymm2[0],ymm3[2],ymm2[2]
11521 ; AVX512DQ-SLOW-NEXT: vperm2i128 {{.*#+}} ymm7 = ymm8[2,3],ymm7[2,3]
11522 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm7, %zmm18, %zmm4
11523 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11524 ; AVX512DQ-SLOW-NEXT: vmovdqu64 (%rsp), %zmm4 # 64-byte Reload
11525 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
11526 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm5 {%k1}
11527 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
11528 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm3[1],ymm2[1],ymm3[3],ymm2[3]
11529 ; AVX512DQ-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
11530 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm5, %zmm0
11531 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11532 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11533 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm13 {%k1}
11534 ; AVX512DQ-SLOW-NEXT: vmovdqa 128(%rcx), %ymm0
11535 ; AVX512DQ-SLOW-NEXT: vmovdqa 128(%rdx), %ymm1
11536 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11537 ; AVX512DQ-SLOW-NEXT: vmovdqa 128(%rsi), %ymm3
11538 ; AVX512DQ-SLOW-NEXT: vmovdqa 128(%rdi), %ymm7
11539 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
11540 ; AVX512DQ-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
11541 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm13, %zmm2
11542 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
11543 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
11544 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
11545 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
11546 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
11547 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
11548 ; AVX512DQ-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
11549 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm0
11550 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11551 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11552 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm10 {%k1}
11553 ; AVX512DQ-SLOW-NEXT: vmovdqa 192(%rcx), %ymm0
11554 ; AVX512DQ-SLOW-NEXT: vmovdqa 192(%rdx), %ymm1
11555 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11556 ; AVX512DQ-SLOW-NEXT: vmovdqa 192(%rsi), %ymm3
11557 ; AVX512DQ-SLOW-NEXT: vmovdqa 192(%rdi), %ymm7
11558 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
11559 ; AVX512DQ-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
11560 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm10, %zmm2
11561 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11562 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
11563 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
11564 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
11565 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
11566 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
11567 ; AVX512DQ-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
11568 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm9
11569 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11570 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm26 {%k1}
11571 ; AVX512DQ-SLOW-NEXT: vmovdqa 256(%rcx), %ymm0
11572 ; AVX512DQ-SLOW-NEXT: vmovdqa 256(%rdx), %ymm1
11573 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11574 ; AVX512DQ-SLOW-NEXT: vmovdqa 256(%rsi), %ymm3
11575 ; AVX512DQ-SLOW-NEXT: vmovdqa 256(%rdi), %ymm7
11576 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
11577 ; AVX512DQ-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
11578 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm26, %zmm26
11579 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
11580 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm23, %zmm2 {%k1}
11581 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
11582 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
11583 ; AVX512DQ-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
11584 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm2, %zmm7
11585 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11586 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm24 {%k1}
11587 ; AVX512DQ-SLOW-NEXT: vmovdqa 320(%rcx), %ymm0
11588 ; AVX512DQ-SLOW-NEXT: vmovdqa 320(%rdx), %ymm1
11589 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11590 ; AVX512DQ-SLOW-NEXT: vmovdqa 320(%rsi), %ymm3
11591 ; AVX512DQ-SLOW-NEXT: vmovdqa64 320(%rdi), %ymm23
11592 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm23[0],ymm3[0],ymm23[2],ymm3[2]
11593 ; AVX512DQ-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
11594 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm24, %zmm8
11595 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm31, %zmm17 {%k1}
11596 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
11597 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm23[1],ymm3[1],ymm23[3],ymm3[3]
11598 ; AVX512DQ-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
11599 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm17, %zmm17
11600 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm22, %zmm20 {%k1}
11601 ; AVX512DQ-SLOW-NEXT: vmovdqa 384(%rcx), %ymm0
11602 ; AVX512DQ-SLOW-NEXT: vmovdqa 384(%rdx), %ymm1
11603 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11604 ; AVX512DQ-SLOW-NEXT: vmovdqa 384(%rsi), %ymm3
11605 ; AVX512DQ-SLOW-NEXT: vmovdqa64 384(%rdi), %ymm18
11606 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm13 = ymm18[0],ymm3[0],ymm18[2],ymm3[2]
11607 ; AVX512DQ-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm13[2,3],ymm2[2,3]
11608 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm20, %zmm20
11609 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm30 {%k1}
11610 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
11611 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm18[1],ymm3[1],ymm18[3],ymm3[3]
11612 ; AVX512DQ-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
11613 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm30, %zmm24
11614 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm12, %zmm6 {%k1}
11615 ; AVX512DQ-SLOW-NEXT: vmovdqa 448(%rcx), %ymm0
11616 ; AVX512DQ-SLOW-NEXT: vmovdqa 448(%rdx), %ymm1
11617 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11618 ; AVX512DQ-SLOW-NEXT: vmovdqa 448(%rsi), %ymm3
11619 ; AVX512DQ-SLOW-NEXT: vmovdqa 448(%rdi), %ymm10
11620 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm12 = ymm10[0],ymm3[0],ymm10[2],ymm3[2]
11621 ; AVX512DQ-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm12[2,3],ymm2[2,3]
11622 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm6, %zmm31
11623 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
11624 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm10[1],ymm3[1],ymm10[3],ymm3[3]
11625 ; AVX512DQ-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
11626 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm16, %zmm19 {%k1}
11627 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm19, %zmm0
11628 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
11629 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
11630 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm4 {%k1}
11631 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rsi), %xmm1
11632 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
11633 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdi), %xmm2
11634 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, (%rdx), %ymm2, %ymm2
11635 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm3 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
11636 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm3, %zmm4, %zmm3
11637 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
11638 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
11639 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm10 {%k1}
11640 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm2[1],ymm1[1],ymm2[3],ymm1[3]
11641 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm10, %zmm2
11642 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
11643 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
11644 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm11 {%k1}
11645 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rsi), %xmm1
11646 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, 64(%rcx), %ymm1, %ymm1
11647 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rdi), %xmm4
11648 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, 64(%rdx), %ymm4, %ymm4
11649 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm10 = ymm4[0],ymm1[0],ymm4[2],ymm1[2]
11650 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm11, %zmm10
11651 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
11652 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
11653 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm12 {%k1}
11654 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm4[1],ymm1[1],ymm4[3],ymm1[3]
11655 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm12, %zmm11
11656 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
11657 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
11658 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm13 {%k1}
11659 ; AVX512DQ-SLOW-NEXT: vmovdqa 128(%rsi), %xmm1
11660 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, 128(%rcx), %ymm1, %ymm1
11661 ; AVX512DQ-SLOW-NEXT: vmovdqa 128(%rdi), %xmm4
11662 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, 128(%rdx), %ymm4, %ymm12
11663 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
11664 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm13, %zmm4
11665 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
11666 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
11667 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm13, %zmm18 {%k1}
11668 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
11669 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm18, %zmm19
11670 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
11671 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
11672 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm18 {%k1}
11673 ; AVX512DQ-SLOW-NEXT: vmovdqa 192(%rsi), %xmm1
11674 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, 192(%rcx), %ymm1, %ymm1
11675 ; AVX512DQ-SLOW-NEXT: vmovdqa 192(%rdi), %xmm12
11676 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, 192(%rdx), %ymm12, %ymm12
11677 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm13 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
11678 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm13, %zmm18, %zmm30
11679 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
11680 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
11681 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm13, %zmm18 {%k1}
11682 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
11683 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm18, %zmm1
11684 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
11685 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
11686 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm12, %zmm5 {%k1}
11687 ; AVX512DQ-SLOW-NEXT: vmovdqa 256(%rsi), %xmm12
11688 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, 256(%rcx), %ymm12, %ymm13
11689 ; AVX512DQ-SLOW-NEXT: vmovdqa 256(%rdi), %xmm12
11690 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $1, 256(%rdx), %ymm12, %ymm18
11691 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm12 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
11692 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm12, %zmm5, %zmm12
11693 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
11694 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
11695 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm23, %zmm5 {%k1}
11696 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
11697 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm13, %zmm5, %zmm23
11698 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
11699 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm29 {%k1}
11700 ; AVX512DQ-SLOW-NEXT: vmovdqa 320(%rsi), %xmm13
11701 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, 320(%rcx), %ymm13, %ymm13
11702 ; AVX512DQ-SLOW-NEXT: vmovdqa64 320(%rdi), %xmm18
11703 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $1, 320(%rdx), %ymm18, %ymm18
11704 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm27 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
11705 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm27, %zmm29, %zmm22
11706 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
11707 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
11708 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm6 {%k1}
11709 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
11710 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm13, %zmm6, %zmm13
11711 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
11712 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm21 {%k1}
11713 ; AVX512DQ-SLOW-NEXT: vmovdqa64 384(%rsi), %xmm18
11714 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $1, 384(%rcx), %ymm18, %ymm18
11715 ; AVX512DQ-SLOW-NEXT: vmovdqa64 384(%rdi), %xmm25
11716 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $1, 384(%rdx), %ymm25, %ymm25
11717 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
11718 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm27, %zmm21, %zmm16
11719 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
11720 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm28 {%k1}
11721 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
11722 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm18, %zmm28, %zmm21
11723 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
11724 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm15 {%k1}
11725 ; AVX512DQ-SLOW-NEXT: vmovdqa64 448(%rsi), %xmm18
11726 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $1, 448(%rcx), %ymm18, %ymm18
11727 ; AVX512DQ-SLOW-NEXT: vmovdqa64 448(%rdi), %xmm25
11728 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $1, 448(%rdx), %ymm25, %ymm25
11729 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
11730 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm27, %zmm15, %zmm6
11731 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
11732 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm14 {%k1}
11733 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
11734 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm18, %zmm14, %zmm5
11735 ; AVX512DQ-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
11736 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, 3776(%rax)
11737 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm31, 3712(%rax)
11738 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm24, 3264(%rax)
11739 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm20, 3200(%rax)
11740 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm17, 2752(%rax)
11741 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, 2688(%rax)
11742 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, 2240(%rax)
11743 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm26, 2176(%rax)
11744 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, 1728(%rax)
11745 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11746 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 1664(%rax)
11747 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11748 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 1216(%rax)
11749 ; AVX512DQ-SLOW-NEXT: vmovups (%rsp), %zmm0 # 64-byte Reload
11750 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 1152(%rax)
11751 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11752 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 704(%rax)
11753 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11754 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 640(%rax)
11755 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11756 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 192(%rax)
11757 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11758 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 128(%rax)
11759 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11760 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 4032(%rax)
11761 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11762 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 3968(%rax)
11763 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11764 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 3904(%rax)
11765 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11766 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 3840(%rax)
11767 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, 3648(%rax)
11768 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, 3584(%rax)
11769 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11770 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 3520(%rax)
11771 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11772 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 3456(%rax)
11773 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11774 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 3392(%rax)
11775 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11776 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 3328(%rax)
11777 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm21, 3136(%rax)
11778 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm16, 3072(%rax)
11779 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11780 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 3008(%rax)
11781 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11782 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 2944(%rax)
11783 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11784 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 2880(%rax)
11785 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11786 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 2816(%rax)
11787 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm13, 2624(%rax)
11788 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm22, 2560(%rax)
11789 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11790 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 2496(%rax)
11791 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11792 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 2432(%rax)
11793 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11794 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 2368(%rax)
11795 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11796 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 2304(%rax)
11797 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm23, 2112(%rax)
11798 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm12, 2048(%rax)
11799 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11800 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 1984(%rax)
11801 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11802 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 1920(%rax)
11803 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11804 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 1856(%rax)
11805 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11806 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 1792(%rax)
11807 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, 1600(%rax)
11808 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm30, 1536(%rax)
11809 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11810 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 1472(%rax)
11811 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11812 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 1408(%rax)
11813 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11814 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 1344(%rax)
11815 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11816 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 1280(%rax)
11817 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm19, 1088(%rax)
11818 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, 1024(%rax)
11819 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11820 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 960(%rax)
11821 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11822 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 896(%rax)
11823 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11824 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 832(%rax)
11825 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11826 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 768(%rax)
11827 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, 576(%rax)
11828 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, 512(%rax)
11829 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11830 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 448(%rax)
11831 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11832 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 384(%rax)
11833 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11834 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 320(%rax)
11835 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11836 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 256(%rax)
11837 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, 64(%rax)
11838 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, (%rax)
11839 ; AVX512DQ-SLOW-NEXT: addq $5512, %rsp # imm = 0x1588
11840 ; AVX512DQ-SLOW-NEXT: vzeroupper
11841 ; AVX512DQ-SLOW-NEXT: retq
11843 ; AVX512DQ-FAST-LABEL: store_i64_stride8_vf64:
11844 ; AVX512DQ-FAST: # %bb.0:
11845 ; AVX512DQ-FAST-NEXT: subq $5512, %rsp # imm = 0x1588
11846 ; AVX512DQ-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
11847 ; AVX512DQ-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
11848 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%rdi), %zmm2
11849 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rdi), %zmm4
11850 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rdi), %zmm8
11851 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%rsi), %zmm17
11852 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rsi), %zmm19
11853 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rsi), %zmm22
11854 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rdx), %zmm5
11855 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rdx), %zmm10
11856 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rcx), %zmm20
11857 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rcx), %zmm11
11858 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%r8), %zmm1
11859 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%r8), %zmm25
11860 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%r8), %zmm23
11861 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%r9), %zmm28
11862 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%r9), %zmm26
11863 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%r9), %zmm24
11864 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%r10), %zmm21
11865 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%r10), %zmm14
11866 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rax), %zmm27
11867 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rax), %zmm16
11868 ; AVX512DQ-FAST-NEXT: movb $-64, %r11b
11869 ; AVX512DQ-FAST-NEXT: kmovw %r11d, %k1
11870 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [4,12,4,12,4,12,4,12]
11871 ; AVX512DQ-FAST-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
11872 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm21, %zmm0
11873 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm27, %zmm3, %zmm0
11874 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm12 = zmm1[0],zmm28[0],zmm1[2],zmm28[2],zmm1[4],zmm28[4],zmm1[6],zmm28[6]
11875 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
11876 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
11877 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm3, %zmm0
11878 ; AVX512DQ-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm6 = [4,12,4,12]
11879 ; AVX512DQ-FAST-NEXT: # ymm6 = mem[0,1,0,1]
11880 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, %zmm15
11881 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm11, %zmm6, %zmm15
11882 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
11883 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm12, %zmm0
11884 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11885 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [5,13,5,13,5,13,5,13]
11886 ; AVX512DQ-FAST-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
11887 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm21, %zmm0
11888 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm27, %zmm9, %zmm0
11889 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm12 = zmm1[1],zmm28[1],zmm1[3],zmm28[3],zmm1[5],zmm28[5],zmm1[7],zmm28[7]
11890 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
11891 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
11892 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm9, %zmm0
11893 ; AVX512DQ-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [5,13,5,13]
11894 ; AVX512DQ-FAST-NEXT: # ymm7 = mem[0,1,0,1]
11895 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, %zmm15
11896 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm11, %zmm7, %zmm15
11897 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
11898 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm12, %zmm0
11899 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11900 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm13 = [6,14,6,14,6,14,6,14]
11901 ; AVX512DQ-FAST-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
11902 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm0
11903 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm29
11904 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11905 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm28, %zmm13, %zmm0
11906 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm0 {%k1} = zmm21[0],zmm27[0],zmm21[2],zmm27[2],zmm21[4],zmm27[4],zmm21[6],zmm27[6]
11907 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm8, %zmm12
11908 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm13, %zmm12
11909 ; AVX512DQ-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [6,14,6,14]
11910 ; AVX512DQ-FAST-NEXT: # ymm1 = mem[0,1,0,1]
11911 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, %zmm15
11912 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm11, %zmm1, %zmm15
11913 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3],ymm15[4,5,6,7]
11914 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm0
11915 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11916 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm18 = [7,15,7,15,7,15,7,15]
11917 ; AVX512DQ-FAST-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
11918 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm29, %zmm12
11919 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm28, %zmm18, %zmm12
11920 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm12 {%k1} = zmm21[1],zmm27[1],zmm21[3],zmm27[3],zmm21[5],zmm27[5],zmm21[7],zmm27[7]
11921 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm18, %zmm8
11922 ; AVX512DQ-FAST-NEXT: vbroadcasti64x2 {{.*#+}} ymm30 = [7,15,7,15]
11923 ; AVX512DQ-FAST-NEXT: # ymm30 = mem[0,1,0,1]
11924 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm11, %zmm30, %zmm10
11925 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
11926 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm8, %zmm12, %zmm0
11927 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11928 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm14, %zmm8
11929 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm16, %zmm3, %zmm8
11930 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm10 = zmm25[0],zmm26[0],zmm25[2],zmm26[2],zmm25[4],zmm26[4],zmm25[6],zmm26[6]
11931 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm8, %zmm10 {%k1}
11932 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, %zmm8
11933 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm19, %zmm3, %zmm8
11934 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm11
11935 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, %zmm15
11936 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm6, %zmm11
11937 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
11938 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm8, %zmm10, %zmm0
11939 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11940 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm14, %zmm8
11941 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm16, %zmm9, %zmm8
11942 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm10 = zmm25[1],zmm26[1],zmm25[3],zmm26[3],zmm25[5],zmm26[5],zmm25[7],zmm26[7]
11943 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm8, %zmm10 {%k1}
11944 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, %zmm8
11945 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm19, %zmm9, %zmm8
11946 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm11
11947 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, %zmm12
11948 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm7, %zmm11
11949 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
11950 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm8, %zmm10, %zmm0
11951 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11952 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, %zmm8
11953 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm19, %zmm13, %zmm8
11954 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm10
11955 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm1, %zmm10
11956 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
11957 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11958 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm25, %zmm10
11959 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11960 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm26, %zmm13, %zmm10
11961 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm14, %zmm11
11962 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm14, (%rsp) # 64-byte Spill
11963 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11964 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm10 {%k1} = zmm14[0],zmm16[0],zmm14[2],zmm16[2],zmm14[4],zmm16[4],zmm14[6],zmm16[6]
11965 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm8, %zmm10, %zmm0
11966 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11967 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%r10), %zmm10
11968 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm19, %zmm18, %zmm4
11969 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%rax), %zmm14
11970 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm30, %zmm5
11971 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm25, %zmm6
11972 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm26, %zmm18, %zmm6
11973 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm6 {%k1} = zmm11[1],zmm16[1],zmm11[3],zmm16[3],zmm11[5],zmm16[5],zmm11[7],zmm16[7]
11974 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
11975 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
11976 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11977 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, %zmm4
11978 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm14, %zmm3, %zmm4
11979 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm5 = zmm23[0],zmm24[0],zmm23[2],zmm24[2],zmm23[4],zmm24[4],zmm23[6],zmm24[6]
11980 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, %zmm5 {%k1}
11981 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm6
11982 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm17, %zmm3, %zmm6
11983 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm11
11984 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%rdx), %zmm4
11985 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%rcx), %zmm7
11986 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, %zmm8
11987 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm15, %zmm8
11988 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
11989 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm6, %zmm5, %zmm0
11990 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11991 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, %zmm5
11992 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm14, %zmm9, %zmm5
11993 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm24[1],zmm23[3],zmm24[3],zmm23[5],zmm24[5],zmm23[7],zmm24[7]
11994 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm6 {%k1}
11995 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm5
11996 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm17, %zmm9, %zmm5
11997 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, %zmm8
11998 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm12, %zmm8
11999 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm8[4,5,6,7]
12000 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm5, %zmm6, %zmm0
12001 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12002 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm5
12003 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm17, %zmm13, %zmm5
12004 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, %zmm6
12005 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm1, %zmm6
12006 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm6[4,5,6,7]
12007 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, %zmm3
12008 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12009 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, %zmm6
12010 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12011 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm24, %zmm13, %zmm6
12012 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12013 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12014 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm14[0],zmm10[2],zmm14[2],zmm10[4],zmm14[4],zmm10[6],zmm14[6]
12015 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm5, %zmm6, %zmm0
12016 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12017 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm17, %zmm18, %zmm2
12018 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm30, %zmm4
12019 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm4[4,5,6,7]
12020 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm24, %zmm18, %zmm3
12021 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm14[1],zmm10[3],zmm14[3],zmm10[5],zmm14[5],zmm10[7],zmm14[7]
12022 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm3, %zmm0
12023 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12024 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%r10), %zmm10
12025 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%rax), %zmm26
12026 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, %zmm2
12027 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm26, %zmm11, %zmm2
12028 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%r8), %zmm23
12029 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%r9), %zmm29
12030 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm4 = zmm23[0],zmm29[0],zmm23[2],zmm29[2],zmm23[4],zmm29[4],zmm23[6],zmm29[6]
12031 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
12032 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%rdi), %zmm2
12033 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%rsi), %zmm5
12034 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm6
12035 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm11, %zmm6
12036 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%rdx), %zmm3
12037 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%rcx), %zmm7
12038 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm8
12039 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm15, %zmm8
12040 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
12041 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm6, %zmm4, %zmm0
12042 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12043 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, %zmm4
12044 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm26, %zmm9, %zmm4
12045 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm29[1],zmm23[3],zmm29[3],zmm23[5],zmm29[5],zmm23[7],zmm29[7]
12046 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, %zmm6 {%k1}
12047 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm4
12048 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm9, %zmm4
12049 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm8
12050 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm12, %zmm8
12051 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
12052 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
12053 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12054 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm4
12055 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm13, %zmm4
12056 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm6
12057 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm1, %zmm6
12058 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
12059 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, %zmm6
12060 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm29, %zmm13, %zmm6
12061 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12062 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm26[0],zmm10[2],zmm26[2],zmm10[4],zmm26[4],zmm10[6],zmm26[6]
12063 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
12064 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12065 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm18, %zmm2
12066 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm30, %zmm3
12067 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
12068 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, %zmm3
12069 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm29, %zmm18, %zmm3
12070 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm26[1],zmm10[3],zmm26[3],zmm10[5],zmm26[5],zmm10[7],zmm26[7]
12071 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm3, %zmm0
12072 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12073 ; AVX512DQ-FAST-NEXT: vmovdqa64 256(%r10), %zmm10
12074 ; AVX512DQ-FAST-NEXT: vmovdqa64 256(%rax), %zmm22
12075 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, %zmm2
12076 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm11, %zmm2
12077 ; AVX512DQ-FAST-NEXT: vmovdqa64 256(%r8), %zmm0
12078 ; AVX512DQ-FAST-NEXT: vmovdqa64 256(%r9), %zmm24
12079 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm4 = zmm0[0],zmm24[0],zmm0[2],zmm24[2],zmm0[4],zmm24[4],zmm0[6],zmm24[6]
12080 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm17
12081 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
12082 ; AVX512DQ-FAST-NEXT: vmovdqa64 256(%rdi), %zmm2
12083 ; AVX512DQ-FAST-NEXT: vmovdqa64 256(%rsi), %zmm5
12084 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm6
12085 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm11, %zmm6
12086 ; AVX512DQ-FAST-NEXT: vmovdqa64 256(%rdx), %zmm3
12087 ; AVX512DQ-FAST-NEXT: vmovdqa64 256(%rcx), %zmm7
12088 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm8
12089 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm15, %zmm8
12090 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
12091 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm6, %zmm4, %zmm0
12092 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12093 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, %zmm4
12094 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm9, %zmm4
12095 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm17[1],zmm24[1],zmm17[3],zmm24[3],zmm17[5],zmm24[5],zmm17[7],zmm24[7]
12096 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, %zmm6 {%k1}
12097 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm4
12098 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm9, %zmm4
12099 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm8
12100 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm12, %zmm8
12101 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
12102 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
12103 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12104 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm4
12105 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm13, %zmm4
12106 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm6
12107 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm1, %zmm6
12108 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
12109 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12110 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm17, %zmm6
12111 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm24, %zmm13, %zmm6
12112 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12113 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm22[0],zmm10[2],zmm22[2],zmm10[4],zmm22[4],zmm10[6],zmm22[6]
12114 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
12115 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12116 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm18, %zmm2
12117 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm30, %zmm3
12118 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
12119 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm17, %zmm3
12120 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm24, %zmm18, %zmm3
12121 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm22[1],zmm10[3],zmm22[3],zmm10[5],zmm22[5],zmm10[7],zmm22[7]
12122 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm3, %zmm0
12123 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12124 ; AVX512DQ-FAST-NEXT: vmovdqa64 320(%rdi), %zmm2
12125 ; AVX512DQ-FAST-NEXT: vmovdqa64 320(%rsi), %zmm3
12126 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm5
12127 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm11, %zmm5
12128 ; AVX512DQ-FAST-NEXT: vmovdqa64 320(%rdx), %zmm14
12129 ; AVX512DQ-FAST-NEXT: vmovdqa64 320(%rcx), %zmm6
12130 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm14, %zmm7
12131 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm15, %zmm7
12132 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm7[4,5,6,7]
12133 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm14, %zmm7
12134 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm12, %zmm7
12135 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm14, %zmm8
12136 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm1, %zmm8
12137 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm30, %zmm14
12138 ; AVX512DQ-FAST-NEXT: vmovdqa64 384(%rdx), %zmm0
12139 ; AVX512DQ-FAST-NEXT: vmovdqa64 384(%rcx), %zmm6
12140 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm10
12141 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm15, %zmm10
12142 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12143 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm10
12144 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm12, %zmm10
12145 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12146 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm10
12147 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm1, %zmm10
12148 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12149 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm30, %zmm0
12150 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12151 ; AVX512DQ-FAST-NEXT: vmovdqa64 448(%rdx), %zmm0
12152 ; AVX512DQ-FAST-NEXT: vmovdqa64 448(%rcx), %zmm6
12153 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm6, %zmm0, %zmm15
12154 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12155 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm6, %zmm0, %zmm12
12156 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12157 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm6, %zmm0, %zmm1
12158 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12159 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm30, %zmm0
12160 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12161 ; AVX512DQ-FAST-NEXT: vmovdqa64 320(%r10), %zmm31
12162 ; AVX512DQ-FAST-NEXT: vmovdqa64 320(%rax), %zmm12
12163 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm31, %zmm1
12164 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm12, %zmm11, %zmm1
12165 ; AVX512DQ-FAST-NEXT: vmovdqa64 320(%r8), %zmm17
12166 ; AVX512DQ-FAST-NEXT: vmovdqa64 320(%r9), %zmm4
12167 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm10 = zmm17[0],zmm4[0],zmm17[2],zmm4[2],zmm17[4],zmm4[4],zmm17[6],zmm4[6]
12168 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm10 {%k1}
12169 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm5, %zmm10, %zmm0
12170 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12171 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm31, %zmm1
12172 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm12, %zmm9, %zmm1
12173 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm5 = zmm17[1],zmm4[1],zmm17[3],zmm4[3],zmm17[5],zmm4[5],zmm17[7],zmm4[7]
12174 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm5 {%k1}
12175 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
12176 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm9, %zmm1
12177 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm7[4,5,6,7]
12178 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm5, %zmm0
12179 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12180 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
12181 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm13, %zmm1
12182 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm8[4,5,6,7]
12183 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm17, %zmm5
12184 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm13, %zmm5
12185 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm5 {%k1} = zmm31[0],zmm12[0],zmm31[2],zmm12[2],zmm31[4],zmm12[4],zmm31[6],zmm12[6]
12186 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm5, %zmm0
12187 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12188 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm18, %zmm2
12189 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm14[4,5,6,7]
12190 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm17, %zmm2
12191 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm18, %zmm2
12192 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm2 {%k1} = zmm31[1],zmm12[1],zmm31[3],zmm12[3],zmm31[5],zmm12[5],zmm31[7],zmm12[7]
12193 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm2, %zmm0
12194 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12195 ; AVX512DQ-FAST-NEXT: vmovdqa64 384(%rdi), %zmm0
12196 ; AVX512DQ-FAST-NEXT: vmovdqa64 384(%rsi), %zmm1
12197 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm3
12198 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, %zmm2
12199 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm11, %zmm3
12200 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12201 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm3
12202 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
12203 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12204 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm3
12205 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm13, %zmm3
12206 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12207 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm18, %zmm0
12208 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12209 ; AVX512DQ-FAST-NEXT: vmovdqa64 384(%r10), %zmm11
12210 ; AVX512DQ-FAST-NEXT: vmovdqa64 384(%rax), %zmm20
12211 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, %zmm0
12212 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm2, %zmm0
12213 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12214 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, %zmm0
12215 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm9, %zmm0
12216 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12217 ; AVX512DQ-FAST-NEXT: vmovdqa64 384(%r8), %zmm30
12218 ; AVX512DQ-FAST-NEXT: vmovdqa64 384(%r9), %zmm0
12219 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm30, %zmm1
12220 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm13, %zmm1
12221 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12222 ; AVX512DQ-FAST-NEXT: vmovdqa64 448(%r10), %zmm16
12223 ; AVX512DQ-FAST-NEXT: vmovdqa64 448(%rax), %zmm8
12224 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm16, %zmm1
12225 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm8, %zmm2, %zmm1
12226 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12227 ; AVX512DQ-FAST-NEXT: vmovdqa64 448(%rdi), %zmm3
12228 ; AVX512DQ-FAST-NEXT: vmovdqa64 448(%rsi), %zmm1
12229 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm1, %zmm3, %zmm2
12230 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12231 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm16, %zmm2
12232 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm8, %zmm9, %zmm2
12233 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12234 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm1, %zmm3, %zmm9
12235 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12236 ; AVX512DQ-FAST-NEXT: vmovdqa64 448(%r8), %zmm19
12237 ; AVX512DQ-FAST-NEXT: vmovdqa64 448(%r9), %zmm7
12238 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm19, %zmm2
12239 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm13, %zmm2
12240 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12241 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm1, %zmm3, %zmm13
12242 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12243 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm18, %zmm3
12244 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12245 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm30, %zmm1
12246 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm18, %zmm1
12247 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm5
12248 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm19, %zmm1
12249 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm18, %zmm1
12250 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12251 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [0,8,0,8,0,8,0,8]
12252 ; AVX512DQ-FAST-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
12253 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm21, %zmm1
12254 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm27, %zmm15, %zmm1
12255 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12256 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm14 = [1,9,1,9,1,9,1,9]
12257 ; AVX512DQ-FAST-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
12258 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm21, %zmm1
12259 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm27, %zmm14, %zmm1
12260 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12261 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [2,10,2,10,2,10,2,10]
12262 ; AVX512DQ-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
12263 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm21, %zmm1
12264 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm27, %zmm6, %zmm1
12265 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12266 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [3,11,3,11,3,11,3,11]
12267 ; AVX512DQ-FAST-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
12268 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm27, %zmm1, %zmm21
12269 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12270 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
12271 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
12272 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm28, %zmm15, %zmm3
12273 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12274 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
12275 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm28, %zmm14, %zmm3
12276 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12277 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm25
12278 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm28, %zmm6, %zmm25
12279 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm28, %zmm1, %zmm2
12280 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12281 ; AVX512DQ-FAST-NEXT: vmovdqu64 (%rsp), %zmm27 # 64-byte Reload
12282 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm27, %zmm2
12283 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
12284 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm15, %zmm2
12285 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12286 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm27, %zmm2
12287 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm14, %zmm2
12288 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12289 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm27, %zmm2
12290 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm6, %zmm2
12291 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12292 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm1, %zmm27
12293 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm27, (%rsp) # 64-byte Spill
12294 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
12295 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm9, %zmm3
12296 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
12297 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm15, %zmm3
12298 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12299 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm9, %zmm3
12300 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm14, %zmm3
12301 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12302 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm9, %zmm18
12303 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm6, %zmm18
12304 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm1, %zmm9
12305 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12306 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
12307 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
12308 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
12309 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm9, %zmm15, %zmm3
12310 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12311 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
12312 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm9, %zmm14, %zmm3
12313 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12314 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
12315 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm9, %zmm6, %zmm3
12316 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12317 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm9, %zmm1, %zmm2
12318 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12319 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
12320 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm9, %zmm3
12321 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
12322 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm15, %zmm3
12323 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12324 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm9, %zmm3
12325 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm14, %zmm3
12326 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12327 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm9, %zmm13
12328 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm6, %zmm13
12329 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm1, %zmm9
12330 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12331 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
12332 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
12333 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm26, %zmm15, %zmm3
12334 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12335 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
12336 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm26, %zmm14, %zmm3
12337 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12338 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
12339 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm26, %zmm6, %zmm3
12340 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12341 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm26, %zmm1, %zmm2
12342 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12343 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, %zmm3
12344 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm29, %zmm15, %zmm3
12345 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12346 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, %zmm3
12347 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm29, %zmm14, %zmm3
12348 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12349 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, %zmm10
12350 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm29, %zmm6, %zmm10
12351 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm29, %zmm1, %zmm23
12352 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12353 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
12354 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, %zmm2
12355 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm15, %zmm2
12356 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12357 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, %zmm2
12358 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm14, %zmm2
12359 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12360 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, %zmm2
12361 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm6, %zmm2
12362 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12363 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm1, %zmm23
12364 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
12365 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
12366 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm24, %zmm15, %zmm3
12367 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12368 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
12369 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm24, %zmm14, %zmm3
12370 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12371 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm26
12372 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm24, %zmm6, %zmm26
12373 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm24, %zmm1, %zmm2
12374 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12375 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm31, %zmm2
12376 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm12, %zmm15, %zmm2
12377 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12378 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm31, %zmm2
12379 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm12, %zmm14, %zmm2
12380 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12381 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm31, %zmm2
12382 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm12, %zmm6, %zmm2
12383 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12384 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm12, %zmm1, %zmm31
12385 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm17, %zmm29
12386 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm15, %zmm29
12387 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm17, %zmm2
12388 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm14, %zmm2
12389 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12390 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm17, %zmm24
12391 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm6, %zmm24
12392 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm1, %zmm17
12393 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, %zmm2
12394 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm15, %zmm2
12395 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12396 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, %zmm2
12397 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm14, %zmm2
12398 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12399 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
12400 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm9 {%k1} = zmm11[0],zmm20[0],zmm11[2],zmm20[2],zmm11[4],zmm20[4],zmm11[6],zmm20[6]
12401 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm11[1],zmm20[1],zmm11[3],zmm20[3],zmm11[5],zmm20[5],zmm11[7],zmm20[7]
12402 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm27
12403 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, %zmm22
12404 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm6, %zmm22
12405 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm1, %zmm11
12406 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm30, %zmm21
12407 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm15, %zmm21
12408 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm30, %zmm28
12409 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm14, %zmm28
12410 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm30, %zmm20
12411 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm6, %zmm20
12412 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm2 = zmm30[0],zmm0[0],zmm30[2],zmm0[2],zmm30[4],zmm0[4],zmm30[6],zmm0[6]
12413 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm3 = zmm30[1],zmm0[1],zmm30[3],zmm0[3],zmm30[5],zmm0[5],zmm30[7],zmm0[7]
12414 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm1, %zmm30
12415 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm16, %zmm0
12416 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm8, %zmm15, %zmm0
12417 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12418 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm16, %zmm0
12419 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm8, %zmm14, %zmm0
12420 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12421 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
12422 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm4 {%k1} = zmm16[0],zmm8[0],zmm16[2],zmm8[2],zmm16[4],zmm8[4],zmm16[6],zmm8[6]
12423 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
12424 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm16[1],zmm8[1],zmm16[3],zmm8[3],zmm16[5],zmm8[5],zmm16[7],zmm8[7]
12425 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm16, %zmm12
12426 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm8, %zmm6, %zmm12
12427 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm8, %zmm1, %zmm16
12428 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm7, %zmm19, %zmm15
12429 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm7, %zmm19, %zmm14
12430 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm7, %zmm19, %zmm6
12431 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm8 = zmm19[0],zmm7[0],zmm19[2],zmm7[2],zmm19[4],zmm7[4],zmm19[6],zmm7[6]
12432 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm0 = zmm19[1],zmm7[1],zmm19[3],zmm7[3],zmm19[5],zmm7[5],zmm19[7],zmm7[7]
12433 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm1, %zmm19
12434 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
12435 ; AVX512DQ-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
12436 ; AVX512DQ-FAST-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
12437 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
12438 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, %zmm2 {%k1}
12439 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm2, %zmm1
12440 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12441 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
12442 ; AVX512DQ-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
12443 ; AVX512DQ-FAST-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
12444 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
12445 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm3 {%k1}
12446 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm3, %zmm1
12447 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12448 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
12449 ; AVX512DQ-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
12450 ; AVX512DQ-FAST-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
12451 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm9, %zmm1
12452 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12453 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
12454 ; AVX512DQ-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
12455 ; AVX512DQ-FAST-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
12456 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm27, %zmm1
12457 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12458 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
12459 ; AVX512DQ-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
12460 ; AVX512DQ-FAST-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
12461 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
12462 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm8 {%k1}
12463 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm8, %zmm1
12464 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12465 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
12466 ; AVX512DQ-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
12467 ; AVX512DQ-FAST-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
12468 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
12469 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm0 {%k1}
12470 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm0
12471 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12472 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12473 ; AVX512DQ-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12474 ; AVX512DQ-FAST-NEXT: # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
12475 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm0
12476 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12477 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12478 ; AVX512DQ-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12479 ; AVX512DQ-FAST-NEXT: # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
12480 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm5, %zmm0
12481 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12482 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12483 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm25 {%k1}
12484 ; AVX512DQ-FAST-NEXT: vmovdqa (%rcx), %ymm0
12485 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdx), %ymm1
12486 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12487 ; AVX512DQ-FAST-NEXT: vmovdqa (%rsi), %ymm3
12488 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdi), %ymm7
12489 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
12490 ; AVX512DQ-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
12491 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm25, %zmm2
12492 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12493 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
12494 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
12495 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
12496 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
12497 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
12498 ; AVX512DQ-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
12499 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm0
12500 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12501 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12502 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm18 {%k1}
12503 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rcx), %ymm0
12504 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rdx), %ymm1
12505 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rsi), %ymm2
12506 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rdi), %ymm3
12507 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm7 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12508 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm3[0],ymm2[0],ymm3[2],ymm2[2]
12509 ; AVX512DQ-FAST-NEXT: vperm2i128 {{.*#+}} ymm7 = ymm8[2,3],ymm7[2,3]
12510 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm7, %zmm18, %zmm4
12511 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12512 ; AVX512DQ-FAST-NEXT: vmovdqu64 (%rsp), %zmm4 # 64-byte Reload
12513 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
12514 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, %zmm5 {%k1}
12515 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
12516 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm3[1],ymm2[1],ymm3[3],ymm2[3]
12517 ; AVX512DQ-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
12518 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm5, %zmm0
12519 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12520 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12521 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm13 {%k1}
12522 ; AVX512DQ-FAST-NEXT: vmovdqa 128(%rcx), %ymm0
12523 ; AVX512DQ-FAST-NEXT: vmovdqa 128(%rdx), %ymm1
12524 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12525 ; AVX512DQ-FAST-NEXT: vmovdqa 128(%rsi), %ymm3
12526 ; AVX512DQ-FAST-NEXT: vmovdqa 128(%rdi), %ymm7
12527 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
12528 ; AVX512DQ-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
12529 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm13, %zmm2
12530 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
12531 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
12532 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
12533 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
12534 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
12535 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
12536 ; AVX512DQ-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
12537 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm0
12538 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12539 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12540 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm10 {%k1}
12541 ; AVX512DQ-FAST-NEXT: vmovdqa 192(%rcx), %ymm0
12542 ; AVX512DQ-FAST-NEXT: vmovdqa 192(%rdx), %ymm1
12543 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12544 ; AVX512DQ-FAST-NEXT: vmovdqa 192(%rsi), %ymm3
12545 ; AVX512DQ-FAST-NEXT: vmovdqa 192(%rdi), %ymm7
12546 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
12547 ; AVX512DQ-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
12548 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm10, %zmm2
12549 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12550 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
12551 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
12552 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
12553 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
12554 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
12555 ; AVX512DQ-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
12556 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm9
12557 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12558 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm26 {%k1}
12559 ; AVX512DQ-FAST-NEXT: vmovdqa 256(%rcx), %ymm0
12560 ; AVX512DQ-FAST-NEXT: vmovdqa 256(%rdx), %ymm1
12561 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12562 ; AVX512DQ-FAST-NEXT: vmovdqa 256(%rsi), %ymm3
12563 ; AVX512DQ-FAST-NEXT: vmovdqa 256(%rdi), %ymm7
12564 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
12565 ; AVX512DQ-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
12566 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm26, %zmm26
12567 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
12568 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, %zmm2 {%k1}
12569 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
12570 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
12571 ; AVX512DQ-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
12572 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm2, %zmm7
12573 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12574 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm24 {%k1}
12575 ; AVX512DQ-FAST-NEXT: vmovdqa 320(%rcx), %ymm0
12576 ; AVX512DQ-FAST-NEXT: vmovdqa 320(%rdx), %ymm1
12577 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12578 ; AVX512DQ-FAST-NEXT: vmovdqa 320(%rsi), %ymm3
12579 ; AVX512DQ-FAST-NEXT: vmovdqa64 320(%rdi), %ymm23
12580 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm23[0],ymm3[0],ymm23[2],ymm3[2]
12581 ; AVX512DQ-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
12582 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm24, %zmm8
12583 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm31, %zmm17 {%k1}
12584 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
12585 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm23[1],ymm3[1],ymm23[3],ymm3[3]
12586 ; AVX512DQ-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
12587 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm17, %zmm17
12588 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm22, %zmm20 {%k1}
12589 ; AVX512DQ-FAST-NEXT: vmovdqa 384(%rcx), %ymm0
12590 ; AVX512DQ-FAST-NEXT: vmovdqa 384(%rdx), %ymm1
12591 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12592 ; AVX512DQ-FAST-NEXT: vmovdqa 384(%rsi), %ymm3
12593 ; AVX512DQ-FAST-NEXT: vmovdqa64 384(%rdi), %ymm18
12594 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm13 = ymm18[0],ymm3[0],ymm18[2],ymm3[2]
12595 ; AVX512DQ-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm13[2,3],ymm2[2,3]
12596 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm20, %zmm20
12597 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, %zmm30 {%k1}
12598 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
12599 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm18[1],ymm3[1],ymm18[3],ymm3[3]
12600 ; AVX512DQ-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
12601 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm30, %zmm24
12602 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm12, %zmm6 {%k1}
12603 ; AVX512DQ-FAST-NEXT: vmovdqa 448(%rcx), %ymm0
12604 ; AVX512DQ-FAST-NEXT: vmovdqa 448(%rdx), %ymm1
12605 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12606 ; AVX512DQ-FAST-NEXT: vmovdqa 448(%rsi), %ymm3
12607 ; AVX512DQ-FAST-NEXT: vmovdqa 448(%rdi), %ymm10
12608 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm12 = ymm10[0],ymm3[0],ymm10[2],ymm3[2]
12609 ; AVX512DQ-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm12[2,3],ymm2[2,3]
12610 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm6, %zmm31
12611 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
12612 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm10[1],ymm3[1],ymm10[3],ymm3[3]
12613 ; AVX512DQ-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
12614 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm16, %zmm19 {%k1}
12615 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm19, %zmm0
12616 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
12617 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
12618 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm4 {%k1}
12619 ; AVX512DQ-FAST-NEXT: vmovdqa (%rsi), %xmm1
12620 ; AVX512DQ-FAST-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
12621 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdi), %xmm2
12622 ; AVX512DQ-FAST-NEXT: vinserti128 $1, (%rdx), %ymm2, %ymm2
12623 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm3 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
12624 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm4, %zmm3
12625 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
12626 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
12627 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, %zmm10 {%k1}
12628 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm2[1],ymm1[1],ymm2[3],ymm1[3]
12629 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm10, %zmm2
12630 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
12631 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
12632 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm11 {%k1}
12633 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rsi), %xmm1
12634 ; AVX512DQ-FAST-NEXT: vinserti128 $1, 64(%rcx), %ymm1, %ymm1
12635 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rdi), %xmm4
12636 ; AVX512DQ-FAST-NEXT: vinserti128 $1, 64(%rdx), %ymm4, %ymm4
12637 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm10 = ymm4[0],ymm1[0],ymm4[2],ymm1[2]
12638 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm11, %zmm10
12639 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
12640 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
12641 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, %zmm12 {%k1}
12642 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm4[1],ymm1[1],ymm4[3],ymm1[3]
12643 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm12, %zmm11
12644 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
12645 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
12646 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm13 {%k1}
12647 ; AVX512DQ-FAST-NEXT: vmovdqa 128(%rsi), %xmm1
12648 ; AVX512DQ-FAST-NEXT: vinserti128 $1, 128(%rcx), %ymm1, %ymm1
12649 ; AVX512DQ-FAST-NEXT: vmovdqa 128(%rdi), %xmm4
12650 ; AVX512DQ-FAST-NEXT: vinserti128 $1, 128(%rdx), %ymm4, %ymm12
12651 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
12652 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm13, %zmm4
12653 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
12654 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
12655 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm13, %zmm18 {%k1}
12656 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
12657 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm18, %zmm19
12658 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
12659 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
12660 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm18 {%k1}
12661 ; AVX512DQ-FAST-NEXT: vmovdqa 192(%rsi), %xmm1
12662 ; AVX512DQ-FAST-NEXT: vinserti128 $1, 192(%rcx), %ymm1, %ymm1
12663 ; AVX512DQ-FAST-NEXT: vmovdqa 192(%rdi), %xmm12
12664 ; AVX512DQ-FAST-NEXT: vinserti128 $1, 192(%rdx), %ymm12, %ymm12
12665 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm13 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
12666 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm13, %zmm18, %zmm30
12667 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
12668 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
12669 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm13, %zmm18 {%k1}
12670 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
12671 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm18, %zmm1
12672 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
12673 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
12674 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm12, %zmm5 {%k1}
12675 ; AVX512DQ-FAST-NEXT: vmovdqa 256(%rsi), %xmm12
12676 ; AVX512DQ-FAST-NEXT: vinserti128 $1, 256(%rcx), %ymm12, %ymm13
12677 ; AVX512DQ-FAST-NEXT: vmovdqa 256(%rdi), %xmm12
12678 ; AVX512DQ-FAST-NEXT: vinserti32x4 $1, 256(%rdx), %ymm12, %ymm18
12679 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm12 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
12680 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm12, %zmm5, %zmm12
12681 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
12682 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
12683 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, %zmm5 {%k1}
12684 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
12685 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm13, %zmm5, %zmm23
12686 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
12687 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm29 {%k1}
12688 ; AVX512DQ-FAST-NEXT: vmovdqa 320(%rsi), %xmm13
12689 ; AVX512DQ-FAST-NEXT: vinserti128 $1, 320(%rcx), %ymm13, %ymm13
12690 ; AVX512DQ-FAST-NEXT: vmovdqa64 320(%rdi), %xmm18
12691 ; AVX512DQ-FAST-NEXT: vinserti32x4 $1, 320(%rdx), %ymm18, %ymm18
12692 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm27 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
12693 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm27, %zmm29, %zmm22
12694 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
12695 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
12696 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm6 {%k1}
12697 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
12698 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm13, %zmm6, %zmm13
12699 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
12700 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm21 {%k1}
12701 ; AVX512DQ-FAST-NEXT: vmovdqa64 384(%rsi), %xmm18
12702 ; AVX512DQ-FAST-NEXT: vinserti32x4 $1, 384(%rcx), %ymm18, %ymm18
12703 ; AVX512DQ-FAST-NEXT: vmovdqa64 384(%rdi), %xmm25
12704 ; AVX512DQ-FAST-NEXT: vinserti32x4 $1, 384(%rdx), %ymm25, %ymm25
12705 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
12706 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm27, %zmm21, %zmm16
12707 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
12708 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm28 {%k1}
12709 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
12710 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm18, %zmm28, %zmm21
12711 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
12712 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm15 {%k1}
12713 ; AVX512DQ-FAST-NEXT: vmovdqa64 448(%rsi), %xmm18
12714 ; AVX512DQ-FAST-NEXT: vinserti32x4 $1, 448(%rcx), %ymm18, %ymm18
12715 ; AVX512DQ-FAST-NEXT: vmovdqa64 448(%rdi), %xmm25
12716 ; AVX512DQ-FAST-NEXT: vinserti32x4 $1, 448(%rdx), %ymm25, %ymm25
12717 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
12718 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm27, %zmm15, %zmm6
12719 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
12720 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm14 {%k1}
12721 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
12722 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm18, %zmm14, %zmm5
12723 ; AVX512DQ-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
12724 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, 3776(%rax)
12725 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm31, 3712(%rax)
12726 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm24, 3264(%rax)
12727 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm20, 3200(%rax)
12728 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm17, 2752(%rax)
12729 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm8, 2688(%rax)
12730 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, 2240(%rax)
12731 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm26, 2176(%rax)
12732 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm9, 1728(%rax)
12733 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12734 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 1664(%rax)
12735 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12736 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 1216(%rax)
12737 ; AVX512DQ-FAST-NEXT: vmovups (%rsp), %zmm0 # 64-byte Reload
12738 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 1152(%rax)
12739 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12740 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 704(%rax)
12741 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12742 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 640(%rax)
12743 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12744 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 192(%rax)
12745 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12746 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 128(%rax)
12747 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12748 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 4032(%rax)
12749 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12750 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 3968(%rax)
12751 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12752 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 3904(%rax)
12753 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12754 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 3840(%rax)
12755 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, 3648(%rax)
12756 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, 3584(%rax)
12757 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12758 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 3520(%rax)
12759 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12760 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 3456(%rax)
12761 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12762 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 3392(%rax)
12763 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12764 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 3328(%rax)
12765 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm21, 3136(%rax)
12766 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm16, 3072(%rax)
12767 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12768 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 3008(%rax)
12769 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12770 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 2944(%rax)
12771 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12772 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 2880(%rax)
12773 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12774 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 2816(%rax)
12775 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm13, 2624(%rax)
12776 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm22, 2560(%rax)
12777 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12778 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 2496(%rax)
12779 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12780 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 2432(%rax)
12781 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12782 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 2368(%rax)
12783 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12784 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 2304(%rax)
12785 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, 2112(%rax)
12786 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm12, 2048(%rax)
12787 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12788 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 1984(%rax)
12789 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12790 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 1920(%rax)
12791 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12792 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 1856(%rax)
12793 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12794 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 1792(%rax)
12795 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, 1600(%rax)
12796 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm30, 1536(%rax)
12797 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12798 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 1472(%rax)
12799 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12800 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 1408(%rax)
12801 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12802 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 1344(%rax)
12803 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12804 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 1280(%rax)
12805 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm19, 1088(%rax)
12806 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, 1024(%rax)
12807 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12808 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 960(%rax)
12809 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12810 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 896(%rax)
12811 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12812 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 832(%rax)
12813 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12814 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 768(%rax)
12815 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, 576(%rax)
12816 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, 512(%rax)
12817 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12818 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 448(%rax)
12819 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12820 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 384(%rax)
12821 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12822 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 320(%rax)
12823 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12824 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 256(%rax)
12825 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, 64(%rax)
12826 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, (%rax)
12827 ; AVX512DQ-FAST-NEXT: addq $5512, %rsp # imm = 0x1588
12828 ; AVX512DQ-FAST-NEXT: vzeroupper
12829 ; AVX512DQ-FAST-NEXT: retq
12831 ; AVX512BW-ONLY-SLOW-LABEL: store_i64_stride8_vf64:
12832 ; AVX512BW-ONLY-SLOW: # %bb.0:
12833 ; AVX512BW-ONLY-SLOW-NEXT: subq $5512, %rsp # imm = 0x1588
12834 ; AVX512BW-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
12835 ; AVX512BW-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
12836 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%rdi), %zmm2
12837 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm4
12838 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rdi), %zmm8
12839 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%rsi), %zmm17
12840 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm19
12841 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rsi), %zmm22
12842 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm5
12843 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rdx), %zmm10
12844 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm20
12845 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rcx), %zmm11
12846 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%r8), %zmm1
12847 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%r8), %zmm25
12848 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%r8), %zmm23
12849 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%r9), %zmm28
12850 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%r9), %zmm26
12851 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%r9), %zmm24
12852 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%r10), %zmm21
12853 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%r10), %zmm14
12854 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rax), %zmm27
12855 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rax), %zmm16
12856 ; AVX512BW-ONLY-SLOW-NEXT: movb $-64, %r11b
12857 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %r11d, %k1
12858 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [4,12,4,12,4,12,4,12]
12859 ; AVX512BW-ONLY-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
12860 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm0
12861 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm3, %zmm0
12862 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm12 = zmm1[0],zmm28[0],zmm1[2],zmm28[2],zmm1[4],zmm28[4],zmm1[6],zmm28[6]
12863 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
12864 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
12865 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm3, %zmm0
12866 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm6 = [4,12,4,12]
12867 ; AVX512BW-ONLY-SLOW-NEXT: # ymm6 = mem[0,1,0,1]
12868 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm15
12869 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm6, %zmm15
12870 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
12871 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm12, %zmm0
12872 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12873 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [5,13,5,13,5,13,5,13]
12874 ; AVX512BW-ONLY-SLOW-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
12875 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm0
12876 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm9, %zmm0
12877 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm12 = zmm1[1],zmm28[1],zmm1[3],zmm28[3],zmm1[5],zmm28[5],zmm1[7],zmm28[7]
12878 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
12879 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
12880 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm9, %zmm0
12881 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [5,13,5,13]
12882 ; AVX512BW-ONLY-SLOW-NEXT: # ymm7 = mem[0,1,0,1]
12883 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm15
12884 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm7, %zmm15
12885 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
12886 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm12, %zmm0
12887 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12888 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm13 = [6,14,6,14,6,14,6,14]
12889 ; AVX512BW-ONLY-SLOW-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
12890 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm0
12891 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm29
12892 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12893 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm28, %zmm13, %zmm0
12894 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm0 {%k1} = zmm21[0],zmm27[0],zmm21[2],zmm27[2],zmm21[4],zmm27[4],zmm21[6],zmm27[6]
12895 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm12
12896 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm13, %zmm12
12897 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [6,14,6,14]
12898 ; AVX512BW-ONLY-SLOW-NEXT: # ymm1 = mem[0,1,0,1]
12899 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm15
12900 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm1, %zmm15
12901 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3],ymm15[4,5,6,7]
12902 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm0
12903 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12904 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm18 = [7,15,7,15,7,15,7,15]
12905 ; AVX512BW-ONLY-SLOW-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
12906 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm29, %zmm12
12907 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm28, %zmm18, %zmm12
12908 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm12 {%k1} = zmm21[1],zmm27[1],zmm21[3],zmm27[3],zmm21[5],zmm27[5],zmm21[7],zmm27[7]
12909 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm18, %zmm8
12910 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} ymm30 = [7,15,7,15]
12911 ; AVX512BW-ONLY-SLOW-NEXT: # ymm30 = mem[0,1,2,3,0,1,2,3]
12912 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm30, %zmm10
12913 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
12914 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm12, %zmm0
12915 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12916 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm8
12917 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm16, %zmm3, %zmm8
12918 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm10 = zmm25[0],zmm26[0],zmm25[2],zmm26[2],zmm25[4],zmm26[4],zmm25[6],zmm26[6]
12919 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm10 {%k1}
12920 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8
12921 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm3, %zmm8
12922 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm11
12923 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm15
12924 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm6, %zmm11
12925 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
12926 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm10, %zmm0
12927 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12928 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm8
12929 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm16, %zmm9, %zmm8
12930 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm10 = zmm25[1],zmm26[1],zmm25[3],zmm26[3],zmm25[5],zmm26[5],zmm25[7],zmm26[7]
12931 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm10 {%k1}
12932 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8
12933 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm9, %zmm8
12934 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm11
12935 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm12
12936 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm7, %zmm11
12937 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
12938 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm10, %zmm0
12939 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12940 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8
12941 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm13, %zmm8
12942 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm10
12943 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm1, %zmm10
12944 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
12945 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12946 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, %zmm10
12947 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12948 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm13, %zmm10
12949 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm11
12950 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm14, (%rsp) # 64-byte Spill
12951 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12952 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm10 {%k1} = zmm14[0],zmm16[0],zmm14[2],zmm16[2],zmm14[4],zmm16[4],zmm14[6],zmm16[6]
12953 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm10, %zmm0
12954 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12955 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%r10), %zmm10
12956 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm18, %zmm4
12957 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%rax), %zmm14
12958 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm30, %zmm5
12959 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, %zmm6
12960 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm18, %zmm6
12961 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm6 {%k1} = zmm11[1],zmm16[1],zmm11[3],zmm16[3],zmm11[5],zmm16[5],zmm11[7],zmm16[7]
12962 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
12963 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
12964 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12965 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm4
12966 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm3, %zmm4
12967 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm5 = zmm23[0],zmm24[0],zmm23[2],zmm24[2],zmm23[4],zmm24[4],zmm23[6],zmm24[6]
12968 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm5 {%k1}
12969 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm6
12970 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm17, %zmm3, %zmm6
12971 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm11
12972 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%rdx), %zmm4
12973 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%rcx), %zmm7
12974 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8
12975 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm15, %zmm8
12976 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
12977 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm6, %zmm5, %zmm0
12978 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12979 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm5
12980 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm9, %zmm5
12981 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm24[1],zmm23[3],zmm24[3],zmm23[5],zmm24[5],zmm23[7],zmm24[7]
12982 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm6 {%k1}
12983 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm5
12984 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm17, %zmm9, %zmm5
12985 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8
12986 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm12, %zmm8
12987 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm8[4,5,6,7]
12988 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm5, %zmm6, %zmm0
12989 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12990 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm5
12991 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm17, %zmm13, %zmm5
12992 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm6
12993 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm1, %zmm6
12994 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm6[4,5,6,7]
12995 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm3
12996 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12997 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm6
12998 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12999 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm13, %zmm6
13000 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13001 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13002 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm14[0],zmm10[2],zmm14[2],zmm10[4],zmm14[4],zmm10[6],zmm14[6]
13003 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm5, %zmm6, %zmm0
13004 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13005 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm17, %zmm18, %zmm2
13006 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm30, %zmm4
13007 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm4[4,5,6,7]
13008 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm18, %zmm3
13009 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm14[1],zmm10[3],zmm14[3],zmm10[5],zmm14[5],zmm10[7],zmm14[7]
13010 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm3, %zmm0
13011 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13012 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%r10), %zmm10
13013 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%rax), %zmm26
13014 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm2
13015 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm11, %zmm2
13016 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%r8), %zmm23
13017 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%r9), %zmm29
13018 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm4 = zmm23[0],zmm29[0],zmm23[2],zmm29[2],zmm23[4],zmm29[4],zmm23[6],zmm29[6]
13019 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
13020 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%rdi), %zmm2
13021 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%rsi), %zmm5
13022 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm6
13023 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm11, %zmm6
13024 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%rdx), %zmm3
13025 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%rcx), %zmm7
13026 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm8
13027 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm15, %zmm8
13028 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
13029 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm6, %zmm4, %zmm0
13030 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13031 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm4
13032 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm9, %zmm4
13033 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm29[1],zmm23[3],zmm29[3],zmm23[5],zmm29[5],zmm23[7],zmm29[7]
13034 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm6 {%k1}
13035 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4
13036 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm9, %zmm4
13037 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm8
13038 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm12, %zmm8
13039 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
13040 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
13041 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13042 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4
13043 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm13, %zmm4
13044 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm6
13045 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm1, %zmm6
13046 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
13047 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm6
13048 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm13, %zmm6
13049 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13050 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm26[0],zmm10[2],zmm26[2],zmm10[4],zmm26[4],zmm10[6],zmm26[6]
13051 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
13052 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13053 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm18, %zmm2
13054 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm30, %zmm3
13055 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
13056 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm3
13057 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm18, %zmm3
13058 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm26[1],zmm10[3],zmm26[3],zmm10[5],zmm26[5],zmm10[7],zmm26[7]
13059 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm3, %zmm0
13060 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13061 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 256(%r10), %zmm10
13062 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 256(%rax), %zmm22
13063 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm2
13064 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm11, %zmm2
13065 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 256(%r8), %zmm0
13066 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 256(%r9), %zmm24
13067 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm4 = zmm0[0],zmm24[0],zmm0[2],zmm24[2],zmm0[4],zmm24[4],zmm0[6],zmm24[6]
13068 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm17
13069 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
13070 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 256(%rdi), %zmm2
13071 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 256(%rsi), %zmm5
13072 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm6
13073 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm11, %zmm6
13074 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 256(%rdx), %zmm3
13075 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 256(%rcx), %zmm7
13076 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm8
13077 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm15, %zmm8
13078 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
13079 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm6, %zmm4, %zmm0
13080 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13081 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm4
13082 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm9, %zmm4
13083 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm17[1],zmm24[1],zmm17[3],zmm24[3],zmm17[5],zmm24[5],zmm17[7],zmm24[7]
13084 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm6 {%k1}
13085 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4
13086 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm9, %zmm4
13087 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm8
13088 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm12, %zmm8
13089 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
13090 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
13091 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13092 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4
13093 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm13, %zmm4
13094 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm6
13095 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm1, %zmm6
13096 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
13097 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13098 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm6
13099 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm13, %zmm6
13100 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13101 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm22[0],zmm10[2],zmm22[2],zmm10[4],zmm22[4],zmm10[6],zmm22[6]
13102 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
13103 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13104 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm18, %zmm2
13105 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm30, %zmm3
13106 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
13107 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm3
13108 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm18, %zmm3
13109 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm22[1],zmm10[3],zmm22[3],zmm10[5],zmm22[5],zmm10[7],zmm22[7]
13110 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm3, %zmm0
13111 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13112 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 320(%rdi), %zmm2
13113 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 320(%rsi), %zmm3
13114 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm5
13115 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm11, %zmm5
13116 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 320(%rdx), %zmm14
13117 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 320(%rcx), %zmm6
13118 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm7
13119 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm15, %zmm7
13120 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm7[4,5,6,7]
13121 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm7
13122 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm12, %zmm7
13123 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm8
13124 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm1, %zmm8
13125 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm30, %zmm14
13126 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 384(%rdx), %zmm0
13127 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 384(%rcx), %zmm6
13128 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm10
13129 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm15, %zmm10
13130 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13131 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm10
13132 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm12, %zmm10
13133 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13134 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm10
13135 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm1, %zmm10
13136 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13137 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm30, %zmm0
13138 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13139 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 448(%rdx), %zmm0
13140 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 448(%rcx), %zmm6
13141 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm6, %zmm0, %zmm15
13142 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13143 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm6, %zmm0, %zmm12
13144 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13145 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm6, %zmm0, %zmm1
13146 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13147 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm30, %zmm0
13148 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13149 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 320(%r10), %zmm31
13150 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 320(%rax), %zmm12
13151 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, %zmm1
13152 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm11, %zmm1
13153 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 320(%r8), %zmm17
13154 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 320(%r9), %zmm4
13155 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm10 = zmm17[0],zmm4[0],zmm17[2],zmm4[2],zmm17[4],zmm4[4],zmm17[6],zmm4[6]
13156 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm10 {%k1}
13157 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm5, %zmm10, %zmm0
13158 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13159 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, %zmm1
13160 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm9, %zmm1
13161 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm5 = zmm17[1],zmm4[1],zmm17[3],zmm4[3],zmm17[5],zmm4[5],zmm17[7],zmm4[7]
13162 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm5 {%k1}
13163 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm1
13164 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm9, %zmm1
13165 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm7[4,5,6,7]
13166 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm5, %zmm0
13167 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13168 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm1
13169 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm13, %zmm1
13170 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm8[4,5,6,7]
13171 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm5
13172 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm13, %zmm5
13173 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm5 {%k1} = zmm31[0],zmm12[0],zmm31[2],zmm12[2],zmm31[4],zmm12[4],zmm31[6],zmm12[6]
13174 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm5, %zmm0
13175 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13176 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm18, %zmm2
13177 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm14[4,5,6,7]
13178 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm2
13179 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm18, %zmm2
13180 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm2 {%k1} = zmm31[1],zmm12[1],zmm31[3],zmm12[3],zmm31[5],zmm12[5],zmm31[7],zmm12[7]
13181 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm2, %zmm0
13182 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13183 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 384(%rdi), %zmm0
13184 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 384(%rsi), %zmm1
13185 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3
13186 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm2
13187 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm11, %zmm3
13188 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13189 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3
13190 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
13191 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13192 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3
13193 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm13, %zmm3
13194 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13195 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm18, %zmm0
13196 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13197 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 384(%r10), %zmm11
13198 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 384(%rax), %zmm20
13199 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm0
13200 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm2, %zmm0
13201 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13202 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm0
13203 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm9, %zmm0
13204 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13205 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 384(%r8), %zmm30
13206 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 384(%r9), %zmm0
13207 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm30, %zmm1
13208 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm13, %zmm1
13209 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13210 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 448(%r10), %zmm16
13211 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 448(%rax), %zmm8
13212 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm1
13213 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm2, %zmm1
13214 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13215 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 448(%rdi), %zmm3
13216 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 448(%rsi), %zmm1
13217 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm1, %zmm3, %zmm2
13218 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13219 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm2
13220 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm9, %zmm2
13221 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13222 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm1, %zmm3, %zmm9
13223 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13224 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 448(%r8), %zmm19
13225 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 448(%r9), %zmm7
13226 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm19, %zmm2
13227 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm13, %zmm2
13228 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13229 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm1, %zmm3, %zmm13
13230 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13231 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm18, %zmm3
13232 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13233 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm30, %zmm1
13234 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm18, %zmm1
13235 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm5
13236 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm19, %zmm1
13237 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm18, %zmm1
13238 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13239 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [0,8,0,8,0,8,0,8]
13240 ; AVX512BW-ONLY-SLOW-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
13241 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm1
13242 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm15, %zmm1
13243 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13244 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm14 = [1,9,1,9,1,9,1,9]
13245 ; AVX512BW-ONLY-SLOW-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
13246 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm1
13247 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm14, %zmm1
13248 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13249 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [2,10,2,10,2,10,2,10]
13250 ; AVX512BW-ONLY-SLOW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
13251 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm1
13252 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm6, %zmm1
13253 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13254 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [3,11,3,11,3,11,3,11]
13255 ; AVX512BW-ONLY-SLOW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
13256 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm1, %zmm21
13257 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13258 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13259 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
13260 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm28, %zmm15, %zmm3
13261 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13262 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
13263 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm28, %zmm14, %zmm3
13264 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13265 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm25
13266 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm28, %zmm6, %zmm25
13267 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm28, %zmm1, %zmm2
13268 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13269 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 (%rsp), %zmm27 # 64-byte Reload
13270 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, %zmm2
13271 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
13272 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm15, %zmm2
13273 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13274 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, %zmm2
13275 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm14, %zmm2
13276 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13277 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, %zmm2
13278 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm6, %zmm2
13279 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13280 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm1, %zmm27
13281 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm27, (%rsp) # 64-byte Spill
13282 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
13283 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm3
13284 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13285 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm15, %zmm3
13286 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13287 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm3
13288 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm14, %zmm3
13289 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13290 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm18
13291 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm6, %zmm18
13292 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm1, %zmm9
13293 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13294 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13295 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
13296 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
13297 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm15, %zmm3
13298 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13299 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
13300 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm14, %zmm3
13301 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13302 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
13303 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm6, %zmm3
13304 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13305 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm1, %zmm2
13306 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13307 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
13308 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm3
13309 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13310 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm15, %zmm3
13311 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13312 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm3
13313 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm14, %zmm3
13314 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13315 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm13
13316 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm6, %zmm13
13317 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm1, %zmm9
13318 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13319 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13320 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
13321 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm15, %zmm3
13322 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13323 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
13324 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm14, %zmm3
13325 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13326 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
13327 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm6, %zmm3
13328 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13329 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm1, %zmm2
13330 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13331 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm3
13332 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm15, %zmm3
13333 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13334 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm3
13335 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm14, %zmm3
13336 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13337 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm10
13338 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm6, %zmm10
13339 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm1, %zmm23
13340 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13341 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
13342 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm2
13343 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm15, %zmm2
13344 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13345 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm2
13346 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm14, %zmm2
13347 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13348 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm2
13349 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm6, %zmm2
13350 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13351 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm1, %zmm23
13352 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13353 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
13354 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm15, %zmm3
13355 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13356 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
13357 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm14, %zmm3
13358 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13359 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm26
13360 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm6, %zmm26
13361 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm1, %zmm2
13362 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13363 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, %zmm2
13364 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm15, %zmm2
13365 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13366 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, %zmm2
13367 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm14, %zmm2
13368 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13369 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, %zmm2
13370 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm6, %zmm2
13371 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13372 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm1, %zmm31
13373 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm29
13374 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm15, %zmm29
13375 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm2
13376 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm14, %zmm2
13377 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13378 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm24
13379 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm6, %zmm24
13380 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm1, %zmm17
13381 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm2
13382 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm15, %zmm2
13383 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13384 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm2
13385 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm14, %zmm2
13386 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13387 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
13388 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm9 {%k1} = zmm11[0],zmm20[0],zmm11[2],zmm20[2],zmm11[4],zmm20[4],zmm11[6],zmm20[6]
13389 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm11[1],zmm20[1],zmm11[3],zmm20[3],zmm11[5],zmm20[5],zmm11[7],zmm20[7]
13390 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm27
13391 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm22
13392 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm6, %zmm22
13393 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm1, %zmm11
13394 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm30, %zmm21
13395 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm15, %zmm21
13396 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm30, %zmm28
13397 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm14, %zmm28
13398 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm30, %zmm20
13399 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm6, %zmm20
13400 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm2 = zmm30[0],zmm0[0],zmm30[2],zmm0[2],zmm30[4],zmm0[4],zmm30[6],zmm0[6]
13401 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm3 = zmm30[1],zmm0[1],zmm30[3],zmm0[3],zmm30[5],zmm0[5],zmm30[7],zmm0[7]
13402 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm1, %zmm30
13403 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm0
13404 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm15, %zmm0
13405 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13406 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm0
13407 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm14, %zmm0
13408 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13409 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
13410 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm4 {%k1} = zmm16[0],zmm8[0],zmm16[2],zmm8[2],zmm16[4],zmm8[4],zmm16[6],zmm8[6]
13411 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
13412 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm16[1],zmm8[1],zmm16[3],zmm8[3],zmm16[5],zmm8[5],zmm16[7],zmm8[7]
13413 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm12
13414 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm6, %zmm12
13415 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm1, %zmm16
13416 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm19, %zmm15
13417 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm19, %zmm14
13418 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm19, %zmm6
13419 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm8 = zmm19[0],zmm7[0],zmm19[2],zmm7[2],zmm19[4],zmm7[4],zmm19[6],zmm7[6]
13420 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm0 = zmm19[1],zmm7[1],zmm19[3],zmm7[3],zmm19[5],zmm7[5],zmm19[7],zmm7[7]
13421 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm1, %zmm19
13422 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13423 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
13424 ; AVX512BW-ONLY-SLOW-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
13425 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
13426 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm2 {%k1}
13427 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm2, %zmm1
13428 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13429 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13430 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
13431 ; AVX512BW-ONLY-SLOW-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
13432 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13433 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3 {%k1}
13434 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm3, %zmm1
13435 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13436 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13437 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
13438 ; AVX512BW-ONLY-SLOW-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
13439 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm9, %zmm1
13440 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13441 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13442 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
13443 ; AVX512BW-ONLY-SLOW-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
13444 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm27, %zmm1
13445 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13446 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13447 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
13448 ; AVX512BW-ONLY-SLOW-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
13449 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13450 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm8 {%k1}
13451 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm8, %zmm1
13452 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13453 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13454 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
13455 ; AVX512BW-ONLY-SLOW-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
13456 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13457 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm0 {%k1}
13458 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm0
13459 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13460 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13461 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13462 ; AVX512BW-ONLY-SLOW-NEXT: # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
13463 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm0
13464 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13465 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13466 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
13467 ; AVX512BW-ONLY-SLOW-NEXT: # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
13468 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm5, %zmm0
13469 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13470 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13471 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm25 {%k1}
13472 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%rcx), %ymm0
13473 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%rdx), %ymm1
13474 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13475 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%rsi), %ymm3
13476 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%rdi), %ymm7
13477 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
13478 ; AVX512BW-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
13479 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm25, %zmm2
13480 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13481 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13482 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
13483 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
13484 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
13485 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
13486 ; AVX512BW-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
13487 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm0
13488 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13489 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13490 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm18 {%k1}
13491 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 64(%rcx), %ymm0
13492 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 64(%rdx), %ymm1
13493 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 64(%rsi), %ymm2
13494 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 64(%rdi), %ymm3
13495 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm7 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13496 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm3[0],ymm2[0],ymm3[2],ymm2[2]
13497 ; AVX512BW-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm7 = ymm8[2,3],ymm7[2,3]
13498 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm7, %zmm18, %zmm4
13499 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13500 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 (%rsp), %zmm4 # 64-byte Reload
13501 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
13502 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm5 {%k1}
13503 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
13504 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm3[1],ymm2[1],ymm3[3],ymm2[3]
13505 ; AVX512BW-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
13506 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm5, %zmm0
13507 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13508 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13509 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm13 {%k1}
13510 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 128(%rcx), %ymm0
13511 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 128(%rdx), %ymm1
13512 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13513 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 128(%rsi), %ymm3
13514 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 128(%rdi), %ymm7
13515 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
13516 ; AVX512BW-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
13517 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm13, %zmm2
13518 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
13519 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13520 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
13521 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
13522 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
13523 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
13524 ; AVX512BW-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
13525 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm0
13526 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13527 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13528 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm10 {%k1}
13529 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 192(%rcx), %ymm0
13530 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 192(%rdx), %ymm1
13531 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13532 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 192(%rsi), %ymm3
13533 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 192(%rdi), %ymm7
13534 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
13535 ; AVX512BW-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
13536 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm10, %zmm2
13537 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13538 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13539 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
13540 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
13541 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
13542 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
13543 ; AVX512BW-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
13544 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm9
13545 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13546 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm26 {%k1}
13547 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 256(%rcx), %ymm0
13548 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 256(%rdx), %ymm1
13549 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13550 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 256(%rsi), %ymm3
13551 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 256(%rdi), %ymm7
13552 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
13553 ; AVX512BW-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
13554 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm26, %zmm26
13555 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13556 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm2 {%k1}
13557 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
13558 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
13559 ; AVX512BW-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
13560 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm2, %zmm7
13561 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13562 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm24 {%k1}
13563 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 320(%rcx), %ymm0
13564 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 320(%rdx), %ymm1
13565 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13566 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 320(%rsi), %ymm3
13567 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 320(%rdi), %ymm23
13568 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm23[0],ymm3[0],ymm23[2],ymm3[2]
13569 ; AVX512BW-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
13570 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm24, %zmm8
13571 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, %zmm17 {%k1}
13572 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
13573 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm23[1],ymm3[1],ymm23[3],ymm3[3]
13574 ; AVX512BW-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
13575 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm17, %zmm17
13576 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm22, %zmm20 {%k1}
13577 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 384(%rcx), %ymm0
13578 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 384(%rdx), %ymm1
13579 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13580 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 384(%rsi), %ymm3
13581 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 384(%rdi), %ymm18
13582 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm13 = ymm18[0],ymm3[0],ymm18[2],ymm3[2]
13583 ; AVX512BW-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm13[2,3],ymm2[2,3]
13584 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm20, %zmm20
13585 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm30 {%k1}
13586 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
13587 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm18[1],ymm3[1],ymm18[3],ymm3[3]
13588 ; AVX512BW-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
13589 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm30, %zmm24
13590 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, %zmm6 {%k1}
13591 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 448(%rcx), %ymm0
13592 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 448(%rdx), %ymm1
13593 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
13594 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 448(%rsi), %ymm3
13595 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 448(%rdi), %ymm10
13596 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm12 = ymm10[0],ymm3[0],ymm10[2],ymm3[2]
13597 ; AVX512BW-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm12[2,3],ymm2[2,3]
13598 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm6, %zmm31
13599 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
13600 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm10[1],ymm3[1],ymm10[3],ymm3[3]
13601 ; AVX512BW-ONLY-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
13602 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm19 {%k1}
13603 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm19, %zmm0
13604 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13605 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
13606 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm4 {%k1}
13607 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%rsi), %xmm1
13608 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
13609 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%rdi), %xmm2
13610 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, (%rdx), %ymm2, %ymm2
13611 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm3 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
13612 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm3, %zmm4, %zmm3
13613 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
13614 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
13615 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm10 {%k1}
13616 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm2[1],ymm1[1],ymm2[3],ymm1[3]
13617 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm10, %zmm2
13618 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13619 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
13620 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm11 {%k1}
13621 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 64(%rsi), %xmm1
13622 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, 64(%rcx), %ymm1, %ymm1
13623 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 64(%rdi), %xmm4
13624 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, 64(%rdx), %ymm4, %ymm4
13625 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm10 = ymm4[0],ymm1[0],ymm4[2],ymm1[2]
13626 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm11, %zmm10
13627 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
13628 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
13629 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm12 {%k1}
13630 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm4[1],ymm1[1],ymm4[3],ymm1[3]
13631 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm12, %zmm11
13632 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13633 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
13634 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm13 {%k1}
13635 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 128(%rsi), %xmm1
13636 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, 128(%rcx), %ymm1, %ymm1
13637 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 128(%rdi), %xmm4
13638 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, 128(%rdx), %ymm4, %ymm12
13639 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
13640 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm13, %zmm4
13641 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
13642 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
13643 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, %zmm18 {%k1}
13644 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
13645 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm18, %zmm19
13646 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13647 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
13648 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm18 {%k1}
13649 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 192(%rsi), %xmm1
13650 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, 192(%rcx), %ymm1, %ymm1
13651 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 192(%rdi), %xmm12
13652 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, 192(%rdx), %ymm12, %ymm12
13653 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm13 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
13654 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm13, %zmm18, %zmm30
13655 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
13656 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
13657 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, %zmm18 {%k1}
13658 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
13659 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm18, %zmm1
13660 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
13661 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
13662 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, %zmm5 {%k1}
13663 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 256(%rsi), %xmm12
13664 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, 256(%rcx), %ymm12, %ymm13
13665 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 256(%rdi), %xmm12
13666 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $1, 256(%rdx), %ymm12, %ymm18
13667 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm12 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
13668 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm12, %zmm5, %zmm12
13669 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
13670 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
13671 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm5 {%k1}
13672 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
13673 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm13, %zmm5, %zmm23
13674 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
13675 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm29 {%k1}
13676 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 320(%rsi), %xmm13
13677 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, 320(%rcx), %ymm13, %ymm13
13678 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 320(%rdi), %xmm18
13679 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $1, 320(%rdx), %ymm18, %ymm18
13680 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm27 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
13681 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm27, %zmm29, %zmm22
13682 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
13683 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
13684 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm6 {%k1}
13685 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
13686 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm13, %zmm6, %zmm13
13687 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
13688 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm21 {%k1}
13689 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 384(%rsi), %xmm18
13690 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $1, 384(%rcx), %ymm18, %ymm18
13691 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 384(%rdi), %xmm25
13692 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $1, 384(%rdx), %ymm25, %ymm25
13693 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
13694 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm27, %zmm21, %zmm16
13695 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
13696 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm28 {%k1}
13697 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
13698 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm18, %zmm28, %zmm21
13699 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
13700 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm15 {%k1}
13701 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 448(%rsi), %xmm18
13702 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $1, 448(%rcx), %ymm18, %ymm18
13703 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 448(%rdi), %xmm25
13704 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $1, 448(%rdx), %ymm25, %ymm25
13705 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
13706 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm27, %zmm15, %zmm6
13707 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
13708 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm14 {%k1}
13709 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
13710 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm18, %zmm14, %zmm5
13711 ; AVX512BW-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
13712 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, 3776(%rax)
13713 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, 3712(%rax)
13714 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm24, 3264(%rax)
13715 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm20, 3200(%rax)
13716 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, 2752(%rax)
13717 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, 2688(%rax)
13718 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, 2240(%rax)
13719 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm26, 2176(%rax)
13720 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, 1728(%rax)
13721 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13722 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 1664(%rax)
13723 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13724 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 1216(%rax)
13725 ; AVX512BW-ONLY-SLOW-NEXT: vmovups (%rsp), %zmm0 # 64-byte Reload
13726 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 1152(%rax)
13727 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13728 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 704(%rax)
13729 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13730 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 640(%rax)
13731 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13732 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 192(%rax)
13733 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13734 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 128(%rax)
13735 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13736 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 4032(%rax)
13737 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13738 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 3968(%rax)
13739 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13740 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 3904(%rax)
13741 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13742 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 3840(%rax)
13743 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, 3648(%rax)
13744 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, 3584(%rax)
13745 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13746 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 3520(%rax)
13747 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13748 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 3456(%rax)
13749 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13750 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 3392(%rax)
13751 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13752 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 3328(%rax)
13753 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, 3136(%rax)
13754 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, 3072(%rax)
13755 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13756 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 3008(%rax)
13757 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13758 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 2944(%rax)
13759 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13760 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 2880(%rax)
13761 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13762 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 2816(%rax)
13763 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, 2624(%rax)
13764 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm22, 2560(%rax)
13765 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13766 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 2496(%rax)
13767 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13768 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 2432(%rax)
13769 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13770 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 2368(%rax)
13771 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13772 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 2304(%rax)
13773 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, 2112(%rax)
13774 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, 2048(%rax)
13775 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13776 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 1984(%rax)
13777 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13778 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 1920(%rax)
13779 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13780 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 1856(%rax)
13781 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13782 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 1792(%rax)
13783 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, 1600(%rax)
13784 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm30, 1536(%rax)
13785 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13786 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 1472(%rax)
13787 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13788 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 1408(%rax)
13789 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13790 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 1344(%rax)
13791 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13792 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 1280(%rax)
13793 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm19, 1088(%rax)
13794 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, 1024(%rax)
13795 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13796 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 960(%rax)
13797 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13798 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 896(%rax)
13799 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13800 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 832(%rax)
13801 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13802 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 768(%rax)
13803 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, 576(%rax)
13804 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, 512(%rax)
13805 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13806 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 448(%rax)
13807 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13808 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 384(%rax)
13809 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13810 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 320(%rax)
13811 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13812 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 256(%rax)
13813 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, 64(%rax)
13814 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, (%rax)
13815 ; AVX512BW-ONLY-SLOW-NEXT: addq $5512, %rsp # imm = 0x1588
13816 ; AVX512BW-ONLY-SLOW-NEXT: vzeroupper
13817 ; AVX512BW-ONLY-SLOW-NEXT: retq
13819 ; AVX512BW-ONLY-FAST-LABEL: store_i64_stride8_vf64:
13820 ; AVX512BW-ONLY-FAST: # %bb.0:
13821 ; AVX512BW-ONLY-FAST-NEXT: subq $5512, %rsp # imm = 0x1588
13822 ; AVX512BW-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
13823 ; AVX512BW-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
13824 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%rdi), %zmm2
13825 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rdi), %zmm4
13826 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rdi), %zmm8
13827 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%rsi), %zmm17
13828 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rsi), %zmm19
13829 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rsi), %zmm22
13830 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rdx), %zmm5
13831 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rdx), %zmm10
13832 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rcx), %zmm20
13833 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rcx), %zmm11
13834 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%r8), %zmm1
13835 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%r8), %zmm25
13836 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%r8), %zmm23
13837 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%r9), %zmm28
13838 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%r9), %zmm26
13839 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%r9), %zmm24
13840 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%r10), %zmm21
13841 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%r10), %zmm14
13842 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rax), %zmm27
13843 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rax), %zmm16
13844 ; AVX512BW-ONLY-FAST-NEXT: movb $-64, %r11b
13845 ; AVX512BW-ONLY-FAST-NEXT: kmovd %r11d, %k1
13846 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [4,12,4,12,4,12,4,12]
13847 ; AVX512BW-ONLY-FAST-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
13848 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm21, %zmm0
13849 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm3, %zmm0
13850 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm12 = zmm1[0],zmm28[0],zmm1[2],zmm28[2],zmm1[4],zmm28[4],zmm1[6],zmm28[6]
13851 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
13852 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
13853 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm3, %zmm0
13854 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm6 = [4,12,4,12]
13855 ; AVX512BW-ONLY-FAST-NEXT: # ymm6 = mem[0,1,0,1]
13856 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm15
13857 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm6, %zmm15
13858 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
13859 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm12, %zmm0
13860 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13861 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [5,13,5,13,5,13,5,13]
13862 ; AVX512BW-ONLY-FAST-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
13863 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm21, %zmm0
13864 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm9, %zmm0
13865 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm12 = zmm1[1],zmm28[1],zmm1[3],zmm28[3],zmm1[5],zmm28[5],zmm1[7],zmm28[7]
13866 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
13867 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
13868 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm9, %zmm0
13869 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [5,13,5,13]
13870 ; AVX512BW-ONLY-FAST-NEXT: # ymm7 = mem[0,1,0,1]
13871 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm15
13872 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm7, %zmm15
13873 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
13874 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm12, %zmm0
13875 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13876 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm13 = [6,14,6,14,6,14,6,14]
13877 ; AVX512BW-ONLY-FAST-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
13878 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm0
13879 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm29
13880 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13881 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm28, %zmm13, %zmm0
13882 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm0 {%k1} = zmm21[0],zmm27[0],zmm21[2],zmm27[2],zmm21[4],zmm27[4],zmm21[6],zmm27[6]
13883 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm12
13884 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm13, %zmm12
13885 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [6,14,6,14]
13886 ; AVX512BW-ONLY-FAST-NEXT: # ymm1 = mem[0,1,0,1]
13887 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm15
13888 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm1, %zmm15
13889 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3],ymm15[4,5,6,7]
13890 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm0
13891 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13892 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm18 = [7,15,7,15,7,15,7,15]
13893 ; AVX512BW-ONLY-FAST-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
13894 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm29, %zmm12
13895 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm28, %zmm18, %zmm12
13896 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm12 {%k1} = zmm21[1],zmm27[1],zmm21[3],zmm27[3],zmm21[5],zmm27[5],zmm21[7],zmm27[7]
13897 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm18, %zmm8
13898 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} ymm30 = [7,15,7,15]
13899 ; AVX512BW-ONLY-FAST-NEXT: # ymm30 = mem[0,1,2,3,0,1,2,3]
13900 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm30, %zmm10
13901 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
13902 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm8, %zmm12, %zmm0
13903 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13904 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm14, %zmm8
13905 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm16, %zmm3, %zmm8
13906 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm10 = zmm25[0],zmm26[0],zmm25[2],zmm26[2],zmm25[4],zmm26[4],zmm25[6],zmm26[6]
13907 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm10 {%k1}
13908 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm8
13909 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm3, %zmm8
13910 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm11
13911 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm15
13912 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm6, %zmm11
13913 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
13914 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm8, %zmm10, %zmm0
13915 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13916 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm14, %zmm8
13917 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm16, %zmm9, %zmm8
13918 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm10 = zmm25[1],zmm26[1],zmm25[3],zmm26[3],zmm25[5],zmm26[5],zmm25[7],zmm26[7]
13919 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm10 {%k1}
13920 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm8
13921 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm9, %zmm8
13922 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm11
13923 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm12
13924 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm7, %zmm11
13925 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
13926 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm8, %zmm10, %zmm0
13927 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13928 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm8
13929 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm13, %zmm8
13930 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm10
13931 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm1, %zmm10
13932 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
13933 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13934 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm25, %zmm10
13935 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13936 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm13, %zmm10
13937 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm14, %zmm11
13938 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm14, (%rsp) # 64-byte Spill
13939 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13940 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm10 {%k1} = zmm14[0],zmm16[0],zmm14[2],zmm16[2],zmm14[4],zmm16[4],zmm14[6],zmm16[6]
13941 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm8, %zmm10, %zmm0
13942 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13943 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%r10), %zmm10
13944 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm18, %zmm4
13945 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%rax), %zmm14
13946 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm30, %zmm5
13947 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm25, %zmm6
13948 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm18, %zmm6
13949 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm6 {%k1} = zmm11[1],zmm16[1],zmm11[3],zmm16[3],zmm11[5],zmm16[5],zmm11[7],zmm16[7]
13950 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
13951 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
13952 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13953 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm4
13954 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm3, %zmm4
13955 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm5 = zmm23[0],zmm24[0],zmm23[2],zmm24[2],zmm23[4],zmm24[4],zmm23[6],zmm24[6]
13956 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm5 {%k1}
13957 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm6
13958 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm3, %zmm6
13959 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm11
13960 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%rdx), %zmm4
13961 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%rcx), %zmm7
13962 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm8
13963 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm15, %zmm8
13964 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
13965 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm6, %zmm5, %zmm0
13966 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13967 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm5
13968 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm9, %zmm5
13969 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm24[1],zmm23[3],zmm24[3],zmm23[5],zmm24[5],zmm23[7],zmm24[7]
13970 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm6 {%k1}
13971 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm5
13972 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm9, %zmm5
13973 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm8
13974 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm12, %zmm8
13975 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm8[4,5,6,7]
13976 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm5, %zmm6, %zmm0
13977 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13978 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm5
13979 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm13, %zmm5
13980 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm6
13981 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm1, %zmm6
13982 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm6[4,5,6,7]
13983 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm3
13984 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13985 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm6
13986 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13987 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm13, %zmm6
13988 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13989 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13990 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm14[0],zmm10[2],zmm14[2],zmm10[4],zmm14[4],zmm10[6],zmm14[6]
13991 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm5, %zmm6, %zmm0
13992 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13993 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm18, %zmm2
13994 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm30, %zmm4
13995 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm4[4,5,6,7]
13996 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm18, %zmm3
13997 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm14[1],zmm10[3],zmm14[3],zmm10[5],zmm14[5],zmm10[7],zmm14[7]
13998 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm3, %zmm0
13999 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14000 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%r10), %zmm10
14001 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%rax), %zmm26
14002 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm2
14003 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm11, %zmm2
14004 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%r8), %zmm23
14005 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%r9), %zmm29
14006 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm4 = zmm23[0],zmm29[0],zmm23[2],zmm29[2],zmm23[4],zmm29[4],zmm23[6],zmm29[6]
14007 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
14008 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%rdi), %zmm2
14009 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%rsi), %zmm5
14010 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm6
14011 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm11, %zmm6
14012 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%rdx), %zmm3
14013 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%rcx), %zmm7
14014 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm8
14015 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm15, %zmm8
14016 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
14017 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm6, %zmm4, %zmm0
14018 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14019 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm4
14020 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm9, %zmm4
14021 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm29[1],zmm23[3],zmm29[3],zmm23[5],zmm29[5],zmm23[7],zmm29[7]
14022 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm6 {%k1}
14023 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm4
14024 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm9, %zmm4
14025 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm8
14026 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm12, %zmm8
14027 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
14028 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
14029 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14030 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm4
14031 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm13, %zmm4
14032 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm6
14033 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm1, %zmm6
14034 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
14035 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm6
14036 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm29, %zmm13, %zmm6
14037 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14038 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm26[0],zmm10[2],zmm26[2],zmm10[4],zmm26[4],zmm10[6],zmm26[6]
14039 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
14040 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14041 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm18, %zmm2
14042 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm30, %zmm3
14043 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
14044 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm3
14045 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm29, %zmm18, %zmm3
14046 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm26[1],zmm10[3],zmm26[3],zmm10[5],zmm26[5],zmm10[7],zmm26[7]
14047 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm3, %zmm0
14048 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14049 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 256(%r10), %zmm10
14050 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 256(%rax), %zmm22
14051 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm2
14052 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm11, %zmm2
14053 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 256(%r8), %zmm0
14054 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 256(%r9), %zmm24
14055 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm4 = zmm0[0],zmm24[0],zmm0[2],zmm24[2],zmm0[4],zmm24[4],zmm0[6],zmm24[6]
14056 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm17
14057 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
14058 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 256(%rdi), %zmm2
14059 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 256(%rsi), %zmm5
14060 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm6
14061 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm11, %zmm6
14062 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 256(%rdx), %zmm3
14063 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 256(%rcx), %zmm7
14064 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm8
14065 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm15, %zmm8
14066 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
14067 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm6, %zmm4, %zmm0
14068 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14069 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm4
14070 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm9, %zmm4
14071 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm17[1],zmm24[1],zmm17[3],zmm24[3],zmm17[5],zmm24[5],zmm17[7],zmm24[7]
14072 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm6 {%k1}
14073 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm4
14074 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm9, %zmm4
14075 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm8
14076 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm12, %zmm8
14077 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
14078 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
14079 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14080 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm4
14081 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm13, %zmm4
14082 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm6
14083 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm1, %zmm6
14084 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
14085 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14086 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm17, %zmm6
14087 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm13, %zmm6
14088 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14089 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm22[0],zmm10[2],zmm22[2],zmm10[4],zmm22[4],zmm10[6],zmm22[6]
14090 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
14091 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14092 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm18, %zmm2
14093 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm30, %zmm3
14094 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
14095 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm17, %zmm3
14096 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm18, %zmm3
14097 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm22[1],zmm10[3],zmm22[3],zmm10[5],zmm22[5],zmm10[7],zmm22[7]
14098 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm3, %zmm0
14099 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14100 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 320(%rdi), %zmm2
14101 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 320(%rsi), %zmm3
14102 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm5
14103 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm11, %zmm5
14104 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 320(%rdx), %zmm14
14105 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 320(%rcx), %zmm6
14106 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm14, %zmm7
14107 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm15, %zmm7
14108 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm7[4,5,6,7]
14109 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm14, %zmm7
14110 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm12, %zmm7
14111 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm14, %zmm8
14112 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm1, %zmm8
14113 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm30, %zmm14
14114 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 384(%rdx), %zmm0
14115 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 384(%rcx), %zmm6
14116 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm10
14117 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm15, %zmm10
14118 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14119 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm10
14120 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm12, %zmm10
14121 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14122 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm10
14123 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm1, %zmm10
14124 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14125 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm30, %zmm0
14126 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14127 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 448(%rdx), %zmm0
14128 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 448(%rcx), %zmm6
14129 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm6, %zmm0, %zmm15
14130 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14131 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm6, %zmm0, %zmm12
14132 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14133 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm6, %zmm0, %zmm1
14134 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14135 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm30, %zmm0
14136 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14137 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 320(%r10), %zmm31
14138 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 320(%rax), %zmm12
14139 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm1
14140 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm12, %zmm11, %zmm1
14141 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 320(%r8), %zmm17
14142 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 320(%r9), %zmm4
14143 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm10 = zmm17[0],zmm4[0],zmm17[2],zmm4[2],zmm17[4],zmm4[4],zmm17[6],zmm4[6]
14144 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm10 {%k1}
14145 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm5, %zmm10, %zmm0
14146 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14147 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm1
14148 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm12, %zmm9, %zmm1
14149 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm5 = zmm17[1],zmm4[1],zmm17[3],zmm4[3],zmm17[5],zmm4[5],zmm17[7],zmm4[7]
14150 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm5 {%k1}
14151 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
14152 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm9, %zmm1
14153 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm7[4,5,6,7]
14154 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm5, %zmm0
14155 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14156 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
14157 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm13, %zmm1
14158 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm8[4,5,6,7]
14159 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm17, %zmm5
14160 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm13, %zmm5
14161 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm5 {%k1} = zmm31[0],zmm12[0],zmm31[2],zmm12[2],zmm31[4],zmm12[4],zmm31[6],zmm12[6]
14162 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm5, %zmm0
14163 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14164 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm18, %zmm2
14165 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm14[4,5,6,7]
14166 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm17, %zmm2
14167 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm18, %zmm2
14168 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm2 {%k1} = zmm31[1],zmm12[1],zmm31[3],zmm12[3],zmm31[5],zmm12[5],zmm31[7],zmm12[7]
14169 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm2, %zmm0
14170 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14171 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 384(%rdi), %zmm0
14172 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 384(%rsi), %zmm1
14173 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm3
14174 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm2
14175 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm11, %zmm3
14176 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14177 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm3
14178 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
14179 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14180 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm3
14181 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm13, %zmm3
14182 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14183 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm18, %zmm0
14184 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14185 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 384(%r10), %zmm11
14186 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 384(%rax), %zmm20
14187 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm0
14188 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm2, %zmm0
14189 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14190 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm0
14191 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm9, %zmm0
14192 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14193 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 384(%r8), %zmm30
14194 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 384(%r9), %zmm0
14195 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm30, %zmm1
14196 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm13, %zmm1
14197 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14198 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 448(%r10), %zmm16
14199 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 448(%rax), %zmm8
14200 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm16, %zmm1
14201 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm2, %zmm1
14202 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14203 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 448(%rdi), %zmm3
14204 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 448(%rsi), %zmm1
14205 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm1, %zmm3, %zmm2
14206 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14207 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm16, %zmm2
14208 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm9, %zmm2
14209 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14210 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm1, %zmm3, %zmm9
14211 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14212 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 448(%r8), %zmm19
14213 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 448(%r9), %zmm7
14214 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm19, %zmm2
14215 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm13, %zmm2
14216 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14217 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm1, %zmm3, %zmm13
14218 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14219 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm18, %zmm3
14220 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14221 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm30, %zmm1
14222 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm18, %zmm1
14223 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm5
14224 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm19, %zmm1
14225 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm18, %zmm1
14226 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14227 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [0,8,0,8,0,8,0,8]
14228 ; AVX512BW-ONLY-FAST-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14229 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm21, %zmm1
14230 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm15, %zmm1
14231 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14232 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm14 = [1,9,1,9,1,9,1,9]
14233 ; AVX512BW-ONLY-FAST-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14234 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm21, %zmm1
14235 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm14, %zmm1
14236 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14237 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [2,10,2,10,2,10,2,10]
14238 ; AVX512BW-ONLY-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14239 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm21, %zmm1
14240 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm6, %zmm1
14241 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14242 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [3,11,3,11,3,11,3,11]
14243 ; AVX512BW-ONLY-FAST-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14244 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm1, %zmm21
14245 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14246 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
14247 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
14248 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm28, %zmm15, %zmm3
14249 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14250 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
14251 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm28, %zmm14, %zmm3
14252 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14253 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm25
14254 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm28, %zmm6, %zmm25
14255 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm28, %zmm1, %zmm2
14256 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14257 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 (%rsp), %zmm27 # 64-byte Reload
14258 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm27, %zmm2
14259 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
14260 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm15, %zmm2
14261 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14262 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm27, %zmm2
14263 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm14, %zmm2
14264 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14265 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm27, %zmm2
14266 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm6, %zmm2
14267 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14268 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm1, %zmm27
14269 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm27, (%rsp) # 64-byte Spill
14270 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
14271 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm3
14272 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
14273 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm15, %zmm3
14274 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14275 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm3
14276 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm14, %zmm3
14277 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14278 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm18
14279 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm6, %zmm18
14280 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm1, %zmm9
14281 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14282 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
14283 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
14284 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
14285 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm15, %zmm3
14286 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14287 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
14288 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm14, %zmm3
14289 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14290 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
14291 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm6, %zmm3
14292 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14293 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm1, %zmm2
14294 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14295 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
14296 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm3
14297 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
14298 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm15, %zmm3
14299 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14300 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm3
14301 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm14, %zmm3
14302 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14303 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm13
14304 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm6, %zmm13
14305 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm1, %zmm9
14306 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14307 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
14308 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
14309 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm15, %zmm3
14310 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14311 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
14312 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm14, %zmm3
14313 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14314 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
14315 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm6, %zmm3
14316 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14317 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm1, %zmm2
14318 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14319 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm3
14320 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm29, %zmm15, %zmm3
14321 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14322 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm3
14323 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm29, %zmm14, %zmm3
14324 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14325 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm10
14326 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm29, %zmm6, %zmm10
14327 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm29, %zmm1, %zmm23
14328 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14329 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
14330 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm2
14331 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm15, %zmm2
14332 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14333 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm2
14334 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm14, %zmm2
14335 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14336 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm2
14337 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm6, %zmm2
14338 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14339 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm1, %zmm23
14340 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
14341 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
14342 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm15, %zmm3
14343 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14344 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
14345 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm14, %zmm3
14346 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14347 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm26
14348 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm6, %zmm26
14349 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm1, %zmm2
14350 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14351 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm2
14352 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm12, %zmm15, %zmm2
14353 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14354 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm2
14355 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm12, %zmm14, %zmm2
14356 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14357 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm2
14358 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm12, %zmm6, %zmm2
14359 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14360 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm12, %zmm1, %zmm31
14361 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm17, %zmm29
14362 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm15, %zmm29
14363 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm17, %zmm2
14364 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm14, %zmm2
14365 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14366 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm17, %zmm24
14367 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm6, %zmm24
14368 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm1, %zmm17
14369 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm2
14370 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm15, %zmm2
14371 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14372 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm2
14373 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm14, %zmm2
14374 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14375 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
14376 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm9 {%k1} = zmm11[0],zmm20[0],zmm11[2],zmm20[2],zmm11[4],zmm20[4],zmm11[6],zmm20[6]
14377 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm11[1],zmm20[1],zmm11[3],zmm20[3],zmm11[5],zmm20[5],zmm11[7],zmm20[7]
14378 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm27
14379 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm22
14380 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm6, %zmm22
14381 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm1, %zmm11
14382 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm30, %zmm21
14383 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm15, %zmm21
14384 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm30, %zmm28
14385 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm14, %zmm28
14386 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm30, %zmm20
14387 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm6, %zmm20
14388 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm2 = zmm30[0],zmm0[0],zmm30[2],zmm0[2],zmm30[4],zmm0[4],zmm30[6],zmm0[6]
14389 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm3 = zmm30[1],zmm0[1],zmm30[3],zmm0[3],zmm30[5],zmm0[5],zmm30[7],zmm0[7]
14390 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm1, %zmm30
14391 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm16, %zmm0
14392 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm15, %zmm0
14393 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14394 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm16, %zmm0
14395 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm14, %zmm0
14396 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14397 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
14398 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm4 {%k1} = zmm16[0],zmm8[0],zmm16[2],zmm8[2],zmm16[4],zmm8[4],zmm16[6],zmm8[6]
14399 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
14400 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm16[1],zmm8[1],zmm16[3],zmm8[3],zmm16[5],zmm8[5],zmm16[7],zmm8[7]
14401 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm16, %zmm12
14402 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm6, %zmm12
14403 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm1, %zmm16
14404 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm7, %zmm19, %zmm15
14405 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm7, %zmm19, %zmm14
14406 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm7, %zmm19, %zmm6
14407 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm8 = zmm19[0],zmm7[0],zmm19[2],zmm7[2],zmm19[4],zmm7[4],zmm19[6],zmm7[6]
14408 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm0 = zmm19[1],zmm7[1],zmm19[3],zmm7[3],zmm19[5],zmm7[5],zmm19[7],zmm7[7]
14409 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm1, %zmm19
14410 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14411 ; AVX512BW-ONLY-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
14412 ; AVX512BW-ONLY-FAST-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
14413 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
14414 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm2 {%k1}
14415 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm2, %zmm1
14416 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14417 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14418 ; AVX512BW-ONLY-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
14419 ; AVX512BW-ONLY-FAST-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
14420 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
14421 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3 {%k1}
14422 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm3, %zmm1
14423 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14424 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14425 ; AVX512BW-ONLY-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
14426 ; AVX512BW-ONLY-FAST-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
14427 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm9, %zmm1
14428 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14429 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14430 ; AVX512BW-ONLY-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
14431 ; AVX512BW-ONLY-FAST-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
14432 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm27, %zmm1
14433 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14434 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14435 ; AVX512BW-ONLY-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
14436 ; AVX512BW-ONLY-FAST-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
14437 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
14438 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm8 {%k1}
14439 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm8, %zmm1
14440 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14441 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14442 ; AVX512BW-ONLY-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
14443 ; AVX512BW-ONLY-FAST-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
14444 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
14445 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm0 {%k1}
14446 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm0
14447 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14448 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14449 ; AVX512BW-ONLY-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14450 ; AVX512BW-ONLY-FAST-NEXT: # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
14451 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm0
14452 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14453 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14454 ; AVX512BW-ONLY-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
14455 ; AVX512BW-ONLY-FAST-NEXT: # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
14456 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm5, %zmm0
14457 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14458 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14459 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm25 {%k1}
14460 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa (%rcx), %ymm0
14461 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa (%rdx), %ymm1
14462 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14463 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa (%rsi), %ymm3
14464 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa (%rdi), %ymm7
14465 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
14466 ; AVX512BW-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
14467 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm25, %zmm2
14468 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14469 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
14470 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
14471 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
14472 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
14473 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
14474 ; AVX512BW-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
14475 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm0
14476 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14477 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14478 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm18 {%k1}
14479 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 64(%rcx), %ymm0
14480 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 64(%rdx), %ymm1
14481 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 64(%rsi), %ymm2
14482 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 64(%rdi), %ymm3
14483 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm7 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14484 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm3[0],ymm2[0],ymm3[2],ymm2[2]
14485 ; AVX512BW-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm7 = ymm8[2,3],ymm7[2,3]
14486 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm7, %zmm18, %zmm4
14487 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14488 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 (%rsp), %zmm4 # 64-byte Reload
14489 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
14490 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm5 {%k1}
14491 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
14492 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm3[1],ymm2[1],ymm3[3],ymm2[3]
14493 ; AVX512BW-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
14494 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm5, %zmm0
14495 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14496 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14497 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm13 {%k1}
14498 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 128(%rcx), %ymm0
14499 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 128(%rdx), %ymm1
14500 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14501 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 128(%rsi), %ymm3
14502 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 128(%rdi), %ymm7
14503 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
14504 ; AVX512BW-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
14505 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm13, %zmm2
14506 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
14507 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
14508 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
14509 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
14510 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
14511 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
14512 ; AVX512BW-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
14513 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm0
14514 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14515 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14516 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm10 {%k1}
14517 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 192(%rcx), %ymm0
14518 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 192(%rdx), %ymm1
14519 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14520 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 192(%rsi), %ymm3
14521 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 192(%rdi), %ymm7
14522 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
14523 ; AVX512BW-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
14524 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm10, %zmm2
14525 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14526 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
14527 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
14528 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
14529 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
14530 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
14531 ; AVX512BW-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
14532 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm9
14533 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14534 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm26 {%k1}
14535 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 256(%rcx), %ymm0
14536 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 256(%rdx), %ymm1
14537 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14538 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 256(%rsi), %ymm3
14539 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 256(%rdi), %ymm7
14540 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
14541 ; AVX512BW-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
14542 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm26, %zmm26
14543 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
14544 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm2 {%k1}
14545 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
14546 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
14547 ; AVX512BW-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
14548 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm2, %zmm7
14549 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14550 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm24 {%k1}
14551 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 320(%rcx), %ymm0
14552 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 320(%rdx), %ymm1
14553 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14554 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 320(%rsi), %ymm3
14555 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 320(%rdi), %ymm23
14556 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm23[0],ymm3[0],ymm23[2],ymm3[2]
14557 ; AVX512BW-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
14558 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm24, %zmm8
14559 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm17 {%k1}
14560 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
14561 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm23[1],ymm3[1],ymm23[3],ymm3[3]
14562 ; AVX512BW-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
14563 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm17, %zmm17
14564 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm22, %zmm20 {%k1}
14565 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 384(%rcx), %ymm0
14566 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 384(%rdx), %ymm1
14567 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14568 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 384(%rsi), %ymm3
14569 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 384(%rdi), %ymm18
14570 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm13 = ymm18[0],ymm3[0],ymm18[2],ymm3[2]
14571 ; AVX512BW-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm13[2,3],ymm2[2,3]
14572 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm20, %zmm20
14573 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm30 {%k1}
14574 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
14575 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm18[1],ymm3[1],ymm18[3],ymm3[3]
14576 ; AVX512BW-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
14577 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm30, %zmm24
14578 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm6 {%k1}
14579 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 448(%rcx), %ymm0
14580 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 448(%rdx), %ymm1
14581 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
14582 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 448(%rsi), %ymm3
14583 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 448(%rdi), %ymm10
14584 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm12 = ymm10[0],ymm3[0],ymm10[2],ymm3[2]
14585 ; AVX512BW-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm12[2,3],ymm2[2,3]
14586 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm6, %zmm31
14587 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
14588 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm10[1],ymm3[1],ymm10[3],ymm3[3]
14589 ; AVX512BW-ONLY-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
14590 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm16, %zmm19 {%k1}
14591 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm19, %zmm0
14592 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14593 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
14594 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm4 {%k1}
14595 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa (%rsi), %xmm1
14596 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
14597 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa (%rdi), %xmm2
14598 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, (%rdx), %ymm2, %ymm2
14599 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm3 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
14600 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm4, %zmm3
14601 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
14602 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
14603 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm10 {%k1}
14604 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm2[1],ymm1[1],ymm2[3],ymm1[3]
14605 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm10, %zmm2
14606 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14607 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
14608 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm11 {%k1}
14609 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 64(%rsi), %xmm1
14610 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, 64(%rcx), %ymm1, %ymm1
14611 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 64(%rdi), %xmm4
14612 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, 64(%rdx), %ymm4, %ymm4
14613 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm10 = ymm4[0],ymm1[0],ymm4[2],ymm1[2]
14614 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm11, %zmm10
14615 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
14616 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
14617 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm12 {%k1}
14618 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm4[1],ymm1[1],ymm4[3],ymm1[3]
14619 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm12, %zmm11
14620 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14621 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
14622 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm13 {%k1}
14623 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 128(%rsi), %xmm1
14624 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, 128(%rcx), %ymm1, %ymm1
14625 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 128(%rdi), %xmm4
14626 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, 128(%rdx), %ymm4, %ymm12
14627 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
14628 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm13, %zmm4
14629 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
14630 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
14631 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm18 {%k1}
14632 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
14633 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm18, %zmm19
14634 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14635 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
14636 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm18 {%k1}
14637 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 192(%rsi), %xmm1
14638 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, 192(%rcx), %ymm1, %ymm1
14639 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 192(%rdi), %xmm12
14640 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, 192(%rdx), %ymm12, %ymm12
14641 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm13 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
14642 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm13, %zmm18, %zmm30
14643 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
14644 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
14645 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm18 {%k1}
14646 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
14647 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm18, %zmm1
14648 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
14649 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
14650 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm5 {%k1}
14651 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 256(%rsi), %xmm12
14652 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, 256(%rcx), %ymm12, %ymm13
14653 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 256(%rdi), %xmm12
14654 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $1, 256(%rdx), %ymm12, %ymm18
14655 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm12 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
14656 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm12, %zmm5, %zmm12
14657 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
14658 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
14659 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm5 {%k1}
14660 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
14661 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm13, %zmm5, %zmm23
14662 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
14663 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm29 {%k1}
14664 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 320(%rsi), %xmm13
14665 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, 320(%rcx), %ymm13, %ymm13
14666 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 320(%rdi), %xmm18
14667 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $1, 320(%rdx), %ymm18, %ymm18
14668 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm27 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
14669 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm27, %zmm29, %zmm22
14670 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
14671 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
14672 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm6 {%k1}
14673 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
14674 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm13, %zmm6, %zmm13
14675 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
14676 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm21 {%k1}
14677 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 384(%rsi), %xmm18
14678 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $1, 384(%rcx), %ymm18, %ymm18
14679 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 384(%rdi), %xmm25
14680 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $1, 384(%rdx), %ymm25, %ymm25
14681 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
14682 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm27, %zmm21, %zmm16
14683 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
14684 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm28 {%k1}
14685 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
14686 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm18, %zmm28, %zmm21
14687 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
14688 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm15 {%k1}
14689 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 448(%rsi), %xmm18
14690 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $1, 448(%rcx), %ymm18, %ymm18
14691 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 448(%rdi), %xmm25
14692 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $1, 448(%rdx), %ymm25, %ymm25
14693 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
14694 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm27, %zmm15, %zmm6
14695 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
14696 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm14 {%k1}
14697 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
14698 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm18, %zmm14, %zmm5
14699 ; AVX512BW-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
14700 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, 3776(%rax)
14701 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm31, 3712(%rax)
14702 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm24, 3264(%rax)
14703 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm20, 3200(%rax)
14704 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm17, 2752(%rax)
14705 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm8, 2688(%rax)
14706 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, 2240(%rax)
14707 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm26, 2176(%rax)
14708 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm9, 1728(%rax)
14709 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14710 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 1664(%rax)
14711 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14712 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 1216(%rax)
14713 ; AVX512BW-ONLY-FAST-NEXT: vmovups (%rsp), %zmm0 # 64-byte Reload
14714 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 1152(%rax)
14715 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14716 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 704(%rax)
14717 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14718 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 640(%rax)
14719 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14720 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 192(%rax)
14721 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14722 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 128(%rax)
14723 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14724 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 4032(%rax)
14725 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14726 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 3968(%rax)
14727 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14728 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 3904(%rax)
14729 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14730 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 3840(%rax)
14731 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, 3648(%rax)
14732 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm6, 3584(%rax)
14733 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14734 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 3520(%rax)
14735 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14736 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 3456(%rax)
14737 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14738 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 3392(%rax)
14739 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14740 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 3328(%rax)
14741 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm21, 3136(%rax)
14742 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm16, 3072(%rax)
14743 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14744 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 3008(%rax)
14745 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14746 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 2944(%rax)
14747 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14748 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 2880(%rax)
14749 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14750 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 2816(%rax)
14751 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, 2624(%rax)
14752 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm22, 2560(%rax)
14753 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14754 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 2496(%rax)
14755 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14756 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 2432(%rax)
14757 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14758 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 2368(%rax)
14759 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14760 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 2304(%rax)
14761 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm23, 2112(%rax)
14762 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm12, 2048(%rax)
14763 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14764 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 1984(%rax)
14765 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14766 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 1920(%rax)
14767 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14768 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 1856(%rax)
14769 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14770 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 1792(%rax)
14771 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, 1600(%rax)
14772 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm30, 1536(%rax)
14773 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14774 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 1472(%rax)
14775 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14776 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 1408(%rax)
14777 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14778 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 1344(%rax)
14779 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14780 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 1280(%rax)
14781 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm19, 1088(%rax)
14782 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, 1024(%rax)
14783 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14784 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 960(%rax)
14785 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14786 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 896(%rax)
14787 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14788 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 832(%rax)
14789 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14790 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 768(%rax)
14791 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, 576(%rax)
14792 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm10, 512(%rax)
14793 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14794 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 448(%rax)
14795 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14796 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 384(%rax)
14797 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14798 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 320(%rax)
14799 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14800 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 256(%rax)
14801 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, 64(%rax)
14802 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, (%rax)
14803 ; AVX512BW-ONLY-FAST-NEXT: addq $5512, %rsp # imm = 0x1588
14804 ; AVX512BW-ONLY-FAST-NEXT: vzeroupper
14805 ; AVX512BW-ONLY-FAST-NEXT: retq
14807 ; AVX512DQBW-SLOW-LABEL: store_i64_stride8_vf64:
14808 ; AVX512DQBW-SLOW: # %bb.0:
14809 ; AVX512DQBW-SLOW-NEXT: subq $5512, %rsp # imm = 0x1588
14810 ; AVX512DQBW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
14811 ; AVX512DQBW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
14812 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%rdi), %zmm2
14813 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm4
14814 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rdi), %zmm8
14815 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%rsi), %zmm17
14816 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm19
14817 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rsi), %zmm22
14818 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm5
14819 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rdx), %zmm10
14820 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm20
14821 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rcx), %zmm11
14822 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%r8), %zmm1
14823 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%r8), %zmm25
14824 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%r8), %zmm23
14825 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%r9), %zmm28
14826 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%r9), %zmm26
14827 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%r9), %zmm24
14828 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%r10), %zmm21
14829 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%r10), %zmm14
14830 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rax), %zmm27
14831 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rax), %zmm16
14832 ; AVX512DQBW-SLOW-NEXT: movb $-64, %r11b
14833 ; AVX512DQBW-SLOW-NEXT: kmovd %r11d, %k1
14834 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [4,12,4,12,4,12,4,12]
14835 ; AVX512DQBW-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14836 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm21, %zmm0
14837 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm27, %zmm3, %zmm0
14838 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm12 = zmm1[0],zmm28[0],zmm1[2],zmm28[2],zmm1[4],zmm28[4],zmm1[6],zmm28[6]
14839 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
14840 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
14841 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm3, %zmm0
14842 ; AVX512DQBW-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm6 = [4,12,4,12]
14843 ; AVX512DQBW-SLOW-NEXT: # ymm6 = mem[0,1,0,1]
14844 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, %zmm15
14845 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm11, %zmm6, %zmm15
14846 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
14847 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm12, %zmm0
14848 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14849 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [5,13,5,13,5,13,5,13]
14850 ; AVX512DQBW-SLOW-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14851 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm21, %zmm0
14852 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm27, %zmm9, %zmm0
14853 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm12 = zmm1[1],zmm28[1],zmm1[3],zmm28[3],zmm1[5],zmm28[5],zmm1[7],zmm28[7]
14854 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
14855 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
14856 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm9, %zmm0
14857 ; AVX512DQBW-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [5,13,5,13]
14858 ; AVX512DQBW-SLOW-NEXT: # ymm7 = mem[0,1,0,1]
14859 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, %zmm15
14860 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm11, %zmm7, %zmm15
14861 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
14862 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm12, %zmm0
14863 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14864 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm13 = [6,14,6,14,6,14,6,14]
14865 ; AVX512DQBW-SLOW-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14866 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm0
14867 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm29
14868 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14869 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm28, %zmm13, %zmm0
14870 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm0 {%k1} = zmm21[0],zmm27[0],zmm21[2],zmm27[2],zmm21[4],zmm27[4],zmm21[6],zmm27[6]
14871 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm8, %zmm12
14872 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm13, %zmm12
14873 ; AVX512DQBW-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [6,14,6,14]
14874 ; AVX512DQBW-SLOW-NEXT: # ymm1 = mem[0,1,0,1]
14875 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, %zmm15
14876 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm11, %zmm1, %zmm15
14877 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3],ymm15[4,5,6,7]
14878 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm0
14879 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14880 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm18 = [7,15,7,15,7,15,7,15]
14881 ; AVX512DQBW-SLOW-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14882 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm29, %zmm12
14883 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm28, %zmm18, %zmm12
14884 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm12 {%k1} = zmm21[1],zmm27[1],zmm21[3],zmm27[3],zmm21[5],zmm27[5],zmm21[7],zmm27[7]
14885 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm18, %zmm8
14886 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x2 {{.*#+}} ymm30 = [7,15,7,15]
14887 ; AVX512DQBW-SLOW-NEXT: # ymm30 = mem[0,1,0,1]
14888 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm11, %zmm30, %zmm10
14889 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
14890 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm12, %zmm0
14891 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14892 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm14, %zmm8
14893 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm16, %zmm3, %zmm8
14894 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm10 = zmm25[0],zmm26[0],zmm25[2],zmm26[2],zmm25[4],zmm26[4],zmm25[6],zmm26[6]
14895 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm8, %zmm10 {%k1}
14896 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8
14897 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm19, %zmm3, %zmm8
14898 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm11
14899 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, %zmm15
14900 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm20, %zmm6, %zmm11
14901 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
14902 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm10, %zmm0
14903 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14904 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm14, %zmm8
14905 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm16, %zmm9, %zmm8
14906 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm10 = zmm25[1],zmm26[1],zmm25[3],zmm26[3],zmm25[5],zmm26[5],zmm25[7],zmm26[7]
14907 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm8, %zmm10 {%k1}
14908 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8
14909 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm19, %zmm9, %zmm8
14910 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm11
14911 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, %zmm12
14912 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm20, %zmm7, %zmm11
14913 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
14914 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm10, %zmm0
14915 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14916 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8
14917 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm19, %zmm13, %zmm8
14918 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm10
14919 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm20, %zmm1, %zmm10
14920 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
14921 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14922 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm25, %zmm10
14923 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14924 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm26, %zmm13, %zmm10
14925 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm14, %zmm11
14926 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm14, (%rsp) # 64-byte Spill
14927 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14928 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm10 {%k1} = zmm14[0],zmm16[0],zmm14[2],zmm16[2],zmm14[4],zmm16[4],zmm14[6],zmm16[6]
14929 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm10, %zmm0
14930 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14931 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%r10), %zmm10
14932 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm19, %zmm18, %zmm4
14933 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%rax), %zmm14
14934 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm20, %zmm30, %zmm5
14935 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm25, %zmm6
14936 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm26, %zmm18, %zmm6
14937 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm6 {%k1} = zmm11[1],zmm16[1],zmm11[3],zmm16[3],zmm11[5],zmm16[5],zmm11[7],zmm16[7]
14938 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
14939 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
14940 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14941 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, %zmm4
14942 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm14, %zmm3, %zmm4
14943 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm5 = zmm23[0],zmm24[0],zmm23[2],zmm24[2],zmm23[4],zmm24[4],zmm23[6],zmm24[6]
14944 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm5 {%k1}
14945 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm6
14946 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm17, %zmm3, %zmm6
14947 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm11
14948 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%rdx), %zmm4
14949 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%rcx), %zmm7
14950 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8
14951 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm15, %zmm8
14952 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
14953 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm6, %zmm5, %zmm0
14954 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14955 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, %zmm5
14956 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm14, %zmm9, %zmm5
14957 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm24[1],zmm23[3],zmm24[3],zmm23[5],zmm24[5],zmm23[7],zmm24[7]
14958 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm6 {%k1}
14959 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm5
14960 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm17, %zmm9, %zmm5
14961 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8
14962 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm12, %zmm8
14963 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm8[4,5,6,7]
14964 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm5, %zmm6, %zmm0
14965 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14966 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm5
14967 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm17, %zmm13, %zmm5
14968 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm6
14969 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm1, %zmm6
14970 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm6[4,5,6,7]
14971 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm23, %zmm3
14972 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14973 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm23, %zmm6
14974 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14975 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm24, %zmm13, %zmm6
14976 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14977 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14978 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm14[0],zmm10[2],zmm14[2],zmm10[4],zmm14[4],zmm10[6],zmm14[6]
14979 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm5, %zmm6, %zmm0
14980 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14981 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm17, %zmm18, %zmm2
14982 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm30, %zmm4
14983 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm4[4,5,6,7]
14984 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm24, %zmm18, %zmm3
14985 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm14[1],zmm10[3],zmm14[3],zmm10[5],zmm14[5],zmm10[7],zmm14[7]
14986 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm3, %zmm0
14987 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14988 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%r10), %zmm10
14989 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%rax), %zmm26
14990 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, %zmm2
14991 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm26, %zmm11, %zmm2
14992 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%r8), %zmm23
14993 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%r9), %zmm29
14994 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm4 = zmm23[0],zmm29[0],zmm23[2],zmm29[2],zmm23[4],zmm29[4],zmm23[6],zmm29[6]
14995 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
14996 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%rdi), %zmm2
14997 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%rsi), %zmm5
14998 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm6
14999 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm11, %zmm6
15000 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%rdx), %zmm3
15001 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%rcx), %zmm7
15002 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm8
15003 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm15, %zmm8
15004 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
15005 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm6, %zmm4, %zmm0
15006 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15007 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, %zmm4
15008 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm26, %zmm9, %zmm4
15009 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm29[1],zmm23[3],zmm29[3],zmm23[5],zmm29[5],zmm23[7],zmm29[7]
15010 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm6 {%k1}
15011 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4
15012 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm9, %zmm4
15013 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm8
15014 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm12, %zmm8
15015 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
15016 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
15017 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15018 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4
15019 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm13, %zmm4
15020 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm6
15021 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm1, %zmm6
15022 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
15023 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm23, %zmm6
15024 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm29, %zmm13, %zmm6
15025 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15026 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm26[0],zmm10[2],zmm26[2],zmm10[4],zmm26[4],zmm10[6],zmm26[6]
15027 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
15028 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15029 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm18, %zmm2
15030 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm30, %zmm3
15031 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
15032 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm23, %zmm3
15033 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm29, %zmm18, %zmm3
15034 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm26[1],zmm10[3],zmm26[3],zmm10[5],zmm26[5],zmm10[7],zmm26[7]
15035 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm3, %zmm0
15036 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15037 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 256(%r10), %zmm10
15038 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 256(%rax), %zmm22
15039 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, %zmm2
15040 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm11, %zmm2
15041 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 256(%r8), %zmm0
15042 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 256(%r9), %zmm24
15043 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm4 = zmm0[0],zmm24[0],zmm0[2],zmm24[2],zmm0[4],zmm24[4],zmm0[6],zmm24[6]
15044 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm17
15045 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
15046 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 256(%rdi), %zmm2
15047 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 256(%rsi), %zmm5
15048 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm6
15049 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm11, %zmm6
15050 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 256(%rdx), %zmm3
15051 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 256(%rcx), %zmm7
15052 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm8
15053 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm15, %zmm8
15054 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
15055 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm6, %zmm4, %zmm0
15056 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15057 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, %zmm4
15058 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm9, %zmm4
15059 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm17[1],zmm24[1],zmm17[3],zmm24[3],zmm17[5],zmm24[5],zmm17[7],zmm24[7]
15060 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm6 {%k1}
15061 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4
15062 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm9, %zmm4
15063 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm8
15064 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm12, %zmm8
15065 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
15066 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
15067 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15068 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4
15069 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm13, %zmm4
15070 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm6
15071 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm1, %zmm6
15072 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
15073 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15074 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm17, %zmm6
15075 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm24, %zmm13, %zmm6
15076 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15077 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm22[0],zmm10[2],zmm22[2],zmm10[4],zmm22[4],zmm10[6],zmm22[6]
15078 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
15079 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15080 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm18, %zmm2
15081 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm30, %zmm3
15082 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
15083 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm17, %zmm3
15084 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm24, %zmm18, %zmm3
15085 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm22[1],zmm10[3],zmm22[3],zmm10[5],zmm22[5],zmm10[7],zmm22[7]
15086 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm3, %zmm0
15087 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15088 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 320(%rdi), %zmm2
15089 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 320(%rsi), %zmm3
15090 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm5
15091 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm11, %zmm5
15092 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 320(%rdx), %zmm14
15093 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 320(%rcx), %zmm6
15094 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm14, %zmm7
15095 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm15, %zmm7
15096 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm7[4,5,6,7]
15097 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm14, %zmm7
15098 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm12, %zmm7
15099 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm14, %zmm8
15100 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm1, %zmm8
15101 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm30, %zmm14
15102 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 384(%rdx), %zmm0
15103 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 384(%rcx), %zmm6
15104 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm10
15105 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm15, %zmm10
15106 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15107 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm10
15108 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm12, %zmm10
15109 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15110 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm10
15111 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm1, %zmm10
15112 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15113 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm30, %zmm0
15114 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15115 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 448(%rdx), %zmm0
15116 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 448(%rcx), %zmm6
15117 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm6, %zmm0, %zmm15
15118 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15119 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm6, %zmm0, %zmm12
15120 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15121 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm6, %zmm0, %zmm1
15122 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15123 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm30, %zmm0
15124 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15125 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 320(%r10), %zmm31
15126 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 320(%rax), %zmm12
15127 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm31, %zmm1
15128 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm12, %zmm11, %zmm1
15129 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 320(%r8), %zmm17
15130 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 320(%r9), %zmm4
15131 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm10 = zmm17[0],zmm4[0],zmm17[2],zmm4[2],zmm17[4],zmm4[4],zmm17[6],zmm4[6]
15132 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm10 {%k1}
15133 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm5, %zmm10, %zmm0
15134 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15135 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm31, %zmm1
15136 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm12, %zmm9, %zmm1
15137 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm5 = zmm17[1],zmm4[1],zmm17[3],zmm4[3],zmm17[5],zmm4[5],zmm17[7],zmm4[7]
15138 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm5 {%k1}
15139 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm1
15140 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm9, %zmm1
15141 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm7[4,5,6,7]
15142 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm5, %zmm0
15143 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15144 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm1
15145 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm13, %zmm1
15146 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm8[4,5,6,7]
15147 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm17, %zmm5
15148 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm13, %zmm5
15149 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm5 {%k1} = zmm31[0],zmm12[0],zmm31[2],zmm12[2],zmm31[4],zmm12[4],zmm31[6],zmm12[6]
15150 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm5, %zmm0
15151 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15152 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm18, %zmm2
15153 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm14[4,5,6,7]
15154 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm17, %zmm2
15155 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm18, %zmm2
15156 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm2 {%k1} = zmm31[1],zmm12[1],zmm31[3],zmm12[3],zmm31[5],zmm12[5],zmm31[7],zmm12[7]
15157 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm2, %zmm0
15158 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15159 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 384(%rdi), %zmm0
15160 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 384(%rsi), %zmm1
15161 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3
15162 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm2
15163 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm11, %zmm3
15164 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15165 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3
15166 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
15167 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15168 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3
15169 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm13, %zmm3
15170 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15171 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm18, %zmm0
15172 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15173 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 384(%r10), %zmm11
15174 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 384(%rax), %zmm20
15175 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm0
15176 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm20, %zmm2, %zmm0
15177 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15178 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm0
15179 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm20, %zmm9, %zmm0
15180 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15181 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 384(%r8), %zmm30
15182 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 384(%r9), %zmm0
15183 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm30, %zmm1
15184 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm0, %zmm13, %zmm1
15185 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15186 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 448(%r10), %zmm16
15187 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 448(%rax), %zmm8
15188 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm16, %zmm1
15189 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm8, %zmm2, %zmm1
15190 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15191 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 448(%rdi), %zmm3
15192 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 448(%rsi), %zmm1
15193 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm1, %zmm3, %zmm2
15194 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15195 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm16, %zmm2
15196 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm8, %zmm9, %zmm2
15197 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15198 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm1, %zmm3, %zmm9
15199 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15200 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 448(%r8), %zmm19
15201 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 448(%r9), %zmm7
15202 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm19, %zmm2
15203 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm13, %zmm2
15204 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15205 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm1, %zmm3, %zmm13
15206 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15207 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm18, %zmm3
15208 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15209 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm30, %zmm1
15210 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm0, %zmm18, %zmm1
15211 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm5
15212 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm19, %zmm1
15213 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm18, %zmm1
15214 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15215 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [0,8,0,8,0,8,0,8]
15216 ; AVX512DQBW-SLOW-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15217 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm21, %zmm1
15218 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm27, %zmm15, %zmm1
15219 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15220 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm14 = [1,9,1,9,1,9,1,9]
15221 ; AVX512DQBW-SLOW-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15222 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm21, %zmm1
15223 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm27, %zmm14, %zmm1
15224 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15225 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [2,10,2,10,2,10,2,10]
15226 ; AVX512DQBW-SLOW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15227 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm21, %zmm1
15228 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm27, %zmm6, %zmm1
15229 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15230 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [3,11,3,11,3,11,3,11]
15231 ; AVX512DQBW-SLOW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15232 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm27, %zmm1, %zmm21
15233 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15234 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15235 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
15236 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm28, %zmm15, %zmm3
15237 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15238 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
15239 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm28, %zmm14, %zmm3
15240 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15241 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm25
15242 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm28, %zmm6, %zmm25
15243 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm28, %zmm1, %zmm2
15244 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15245 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 (%rsp), %zmm27 # 64-byte Reload
15246 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm27, %zmm2
15247 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
15248 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm15, %zmm2
15249 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15250 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm27, %zmm2
15251 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm14, %zmm2
15252 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15253 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm27, %zmm2
15254 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm6, %zmm2
15255 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15256 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm1, %zmm27
15257 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm27, (%rsp) # 64-byte Spill
15258 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
15259 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, %zmm3
15260 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15261 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm15, %zmm3
15262 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15263 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, %zmm3
15264 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm14, %zmm3
15265 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15266 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, %zmm18
15267 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm6, %zmm18
15268 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm1, %zmm9
15269 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15270 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15271 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
15272 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
15273 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm9, %zmm15, %zmm3
15274 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15275 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
15276 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm9, %zmm14, %zmm3
15277 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15278 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
15279 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm9, %zmm6, %zmm3
15280 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15281 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm9, %zmm1, %zmm2
15282 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15283 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
15284 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, %zmm3
15285 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15286 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm15, %zmm3
15287 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15288 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, %zmm3
15289 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm14, %zmm3
15290 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15291 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, %zmm13
15292 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm6, %zmm13
15293 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm1, %zmm9
15294 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15295 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15296 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
15297 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm26, %zmm15, %zmm3
15298 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15299 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
15300 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm26, %zmm14, %zmm3
15301 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15302 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
15303 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm26, %zmm6, %zmm3
15304 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15305 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm26, %zmm1, %zmm2
15306 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15307 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm23, %zmm3
15308 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm29, %zmm15, %zmm3
15309 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15310 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm23, %zmm3
15311 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm29, %zmm14, %zmm3
15312 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15313 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm23, %zmm10
15314 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm29, %zmm6, %zmm10
15315 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm29, %zmm1, %zmm23
15316 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15317 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
15318 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm23, %zmm2
15319 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm15, %zmm2
15320 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15321 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm23, %zmm2
15322 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm14, %zmm2
15323 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15324 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm23, %zmm2
15325 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm6, %zmm2
15326 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15327 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm1, %zmm23
15328 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15329 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
15330 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm24, %zmm15, %zmm3
15331 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15332 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
15333 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm24, %zmm14, %zmm3
15334 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15335 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm26
15336 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm24, %zmm6, %zmm26
15337 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm24, %zmm1, %zmm2
15338 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15339 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm31, %zmm2
15340 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm12, %zmm15, %zmm2
15341 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15342 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm31, %zmm2
15343 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm12, %zmm14, %zmm2
15344 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15345 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm31, %zmm2
15346 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm12, %zmm6, %zmm2
15347 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15348 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm12, %zmm1, %zmm31
15349 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm17, %zmm29
15350 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm15, %zmm29
15351 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm17, %zmm2
15352 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm14, %zmm2
15353 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15354 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm17, %zmm24
15355 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm6, %zmm24
15356 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm1, %zmm17
15357 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm2
15358 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm20, %zmm15, %zmm2
15359 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15360 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm2
15361 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm20, %zmm14, %zmm2
15362 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15363 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
15364 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm9 {%k1} = zmm11[0],zmm20[0],zmm11[2],zmm20[2],zmm11[4],zmm20[4],zmm11[6],zmm20[6]
15365 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm11[1],zmm20[1],zmm11[3],zmm20[3],zmm11[5],zmm20[5],zmm11[7],zmm20[7]
15366 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm27
15367 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm22
15368 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm20, %zmm6, %zmm22
15369 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm20, %zmm1, %zmm11
15370 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm30, %zmm21
15371 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm0, %zmm15, %zmm21
15372 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm30, %zmm28
15373 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm0, %zmm14, %zmm28
15374 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm30, %zmm20
15375 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm0, %zmm6, %zmm20
15376 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm2 = zmm30[0],zmm0[0],zmm30[2],zmm0[2],zmm30[4],zmm0[4],zmm30[6],zmm0[6]
15377 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm3 = zmm30[1],zmm0[1],zmm30[3],zmm0[3],zmm30[5],zmm0[5],zmm30[7],zmm0[7]
15378 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm0, %zmm1, %zmm30
15379 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm16, %zmm0
15380 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm8, %zmm15, %zmm0
15381 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15382 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm16, %zmm0
15383 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm8, %zmm14, %zmm0
15384 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15385 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
15386 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm4 {%k1} = zmm16[0],zmm8[0],zmm16[2],zmm8[2],zmm16[4],zmm8[4],zmm16[6],zmm8[6]
15387 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15388 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm16[1],zmm8[1],zmm16[3],zmm8[3],zmm16[5],zmm8[5],zmm16[7],zmm8[7]
15389 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm16, %zmm12
15390 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm8, %zmm6, %zmm12
15391 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm8, %zmm1, %zmm16
15392 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm7, %zmm19, %zmm15
15393 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm7, %zmm19, %zmm14
15394 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm7, %zmm19, %zmm6
15395 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm8 = zmm19[0],zmm7[0],zmm19[2],zmm7[2],zmm19[4],zmm7[4],zmm19[6],zmm7[6]
15396 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} zmm0 = zmm19[1],zmm7[1],zmm19[3],zmm7[3],zmm19[5],zmm7[5],zmm19[7],zmm7[7]
15397 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm1, %zmm19
15398 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15399 ; AVX512DQBW-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
15400 ; AVX512DQBW-SLOW-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
15401 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
15402 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, %zmm2 {%k1}
15403 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm2, %zmm1
15404 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15405 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15406 ; AVX512DQBW-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
15407 ; AVX512DQBW-SLOW-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
15408 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15409 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3 {%k1}
15410 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm3, %zmm1
15411 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15412 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15413 ; AVX512DQBW-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
15414 ; AVX512DQBW-SLOW-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
15415 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm9, %zmm1
15416 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15417 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15418 ; AVX512DQBW-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
15419 ; AVX512DQBW-SLOW-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
15420 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm27, %zmm1
15421 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15422 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15423 ; AVX512DQBW-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
15424 ; AVX512DQBW-SLOW-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
15425 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15426 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm8 {%k1}
15427 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm8, %zmm1
15428 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15429 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15430 ; AVX512DQBW-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
15431 ; AVX512DQBW-SLOW-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
15432 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15433 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm0 {%k1}
15434 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm0
15435 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15436 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15437 ; AVX512DQBW-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
15438 ; AVX512DQBW-SLOW-NEXT: # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
15439 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm0
15440 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15441 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15442 ; AVX512DQBW-SLOW-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
15443 ; AVX512DQBW-SLOW-NEXT: # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
15444 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm5, %zmm0
15445 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15446 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15447 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm25 {%k1}
15448 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%rcx), %ymm0
15449 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%rdx), %ymm1
15450 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
15451 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%rsi), %ymm3
15452 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%rdi), %ymm7
15453 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
15454 ; AVX512DQBW-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
15455 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm25, %zmm2
15456 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15457 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15458 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
15459 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
15460 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
15461 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
15462 ; AVX512DQBW-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
15463 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm0
15464 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15465 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15466 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm18 {%k1}
15467 ; AVX512DQBW-SLOW-NEXT: vmovdqa 64(%rcx), %ymm0
15468 ; AVX512DQBW-SLOW-NEXT: vmovdqa 64(%rdx), %ymm1
15469 ; AVX512DQBW-SLOW-NEXT: vmovdqa 64(%rsi), %ymm2
15470 ; AVX512DQBW-SLOW-NEXT: vmovdqa 64(%rdi), %ymm3
15471 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm7 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
15472 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm3[0],ymm2[0],ymm3[2],ymm2[2]
15473 ; AVX512DQBW-SLOW-NEXT: vperm2i128 {{.*#+}} ymm7 = ymm8[2,3],ymm7[2,3]
15474 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm7, %zmm18, %zmm4
15475 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15476 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 (%rsp), %zmm4 # 64-byte Reload
15477 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15478 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm5 {%k1}
15479 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
15480 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm3[1],ymm2[1],ymm3[3],ymm2[3]
15481 ; AVX512DQBW-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
15482 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm5, %zmm0
15483 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15484 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15485 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm13 {%k1}
15486 ; AVX512DQBW-SLOW-NEXT: vmovdqa 128(%rcx), %ymm0
15487 ; AVX512DQBW-SLOW-NEXT: vmovdqa 128(%rdx), %ymm1
15488 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
15489 ; AVX512DQBW-SLOW-NEXT: vmovdqa 128(%rsi), %ymm3
15490 ; AVX512DQBW-SLOW-NEXT: vmovdqa 128(%rdi), %ymm7
15491 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
15492 ; AVX512DQBW-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
15493 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm13, %zmm2
15494 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
15495 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15496 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
15497 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
15498 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
15499 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
15500 ; AVX512DQBW-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
15501 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm0
15502 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15503 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15504 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm10 {%k1}
15505 ; AVX512DQBW-SLOW-NEXT: vmovdqa 192(%rcx), %ymm0
15506 ; AVX512DQBW-SLOW-NEXT: vmovdqa 192(%rdx), %ymm1
15507 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
15508 ; AVX512DQBW-SLOW-NEXT: vmovdqa 192(%rsi), %ymm3
15509 ; AVX512DQBW-SLOW-NEXT: vmovdqa 192(%rdi), %ymm7
15510 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
15511 ; AVX512DQBW-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
15512 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm10, %zmm2
15513 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15514 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15515 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
15516 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
15517 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
15518 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
15519 ; AVX512DQBW-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
15520 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm9
15521 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15522 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm26 {%k1}
15523 ; AVX512DQBW-SLOW-NEXT: vmovdqa 256(%rcx), %ymm0
15524 ; AVX512DQBW-SLOW-NEXT: vmovdqa 256(%rdx), %ymm1
15525 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
15526 ; AVX512DQBW-SLOW-NEXT: vmovdqa 256(%rsi), %ymm3
15527 ; AVX512DQBW-SLOW-NEXT: vmovdqa 256(%rdi), %ymm7
15528 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
15529 ; AVX512DQBW-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
15530 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm26, %zmm26
15531 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15532 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm23, %zmm2 {%k1}
15533 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
15534 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
15535 ; AVX512DQBW-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
15536 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm2, %zmm7
15537 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15538 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm24 {%k1}
15539 ; AVX512DQBW-SLOW-NEXT: vmovdqa 320(%rcx), %ymm0
15540 ; AVX512DQBW-SLOW-NEXT: vmovdqa 320(%rdx), %ymm1
15541 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
15542 ; AVX512DQBW-SLOW-NEXT: vmovdqa 320(%rsi), %ymm3
15543 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 320(%rdi), %ymm23
15544 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm23[0],ymm3[0],ymm23[2],ymm3[2]
15545 ; AVX512DQBW-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
15546 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm24, %zmm8
15547 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm31, %zmm17 {%k1}
15548 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
15549 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm23[1],ymm3[1],ymm23[3],ymm3[3]
15550 ; AVX512DQBW-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
15551 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm17, %zmm17
15552 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm22, %zmm20 {%k1}
15553 ; AVX512DQBW-SLOW-NEXT: vmovdqa 384(%rcx), %ymm0
15554 ; AVX512DQBW-SLOW-NEXT: vmovdqa 384(%rdx), %ymm1
15555 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
15556 ; AVX512DQBW-SLOW-NEXT: vmovdqa 384(%rsi), %ymm3
15557 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 384(%rdi), %ymm18
15558 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm13 = ymm18[0],ymm3[0],ymm18[2],ymm3[2]
15559 ; AVX512DQBW-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm13[2,3],ymm2[2,3]
15560 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm20, %zmm20
15561 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm30 {%k1}
15562 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
15563 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm18[1],ymm3[1],ymm18[3],ymm3[3]
15564 ; AVX512DQBW-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
15565 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm30, %zmm24
15566 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm12, %zmm6 {%k1}
15567 ; AVX512DQBW-SLOW-NEXT: vmovdqa 448(%rcx), %ymm0
15568 ; AVX512DQBW-SLOW-NEXT: vmovdqa 448(%rdx), %ymm1
15569 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
15570 ; AVX512DQBW-SLOW-NEXT: vmovdqa 448(%rsi), %ymm3
15571 ; AVX512DQBW-SLOW-NEXT: vmovdqa 448(%rdi), %ymm10
15572 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm12 = ymm10[0],ymm3[0],ymm10[2],ymm3[2]
15573 ; AVX512DQBW-SLOW-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm12[2,3],ymm2[2,3]
15574 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm6, %zmm31
15575 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
15576 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm10[1],ymm3[1],ymm10[3],ymm3[3]
15577 ; AVX512DQBW-SLOW-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
15578 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm16, %zmm19 {%k1}
15579 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm19, %zmm0
15580 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15581 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
15582 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm4 {%k1}
15583 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%rsi), %xmm1
15584 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
15585 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%rdi), %xmm2
15586 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, (%rdx), %ymm2, %ymm2
15587 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm3 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
15588 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm3, %zmm4, %zmm3
15589 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
15590 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
15591 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm10 {%k1}
15592 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm2[1],ymm1[1],ymm2[3],ymm1[3]
15593 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm10, %zmm2
15594 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15595 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
15596 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm11 {%k1}
15597 ; AVX512DQBW-SLOW-NEXT: vmovdqa 64(%rsi), %xmm1
15598 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, 64(%rcx), %ymm1, %ymm1
15599 ; AVX512DQBW-SLOW-NEXT: vmovdqa 64(%rdi), %xmm4
15600 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, 64(%rdx), %ymm4, %ymm4
15601 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm10 = ymm4[0],ymm1[0],ymm4[2],ymm1[2]
15602 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm11, %zmm10
15603 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
15604 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
15605 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm12 {%k1}
15606 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm4[1],ymm1[1],ymm4[3],ymm1[3]
15607 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm12, %zmm11
15608 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15609 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
15610 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm13 {%k1}
15611 ; AVX512DQBW-SLOW-NEXT: vmovdqa 128(%rsi), %xmm1
15612 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, 128(%rcx), %ymm1, %ymm1
15613 ; AVX512DQBW-SLOW-NEXT: vmovdqa 128(%rdi), %xmm4
15614 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, 128(%rdx), %ymm4, %ymm12
15615 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
15616 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm13, %zmm4
15617 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
15618 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
15619 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm13, %zmm18 {%k1}
15620 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
15621 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm18, %zmm19
15622 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15623 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
15624 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm18 {%k1}
15625 ; AVX512DQBW-SLOW-NEXT: vmovdqa 192(%rsi), %xmm1
15626 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, 192(%rcx), %ymm1, %ymm1
15627 ; AVX512DQBW-SLOW-NEXT: vmovdqa 192(%rdi), %xmm12
15628 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, 192(%rdx), %ymm12, %ymm12
15629 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm13 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
15630 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm13, %zmm18, %zmm30
15631 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
15632 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
15633 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm13, %zmm18 {%k1}
15634 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
15635 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm18, %zmm1
15636 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
15637 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15638 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm12, %zmm5 {%k1}
15639 ; AVX512DQBW-SLOW-NEXT: vmovdqa 256(%rsi), %xmm12
15640 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, 256(%rcx), %ymm12, %ymm13
15641 ; AVX512DQBW-SLOW-NEXT: vmovdqa 256(%rdi), %xmm12
15642 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $1, 256(%rdx), %ymm12, %ymm18
15643 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm12 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
15644 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm12, %zmm5, %zmm12
15645 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
15646 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15647 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm23, %zmm5 {%k1}
15648 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
15649 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm13, %zmm5, %zmm23
15650 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15651 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm29 {%k1}
15652 ; AVX512DQBW-SLOW-NEXT: vmovdqa 320(%rsi), %xmm13
15653 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, 320(%rcx), %ymm13, %ymm13
15654 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 320(%rdi), %xmm18
15655 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $1, 320(%rdx), %ymm18, %ymm18
15656 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm27 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
15657 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm27, %zmm29, %zmm22
15658 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15659 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
15660 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm6 {%k1}
15661 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
15662 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm13, %zmm6, %zmm13
15663 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15664 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm21 {%k1}
15665 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 384(%rsi), %xmm18
15666 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $1, 384(%rcx), %ymm18, %ymm18
15667 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 384(%rdi), %xmm25
15668 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $1, 384(%rdx), %ymm25, %ymm25
15669 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
15670 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm27, %zmm21, %zmm16
15671 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15672 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm28 {%k1}
15673 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
15674 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm18, %zmm28, %zmm21
15675 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15676 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm15 {%k1}
15677 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 448(%rsi), %xmm18
15678 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $1, 448(%rcx), %ymm18, %ymm18
15679 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 448(%rdi), %xmm25
15680 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $1, 448(%rdx), %ymm25, %ymm25
15681 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
15682 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm27, %zmm15, %zmm6
15683 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15684 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm14 {%k1}
15685 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
15686 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm18, %zmm14, %zmm5
15687 ; AVX512DQBW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
15688 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, 3776(%rax)
15689 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm31, 3712(%rax)
15690 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm24, 3264(%rax)
15691 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm20, 3200(%rax)
15692 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm17, 2752(%rax)
15693 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm8, 2688(%rax)
15694 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, 2240(%rax)
15695 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm26, 2176(%rax)
15696 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, 1728(%rax)
15697 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15698 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 1664(%rax)
15699 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15700 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 1216(%rax)
15701 ; AVX512DQBW-SLOW-NEXT: vmovups (%rsp), %zmm0 # 64-byte Reload
15702 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 1152(%rax)
15703 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15704 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 704(%rax)
15705 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15706 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 640(%rax)
15707 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15708 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 192(%rax)
15709 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15710 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 128(%rax)
15711 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15712 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 4032(%rax)
15713 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15714 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 3968(%rax)
15715 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15716 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 3904(%rax)
15717 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15718 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 3840(%rax)
15719 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, 3648(%rax)
15720 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, 3584(%rax)
15721 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15722 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 3520(%rax)
15723 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15724 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 3456(%rax)
15725 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15726 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 3392(%rax)
15727 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15728 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 3328(%rax)
15729 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm21, 3136(%rax)
15730 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm16, 3072(%rax)
15731 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15732 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 3008(%rax)
15733 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15734 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 2944(%rax)
15735 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15736 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 2880(%rax)
15737 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15738 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 2816(%rax)
15739 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm13, 2624(%rax)
15740 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm22, 2560(%rax)
15741 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15742 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 2496(%rax)
15743 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15744 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 2432(%rax)
15745 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15746 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 2368(%rax)
15747 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15748 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 2304(%rax)
15749 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm23, 2112(%rax)
15750 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm12, 2048(%rax)
15751 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15752 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 1984(%rax)
15753 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15754 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 1920(%rax)
15755 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15756 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 1856(%rax)
15757 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15758 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 1792(%rax)
15759 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, 1600(%rax)
15760 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm30, 1536(%rax)
15761 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15762 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 1472(%rax)
15763 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15764 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 1408(%rax)
15765 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15766 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 1344(%rax)
15767 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15768 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 1280(%rax)
15769 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm19, 1088(%rax)
15770 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, 1024(%rax)
15771 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15772 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 960(%rax)
15773 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15774 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 896(%rax)
15775 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15776 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 832(%rax)
15777 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15778 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 768(%rax)
15779 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, 576(%rax)
15780 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, 512(%rax)
15781 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15782 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 448(%rax)
15783 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15784 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 384(%rax)
15785 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15786 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 320(%rax)
15787 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15788 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 256(%rax)
15789 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, 64(%rax)
15790 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, (%rax)
15791 ; AVX512DQBW-SLOW-NEXT: addq $5512, %rsp # imm = 0x1588
15792 ; AVX512DQBW-SLOW-NEXT: vzeroupper
15793 ; AVX512DQBW-SLOW-NEXT: retq
15795 ; AVX512DQBW-FAST-LABEL: store_i64_stride8_vf64:
15796 ; AVX512DQBW-FAST: # %bb.0:
15797 ; AVX512DQBW-FAST-NEXT: subq $5512, %rsp # imm = 0x1588
15798 ; AVX512DQBW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
15799 ; AVX512DQBW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
15800 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%rdi), %zmm2
15801 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rdi), %zmm4
15802 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rdi), %zmm8
15803 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%rsi), %zmm17
15804 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rsi), %zmm19
15805 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rsi), %zmm22
15806 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rdx), %zmm5
15807 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rdx), %zmm10
15808 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rcx), %zmm20
15809 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rcx), %zmm11
15810 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%r8), %zmm1
15811 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%r8), %zmm25
15812 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%r8), %zmm23
15813 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%r9), %zmm28
15814 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%r9), %zmm26
15815 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%r9), %zmm24
15816 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%r10), %zmm21
15817 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%r10), %zmm14
15818 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rax), %zmm27
15819 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rax), %zmm16
15820 ; AVX512DQBW-FAST-NEXT: movb $-64, %r11b
15821 ; AVX512DQBW-FAST-NEXT: kmovd %r11d, %k1
15822 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [4,12,4,12,4,12,4,12]
15823 ; AVX512DQBW-FAST-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15824 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm21, %zmm0
15825 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm27, %zmm3, %zmm0
15826 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm12 = zmm1[0],zmm28[0],zmm1[2],zmm28[2],zmm1[4],zmm28[4],zmm1[6],zmm28[6]
15827 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
15828 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
15829 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm3, %zmm0
15830 ; AVX512DQBW-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm6 = [4,12,4,12]
15831 ; AVX512DQBW-FAST-NEXT: # ymm6 = mem[0,1,0,1]
15832 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, %zmm15
15833 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm11, %zmm6, %zmm15
15834 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
15835 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm12, %zmm0
15836 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15837 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [5,13,5,13,5,13,5,13]
15838 ; AVX512DQBW-FAST-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15839 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm21, %zmm0
15840 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm27, %zmm9, %zmm0
15841 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm12 = zmm1[1],zmm28[1],zmm1[3],zmm28[3],zmm1[5],zmm28[5],zmm1[7],zmm28[7]
15842 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
15843 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
15844 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm9, %zmm0
15845 ; AVX512DQBW-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [5,13,5,13]
15846 ; AVX512DQBW-FAST-NEXT: # ymm7 = mem[0,1,0,1]
15847 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, %zmm15
15848 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm11, %zmm7, %zmm15
15849 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
15850 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm12, %zmm0
15851 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15852 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm13 = [6,14,6,14,6,14,6,14]
15853 ; AVX512DQBW-FAST-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15854 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm0
15855 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm29
15856 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15857 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm28, %zmm13, %zmm0
15858 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm0 {%k1} = zmm21[0],zmm27[0],zmm21[2],zmm27[2],zmm21[4],zmm27[4],zmm21[6],zmm27[6]
15859 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm8, %zmm12
15860 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm13, %zmm12
15861 ; AVX512DQBW-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [6,14,6,14]
15862 ; AVX512DQBW-FAST-NEXT: # ymm1 = mem[0,1,0,1]
15863 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, %zmm15
15864 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm11, %zmm1, %zmm15
15865 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3],ymm15[4,5,6,7]
15866 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm0
15867 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15868 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm18 = [7,15,7,15,7,15,7,15]
15869 ; AVX512DQBW-FAST-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15870 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm29, %zmm12
15871 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm28, %zmm18, %zmm12
15872 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm12 {%k1} = zmm21[1],zmm27[1],zmm21[3],zmm27[3],zmm21[5],zmm27[5],zmm21[7],zmm27[7]
15873 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm18, %zmm8
15874 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x2 {{.*#+}} ymm30 = [7,15,7,15]
15875 ; AVX512DQBW-FAST-NEXT: # ymm30 = mem[0,1,0,1]
15876 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm11, %zmm30, %zmm10
15877 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
15878 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm8, %zmm12, %zmm0
15879 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15880 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm14, %zmm8
15881 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm16, %zmm3, %zmm8
15882 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm10 = zmm25[0],zmm26[0],zmm25[2],zmm26[2],zmm25[4],zmm26[4],zmm25[6],zmm26[6]
15883 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm8, %zmm10 {%k1}
15884 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, %zmm8
15885 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm19, %zmm3, %zmm8
15886 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm11
15887 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm6, %zmm15
15888 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm6, %zmm11
15889 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
15890 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm8, %zmm10, %zmm0
15891 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15892 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm14, %zmm8
15893 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm16, %zmm9, %zmm8
15894 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm10 = zmm25[1],zmm26[1],zmm25[3],zmm26[3],zmm25[5],zmm26[5],zmm25[7],zmm26[7]
15895 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm8, %zmm10 {%k1}
15896 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, %zmm8
15897 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm19, %zmm9, %zmm8
15898 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm11
15899 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, %zmm12
15900 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm7, %zmm11
15901 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
15902 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm8, %zmm10, %zmm0
15903 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15904 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, %zmm8
15905 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm19, %zmm13, %zmm8
15906 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm10
15907 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm1, %zmm10
15908 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
15909 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15910 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm25, %zmm10
15911 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15912 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm26, %zmm13, %zmm10
15913 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm14, %zmm11
15914 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm14, (%rsp) # 64-byte Spill
15915 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15916 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm10 {%k1} = zmm14[0],zmm16[0],zmm14[2],zmm16[2],zmm14[4],zmm16[4],zmm14[6],zmm16[6]
15917 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm8, %zmm10, %zmm0
15918 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15919 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%r10), %zmm10
15920 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm19, %zmm18, %zmm4
15921 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%rax), %zmm14
15922 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm30, %zmm5
15923 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm25, %zmm6
15924 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm26, %zmm18, %zmm6
15925 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm6 {%k1} = zmm11[1],zmm16[1],zmm11[3],zmm16[3],zmm11[5],zmm16[5],zmm11[7],zmm16[7]
15926 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
15927 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
15928 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15929 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, %zmm4
15930 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm14, %zmm3, %zmm4
15931 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm5 = zmm23[0],zmm24[0],zmm23[2],zmm24[2],zmm23[4],zmm24[4],zmm23[6],zmm24[6]
15932 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, %zmm5 {%k1}
15933 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm6
15934 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm17, %zmm3, %zmm6
15935 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm11
15936 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%rdx), %zmm4
15937 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%rcx), %zmm7
15938 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, %zmm8
15939 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm7, %zmm15, %zmm8
15940 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
15941 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm6, %zmm5, %zmm0
15942 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15943 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, %zmm5
15944 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm14, %zmm9, %zmm5
15945 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm24[1],zmm23[3],zmm24[3],zmm23[5],zmm24[5],zmm23[7],zmm24[7]
15946 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm6 {%k1}
15947 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm5
15948 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm17, %zmm9, %zmm5
15949 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, %zmm8
15950 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm7, %zmm12, %zmm8
15951 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm8[4,5,6,7]
15952 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm5, %zmm6, %zmm0
15953 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15954 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm5
15955 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm17, %zmm13, %zmm5
15956 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, %zmm6
15957 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm7, %zmm1, %zmm6
15958 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm6[4,5,6,7]
15959 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm23, %zmm3
15960 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15961 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm23, %zmm6
15962 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15963 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm24, %zmm13, %zmm6
15964 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15965 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15966 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm14[0],zmm10[2],zmm14[2],zmm10[4],zmm14[4],zmm10[6],zmm14[6]
15967 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm5, %zmm6, %zmm0
15968 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15969 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm17, %zmm18, %zmm2
15970 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm7, %zmm30, %zmm4
15971 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm4[4,5,6,7]
15972 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm24, %zmm18, %zmm3
15973 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm14[1],zmm10[3],zmm14[3],zmm10[5],zmm14[5],zmm10[7],zmm14[7]
15974 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm3, %zmm0
15975 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15976 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%r10), %zmm10
15977 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%rax), %zmm26
15978 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, %zmm2
15979 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm26, %zmm11, %zmm2
15980 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%r8), %zmm23
15981 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%r9), %zmm29
15982 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm4 = zmm23[0],zmm29[0],zmm23[2],zmm29[2],zmm23[4],zmm29[4],zmm23[6],zmm29[6]
15983 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
15984 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%rdi), %zmm2
15985 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%rsi), %zmm5
15986 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm6
15987 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm11, %zmm6
15988 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%rdx), %zmm3
15989 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%rcx), %zmm7
15990 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm8
15991 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm7, %zmm15, %zmm8
15992 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
15993 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm6, %zmm4, %zmm0
15994 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15995 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, %zmm4
15996 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm26, %zmm9, %zmm4
15997 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm29[1],zmm23[3],zmm29[3],zmm23[5],zmm29[5],zmm23[7],zmm29[7]
15998 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, %zmm6 {%k1}
15999 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm4
16000 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm9, %zmm4
16001 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm8
16002 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm7, %zmm12, %zmm8
16003 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
16004 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
16005 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16006 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm4
16007 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm13, %zmm4
16008 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm6
16009 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm7, %zmm1, %zmm6
16010 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
16011 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm23, %zmm6
16012 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm29, %zmm13, %zmm6
16013 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16014 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm26[0],zmm10[2],zmm26[2],zmm10[4],zmm26[4],zmm10[6],zmm26[6]
16015 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
16016 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16017 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm18, %zmm2
16018 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm7, %zmm30, %zmm3
16019 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
16020 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm23, %zmm3
16021 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm29, %zmm18, %zmm3
16022 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm26[1],zmm10[3],zmm26[3],zmm10[5],zmm26[5],zmm10[7],zmm26[7]
16023 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm3, %zmm0
16024 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16025 ; AVX512DQBW-FAST-NEXT: vmovdqa64 256(%r10), %zmm10
16026 ; AVX512DQBW-FAST-NEXT: vmovdqa64 256(%rax), %zmm22
16027 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, %zmm2
16028 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm11, %zmm2
16029 ; AVX512DQBW-FAST-NEXT: vmovdqa64 256(%r8), %zmm0
16030 ; AVX512DQBW-FAST-NEXT: vmovdqa64 256(%r9), %zmm24
16031 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm4 = zmm0[0],zmm24[0],zmm0[2],zmm24[2],zmm0[4],zmm24[4],zmm0[6],zmm24[6]
16032 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm17
16033 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
16034 ; AVX512DQBW-FAST-NEXT: vmovdqa64 256(%rdi), %zmm2
16035 ; AVX512DQBW-FAST-NEXT: vmovdqa64 256(%rsi), %zmm5
16036 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm6
16037 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm11, %zmm6
16038 ; AVX512DQBW-FAST-NEXT: vmovdqa64 256(%rdx), %zmm3
16039 ; AVX512DQBW-FAST-NEXT: vmovdqa64 256(%rcx), %zmm7
16040 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm8
16041 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm7, %zmm15, %zmm8
16042 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
16043 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm6, %zmm4, %zmm0
16044 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16045 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, %zmm4
16046 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm9, %zmm4
16047 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm6 = zmm17[1],zmm24[1],zmm17[3],zmm24[3],zmm17[5],zmm24[5],zmm17[7],zmm24[7]
16048 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, %zmm6 {%k1}
16049 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm4
16050 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm9, %zmm4
16051 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm8
16052 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm7, %zmm12, %zmm8
16053 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
16054 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
16055 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16056 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm4
16057 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm13, %zmm4
16058 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm6
16059 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm7, %zmm1, %zmm6
16060 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
16061 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16062 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm17, %zmm6
16063 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm24, %zmm13, %zmm6
16064 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16065 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm22[0],zmm10[2],zmm22[2],zmm10[4],zmm22[4],zmm10[6],zmm22[6]
16066 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm6, %zmm0
16067 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16068 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm18, %zmm2
16069 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm7, %zmm30, %zmm3
16070 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
16071 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm17, %zmm3
16072 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm24, %zmm18, %zmm3
16073 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm22[1],zmm10[3],zmm22[3],zmm10[5],zmm22[5],zmm10[7],zmm22[7]
16074 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm3, %zmm0
16075 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16076 ; AVX512DQBW-FAST-NEXT: vmovdqa64 320(%rdi), %zmm2
16077 ; AVX512DQBW-FAST-NEXT: vmovdqa64 320(%rsi), %zmm3
16078 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm5
16079 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm11, %zmm5
16080 ; AVX512DQBW-FAST-NEXT: vmovdqa64 320(%rdx), %zmm14
16081 ; AVX512DQBW-FAST-NEXT: vmovdqa64 320(%rcx), %zmm6
16082 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm14, %zmm7
16083 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm15, %zmm7
16084 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm7[4,5,6,7]
16085 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm14, %zmm7
16086 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm12, %zmm7
16087 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm14, %zmm8
16088 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm1, %zmm8
16089 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm30, %zmm14
16090 ; AVX512DQBW-FAST-NEXT: vmovdqa64 384(%rdx), %zmm0
16091 ; AVX512DQBW-FAST-NEXT: vmovdqa64 384(%rcx), %zmm6
16092 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm10
16093 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm15, %zmm10
16094 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16095 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm10
16096 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm12, %zmm10
16097 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16098 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm10
16099 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm1, %zmm10
16100 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16101 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm30, %zmm0
16102 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16103 ; AVX512DQBW-FAST-NEXT: vmovdqa64 448(%rdx), %zmm0
16104 ; AVX512DQBW-FAST-NEXT: vmovdqa64 448(%rcx), %zmm6
16105 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm6, %zmm0, %zmm15
16106 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16107 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm6, %zmm0, %zmm12
16108 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16109 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm6, %zmm0, %zmm1
16110 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16111 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm30, %zmm0
16112 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16113 ; AVX512DQBW-FAST-NEXT: vmovdqa64 320(%r10), %zmm31
16114 ; AVX512DQBW-FAST-NEXT: vmovdqa64 320(%rax), %zmm12
16115 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm31, %zmm1
16116 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm12, %zmm11, %zmm1
16117 ; AVX512DQBW-FAST-NEXT: vmovdqa64 320(%r8), %zmm17
16118 ; AVX512DQBW-FAST-NEXT: vmovdqa64 320(%r9), %zmm4
16119 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm10 = zmm17[0],zmm4[0],zmm17[2],zmm4[2],zmm17[4],zmm4[4],zmm17[6],zmm4[6]
16120 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm10 {%k1}
16121 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm5, %zmm10, %zmm0
16122 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16123 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm31, %zmm1
16124 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm12, %zmm9, %zmm1
16125 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm5 = zmm17[1],zmm4[1],zmm17[3],zmm4[3],zmm17[5],zmm4[5],zmm17[7],zmm4[7]
16126 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm5 {%k1}
16127 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
16128 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm9, %zmm1
16129 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm7[4,5,6,7]
16130 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm5, %zmm0
16131 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16132 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
16133 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm13, %zmm1
16134 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm8[4,5,6,7]
16135 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm17, %zmm5
16136 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm13, %zmm5
16137 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm5 {%k1} = zmm31[0],zmm12[0],zmm31[2],zmm12[2],zmm31[4],zmm12[4],zmm31[6],zmm12[6]
16138 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm5, %zmm0
16139 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16140 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm18, %zmm2
16141 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm14[4,5,6,7]
16142 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm17, %zmm2
16143 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm18, %zmm2
16144 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm2 {%k1} = zmm31[1],zmm12[1],zmm31[3],zmm12[3],zmm31[5],zmm12[5],zmm31[7],zmm12[7]
16145 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm2, %zmm0
16146 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16147 ; AVX512DQBW-FAST-NEXT: vmovdqa64 384(%rdi), %zmm0
16148 ; AVX512DQBW-FAST-NEXT: vmovdqa64 384(%rsi), %zmm1
16149 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm3
16150 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, %zmm2
16151 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm11, %zmm3
16152 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16153 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm3
16154 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm9, %zmm3
16155 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16156 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm3
16157 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm13, %zmm3
16158 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16159 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm18, %zmm0
16160 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16161 ; AVX512DQBW-FAST-NEXT: vmovdqa64 384(%r10), %zmm11
16162 ; AVX512DQBW-FAST-NEXT: vmovdqa64 384(%rax), %zmm20
16163 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, %zmm0
16164 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm2, %zmm0
16165 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16166 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, %zmm0
16167 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm9, %zmm0
16168 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16169 ; AVX512DQBW-FAST-NEXT: vmovdqa64 384(%r8), %zmm30
16170 ; AVX512DQBW-FAST-NEXT: vmovdqa64 384(%r9), %zmm0
16171 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm30, %zmm1
16172 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm13, %zmm1
16173 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16174 ; AVX512DQBW-FAST-NEXT: vmovdqa64 448(%r10), %zmm16
16175 ; AVX512DQBW-FAST-NEXT: vmovdqa64 448(%rax), %zmm8
16176 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm16, %zmm1
16177 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm8, %zmm2, %zmm1
16178 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16179 ; AVX512DQBW-FAST-NEXT: vmovdqa64 448(%rdi), %zmm3
16180 ; AVX512DQBW-FAST-NEXT: vmovdqa64 448(%rsi), %zmm1
16181 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm1, %zmm3, %zmm2
16182 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16183 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm16, %zmm2
16184 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm8, %zmm9, %zmm2
16185 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16186 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm1, %zmm3, %zmm9
16187 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16188 ; AVX512DQBW-FAST-NEXT: vmovdqa64 448(%r8), %zmm19
16189 ; AVX512DQBW-FAST-NEXT: vmovdqa64 448(%r9), %zmm7
16190 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm19, %zmm2
16191 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm7, %zmm13, %zmm2
16192 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16193 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm1, %zmm3, %zmm13
16194 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16195 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm18, %zmm3
16196 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16197 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm30, %zmm1
16198 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm18, %zmm1
16199 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm5
16200 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm19, %zmm1
16201 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm7, %zmm18, %zmm1
16202 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16203 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [0,8,0,8,0,8,0,8]
16204 ; AVX512DQBW-FAST-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
16205 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm21, %zmm1
16206 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm27, %zmm15, %zmm1
16207 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16208 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm14 = [1,9,1,9,1,9,1,9]
16209 ; AVX512DQBW-FAST-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
16210 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm21, %zmm1
16211 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm27, %zmm14, %zmm1
16212 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16213 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [2,10,2,10,2,10,2,10]
16214 ; AVX512DQBW-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
16215 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm21, %zmm1
16216 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm27, %zmm6, %zmm1
16217 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16218 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [3,11,3,11,3,11,3,11]
16219 ; AVX512DQBW-FAST-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
16220 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm27, %zmm1, %zmm21
16221 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16222 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16223 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
16224 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm28, %zmm15, %zmm3
16225 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16226 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
16227 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm28, %zmm14, %zmm3
16228 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16229 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm25
16230 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm28, %zmm6, %zmm25
16231 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm28, %zmm1, %zmm2
16232 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16233 ; AVX512DQBW-FAST-NEXT: vmovdqu64 (%rsp), %zmm27 # 64-byte Reload
16234 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm27, %zmm2
16235 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
16236 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm15, %zmm2
16237 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16238 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm27, %zmm2
16239 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm14, %zmm2
16240 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16241 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm27, %zmm2
16242 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm6, %zmm2
16243 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16244 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm1, %zmm27
16245 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm27, (%rsp) # 64-byte Spill
16246 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
16247 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm9, %zmm3
16248 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16249 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm15, %zmm3
16250 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16251 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm9, %zmm3
16252 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm14, %zmm3
16253 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16254 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm9, %zmm18
16255 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm6, %zmm18
16256 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm1, %zmm9
16257 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16258 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16259 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
16260 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
16261 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm9, %zmm15, %zmm3
16262 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16263 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
16264 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm9, %zmm14, %zmm3
16265 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16266 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
16267 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm9, %zmm6, %zmm3
16268 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16269 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm9, %zmm1, %zmm2
16270 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16271 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
16272 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm9, %zmm3
16273 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16274 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm15, %zmm3
16275 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16276 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm9, %zmm3
16277 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm14, %zmm3
16278 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16279 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm9, %zmm13
16280 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm6, %zmm13
16281 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm1, %zmm9
16282 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16283 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16284 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
16285 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm26, %zmm15, %zmm3
16286 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16287 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
16288 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm26, %zmm14, %zmm3
16289 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16290 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
16291 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm26, %zmm6, %zmm3
16292 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16293 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm26, %zmm1, %zmm2
16294 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16295 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm23, %zmm3
16296 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm29, %zmm15, %zmm3
16297 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16298 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm23, %zmm3
16299 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm29, %zmm14, %zmm3
16300 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16301 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm23, %zmm10
16302 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm29, %zmm6, %zmm10
16303 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm29, %zmm1, %zmm23
16304 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16305 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
16306 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm23, %zmm2
16307 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm15, %zmm2
16308 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16309 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm23, %zmm2
16310 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm14, %zmm2
16311 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16312 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm23, %zmm2
16313 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm6, %zmm2
16314 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16315 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm1, %zmm23
16316 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16317 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
16318 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm24, %zmm15, %zmm3
16319 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16320 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
16321 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm24, %zmm14, %zmm3
16322 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16323 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm26
16324 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm24, %zmm6, %zmm26
16325 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm24, %zmm1, %zmm2
16326 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16327 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm31, %zmm2
16328 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm12, %zmm15, %zmm2
16329 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16330 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm31, %zmm2
16331 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm12, %zmm14, %zmm2
16332 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16333 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm31, %zmm2
16334 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm12, %zmm6, %zmm2
16335 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16336 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm12, %zmm1, %zmm31
16337 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm17, %zmm29
16338 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm15, %zmm29
16339 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm17, %zmm2
16340 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm14, %zmm2
16341 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16342 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm17, %zmm24
16343 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm6, %zmm24
16344 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm1, %zmm17
16345 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, %zmm2
16346 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm15, %zmm2
16347 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16348 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, %zmm2
16349 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm14, %zmm2
16350 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16351 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
16352 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm9 {%k1} = zmm11[0],zmm20[0],zmm11[2],zmm20[2],zmm11[4],zmm20[4],zmm11[6],zmm20[6]
16353 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm11[1],zmm20[1],zmm11[3],zmm20[3],zmm11[5],zmm20[5],zmm11[7],zmm20[7]
16354 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm27
16355 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, %zmm22
16356 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm6, %zmm22
16357 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm1, %zmm11
16358 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm30, %zmm21
16359 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm15, %zmm21
16360 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm30, %zmm28
16361 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm14, %zmm28
16362 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm30, %zmm20
16363 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm6, %zmm20
16364 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm2 = zmm30[0],zmm0[0],zmm30[2],zmm0[2],zmm30[4],zmm0[4],zmm30[6],zmm0[6]
16365 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm3 = zmm30[1],zmm0[1],zmm30[3],zmm0[3],zmm30[5],zmm0[5],zmm30[7],zmm0[7]
16366 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm1, %zmm30
16367 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm16, %zmm0
16368 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm8, %zmm15, %zmm0
16369 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16370 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm16, %zmm0
16371 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm8, %zmm14, %zmm0
16372 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16373 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
16374 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm4 {%k1} = zmm16[0],zmm8[0],zmm16[2],zmm8[2],zmm16[4],zmm8[4],zmm16[6],zmm8[6]
16375 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16376 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm16[1],zmm8[1],zmm16[3],zmm8[3],zmm16[5],zmm8[5],zmm16[7],zmm8[7]
16377 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm16, %zmm12
16378 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm8, %zmm6, %zmm12
16379 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm8, %zmm1, %zmm16
16380 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm7, %zmm19, %zmm15
16381 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm7, %zmm19, %zmm14
16382 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm7, %zmm19, %zmm6
16383 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm8 = zmm19[0],zmm7[0],zmm19[2],zmm7[2],zmm19[4],zmm7[4],zmm19[6],zmm7[6]
16384 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} zmm0 = zmm19[1],zmm7[1],zmm19[3],zmm7[3],zmm19[5],zmm7[5],zmm19[7],zmm7[7]
16385 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm7, %zmm1, %zmm19
16386 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16387 ; AVX512DQBW-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
16388 ; AVX512DQBW-FAST-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
16389 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
16390 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, %zmm2 {%k1}
16391 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm2, %zmm1
16392 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16393 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16394 ; AVX512DQBW-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
16395 ; AVX512DQBW-FAST-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
16396 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16397 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm3 {%k1}
16398 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm3, %zmm1
16399 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16400 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16401 ; AVX512DQBW-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
16402 ; AVX512DQBW-FAST-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
16403 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm9, %zmm1
16404 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16405 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16406 ; AVX512DQBW-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
16407 ; AVX512DQBW-FAST-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
16408 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm27, %zmm1
16409 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16410 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16411 ; AVX512DQBW-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
16412 ; AVX512DQBW-FAST-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
16413 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16414 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm8 {%k1}
16415 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm8, %zmm1
16416 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16417 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16418 ; AVX512DQBW-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
16419 ; AVX512DQBW-FAST-NEXT: # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
16420 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16421 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm0 {%k1}
16422 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm0
16423 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16424 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16425 ; AVX512DQBW-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
16426 ; AVX512DQBW-FAST-NEXT: # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
16427 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm0
16428 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16429 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16430 ; AVX512DQBW-FAST-NEXT: vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
16431 ; AVX512DQBW-FAST-NEXT: # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
16432 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm5, %zmm0
16433 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16434 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16435 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm25 {%k1}
16436 ; AVX512DQBW-FAST-NEXT: vmovdqa (%rcx), %ymm0
16437 ; AVX512DQBW-FAST-NEXT: vmovdqa (%rdx), %ymm1
16438 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
16439 ; AVX512DQBW-FAST-NEXT: vmovdqa (%rsi), %ymm3
16440 ; AVX512DQBW-FAST-NEXT: vmovdqa (%rdi), %ymm7
16441 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
16442 ; AVX512DQBW-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
16443 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm25, %zmm2
16444 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16445 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16446 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
16447 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
16448 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
16449 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
16450 ; AVX512DQBW-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
16451 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm0
16452 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16453 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16454 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm18 {%k1}
16455 ; AVX512DQBW-FAST-NEXT: vmovdqa 64(%rcx), %ymm0
16456 ; AVX512DQBW-FAST-NEXT: vmovdqa 64(%rdx), %ymm1
16457 ; AVX512DQBW-FAST-NEXT: vmovdqa 64(%rsi), %ymm2
16458 ; AVX512DQBW-FAST-NEXT: vmovdqa 64(%rdi), %ymm3
16459 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm7 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
16460 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm3[0],ymm2[0],ymm3[2],ymm2[2]
16461 ; AVX512DQBW-FAST-NEXT: vperm2i128 {{.*#+}} ymm7 = ymm8[2,3],ymm7[2,3]
16462 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm7, %zmm18, %zmm4
16463 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16464 ; AVX512DQBW-FAST-NEXT: vmovdqu64 (%rsp), %zmm4 # 64-byte Reload
16465 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16466 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, %zmm5 {%k1}
16467 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
16468 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm3[1],ymm2[1],ymm3[3],ymm2[3]
16469 ; AVX512DQBW-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
16470 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm5, %zmm0
16471 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16472 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16473 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm13 {%k1}
16474 ; AVX512DQBW-FAST-NEXT: vmovdqa 128(%rcx), %ymm0
16475 ; AVX512DQBW-FAST-NEXT: vmovdqa 128(%rdx), %ymm1
16476 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
16477 ; AVX512DQBW-FAST-NEXT: vmovdqa 128(%rsi), %ymm3
16478 ; AVX512DQBW-FAST-NEXT: vmovdqa 128(%rdi), %ymm7
16479 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
16480 ; AVX512DQBW-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
16481 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm13, %zmm2
16482 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
16483 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16484 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
16485 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
16486 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
16487 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
16488 ; AVX512DQBW-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
16489 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm0
16490 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16491 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16492 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm10 {%k1}
16493 ; AVX512DQBW-FAST-NEXT: vmovdqa 192(%rcx), %ymm0
16494 ; AVX512DQBW-FAST-NEXT: vmovdqa 192(%rdx), %ymm1
16495 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
16496 ; AVX512DQBW-FAST-NEXT: vmovdqa 192(%rsi), %ymm3
16497 ; AVX512DQBW-FAST-NEXT: vmovdqa 192(%rdi), %ymm7
16498 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
16499 ; AVX512DQBW-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
16500 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm10, %zmm2
16501 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16502 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16503 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
16504 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
16505 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
16506 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
16507 ; AVX512DQBW-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
16508 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm9
16509 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16510 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm26 {%k1}
16511 ; AVX512DQBW-FAST-NEXT: vmovdqa 256(%rcx), %ymm0
16512 ; AVX512DQBW-FAST-NEXT: vmovdqa 256(%rdx), %ymm1
16513 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
16514 ; AVX512DQBW-FAST-NEXT: vmovdqa 256(%rsi), %ymm3
16515 ; AVX512DQBW-FAST-NEXT: vmovdqa 256(%rdi), %ymm7
16516 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
16517 ; AVX512DQBW-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
16518 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm26, %zmm26
16519 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16520 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm23, %zmm2 {%k1}
16521 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
16522 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
16523 ; AVX512DQBW-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
16524 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm2, %zmm7
16525 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16526 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm24 {%k1}
16527 ; AVX512DQBW-FAST-NEXT: vmovdqa 320(%rcx), %ymm0
16528 ; AVX512DQBW-FAST-NEXT: vmovdqa 320(%rdx), %ymm1
16529 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
16530 ; AVX512DQBW-FAST-NEXT: vmovdqa 320(%rsi), %ymm3
16531 ; AVX512DQBW-FAST-NEXT: vmovdqa64 320(%rdi), %ymm23
16532 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm23[0],ymm3[0],ymm23[2],ymm3[2]
16533 ; AVX512DQBW-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
16534 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm24, %zmm8
16535 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm31, %zmm17 {%k1}
16536 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
16537 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm23[1],ymm3[1],ymm23[3],ymm3[3]
16538 ; AVX512DQBW-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
16539 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm17, %zmm17
16540 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm22, %zmm20 {%k1}
16541 ; AVX512DQBW-FAST-NEXT: vmovdqa 384(%rcx), %ymm0
16542 ; AVX512DQBW-FAST-NEXT: vmovdqa 384(%rdx), %ymm1
16543 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
16544 ; AVX512DQBW-FAST-NEXT: vmovdqa 384(%rsi), %ymm3
16545 ; AVX512DQBW-FAST-NEXT: vmovdqa64 384(%rdi), %ymm18
16546 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm13 = ymm18[0],ymm3[0],ymm18[2],ymm3[2]
16547 ; AVX512DQBW-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm13[2,3],ymm2[2,3]
16548 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm20, %zmm20
16549 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, %zmm30 {%k1}
16550 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
16551 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm18[1],ymm3[1],ymm18[3],ymm3[3]
16552 ; AVX512DQBW-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
16553 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm30, %zmm24
16554 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm12, %zmm6 {%k1}
16555 ; AVX512DQBW-FAST-NEXT: vmovdqa 448(%rcx), %ymm0
16556 ; AVX512DQBW-FAST-NEXT: vmovdqa 448(%rdx), %ymm1
16557 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
16558 ; AVX512DQBW-FAST-NEXT: vmovdqa 448(%rsi), %ymm3
16559 ; AVX512DQBW-FAST-NEXT: vmovdqa 448(%rdi), %ymm10
16560 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm12 = ymm10[0],ymm3[0],ymm10[2],ymm3[2]
16561 ; AVX512DQBW-FAST-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm12[2,3],ymm2[2,3]
16562 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm6, %zmm31
16563 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
16564 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm10[1],ymm3[1],ymm10[3],ymm3[3]
16565 ; AVX512DQBW-FAST-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
16566 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm16, %zmm19 {%k1}
16567 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm19, %zmm0
16568 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16569 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
16570 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm4 {%k1}
16571 ; AVX512DQBW-FAST-NEXT: vmovdqa (%rsi), %xmm1
16572 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
16573 ; AVX512DQBW-FAST-NEXT: vmovdqa (%rdi), %xmm2
16574 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, (%rdx), %ymm2, %ymm2
16575 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm3 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
16576 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm4, %zmm3
16577 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
16578 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
16579 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, %zmm10 {%k1}
16580 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm2[1],ymm1[1],ymm2[3],ymm1[3]
16581 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm10, %zmm2
16582 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16583 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
16584 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm11 {%k1}
16585 ; AVX512DQBW-FAST-NEXT: vmovdqa 64(%rsi), %xmm1
16586 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, 64(%rcx), %ymm1, %ymm1
16587 ; AVX512DQBW-FAST-NEXT: vmovdqa 64(%rdi), %xmm4
16588 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, 64(%rdx), %ymm4, %ymm4
16589 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm10 = ymm4[0],ymm1[0],ymm4[2],ymm1[2]
16590 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm11, %zmm10
16591 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
16592 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
16593 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, %zmm12 {%k1}
16594 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm4[1],ymm1[1],ymm4[3],ymm1[3]
16595 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm12, %zmm11
16596 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16597 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
16598 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm13 {%k1}
16599 ; AVX512DQBW-FAST-NEXT: vmovdqa 128(%rsi), %xmm1
16600 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, 128(%rcx), %ymm1, %ymm1
16601 ; AVX512DQBW-FAST-NEXT: vmovdqa 128(%rdi), %xmm4
16602 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, 128(%rdx), %ymm4, %ymm12
16603 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
16604 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm13, %zmm4
16605 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
16606 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
16607 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm13, %zmm18 {%k1}
16608 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
16609 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm18, %zmm19
16610 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16611 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
16612 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm18 {%k1}
16613 ; AVX512DQBW-FAST-NEXT: vmovdqa 192(%rsi), %xmm1
16614 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, 192(%rcx), %ymm1, %ymm1
16615 ; AVX512DQBW-FAST-NEXT: vmovdqa 192(%rdi), %xmm12
16616 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, 192(%rdx), %ymm12, %ymm12
16617 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm13 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
16618 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm13, %zmm18, %zmm30
16619 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
16620 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
16621 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm13, %zmm18 {%k1}
16622 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
16623 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm18, %zmm1
16624 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
16625 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16626 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm12, %zmm5 {%k1}
16627 ; AVX512DQBW-FAST-NEXT: vmovdqa 256(%rsi), %xmm12
16628 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, 256(%rcx), %ymm12, %ymm13
16629 ; AVX512DQBW-FAST-NEXT: vmovdqa 256(%rdi), %xmm12
16630 ; AVX512DQBW-FAST-NEXT: vinserti32x4 $1, 256(%rdx), %ymm12, %ymm18
16631 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm12 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
16632 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm12, %zmm5, %zmm12
16633 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
16634 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16635 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm23, %zmm5 {%k1}
16636 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
16637 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm13, %zmm5, %zmm23
16638 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16639 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm29 {%k1}
16640 ; AVX512DQBW-FAST-NEXT: vmovdqa 320(%rsi), %xmm13
16641 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, 320(%rcx), %ymm13, %ymm13
16642 ; AVX512DQBW-FAST-NEXT: vmovdqa64 320(%rdi), %xmm18
16643 ; AVX512DQBW-FAST-NEXT: vinserti32x4 $1, 320(%rdx), %ymm18, %ymm18
16644 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm27 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
16645 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm27, %zmm29, %zmm22
16646 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16647 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
16648 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm6 {%k1}
16649 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
16650 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm13, %zmm6, %zmm13
16651 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16652 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm21 {%k1}
16653 ; AVX512DQBW-FAST-NEXT: vmovdqa64 384(%rsi), %xmm18
16654 ; AVX512DQBW-FAST-NEXT: vinserti32x4 $1, 384(%rcx), %ymm18, %ymm18
16655 ; AVX512DQBW-FAST-NEXT: vmovdqa64 384(%rdi), %xmm25
16656 ; AVX512DQBW-FAST-NEXT: vinserti32x4 $1, 384(%rdx), %ymm25, %ymm25
16657 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
16658 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm27, %zmm21, %zmm16
16659 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16660 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm28 {%k1}
16661 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
16662 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm18, %zmm28, %zmm21
16663 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16664 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm15 {%k1}
16665 ; AVX512DQBW-FAST-NEXT: vmovdqa64 448(%rsi), %xmm18
16666 ; AVX512DQBW-FAST-NEXT: vinserti32x4 $1, 448(%rcx), %ymm18, %ymm18
16667 ; AVX512DQBW-FAST-NEXT: vmovdqa64 448(%rdi), %xmm25
16668 ; AVX512DQBW-FAST-NEXT: vinserti32x4 $1, 448(%rdx), %ymm25, %ymm25
16669 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
16670 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm27, %zmm15, %zmm6
16671 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16672 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm14 {%k1}
16673 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
16674 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm18, %zmm14, %zmm5
16675 ; AVX512DQBW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
16676 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, 3776(%rax)
16677 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm31, 3712(%rax)
16678 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm24, 3264(%rax)
16679 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm20, 3200(%rax)
16680 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm17, 2752(%rax)
16681 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm8, 2688(%rax)
16682 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, 2240(%rax)
16683 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm26, 2176(%rax)
16684 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm9, 1728(%rax)
16685 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16686 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 1664(%rax)
16687 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16688 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 1216(%rax)
16689 ; AVX512DQBW-FAST-NEXT: vmovups (%rsp), %zmm0 # 64-byte Reload
16690 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 1152(%rax)
16691 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16692 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 704(%rax)
16693 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16694 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 640(%rax)
16695 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16696 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 192(%rax)
16697 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16698 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 128(%rax)
16699 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16700 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 4032(%rax)
16701 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16702 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 3968(%rax)
16703 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16704 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 3904(%rax)
16705 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16706 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 3840(%rax)
16707 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, 3648(%rax)
16708 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm6, 3584(%rax)
16709 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16710 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 3520(%rax)
16711 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16712 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 3456(%rax)
16713 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16714 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 3392(%rax)
16715 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16716 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 3328(%rax)
16717 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm21, 3136(%rax)
16718 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm16, 3072(%rax)
16719 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16720 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 3008(%rax)
16721 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16722 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 2944(%rax)
16723 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16724 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 2880(%rax)
16725 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16726 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 2816(%rax)
16727 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm13, 2624(%rax)
16728 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm22, 2560(%rax)
16729 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16730 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 2496(%rax)
16731 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16732 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 2432(%rax)
16733 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16734 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 2368(%rax)
16735 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16736 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 2304(%rax)
16737 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm23, 2112(%rax)
16738 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm12, 2048(%rax)
16739 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16740 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 1984(%rax)
16741 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16742 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 1920(%rax)
16743 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16744 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 1856(%rax)
16745 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16746 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 1792(%rax)
16747 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, 1600(%rax)
16748 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm30, 1536(%rax)
16749 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16750 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 1472(%rax)
16751 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16752 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 1408(%rax)
16753 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16754 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 1344(%rax)
16755 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16756 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 1280(%rax)
16757 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm19, 1088(%rax)
16758 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, 1024(%rax)
16759 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16760 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 960(%rax)
16761 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16762 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 896(%rax)
16763 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16764 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 832(%rax)
16765 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16766 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 768(%rax)
16767 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, 576(%rax)
16768 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, 512(%rax)
16769 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16770 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 448(%rax)
16771 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16772 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 384(%rax)
16773 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16774 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 320(%rax)
16775 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16776 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 256(%rax)
16777 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, 64(%rax)
16778 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, (%rax)
16779 ; AVX512DQBW-FAST-NEXT: addq $5512, %rsp # imm = 0x1588
16780 ; AVX512DQBW-FAST-NEXT: vzeroupper
16781 ; AVX512DQBW-FAST-NEXT: retq
16782 %in.vec0 = load <64 x i64>, ptr %in.vecptr0, align 64
16783 %in.vec1 = load <64 x i64>, ptr %in.vecptr1, align 64
16784 %in.vec2 = load <64 x i64>, ptr %in.vecptr2, align 64
16785 %in.vec3 = load <64 x i64>, ptr %in.vecptr3, align 64
16786 %in.vec4 = load <64 x i64>, ptr %in.vecptr4, align 64
16787 %in.vec5 = load <64 x i64>, ptr %in.vecptr5, align 64
16788 %in.vec6 = load <64 x i64>, ptr %in.vecptr6, align 64
16789 %in.vec7 = load <64 x i64>, ptr %in.vecptr7, align 64
16790 %1 = shufflevector <64 x i64> %in.vec0, <64 x i64> %in.vec1, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
16791 %2 = shufflevector <64 x i64> %in.vec2, <64 x i64> %in.vec3, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
16792 %3 = shufflevector <64 x i64> %in.vec4, <64 x i64> %in.vec5, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
16793 %4 = shufflevector <64 x i64> %in.vec6, <64 x i64> %in.vec7, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
16794 %5 = shufflevector <128 x i64> %1, <128 x i64> %2, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255>
16795 %6 = shufflevector <128 x i64> %3, <128 x i64> %4, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255>
16796 %7 = shufflevector <256 x i64> %5, <256 x i64> %6, <512 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255, i32 256, i32 257, i32 258, i32 259, i32 260, i32 261, i32 262, i32 263, i32 264, i32 265, i32 266, i32 267, i32 268, i32 269, i32 270, i32 271, i32 272, i32 273, i32 274, i32 275, i32 276, i32 277, i32 278, i32 279, i32 280, i32 281, i32 282, i32 283, i32 284, i32 285, i32 286, i32 287, i32 288, i32 289, i32 290, i32 291, i32 292, i32 293, i32 294, i32 295, i32 296, i32 297, i32 298, i32 299, i32 300, i32 301, i32 302, i32 303, i32 304, i32 305, i32 306, i32 307, i32 308, i32 309, i32 310, i32 311, i32 312, i32 313, i32 314, i32 315, i32 316, i32 317, i32 318, i32 319, i32 320, i32 321, i32 322, i32 323, i32 324, i32 325, i32 326, i32 327, i32 328, i32 329, i32 330, i32 331, i32 332, i32 333, i32 334, i32 335, i32 336, i32 337, i32 338, i32 339, i32 340, i32 341, i32 342, i32 343, i32 344, i32 345, i32 346, i32 347, i32 348, i32 349, i32 350, i32 351, i32 352, i32 353, i32 354, i32 355, i32 356, i32 357, i32 358, i32 359, i32 360, i32 361, i32 362, i32 363, i32 364, i32 365, i32 366, i32 367, i32 368, i32 369, i32 370, i32 371, i32 372, i32 373, i32 374, i32 375, i32 376, i32 377, i32 378, i32 379, i32 380, i32 381, i32 382, i32 383, i32 384, i32 385, i32 386, i32 387, i32 388, i32 389, i32 390, i32 391, i32 392, i32 393, i32 394, i32 395, i32 396, i32 397, i32 398, i32 399, i32 400, i32 401, i32 402, i32 403, i32 404, i32 405, i32 406, i32 407, i32 408, i32 409, i32 410, i32 411, i32 412, i32 413, i32 414, i32 415, i32 416, i32 417, i32 418, i32 419, i32 420, i32 421, i32 422, i32 423, i32 424, i32 425, i32 426, i32 427, i32 428, i32 429, i32 430, i32 431, i32 432, i32 433, i32 434, i32 435, i32 436, i32 437, i32 438, i32 439, i32 440, i32 441, i32 442, i32 443, i32 444, i32 445, i32 446, i32 447, i32 448, i32 449, i32 450, i32 451, i32 452, i32 453, i32 454, i32 455, i32 456, i32 457, i32 458, i32 459, i32 460, i32 461, i32 462, i32 463, i32 464, i32 465, i32 466, i32 467, i32 468, i32 469, i32 470, i32 471, i32 472, i32 473, i32 474, i32 475, i32 476, i32 477, i32 478, i32 479, i32 480, i32 481, i32 482, i32 483, i32 484, i32 485, i32 486, i32 487, i32 488, i32 489, i32 490, i32 491, i32 492, i32 493, i32 494, i32 495, i32 496, i32 497, i32 498, i32 499, i32 500, i32 501, i32 502, i32 503, i32 504, i32 505, i32 506, i32 507, i32 508, i32 509, i32 510, i32 511>
16797 %interleaved.vec = shufflevector <512 x i64> %7, <512 x i64> poison, <512 x i32> <i32 0, i32 64, i32 128, i32 192, i32 256, i32 320, i32 384, i32 448, i32 1, i32 65, i32 129, i32 193, i32 257, i32 321, i32 385, i32 449, i32 2, i32 66, i32 130, i32 194, i32 258, i32 322, i32 386, i32 450, i32 3, i32 67, i32 131, i32 195, i32 259, i32 323, i32 387, i32 451, i32 4, i32 68, i32 132, i32 196, i32 260, i32 324, i32 388, i32 452, i32 5, i32 69, i32 133, i32 197, i32 261, i32 325, i32 389, i32 453, i32 6, i32 70, i32 134, i32 198, i32 262, i32 326, i32 390, i32 454, i32 7, i32 71, i32 135, i32 199, i32 263, i32 327, i32 391, i32 455, i32 8, i32 72, i32 136, i32 200, i32 264, i32 328, i32 392, i32 456, i32 9, i32 73, i32 137, i32 201, i32 265, i32 329, i32 393, i32 457, i32 10, i32 74, i32 138, i32 202, i32 266, i32 330, i32 394, i32 458, i32 11, i32 75, i32 139, i32 203, i32 267, i32 331, i32 395, i32 459, i32 12, i32 76, i32 140, i32 204, i32 268, i32 332, i32 396, i32 460, i32 13, i32 77, i32 141, i32 205, i32 269, i32 333, i32 397, i32 461, i32 14, i32 78, i32 142, i32 206, i32 270, i32 334, i32 398, i32 462, i32 15, i32 79, i32 143, i32 207, i32 271, i32 335, i32 399, i32 463, i32 16, i32 80, i32 144, i32 208, i32 272, i32 336, i32 400, i32 464, i32 17, i32 81, i32 145, i32 209, i32 273, i32 337, i32 401, i32 465, i32 18, i32 82, i32 146, i32 210, i32 274, i32 338, i32 402, i32 466, i32 19, i32 83, i32 147, i32 211, i32 275, i32 339, i32 403, i32 467, i32 20, i32 84, i32 148, i32 212, i32 276, i32 340, i32 404, i32 468, i32 21, i32 85, i32 149, i32 213, i32 277, i32 341, i32 405, i32 469, i32 22, i32 86, i32 150, i32 214, i32 278, i32 342, i32 406, i32 470, i32 23, i32 87, i32 151, i32 215, i32 279, i32 343, i32 407, i32 471, i32 24, i32 88, i32 152, i32 216, i32 280, i32 344, i32 408, i32 472, i32 25, i32 89, i32 153, i32 217, i32 281, i32 345, i32 409, i32 473, i32 26, i32 90, i32 154, i32 218, i32 282, i32 346, i32 410, i32 474, i32 27, i32 91, i32 155, i32 219, i32 283, i32 347, i32 411, i32 475, i32 28, i32 92, i32 156, i32 220, i32 284, i32 348, i32 412, i32 476, i32 29, i32 93, i32 157, i32 221, i32 285, i32 349, i32 413, i32 477, i32 30, i32 94, i32 158, i32 222, i32 286, i32 350, i32 414, i32 478, i32 31, i32 95, i32 159, i32 223, i32 287, i32 351, i32 415, i32 479, i32 32, i32 96, i32 160, i32 224, i32 288, i32 352, i32 416, i32 480, i32 33, i32 97, i32 161, i32 225, i32 289, i32 353, i32 417, i32 481, i32 34, i32 98, i32 162, i32 226, i32 290, i32 354, i32 418, i32 482, i32 35, i32 99, i32 163, i32 227, i32 291, i32 355, i32 419, i32 483, i32 36, i32 100, i32 164, i32 228, i32 292, i32 356, i32 420, i32 484, i32 37, i32 101, i32 165, i32 229, i32 293, i32 357, i32 421, i32 485, i32 38, i32 102, i32 166, i32 230, i32 294, i32 358, i32 422, i32 486, i32 39, i32 103, i32 167, i32 231, i32 295, i32 359, i32 423, i32 487, i32 40, i32 104, i32 168, i32 232, i32 296, i32 360, i32 424, i32 488, i32 41, i32 105, i32 169, i32 233, i32 297, i32 361, i32 425, i32 489, i32 42, i32 106, i32 170, i32 234, i32 298, i32 362, i32 426, i32 490, i32 43, i32 107, i32 171, i32 235, i32 299, i32 363, i32 427, i32 491, i32 44, i32 108, i32 172, i32 236, i32 300, i32 364, i32 428, i32 492, i32 45, i32 109, i32 173, i32 237, i32 301, i32 365, i32 429, i32 493, i32 46, i32 110, i32 174, i32 238, i32 302, i32 366, i32 430, i32 494, i32 47, i32 111, i32 175, i32 239, i32 303, i32 367, i32 431, i32 495, i32 48, i32 112, i32 176, i32 240, i32 304, i32 368, i32 432, i32 496, i32 49, i32 113, i32 177, i32 241, i32 305, i32 369, i32 433, i32 497, i32 50, i32 114, i32 178, i32 242, i32 306, i32 370, i32 434, i32 498, i32 51, i32 115, i32 179, i32 243, i32 307, i32 371, i32 435, i32 499, i32 52, i32 116, i32 180, i32 244, i32 308, i32 372, i32 436, i32 500, i32 53, i32 117, i32 181, i32 245, i32 309, i32 373, i32 437, i32 501, i32 54, i32 118, i32 182, i32 246, i32 310, i32 374, i32 438, i32 502, i32 55, i32 119, i32 183, i32 247, i32 311, i32 375, i32 439, i32 503, i32 56, i32 120, i32 184, i32 248, i32 312, i32 376, i32 440, i32 504, i32 57, i32 121, i32 185, i32 249, i32 313, i32 377, i32 441, i32 505, i32 58, i32 122, i32 186, i32 250, i32 314, i32 378, i32 442, i32 506, i32 59, i32 123, i32 187, i32 251, i32 315, i32 379, i32 443, i32 507, i32 60, i32 124, i32 188, i32 252, i32 316, i32 380, i32 444, i32 508, i32 61, i32 125, i32 189, i32 253, i32 317, i32 381, i32 445, i32 509, i32 62, i32 126, i32 190, i32 254, i32 318, i32 382, i32 446, i32 510, i32 63, i32 127, i32 191, i32 255, i32 319, i32 383, i32 447, i32 511>
16798 store <512 x i64> %interleaved.vec, ptr %out.vec, align 64
16801 ;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
16805 ; AVX2-FAST: {{.*}}
16806 ; AVX2-FAST-PERLANE: {{.*}}
16807 ; AVX2-SLOW: {{.*}}
16808 ; AVX512-FAST: {{.*}}
16809 ; AVX512-SLOW: {{.*}}
16810 ; AVX512BW-FAST: {{.*}}
16811 ; AVX512BW-SLOW: {{.*}}
16812 ; AVX512F-FAST: {{.*}}
16813 ; AVX512F-SLOW: {{.*}}
16814 ; FALLBACK0: {{.*}}
16815 ; FALLBACK1: {{.*}}
16816 ; FALLBACK10: {{.*}}
16817 ; FALLBACK11: {{.*}}
16818 ; FALLBACK12: {{.*}}
16819 ; FALLBACK2: {{.*}}
16820 ; FALLBACK3: {{.*}}
16821 ; FALLBACK4: {{.*}}
16822 ; FALLBACK5: {{.*}}
16823 ; FALLBACK6: {{.*}}
16824 ; FALLBACK7: {{.*}}
16825 ; FALLBACK8: {{.*}}
16826 ; FALLBACK9: {{.*}}