1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+sse2 | FileCheck %s --check-prefixes=SSE,FALLBACK0
3 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx | FileCheck %s --check-prefixes=AVX,AVX1,AVX1-ONLY,FALLBACK1
4 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2 | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-SLOW,FALLBACK2
5 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-FAST,FALLBACK3
6 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-FAST-PERLANE,FALLBACK4
7 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-SLOW,AVX512F-SLOW,AVX512F-ONLY-SLOW,FALLBACK5
8 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-FAST,AVX512F-FAST,AVX512F-ONLY-FAST,FALLBACK6
9 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-SLOW,AVX512F-SLOW,AVX512DQ-SLOW,FALLBACK7
10 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-FAST,AVX512F-FAST,AVX512DQ-FAST,FALLBACK8
11 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-SLOW,AVX512BW-SLOW,AVX512BW-ONLY-SLOW,FALLBACK9
12 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-FAST,AVX512BW-FAST,AVX512BW-ONLY-FAST,FALLBACK10
13 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-SLOW,AVX512BW-SLOW,AVX512DQBW-SLOW,FALLBACK11
14 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-FAST,AVX512BW-FAST,AVX512DQBW-FAST,FALLBACK12
16 ; These patterns are produced by LoopVectorizer for interleaved stores.
18 define void @store_i64_stride6_vf2(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
19 ; SSE-LABEL: store_i64_stride6_vf2:
21 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
22 ; SSE-NEXT: movaps (%rdi), %xmm0
23 ; SSE-NEXT: movaps (%rsi), %xmm1
24 ; SSE-NEXT: movaps (%rdx), %xmm2
25 ; SSE-NEXT: movaps (%rcx), %xmm3
26 ; SSE-NEXT: movaps (%r8), %xmm4
27 ; SSE-NEXT: movaps (%r9), %xmm5
28 ; SSE-NEXT: movaps %xmm0, %xmm6
29 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm1[0]
30 ; SSE-NEXT: movaps %xmm2, %xmm7
31 ; SSE-NEXT: unpckhpd {{.*#+}} xmm7 = xmm7[1],xmm3[1]
32 ; SSE-NEXT: movaps %xmm4, %xmm8
33 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm5[1]
34 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
35 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm5[0]
36 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm3[0]
37 ; SSE-NEXT: movaps %xmm2, 16(%rax)
38 ; SSE-NEXT: movaps %xmm4, 32(%rax)
39 ; SSE-NEXT: movaps %xmm0, 48(%rax)
40 ; SSE-NEXT: movaps %xmm8, 80(%rax)
41 ; SSE-NEXT: movaps %xmm7, 64(%rax)
42 ; SSE-NEXT: movaps %xmm6, (%rax)
45 ; AVX1-ONLY-LABEL: store_i64_stride6_vf2:
47 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
48 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %xmm0
49 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %xmm1
50 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %xmm2
51 ; AVX1-ONLY-NEXT: vmovaps (%rcx), %xmm3
52 ; AVX1-ONLY-NEXT: vmovaps (%r8), %xmm4
53 ; AVX1-ONLY-NEXT: vmovaps (%r9), %xmm5
54 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm1, %ymm6
55 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm7
56 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm6 = ymm7[0],ymm6[0],ymm7[2],ymm6[2]
57 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm5, %ymm1
58 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm4, %ymm0
59 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[3],ymm1[3]
60 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm3, %ymm1
61 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm2, %ymm2
62 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm2[1],ymm1[1],ymm2[3],ymm1[3]
63 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 64(%rax)
64 ; AVX1-ONLY-NEXT: vmovapd %ymm0, 32(%rax)
65 ; AVX1-ONLY-NEXT: vmovaps %ymm6, (%rax)
66 ; AVX1-ONLY-NEXT: vzeroupper
67 ; AVX1-ONLY-NEXT: retq
69 ; AVX2-ONLY-LABEL: store_i64_stride6_vf2:
71 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
72 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %xmm0
73 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %xmm1
74 ; AVX2-ONLY-NEXT: vmovaps (%r8), %xmm2
75 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%rsi), %ymm0, %ymm0
76 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%rcx), %ymm1, %ymm1
77 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%r9), %ymm2, %ymm2
78 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
79 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,2,1,3]
80 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1],ymm0[2,3],ymm2[4,5],ymm0[6,7]
81 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,1,3]
82 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
83 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,1,3]
84 ; AVX2-ONLY-NEXT: vmovaps %ymm1, 64(%rax)
85 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
86 ; AVX2-ONLY-NEXT: vmovaps %ymm3, (%rax)
87 ; AVX2-ONLY-NEXT: vzeroupper
88 ; AVX2-ONLY-NEXT: retq
90 ; AVX512-LABEL: store_i64_stride6_vf2:
92 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
93 ; AVX512-NEXT: vmovdqa (%rdi), %xmm0
94 ; AVX512-NEXT: vmovdqa (%rdx), %xmm1
95 ; AVX512-NEXT: vmovdqa (%r8), %xmm2
96 ; AVX512-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
97 ; AVX512-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
98 ; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
99 ; AVX512-NEXT: vinserti32x4 $1, (%r9), %zmm2, %zmm1
100 ; AVX512-NEXT: vmovdqa {{.*#+}} ymm2 = [5,7,9,11]
101 ; AVX512-NEXT: vpermi2q %zmm1, %zmm0, %zmm2
102 ; AVX512-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,2,4,6,8,10,1,3]
103 ; AVX512-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
104 ; AVX512-NEXT: vmovdqa64 %zmm3, (%rax)
105 ; AVX512-NEXT: vmovdqa %ymm2, 64(%rax)
106 ; AVX512-NEXT: vzeroupper
108 %in.vec0 = load <2 x i64>, ptr %in.vecptr0, align 64
109 %in.vec1 = load <2 x i64>, ptr %in.vecptr1, align 64
110 %in.vec2 = load <2 x i64>, ptr %in.vecptr2, align 64
111 %in.vec3 = load <2 x i64>, ptr %in.vecptr3, align 64
112 %in.vec4 = load <2 x i64>, ptr %in.vecptr4, align 64
113 %in.vec5 = load <2 x i64>, ptr %in.vecptr5, align 64
114 %1 = shufflevector <2 x i64> %in.vec0, <2 x i64> %in.vec1, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
115 %2 = shufflevector <2 x i64> %in.vec2, <2 x i64> %in.vec3, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
116 %3 = shufflevector <2 x i64> %in.vec4, <2 x i64> %in.vec5, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
117 %4 = shufflevector <4 x i64> %1, <4 x i64> %2, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
118 %5 = shufflevector <4 x i64> %3, <4 x i64> poison, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 undef, i32 undef, i32 undef, i32 undef>
119 %6 = shufflevector <8 x i64> %4, <8 x i64> %5, <12 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11>
120 %interleaved.vec = shufflevector <12 x i64> %6, <12 x i64> poison, <12 x i32> <i32 0, i32 2, i32 4, i32 6, i32 8, i32 10, i32 1, i32 3, i32 5, i32 7, i32 9, i32 11>
121 store <12 x i64> %interleaved.vec, ptr %out.vec, align 64
125 define void @store_i64_stride6_vf4(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
126 ; SSE-LABEL: store_i64_stride6_vf4:
128 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
129 ; SSE-NEXT: movaps (%rdi), %xmm2
130 ; SSE-NEXT: movaps 16(%rdi), %xmm1
131 ; SSE-NEXT: movaps (%rsi), %xmm5
132 ; SSE-NEXT: movaps 16(%rsi), %xmm6
133 ; SSE-NEXT: movaps (%rdx), %xmm0
134 ; SSE-NEXT: movaps 16(%rdx), %xmm4
135 ; SSE-NEXT: movaps (%rcx), %xmm7
136 ; SSE-NEXT: movaps 16(%rcx), %xmm8
137 ; SSE-NEXT: movaps (%r8), %xmm9
138 ; SSE-NEXT: movaps 16(%r8), %xmm10
139 ; SSE-NEXT: movaps (%r9), %xmm11
140 ; SSE-NEXT: movaps 16(%r9), %xmm12
141 ; SSE-NEXT: movaps %xmm1, %xmm3
142 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm6[1]
143 ; SSE-NEXT: movaps %xmm10, %xmm13
144 ; SSE-NEXT: movlhps {{.*#+}} xmm13 = xmm13[0],xmm12[0]
145 ; SSE-NEXT: movaps %xmm9, %xmm14
146 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm11[1]
147 ; SSE-NEXT: movaps %xmm0, %xmm15
148 ; SSE-NEXT: unpckhpd {{.*#+}} xmm15 = xmm15[1],xmm7[1]
149 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm7[0]
150 ; SSE-NEXT: movaps %xmm2, %xmm7
151 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm5[0]
152 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm12[1]
153 ; SSE-NEXT: movaps %xmm4, %xmm12
154 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm8[1]
155 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm8[0]
156 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm6[0]
157 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm5[1]
158 ; SSE-NEXT: movlhps {{.*#+}} xmm9 = xmm9[0],xmm11[0]
159 ; SSE-NEXT: movaps %xmm9, 32(%rax)
160 ; SSE-NEXT: movaps %xmm2, 48(%rax)
161 ; SSE-NEXT: movaps %xmm1, 96(%rax)
162 ; SSE-NEXT: movaps %xmm4, 112(%rax)
163 ; SSE-NEXT: movaps %xmm12, 160(%rax)
164 ; SSE-NEXT: movaps %xmm10, 176(%rax)
165 ; SSE-NEXT: movaps %xmm7, (%rax)
166 ; SSE-NEXT: movaps %xmm0, 16(%rax)
167 ; SSE-NEXT: movaps %xmm15, 64(%rax)
168 ; SSE-NEXT: movaps %xmm14, 80(%rax)
169 ; SSE-NEXT: movaps %xmm13, 128(%rax)
170 ; SSE-NEXT: movaps %xmm3, 144(%rax)
173 ; AVX1-ONLY-LABEL: store_i64_stride6_vf4:
174 ; AVX1-ONLY: # %bb.0:
175 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
176 ; AVX1-ONLY-NEXT: vmovapd (%rdi), %ymm0
177 ; AVX1-ONLY-NEXT: vmovapd (%rsi), %ymm1
178 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %ymm2
179 ; AVX1-ONLY-NEXT: vmovapd (%r8), %ymm3
180 ; AVX1-ONLY-NEXT: vmovapd (%r9), %ymm4
181 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm5 = mem[0,0]
182 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %xmm6
183 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %xmm7
184 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm8 = xmm7[1],xmm6[1]
185 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm0, %ymm8
186 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm8 = ymm3[0,1],ymm8[2,3]
187 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm5 = ymm8[0],ymm5[1],ymm8[2,3]
188 ; AVX1-ONLY-NEXT: vmovaps (%rcx), %xmm8
189 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %xmm9
190 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm10 = xmm9[1],xmm8[1]
191 ; AVX1-ONLY-NEXT: vinsertf128 $1, (%r9), %ymm10, %ymm11
192 ; AVX1-ONLY-NEXT: vbroadcastsd 8(%r8), %ymm12
193 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1,2,3],ymm12[4,5,6,7]
194 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1],ymm11[2,3],ymm10[4,5],ymm11[6,7]
195 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm2[0],mem[0],ymm2[2],mem[2]
196 ; AVX1-ONLY-NEXT: vmovaps 16(%rdi), %xmm11
197 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm11 = xmm11[0],mem[0]
198 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm11[0,1,2,3],ymm2[4,5,6,7]
199 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm11 = ymm4[2,3],ymm1[2,3]
200 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
201 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm3[2,3],ymm0[2,3]
202 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm11[0],ymm0[2],ymm11[3]
203 ; AVX1-ONLY-NEXT: vmovapd 16(%rdx), %xmm1
204 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],mem[1]
205 ; AVX1-ONLY-NEXT: vbroadcastsd 24(%r8), %ymm3
206 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0,1],ymm3[2,3]
207 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0,1,2],ymm4[3]
208 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm3 = xmm7[0],xmm6[0]
209 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm9[0],xmm8[0]
210 ; AVX1-ONLY-NEXT: vmovaps %xmm4, 16(%rax)
211 ; AVX1-ONLY-NEXT: vmovaps %xmm3, (%rax)
212 ; AVX1-ONLY-NEXT: vmovapd %ymm0, 128(%rax)
213 ; AVX1-ONLY-NEXT: vmovaps %ymm2, 96(%rax)
214 ; AVX1-ONLY-NEXT: vmovaps %ymm10, 64(%rax)
215 ; AVX1-ONLY-NEXT: vmovapd %ymm5, 32(%rax)
216 ; AVX1-ONLY-NEXT: vmovapd %ymm1, 160(%rax)
217 ; AVX1-ONLY-NEXT: vzeroupper
218 ; AVX1-ONLY-NEXT: retq
220 ; AVX2-ONLY-LABEL: store_i64_stride6_vf4:
221 ; AVX2-ONLY: # %bb.0:
222 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
223 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %ymm0
224 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %ymm1
225 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %ymm2
226 ; AVX2-ONLY-NEXT: vmovaps (%rcx), %ymm3
227 ; AVX2-ONLY-NEXT: vmovaps (%r8), %ymm4
228 ; AVX2-ONLY-NEXT: vmovaps (%r9), %xmm5
229 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm6 = xmm5[0,0]
230 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %xmm7
231 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %xmm8
232 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm9 = xmm8[1],xmm7[1]
233 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm9 = ymm4[0,1],ymm9[0,1]
234 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm9[0,1],ymm6[2,3],ymm9[4,5,6,7]
235 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
236 ; AVX2-ONLY-NEXT: vmovaps (%rcx), %xmm9
237 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %xmm10
238 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm10[1],xmm9[1]
239 ; AVX2-ONLY-NEXT: vbroadcastsd 8(%r8), %ymm12
240 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm11[0,1,2,3],ymm12[4,5,6,7]
241 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm11[0,1,2,3,4,5],ymm5[6,7]
242 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm7, %ymm7
243 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm8, %ymm8
244 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm7 = ymm8[0],ymm7[0],ymm8[2],ymm7[2]
245 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
246 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm9 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
247 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm8 = ymm9[2,3],ymm8[2,3]
248 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
249 ; AVX2-ONLY-NEXT: vbroadcastsd 24(%r8), %ymm3
250 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],ymm3[2,3]
251 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],mem[6,7]
252 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
253 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm4[2,3],ymm0[2,3]
254 ; AVX2-ONLY-NEXT: vbroadcastsd 16(%r9), %ymm1
255 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
256 ; AVX2-ONLY-NEXT: vmovaps %ymm8, 96(%rax)
257 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 128(%rax)
258 ; AVX2-ONLY-NEXT: vmovaps %ymm7, (%rax)
259 ; AVX2-ONLY-NEXT: vmovaps %ymm2, 160(%rax)
260 ; AVX2-ONLY-NEXT: vmovaps %ymm5, 64(%rax)
261 ; AVX2-ONLY-NEXT: vmovaps %ymm6, 32(%rax)
262 ; AVX2-ONLY-NEXT: vzeroupper
263 ; AVX2-ONLY-NEXT: retq
265 ; AVX512-LABEL: store_i64_stride6_vf4:
267 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
268 ; AVX512-NEXT: vmovdqa (%rdi), %ymm0
269 ; AVX512-NEXT: vmovdqa (%rdx), %ymm1
270 ; AVX512-NEXT: vmovdqa (%r8), %ymm2
271 ; AVX512-NEXT: vinserti64x4 $1, (%rsi), %zmm0, %zmm0
272 ; AVX512-NEXT: vinserti64x4 $1, (%rcx), %zmm1, %zmm1
273 ; AVX512-NEXT: vinserti64x4 $1, (%r9), %zmm2, %zmm2
274 ; AVX512-NEXT: vmovdqa64 {{.*#+}} zmm3 = <0,4,8,12,u,u,1,5>
275 ; AVX512-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
276 ; AVX512-NEXT: vmovdqa64 {{.*#+}} zmm4 = [0,1,2,3,8,12,6,7]
277 ; AVX512-NEXT: vpermi2q %zmm2, %zmm3, %zmm4
278 ; AVX512-NEXT: vmovdqa64 {{.*#+}} zmm3 = <1,5,u,u,10,14,2,6>
279 ; AVX512-NEXT: vpermi2q %zmm0, %zmm1, %zmm3
280 ; AVX512-NEXT: vmovdqa64 {{.*#+}} zmm5 = [0,1,9,13,4,5,6,7]
281 ; AVX512-NEXT: vpermi2q %zmm2, %zmm3, %zmm5
282 ; AVX512-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [11,15,3,7,11,15,3,7]
283 ; AVX512-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
284 ; AVX512-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
285 ; AVX512-NEXT: vmovdqa64 {{.*#+}} zmm0 = [10,14,2,3,4,5,11,15]
286 ; AVX512-NEXT: vpermi2q %zmm2, %zmm3, %zmm0
287 ; AVX512-NEXT: vmovdqa64 %zmm0, 128(%rax)
288 ; AVX512-NEXT: vmovdqa64 %zmm5, 64(%rax)
289 ; AVX512-NEXT: vmovdqa64 %zmm4, (%rax)
290 ; AVX512-NEXT: vzeroupper
292 %in.vec0 = load <4 x i64>, ptr %in.vecptr0, align 64
293 %in.vec1 = load <4 x i64>, ptr %in.vecptr1, align 64
294 %in.vec2 = load <4 x i64>, ptr %in.vecptr2, align 64
295 %in.vec3 = load <4 x i64>, ptr %in.vecptr3, align 64
296 %in.vec4 = load <4 x i64>, ptr %in.vecptr4, align 64
297 %in.vec5 = load <4 x i64>, ptr %in.vecptr5, align 64
298 %1 = shufflevector <4 x i64> %in.vec0, <4 x i64> %in.vec1, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
299 %2 = shufflevector <4 x i64> %in.vec2, <4 x i64> %in.vec3, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
300 %3 = shufflevector <4 x i64> %in.vec4, <4 x i64> %in.vec5, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
301 %4 = shufflevector <8 x i64> %1, <8 x i64> %2, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
302 %5 = shufflevector <8 x i64> %3, <8 x i64> poison, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
303 %6 = shufflevector <16 x i64> %4, <16 x i64> %5, <24 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23>
304 %interleaved.vec = shufflevector <24 x i64> %6, <24 x i64> poison, <24 x i32> <i32 0, i32 4, i32 8, i32 12, i32 16, i32 20, i32 1, i32 5, i32 9, i32 13, i32 17, i32 21, i32 2, i32 6, i32 10, i32 14, i32 18, i32 22, i32 3, i32 7, i32 11, i32 15, i32 19, i32 23>
305 store <24 x i64> %interleaved.vec, ptr %out.vec, align 64
309 define void @store_i64_stride6_vf8(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
310 ; SSE-LABEL: store_i64_stride6_vf8:
312 ; SSE-NEXT: subq $24, %rsp
313 ; SSE-NEXT: movaps (%rdi), %xmm2
314 ; SSE-NEXT: movaps 16(%rdi), %xmm3
315 ; SSE-NEXT: movaps 32(%rdi), %xmm5
316 ; SSE-NEXT: movaps (%rsi), %xmm1
317 ; SSE-NEXT: movaps 16(%rsi), %xmm12
318 ; SSE-NEXT: movaps 32(%rsi), %xmm14
319 ; SSE-NEXT: movaps (%rdx), %xmm4
320 ; SSE-NEXT: movaps 16(%rdx), %xmm6
321 ; SSE-NEXT: movaps 32(%rdx), %xmm0
322 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
323 ; SSE-NEXT: movaps (%rcx), %xmm10
324 ; SSE-NEXT: movaps 16(%rcx), %xmm13
325 ; SSE-NEXT: movaps (%r8), %xmm7
326 ; SSE-NEXT: movaps 16(%r8), %xmm9
327 ; SSE-NEXT: movaps (%r9), %xmm11
328 ; SSE-NEXT: movaps 16(%r9), %xmm0
329 ; SSE-NEXT: movaps %xmm2, %xmm8
330 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm1[0]
331 ; SSE-NEXT: movaps %xmm8, (%rsp) # 16-byte Spill
332 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
333 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
334 ; SSE-NEXT: movaps %xmm4, %xmm2
335 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm10[0]
336 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
337 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm10[1]
338 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
339 ; SSE-NEXT: movaps %xmm7, %xmm10
340 ; SSE-NEXT: movlhps {{.*#+}} xmm10 = xmm10[0],xmm11[0]
341 ; SSE-NEXT: unpckhpd {{.*#+}} xmm7 = xmm7[1],xmm11[1]
342 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
343 ; SSE-NEXT: movaps %xmm3, %xmm11
344 ; SSE-NEXT: movlhps {{.*#+}} xmm11 = xmm11[0],xmm12[0]
345 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm12[1]
346 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
347 ; SSE-NEXT: movaps %xmm6, %xmm12
348 ; SSE-NEXT: movlhps {{.*#+}} xmm12 = xmm12[0],xmm13[0]
349 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm13[1]
350 ; SSE-NEXT: movaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
351 ; SSE-NEXT: movaps %xmm9, %xmm13
352 ; SSE-NEXT: movlhps {{.*#+}} xmm13 = xmm13[0],xmm0[0]
353 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm0[1]
354 ; SSE-NEXT: movaps %xmm5, %xmm15
355 ; SSE-NEXT: movlhps {{.*#+}} xmm15 = xmm15[0],xmm14[0]
356 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm14[1]
357 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
358 ; SSE-NEXT: movaps 32(%rcx), %xmm0
359 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
360 ; SSE-NEXT: movaps %xmm1, %xmm14
361 ; SSE-NEXT: movlhps {{.*#+}} xmm14 = xmm14[0],xmm0[0]
362 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
363 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
364 ; SSE-NEXT: movaps 32(%r8), %xmm5
365 ; SSE-NEXT: movaps 32(%r9), %xmm0
366 ; SSE-NEXT: movaps %xmm5, %xmm8
367 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm0[0]
368 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm0[1]
369 ; SSE-NEXT: movaps 48(%rdi), %xmm6
370 ; SSE-NEXT: movaps 48(%rsi), %xmm1
371 ; SSE-NEXT: movaps %xmm6, %xmm7
372 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm1[0]
373 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm1[1]
374 ; SSE-NEXT: movaps 48(%rdx), %xmm1
375 ; SSE-NEXT: movaps 48(%rcx), %xmm0
376 ; SSE-NEXT: movaps %xmm1, %xmm4
377 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm0[0]
378 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
379 ; SSE-NEXT: movaps 48(%r8), %xmm0
380 ; SSE-NEXT: movaps 48(%r9), %xmm3
381 ; SSE-NEXT: movaps %xmm0, %xmm2
382 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm3[0]
383 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
384 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
385 ; SSE-NEXT: movaps %xmm0, 368(%rax)
386 ; SSE-NEXT: movaps %xmm1, 352(%rax)
387 ; SSE-NEXT: movaps %xmm6, 336(%rax)
388 ; SSE-NEXT: movaps %xmm2, 320(%rax)
389 ; SSE-NEXT: movaps %xmm4, 304(%rax)
390 ; SSE-NEXT: movaps %xmm7, 288(%rax)
391 ; SSE-NEXT: movaps %xmm5, 272(%rax)
392 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
393 ; SSE-NEXT: movaps %xmm0, 256(%rax)
394 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
395 ; SSE-NEXT: movaps %xmm0, 240(%rax)
396 ; SSE-NEXT: movaps %xmm8, 224(%rax)
397 ; SSE-NEXT: movaps %xmm14, 208(%rax)
398 ; SSE-NEXT: movaps %xmm15, 192(%rax)
399 ; SSE-NEXT: movaps %xmm9, 176(%rax)
400 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
401 ; SSE-NEXT: movaps %xmm0, 160(%rax)
402 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
403 ; SSE-NEXT: movaps %xmm0, 144(%rax)
404 ; SSE-NEXT: movaps %xmm13, 128(%rax)
405 ; SSE-NEXT: movaps %xmm12, 112(%rax)
406 ; SSE-NEXT: movaps %xmm11, 96(%rax)
407 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
408 ; SSE-NEXT: movaps %xmm0, 80(%rax)
409 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
410 ; SSE-NEXT: movaps %xmm0, 64(%rax)
411 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
412 ; SSE-NEXT: movaps %xmm0, 48(%rax)
413 ; SSE-NEXT: movaps %xmm10, 32(%rax)
414 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
415 ; SSE-NEXT: movaps %xmm0, 16(%rax)
416 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
417 ; SSE-NEXT: movaps %xmm0, (%rax)
418 ; SSE-NEXT: addq $24, %rsp
421 ; AVX1-ONLY-LABEL: store_i64_stride6_vf8:
422 ; AVX1-ONLY: # %bb.0:
423 ; AVX1-ONLY-NEXT: vmovapd 32(%rdi), %ymm13
424 ; AVX1-ONLY-NEXT: vmovapd (%r8), %ymm11
425 ; AVX1-ONLY-NEXT: vmovapd 32(%r8), %ymm14
426 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm0 = mem[0,0]
427 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %xmm1
428 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
429 ; AVX1-ONLY-NEXT: vmovaps 32(%rsi), %xmm4
430 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %xmm3
431 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm3[1],xmm1[1]
432 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
433 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm2 = ymm11[0,1],ymm2[2,3]
434 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm2[0],ymm0[1],ymm2[2,3]
435 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
436 ; AVX1-ONLY-NEXT: vmovaps (%rcx), %xmm5
437 ; AVX1-ONLY-NEXT: vmovaps 32(%rcx), %xmm6
438 ; AVX1-ONLY-NEXT: vmovaps 32(%rdx), %xmm8
439 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm8[1],xmm6[1]
440 ; AVX1-ONLY-NEXT: vinsertf128 $1, 32(%r9), %ymm2, %ymm7
441 ; AVX1-ONLY-NEXT: vbroadcastsd 40(%r8), %ymm9
442 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm9[4,5,6,7]
443 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1],ymm7[2,3],ymm2[4,5],ymm7[6,7]
444 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
445 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %xmm9
446 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm7 = xmm9[1],xmm5[1]
447 ; AVX1-ONLY-NEXT: vinsertf128 $1, (%r9), %ymm7, %ymm10
448 ; AVX1-ONLY-NEXT: vbroadcastsd 8(%r8), %ymm12
449 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm12[4,5,6,7]
450 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1],ymm10[2,3],ymm7[4,5],ymm10[6,7]
451 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm10 = mem[0,0]
452 ; AVX1-ONLY-NEXT: vmovaps 32(%rdi), %xmm12
453 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm15 = xmm12[1],xmm4[1]
454 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm15, %ymm0, %ymm15
455 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm15 = ymm14[0,1],ymm15[2,3]
456 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm10 = ymm15[0],ymm10[1],ymm15[2,3]
457 ; AVX1-ONLY-NEXT: vmovapd 32(%rsi), %ymm15
458 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm13 = ymm13[1],ymm15[1],ymm13[3],ymm15[3]
459 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm13 = ymm14[2,3],ymm13[2,3]
460 ; AVX1-ONLY-NEXT: vmovapd 32(%r9), %ymm0
461 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm14 = ymm0[2,3],ymm15[2,3]
462 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm13 = ymm13[0],ymm14[0],ymm13[2],ymm14[3]
463 ; AVX1-ONLY-NEXT: vmovapd (%rdi), %ymm14
464 ; AVX1-ONLY-NEXT: vmovapd (%rsi), %ymm15
465 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm14 = ymm14[1],ymm15[1],ymm14[3],ymm15[3]
466 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm11 = ymm11[2,3],ymm14[2,3]
467 ; AVX1-ONLY-NEXT: vmovapd (%r9), %ymm1
468 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm14 = ymm1[2,3],ymm15[2,3]
469 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm14 = ymm11[0],ymm14[0],ymm11[2],ymm14[3]
470 ; AVX1-ONLY-NEXT: vmovapd 48(%rdx), %xmm11
471 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm11[1],mem[1]
472 ; AVX1-ONLY-NEXT: vbroadcastsd 56(%r8), %ymm15
473 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm11 = ymm11[0,1],ymm15[2,3]
474 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm11 = ymm11[0,1,2],ymm0[3]
475 ; AVX1-ONLY-NEXT: vmovaps 48(%rdi), %xmm0
476 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
477 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
478 ; AVX1-ONLY-NEXT: vbroadcastsd 48(%rcx), %ymm15
479 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm15[6,7]
480 ; AVX1-ONLY-NEXT: vmovapd 16(%rdx), %xmm15
481 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm15 = xmm15[1],mem[1]
482 ; AVX1-ONLY-NEXT: vbroadcastsd 24(%r8), %ymm2
483 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm2 = ymm15[0,1],ymm2[2,3]
484 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3]
485 ; AVX1-ONLY-NEXT: vmovaps 16(%rdi), %xmm2
486 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm2 = xmm2[0],mem[0]
487 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
488 ; AVX1-ONLY-NEXT: vbroadcastsd 16(%rcx), %ymm15
489 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm15[6,7]
490 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm12[0],xmm4[0]
491 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm6 = xmm8[0],xmm6[0]
492 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm3, %xmm3 # 16-byte Folded Reload
493 ; AVX1-ONLY-NEXT: # xmm3 = xmm3[0],mem[0]
494 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm5 = xmm9[0],xmm5[0]
495 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
496 ; AVX1-ONLY-NEXT: vmovaps %xmm5, 16(%rax)
497 ; AVX1-ONLY-NEXT: vmovaps %xmm3, (%rax)
498 ; AVX1-ONLY-NEXT: vmovaps %xmm6, 208(%rax)
499 ; AVX1-ONLY-NEXT: vmovaps %xmm4, 192(%rax)
500 ; AVX1-ONLY-NEXT: vmovapd %ymm14, 128(%rax)
501 ; AVX1-ONLY-NEXT: vmovapd %ymm13, 320(%rax)
502 ; AVX1-ONLY-NEXT: vmovaps %ymm2, 96(%rax)
503 ; AVX1-ONLY-NEXT: vmovapd %ymm1, 160(%rax)
504 ; AVX1-ONLY-NEXT: vmovapd %ymm10, 224(%rax)
505 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 288(%rax)
506 ; AVX1-ONLY-NEXT: vmovaps %ymm7, 64(%rax)
507 ; AVX1-ONLY-NEXT: vmovapd %ymm11, 352(%rax)
508 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
509 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 256(%rax)
510 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
511 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
512 ; AVX1-ONLY-NEXT: vzeroupper
513 ; AVX1-ONLY-NEXT: retq
515 ; AVX2-ONLY-LABEL: store_i64_stride6_vf8:
516 ; AVX2-ONLY: # %bb.0:
517 ; AVX2-ONLY-NEXT: vmovaps (%r8), %ymm1
518 ; AVX2-ONLY-NEXT: vmovaps 32(%r8), %ymm4
519 ; AVX2-ONLY-NEXT: vmovaps (%r9), %xmm3
520 ; AVX2-ONLY-NEXT: vmovaps 32(%r9), %xmm5
521 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm0 = xmm3[0,0]
522 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %xmm9
523 ; AVX2-ONLY-NEXT: vmovaps 32(%rsi), %xmm7
524 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %xmm6
525 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm6[1],xmm9[1]
526 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm1[0,1],ymm2[0,1]
527 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1],ymm0[2,3],ymm2[4,5,6,7]
528 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm2
529 ; AVX2-ONLY-NEXT: vmovaps (%rcx), %xmm11
530 ; AVX2-ONLY-NEXT: vmovaps 32(%rcx), %xmm12
531 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %xmm13
532 ; AVX2-ONLY-NEXT: vmovaps 32(%rdx), %xmm10
533 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm8 = xmm10[1],xmm12[1]
534 ; AVX2-ONLY-NEXT: vbroadcastsd 40(%r8), %ymm14
535 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm14[4,5,6,7]
536 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm8[0,1,2,3,4,5],ymm2[6,7]
537 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm8 = xmm13[1],xmm11[1]
538 ; AVX2-ONLY-NEXT: vbroadcastsd 8(%r8), %ymm14
539 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm14[4,5,6,7]
540 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %xmm14
541 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
542 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm8[0,1,2,3,4,5],ymm3[6,7]
543 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm5 = xmm5[0,0]
544 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm8 = xmm14[1],xmm7[1]
545 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm8 = ymm4[0,1],ymm8[0,1]
546 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm8[0,1],ymm5[2,3],ymm8[4,5,6,7]
547 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %ymm8
548 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm9, %ymm9
549 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %ymm11
550 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm6, %ymm6
551 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %ymm13
552 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm6 = ymm6[0],ymm9[0],ymm6[2],ymm9[2]
553 ; AVX2-ONLY-NEXT: vmovaps 32(%rsi), %ymm9
554 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm7, %ymm7
555 ; AVX2-ONLY-NEXT: vmovaps 32(%rdx), %ymm12
556 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm14, %ymm10
557 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm7 = ymm10[0],ymm7[0],ymm10[2],ymm7[2]
558 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm10 = ymm12[1],mem[1],ymm12[3],mem[3]
559 ; AVX2-ONLY-NEXT: vbroadcastsd 56(%r8), %ymm14
560 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm10 = ymm10[2,3],ymm14[2,3]
561 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm14 = ymm11[1],ymm9[1],ymm11[3],ymm9[3]
562 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],ymm14[2,3]
563 ; AVX2-ONLY-NEXT: vbroadcastsd 48(%r9), %ymm14
564 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1],ymm14[2,3],ymm4[4,5,6,7]
565 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %ymm14
566 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm9 = ymm11[0],ymm9[0],ymm11[2],ymm9[2]
567 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm9 = ymm9[2,3],ymm12[2,3]
568 ; AVX2-ONLY-NEXT: vbroadcastsd 48(%rcx), %ymm11
569 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],ymm11[6,7]
570 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm11 = ymm14[1],mem[1],ymm14[3],mem[3]
571 ; AVX2-ONLY-NEXT: vbroadcastsd 24(%r8), %ymm12
572 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm11 = ymm11[2,3],ymm12[2,3]
573 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm12 = ymm8[1],ymm13[1],ymm8[3],ymm13[3]
574 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm1[2,3],ymm12[2,3]
575 ; AVX2-ONLY-NEXT: vbroadcastsd 16(%r9), %ymm12
576 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm12[2,3],ymm1[4,5,6,7]
577 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm8[0],ymm13[0],ymm8[2],ymm13[2]
578 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm8 = ymm8[2,3],ymm14[2,3]
579 ; AVX2-ONLY-NEXT: vbroadcastsd 16(%rcx), %ymm12
580 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],ymm12[6,7]
581 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
582 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1,2,3,4,5],mem[6,7]
583 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm11[0,1,2,3,4,5],mem[6,7]
584 ; AVX2-ONLY-NEXT: vmovaps %ymm8, 96(%rax)
585 ; AVX2-ONLY-NEXT: vmovaps %ymm1, 128(%rax)
586 ; AVX2-ONLY-NEXT: vmovaps %ymm7, 192(%rax)
587 ; AVX2-ONLY-NEXT: vmovaps %ymm11, 160(%rax)
588 ; AVX2-ONLY-NEXT: vmovaps %ymm9, 288(%rax)
589 ; AVX2-ONLY-NEXT: vmovaps %ymm4, 320(%rax)
590 ; AVX2-ONLY-NEXT: vmovaps %ymm10, 352(%rax)
591 ; AVX2-ONLY-NEXT: vmovaps %ymm6, (%rax)
592 ; AVX2-ONLY-NEXT: vmovaps %ymm5, 224(%rax)
593 ; AVX2-ONLY-NEXT: vmovaps %ymm3, 64(%rax)
594 ; AVX2-ONLY-NEXT: vmovaps %ymm2, 256(%rax)
595 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
596 ; AVX2-ONLY-NEXT: vzeroupper
597 ; AVX2-ONLY-NEXT: retq
599 ; AVX512F-LABEL: store_i64_stride6_vf8:
601 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %rax
602 ; AVX512F-NEXT: vmovdqa64 (%rdi), %zmm4
603 ; AVX512F-NEXT: vmovdqa64 (%rsi), %zmm6
604 ; AVX512F-NEXT: vmovdqa64 (%rdx), %zmm2
605 ; AVX512F-NEXT: vmovdqa64 (%rcx), %zmm3
606 ; AVX512F-NEXT: vmovdqa64 (%r8), %zmm1
607 ; AVX512F-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
608 ; AVX512F-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
609 ; AVX512F-NEXT: vpermi2q %zmm6, %zmm4, %zmm0
610 ; AVX512F-NEXT: vbroadcasti128 {{.*#+}} ymm5 = [4,12,4,12]
611 ; AVX512F-NEXT: # ymm5 = mem[0,1,0,1]
612 ; AVX512F-NEXT: vpermi2q %zmm3, %zmm2, %zmm5
613 ; AVX512F-NEXT: movb $12, %r10b
614 ; AVX512F-NEXT: kmovw %r10d, %k1
615 ; AVX512F-NEXT: vmovdqa64 %zmm5, %zmm0 {%k1}
616 ; AVX512F-NEXT: movb $16, %r10b
617 ; AVX512F-NEXT: kmovw %r10d, %k2
618 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm0 {%k2}
619 ; AVX512F-NEXT: vmovdqa64 (%r9), %zmm5
620 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [2,10,2,10,2,10,2,10]
621 ; AVX512F-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
622 ; AVX512F-NEXT: vpermi2q %zmm6, %zmm4, %zmm7
623 ; AVX512F-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,9,2,10,1,9,2,10]
624 ; AVX512F-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
625 ; AVX512F-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
626 ; AVX512F-NEXT: movb $48, %r9b
627 ; AVX512F-NEXT: kmovw %r9d, %k2
628 ; AVX512F-NEXT: vmovdqa64 %zmm7, %zmm8 {%k2}
629 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm7 = <0,1,9,u,4,5,6,7>
630 ; AVX512F-NEXT: vpermi2q %zmm1, %zmm8, %zmm7
631 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm8 = [0,1,2,9,4,5,6,7]
632 ; AVX512F-NEXT: vpermi2q %zmm5, %zmm7, %zmm8
633 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [6,14,6,14,6,14,6,14]
634 ; AVX512F-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
635 ; AVX512F-NEXT: vpermi2q %zmm6, %zmm4, %zmm7
636 ; AVX512F-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [5,13,6,14,5,13,6,14]
637 ; AVX512F-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
638 ; AVX512F-NEXT: vpermi2q %zmm3, %zmm2, %zmm9
639 ; AVX512F-NEXT: vmovdqa64 %zmm7, %zmm9 {%k2}
640 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm7 = <0,1,13,u,4,5,6,7>
641 ; AVX512F-NEXT: vpermi2q %zmm1, %zmm9, %zmm7
642 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm9 = [0,1,2,13,4,5,6,7]
643 ; AVX512F-NEXT: vpermi2q %zmm5, %zmm7, %zmm9
644 ; AVX512F-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [0,8,1,9,0,8,1,9]
645 ; AVX512F-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
646 ; AVX512F-NEXT: vpermi2q %zmm6, %zmm4, %zmm7
647 ; AVX512F-NEXT: vmovdqa (%rdx), %xmm10
648 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
649 ; AVX512F-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
650 ; AVX512F-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm7 {%k1}
651 ; AVX512F-NEXT: vinserti32x4 $2, (%r8), %zmm7, %zmm7
652 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm10 = [0,1,2,3,4,8,6,7]
653 ; AVX512F-NEXT: vpermi2q %zmm5, %zmm7, %zmm10
654 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [7,15,7,15,7,15,7,15]
655 ; AVX512F-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
656 ; AVX512F-NEXT: vpermi2q %zmm3, %zmm2, %zmm7
657 ; AVX512F-NEXT: vbroadcasti128 {{.*#+}} ymm11 = [7,15,7,15]
658 ; AVX512F-NEXT: # ymm11 = mem[0,1,0,1]
659 ; AVX512F-NEXT: vpermi2q %zmm6, %zmm4, %zmm11
660 ; AVX512F-NEXT: vshufi64x2 {{.*#+}} zmm4 = zmm11[0,1,2,3],zmm7[4,5,6,7]
661 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm6 = <14,u,2,3,4,5,15,u>
662 ; AVX512F-NEXT: vpermi2q %zmm1, %zmm4, %zmm6
663 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm4 = [0,14,2,3,4,5,6,15]
664 ; AVX512F-NEXT: vpermi2q %zmm5, %zmm6, %zmm4
665 ; AVX512F-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [3,11,3,11,3,11,3,11]
666 ; AVX512F-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
667 ; AVX512F-NEXT: vpermi2q %zmm3, %zmm2, %zmm6
668 ; AVX512F-NEXT: vmovdqa (%rdi), %ymm2
669 ; AVX512F-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
670 ; AVX512F-NEXT: vinserti64x4 $0, %ymm2, %zmm6, %zmm2
671 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm3 = <10,u,2,3,4,5,11,u>
672 ; AVX512F-NEXT: vpermi2q %zmm1, %zmm2, %zmm3
673 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,10,2,3,4,5,6,11]
674 ; AVX512F-NEXT: vpermi2q %zmm5, %zmm3, %zmm1
675 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm2 = [0,1,2,3,4,12,6,7]
676 ; AVX512F-NEXT: vpermi2q %zmm5, %zmm0, %zmm2
677 ; AVX512F-NEXT: vmovdqa64 %zmm2, 192(%rax)
678 ; AVX512F-NEXT: vmovdqa64 %zmm1, 128(%rax)
679 ; AVX512F-NEXT: vmovdqa64 %zmm4, 320(%rax)
680 ; AVX512F-NEXT: vmovdqa64 %zmm9, 256(%rax)
681 ; AVX512F-NEXT: vmovdqa64 %zmm8, 64(%rax)
682 ; AVX512F-NEXT: vmovdqa64 %zmm10, (%rax)
683 ; AVX512F-NEXT: vzeroupper
686 ; AVX512BW-LABEL: store_i64_stride6_vf8:
688 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
689 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm4
690 ; AVX512BW-NEXT: vmovdqa64 (%rsi), %zmm6
691 ; AVX512BW-NEXT: vmovdqa64 (%rdx), %zmm2
692 ; AVX512BW-NEXT: vmovdqa64 (%rcx), %zmm3
693 ; AVX512BW-NEXT: vmovdqa64 (%r8), %zmm1
694 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
695 ; AVX512BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
696 ; AVX512BW-NEXT: vpermi2q %zmm6, %zmm4, %zmm0
697 ; AVX512BW-NEXT: vbroadcasti128 {{.*#+}} ymm5 = [4,12,4,12]
698 ; AVX512BW-NEXT: # ymm5 = mem[0,1,0,1]
699 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm5
700 ; AVX512BW-NEXT: movb $12, %r10b
701 ; AVX512BW-NEXT: kmovd %r10d, %k1
702 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm0 {%k1}
703 ; AVX512BW-NEXT: movb $16, %r10b
704 ; AVX512BW-NEXT: kmovd %r10d, %k2
705 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm0 {%k2}
706 ; AVX512BW-NEXT: vmovdqa64 (%r9), %zmm5
707 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [2,10,2,10,2,10,2,10]
708 ; AVX512BW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
709 ; AVX512BW-NEXT: vpermi2q %zmm6, %zmm4, %zmm7
710 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,9,2,10,1,9,2,10]
711 ; AVX512BW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
712 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm8
713 ; AVX512BW-NEXT: movb $48, %r9b
714 ; AVX512BW-NEXT: kmovd %r9d, %k2
715 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm8 {%k2}
716 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <0,1,9,u,4,5,6,7>
717 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm8, %zmm7
718 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm8 = [0,1,2,9,4,5,6,7]
719 ; AVX512BW-NEXT: vpermi2q %zmm5, %zmm7, %zmm8
720 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [6,14,6,14,6,14,6,14]
721 ; AVX512BW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
722 ; AVX512BW-NEXT: vpermi2q %zmm6, %zmm4, %zmm7
723 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [5,13,6,14,5,13,6,14]
724 ; AVX512BW-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
725 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm9
726 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm9 {%k2}
727 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <0,1,13,u,4,5,6,7>
728 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm9, %zmm7
729 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm9 = [0,1,2,13,4,5,6,7]
730 ; AVX512BW-NEXT: vpermi2q %zmm5, %zmm7, %zmm9
731 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [0,8,1,9,0,8,1,9]
732 ; AVX512BW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
733 ; AVX512BW-NEXT: vpermi2q %zmm6, %zmm4, %zmm7
734 ; AVX512BW-NEXT: vmovdqa (%rdx), %xmm10
735 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
736 ; AVX512BW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
737 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm7 {%k1}
738 ; AVX512BW-NEXT: vinserti32x4 $2, (%r8), %zmm7, %zmm7
739 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm10 = [0,1,2,3,4,8,6,7]
740 ; AVX512BW-NEXT: vpermi2q %zmm5, %zmm7, %zmm10
741 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [7,15,7,15,7,15,7,15]
742 ; AVX512BW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
743 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm7
744 ; AVX512BW-NEXT: vbroadcasti128 {{.*#+}} ymm11 = [7,15,7,15]
745 ; AVX512BW-NEXT: # ymm11 = mem[0,1,0,1]
746 ; AVX512BW-NEXT: vpermi2q %zmm6, %zmm4, %zmm11
747 ; AVX512BW-NEXT: vshufi64x2 {{.*#+}} zmm4 = zmm11[0,1,2,3],zmm7[4,5,6,7]
748 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm6 = <14,u,2,3,4,5,15,u>
749 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm4, %zmm6
750 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = [0,14,2,3,4,5,6,15]
751 ; AVX512BW-NEXT: vpermi2q %zmm5, %zmm6, %zmm4
752 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [3,11,3,11,3,11,3,11]
753 ; AVX512BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
754 ; AVX512BW-NEXT: vpermi2q %zmm3, %zmm2, %zmm6
755 ; AVX512BW-NEXT: vmovdqa (%rdi), %ymm2
756 ; AVX512BW-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
757 ; AVX512BW-NEXT: vinserti64x4 $0, %ymm2, %zmm6, %zmm2
758 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = <10,u,2,3,4,5,11,u>
759 ; AVX512BW-NEXT: vpermi2q %zmm1, %zmm2, %zmm3
760 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,10,2,3,4,5,6,11]
761 ; AVX512BW-NEXT: vpermi2q %zmm5, %zmm3, %zmm1
762 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = [0,1,2,3,4,12,6,7]
763 ; AVX512BW-NEXT: vpermi2q %zmm5, %zmm0, %zmm2
764 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 192(%rax)
765 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 128(%rax)
766 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 320(%rax)
767 ; AVX512BW-NEXT: vmovdqa64 %zmm9, 256(%rax)
768 ; AVX512BW-NEXT: vmovdqa64 %zmm8, 64(%rax)
769 ; AVX512BW-NEXT: vmovdqa64 %zmm10, (%rax)
770 ; AVX512BW-NEXT: vzeroupper
771 ; AVX512BW-NEXT: retq
772 %in.vec0 = load <8 x i64>, ptr %in.vecptr0, align 64
773 %in.vec1 = load <8 x i64>, ptr %in.vecptr1, align 64
774 %in.vec2 = load <8 x i64>, ptr %in.vecptr2, align 64
775 %in.vec3 = load <8 x i64>, ptr %in.vecptr3, align 64
776 %in.vec4 = load <8 x i64>, ptr %in.vecptr4, align 64
777 %in.vec5 = load <8 x i64>, ptr %in.vecptr5, align 64
778 %1 = shufflevector <8 x i64> %in.vec0, <8 x i64> %in.vec1, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
779 %2 = shufflevector <8 x i64> %in.vec2, <8 x i64> %in.vec3, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
780 %3 = shufflevector <8 x i64> %in.vec4, <8 x i64> %in.vec5, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
781 %4 = shufflevector <16 x i64> %1, <16 x i64> %2, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
782 %5 = shufflevector <16 x i64> %3, <16 x i64> poison, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
783 %6 = shufflevector <32 x i64> %4, <32 x i64> %5, <48 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47>
784 %interleaved.vec = shufflevector <48 x i64> %6, <48 x i64> poison, <48 x i32> <i32 0, i32 8, i32 16, i32 24, i32 32, i32 40, i32 1, i32 9, i32 17, i32 25, i32 33, i32 41, i32 2, i32 10, i32 18, i32 26, i32 34, i32 42, i32 3, i32 11, i32 19, i32 27, i32 35, i32 43, i32 4, i32 12, i32 20, i32 28, i32 36, i32 44, i32 5, i32 13, i32 21, i32 29, i32 37, i32 45, i32 6, i32 14, i32 22, i32 30, i32 38, i32 46, i32 7, i32 15, i32 23, i32 31, i32 39, i32 47>
785 store <48 x i64> %interleaved.vec, ptr %out.vec, align 64
789 define void @store_i64_stride6_vf16(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
790 ; SSE-LABEL: store_i64_stride6_vf16:
792 ; SSE-NEXT: subq $408, %rsp # imm = 0x198
793 ; SSE-NEXT: movaps (%rdi), %xmm7
794 ; SSE-NEXT: movaps 16(%rdi), %xmm8
795 ; SSE-NEXT: movaps 32(%rdi), %xmm9
796 ; SSE-NEXT: movaps (%rsi), %xmm3
797 ; SSE-NEXT: movaps 16(%rsi), %xmm1
798 ; SSE-NEXT: movaps 32(%rsi), %xmm0
799 ; SSE-NEXT: movaps (%rdx), %xmm10
800 ; SSE-NEXT: movaps 16(%rdx), %xmm11
801 ; SSE-NEXT: movaps 32(%rdx), %xmm12
802 ; SSE-NEXT: movaps (%rcx), %xmm5
803 ; SSE-NEXT: movaps 16(%rcx), %xmm2
804 ; SSE-NEXT: movaps (%r8), %xmm13
805 ; SSE-NEXT: movaps 16(%r8), %xmm15
806 ; SSE-NEXT: movaps (%r9), %xmm6
807 ; SSE-NEXT: movaps 16(%r9), %xmm4
808 ; SSE-NEXT: movaps %xmm7, %xmm14
809 ; SSE-NEXT: movlhps {{.*#+}} xmm14 = xmm14[0],xmm3[0]
810 ; SSE-NEXT: movaps %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
811 ; SSE-NEXT: unpckhpd {{.*#+}} xmm7 = xmm7[1],xmm3[1]
812 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
813 ; SSE-NEXT: movaps %xmm10, %xmm3
814 ; SSE-NEXT: movlhps {{.*#+}} xmm3 = xmm3[0],xmm5[0]
815 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
816 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm5[1]
817 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
818 ; SSE-NEXT: movaps %xmm13, %xmm5
819 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm6[0]
820 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
821 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm6[1]
822 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
823 ; SSE-NEXT: movaps %xmm8, %xmm3
824 ; SSE-NEXT: movlhps {{.*#+}} xmm3 = xmm3[0],xmm1[0]
825 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
826 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm1[1]
827 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
828 ; SSE-NEXT: movaps %xmm11, %xmm1
829 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
830 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
831 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm2[1]
832 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
833 ; SSE-NEXT: movaps %xmm15, %xmm1
834 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm4[0]
835 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
836 ; SSE-NEXT: unpckhpd {{.*#+}} xmm15 = xmm15[1],xmm4[1]
837 ; SSE-NEXT: movaps %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
838 ; SSE-NEXT: movaps %xmm9, %xmm1
839 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
840 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
841 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm0[1]
842 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
843 ; SSE-NEXT: movaps 32(%rcx), %xmm0
844 ; SSE-NEXT: movaps %xmm12, %xmm1
845 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
846 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
847 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm0[1]
848 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
849 ; SSE-NEXT: movaps 32(%r8), %xmm2
850 ; SSE-NEXT: movaps 32(%r9), %xmm0
851 ; SSE-NEXT: movaps %xmm2, %xmm1
852 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
853 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
854 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
855 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
856 ; SSE-NEXT: movaps 48(%rdi), %xmm2
857 ; SSE-NEXT: movaps 48(%rsi), %xmm0
858 ; SSE-NEXT: movaps %xmm2, %xmm1
859 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
860 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
861 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
862 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
863 ; SSE-NEXT: movaps 48(%rdx), %xmm2
864 ; SSE-NEXT: movaps 48(%rcx), %xmm0
865 ; SSE-NEXT: movaps %xmm2, %xmm1
866 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
867 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
868 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
869 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
870 ; SSE-NEXT: movaps 48(%r8), %xmm2
871 ; SSE-NEXT: movaps 48(%r9), %xmm0
872 ; SSE-NEXT: movaps %xmm2, %xmm1
873 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
874 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
875 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
876 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
877 ; SSE-NEXT: movaps 64(%rdi), %xmm2
878 ; SSE-NEXT: movaps 64(%rsi), %xmm0
879 ; SSE-NEXT: movaps %xmm2, %xmm1
880 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
881 ; SSE-NEXT: movaps %xmm1, (%rsp) # 16-byte Spill
882 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
883 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
884 ; SSE-NEXT: movaps 64(%rdx), %xmm2
885 ; SSE-NEXT: movaps 64(%rcx), %xmm0
886 ; SSE-NEXT: movaps %xmm2, %xmm1
887 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
888 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
889 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
890 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
891 ; SSE-NEXT: movaps 64(%r8), %xmm2
892 ; SSE-NEXT: movaps 64(%r9), %xmm0
893 ; SSE-NEXT: movaps %xmm2, %xmm1
894 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
895 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
896 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
897 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
898 ; SSE-NEXT: movaps 80(%rdi), %xmm15
899 ; SSE-NEXT: movaps 80(%rsi), %xmm0
900 ; SSE-NEXT: movaps %xmm15, %xmm1
901 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
902 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
903 ; SSE-NEXT: unpckhpd {{.*#+}} xmm15 = xmm15[1],xmm0[1]
904 ; SSE-NEXT: movaps 80(%rdx), %xmm12
905 ; SSE-NEXT: movaps 80(%rcx), %xmm0
906 ; SSE-NEXT: movaps %xmm12, %xmm1
907 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
908 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
909 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm0[1]
910 ; SSE-NEXT: movaps 80(%r8), %xmm14
911 ; SSE-NEXT: movaps 80(%r9), %xmm0
912 ; SSE-NEXT: movaps %xmm14, %xmm1
913 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
914 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
915 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm0[1]
916 ; SSE-NEXT: movaps 96(%rdi), %xmm9
917 ; SSE-NEXT: movaps 96(%rsi), %xmm0
918 ; SSE-NEXT: movaps %xmm9, %xmm13
919 ; SSE-NEXT: movlhps {{.*#+}} xmm13 = xmm13[0],xmm0[0]
920 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm0[1]
921 ; SSE-NEXT: movaps 96(%rdx), %xmm10
922 ; SSE-NEXT: movaps 96(%rcx), %xmm0
923 ; SSE-NEXT: movaps %xmm10, %xmm11
924 ; SSE-NEXT: movlhps {{.*#+}} xmm11 = xmm11[0],xmm0[0]
925 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm0[1]
926 ; SSE-NEXT: movaps 96(%r8), %xmm5
927 ; SSE-NEXT: movaps 96(%r9), %xmm0
928 ; SSE-NEXT: movaps %xmm5, %xmm8
929 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm0[0]
930 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm0[1]
931 ; SSE-NEXT: movaps 112(%rdi), %xmm6
932 ; SSE-NEXT: movaps 112(%rsi), %xmm1
933 ; SSE-NEXT: movaps %xmm6, %xmm7
934 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm1[0]
935 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm1[1]
936 ; SSE-NEXT: movaps 112(%rdx), %xmm1
937 ; SSE-NEXT: movaps 112(%rcx), %xmm0
938 ; SSE-NEXT: movaps %xmm1, %xmm4
939 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm0[0]
940 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
941 ; SSE-NEXT: movaps 112(%r8), %xmm0
942 ; SSE-NEXT: movaps 112(%r9), %xmm3
943 ; SSE-NEXT: movaps %xmm0, %xmm2
944 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm3[0]
945 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
946 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
947 ; SSE-NEXT: movaps %xmm0, 752(%rax)
948 ; SSE-NEXT: movaps %xmm1, 736(%rax)
949 ; SSE-NEXT: movaps %xmm6, 720(%rax)
950 ; SSE-NEXT: movaps %xmm2, 704(%rax)
951 ; SSE-NEXT: movaps %xmm4, 688(%rax)
952 ; SSE-NEXT: movaps %xmm7, 672(%rax)
953 ; SSE-NEXT: movaps %xmm5, 656(%rax)
954 ; SSE-NEXT: movaps %xmm10, 640(%rax)
955 ; SSE-NEXT: movaps %xmm9, 624(%rax)
956 ; SSE-NEXT: movaps %xmm8, 608(%rax)
957 ; SSE-NEXT: movaps %xmm11, 592(%rax)
958 ; SSE-NEXT: movaps %xmm13, 576(%rax)
959 ; SSE-NEXT: movaps %xmm14, 560(%rax)
960 ; SSE-NEXT: movaps %xmm12, 544(%rax)
961 ; SSE-NEXT: movaps %xmm15, 528(%rax)
962 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
963 ; SSE-NEXT: movaps %xmm0, 512(%rax)
964 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
965 ; SSE-NEXT: movaps %xmm0, 496(%rax)
966 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
967 ; SSE-NEXT: movaps %xmm0, 480(%rax)
968 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
969 ; SSE-NEXT: movaps %xmm0, 464(%rax)
970 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
971 ; SSE-NEXT: movaps %xmm0, 448(%rax)
972 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
973 ; SSE-NEXT: movaps %xmm0, 432(%rax)
974 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
975 ; SSE-NEXT: movaps %xmm0, 416(%rax)
976 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
977 ; SSE-NEXT: movaps %xmm0, 400(%rax)
978 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
979 ; SSE-NEXT: movaps %xmm0, 384(%rax)
980 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
981 ; SSE-NEXT: movaps %xmm0, 368(%rax)
982 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
983 ; SSE-NEXT: movaps %xmm0, 352(%rax)
984 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
985 ; SSE-NEXT: movaps %xmm0, 336(%rax)
986 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
987 ; SSE-NEXT: movaps %xmm0, 320(%rax)
988 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
989 ; SSE-NEXT: movaps %xmm0, 304(%rax)
990 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
991 ; SSE-NEXT: movaps %xmm0, 288(%rax)
992 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
993 ; SSE-NEXT: movaps %xmm0, 272(%rax)
994 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
995 ; SSE-NEXT: movaps %xmm0, 256(%rax)
996 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
997 ; SSE-NEXT: movaps %xmm0, 240(%rax)
998 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
999 ; SSE-NEXT: movaps %xmm0, 224(%rax)
1000 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1001 ; SSE-NEXT: movaps %xmm0, 208(%rax)
1002 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1003 ; SSE-NEXT: movaps %xmm0, 192(%rax)
1004 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1005 ; SSE-NEXT: movaps %xmm0, 176(%rax)
1006 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1007 ; SSE-NEXT: movaps %xmm0, 160(%rax)
1008 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1009 ; SSE-NEXT: movaps %xmm0, 144(%rax)
1010 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1011 ; SSE-NEXT: movaps %xmm0, 128(%rax)
1012 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1013 ; SSE-NEXT: movaps %xmm0, 112(%rax)
1014 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1015 ; SSE-NEXT: movaps %xmm0, 96(%rax)
1016 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1017 ; SSE-NEXT: movaps %xmm0, 80(%rax)
1018 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1019 ; SSE-NEXT: movaps %xmm0, 64(%rax)
1020 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1021 ; SSE-NEXT: movaps %xmm0, 48(%rax)
1022 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1023 ; SSE-NEXT: movaps %xmm0, 32(%rax)
1024 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1025 ; SSE-NEXT: movaps %xmm0, 16(%rax)
1026 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1027 ; SSE-NEXT: movaps %xmm0, (%rax)
1028 ; SSE-NEXT: addq $408, %rsp # imm = 0x198
1031 ; AVX1-ONLY-LABEL: store_i64_stride6_vf16:
1032 ; AVX1-ONLY: # %bb.0:
1033 ; AVX1-ONLY-NEXT: subq $440, %rsp # imm = 0x1B8
1034 ; AVX1-ONLY-NEXT: vmovapd (%r8), %ymm11
1035 ; AVX1-ONLY-NEXT: vmovapd 32(%r8), %ymm0
1036 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
1037 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %xmm3
1038 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1039 ; AVX1-ONLY-NEXT: vmovaps 32(%rsi), %xmm4
1040 ; AVX1-ONLY-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1041 ; AVX1-ONLY-NEXT: vmovaps 64(%rsi), %xmm5
1042 ; AVX1-ONLY-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1043 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %xmm2
1044 ; AVX1-ONLY-NEXT: vmovaps %xmm2, (%rsp) # 16-byte Spill
1045 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
1046 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
1047 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm2 = ymm11[0,1],ymm2[2,3]
1048 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3]
1049 ; AVX1-ONLY-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1050 ; AVX1-ONLY-NEXT: vmovaps (%rcx), %xmm2
1051 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1052 ; AVX1-ONLY-NEXT: vmovaps 32(%rcx), %xmm6
1053 ; AVX1-ONLY-NEXT: vmovaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1054 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %xmm1
1055 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1056 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
1057 ; AVX1-ONLY-NEXT: vinsertf128 $1, (%r9), %ymm1, %ymm2
1058 ; AVX1-ONLY-NEXT: vbroadcastsd 8(%r8), %ymm3
1059 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
1060 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3],ymm1[4,5],ymm2[6,7]
1061 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1062 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
1063 ; AVX1-ONLY-NEXT: vmovaps 32(%rdi), %xmm2
1064 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1065 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm4[1]
1066 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
1067 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm2 = ymm0[0,1],ymm2[2,3]
1068 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3]
1069 ; AVX1-ONLY-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1070 ; AVX1-ONLY-NEXT: vmovaps 32(%rdx), %xmm1
1071 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1072 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm6[1]
1073 ; AVX1-ONLY-NEXT: vbroadcastsd 40(%r8), %ymm2
1074 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm1[0,1,2,3],ymm2[4,5,6,7]
1075 ; AVX1-ONLY-NEXT: vinsertf128 $1, 32(%r9), %ymm1, %ymm1
1076 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3],ymm2[4,5],ymm1[6,7]
1077 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1078 ; AVX1-ONLY-NEXT: vmovaps 64(%rdi), %xmm15
1079 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm15[1],xmm5[1]
1080 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
1081 ; AVX1-ONLY-NEXT: vmovapd 64(%r8), %ymm5
1082 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm5[0,1],ymm1[2,3]
1083 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = mem[0,0]
1084 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3]
1085 ; AVX1-ONLY-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1086 ; AVX1-ONLY-NEXT: vmovaps 64(%rcx), %xmm10
1087 ; AVX1-ONLY-NEXT: vmovaps 64(%rdx), %xmm9
1088 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm9[1],xmm10[1]
1089 ; AVX1-ONLY-NEXT: vbroadcastsd 72(%r8), %ymm2
1090 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm1[0,1,2,3],ymm2[4,5,6,7]
1091 ; AVX1-ONLY-NEXT: vinsertf128 $1, 64(%r9), %ymm1, %ymm1
1092 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3],ymm2[4,5],ymm1[6,7]
1093 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1094 ; AVX1-ONLY-NEXT: vmovaps 96(%rsi), %xmm8
1095 ; AVX1-ONLY-NEXT: vmovaps 96(%rdi), %xmm7
1096 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm7[1],xmm8[1]
1097 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm2
1098 ; AVX1-ONLY-NEXT: vmovapd 96(%r8), %ymm1
1099 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm2 = ymm1[0,1],ymm2[2,3]
1100 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
1101 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm2 = ymm2[0],ymm3[1],ymm2[2,3]
1102 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1103 ; AVX1-ONLY-NEXT: vmovaps 96(%rcx), %xmm6
1104 ; AVX1-ONLY-NEXT: vmovaps 96(%rdx), %xmm3
1105 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm3[1],xmm6[1]
1106 ; AVX1-ONLY-NEXT: vbroadcastsd 104(%r8), %ymm4
1107 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm2[0,1,2,3],ymm4[4,5,6,7]
1108 ; AVX1-ONLY-NEXT: vinsertf128 $1, 96(%r9), %ymm2, %ymm2
1109 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm4[0,1],ymm2[2,3],ymm4[4,5],ymm2[6,7]
1110 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1111 ; AVX1-ONLY-NEXT: vmovapd (%rdi), %ymm2
1112 ; AVX1-ONLY-NEXT: vmovapd (%rsi), %ymm12
1113 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm12[1],ymm2[3],ymm12[3]
1114 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm11[2,3],ymm2[2,3]
1115 ; AVX1-ONLY-NEXT: vmovapd (%r9), %ymm4
1116 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm11 = ymm4[2,3],ymm12[2,3]
1117 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0],ymm11[0],ymm2[2],ymm11[3]
1118 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1119 ; AVX1-ONLY-NEXT: vmovapd 32(%rdi), %ymm2
1120 ; AVX1-ONLY-NEXT: vmovapd 32(%rsi), %ymm11
1121 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm11[1],ymm2[3],ymm11[3]
1122 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm2[2,3]
1123 ; AVX1-ONLY-NEXT: vmovapd 32(%r9), %ymm2
1124 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm11 = ymm2[2,3],ymm11[2,3]
1125 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm11[0],ymm0[2],ymm11[3]
1126 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1127 ; AVX1-ONLY-NEXT: vmovapd 64(%rdi), %ymm0
1128 ; AVX1-ONLY-NEXT: vmovapd 64(%rsi), %ymm11
1129 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm11[1],ymm0[3],ymm11[3]
1130 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm5 = ymm5[2,3],ymm0[2,3]
1131 ; AVX1-ONLY-NEXT: vmovapd 64(%r9), %ymm0
1132 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm11 = ymm0[2,3],ymm11[2,3]
1133 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm5 = ymm5[0],ymm11[0],ymm5[2],ymm11[3]
1134 ; AVX1-ONLY-NEXT: vmovupd %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1135 ; AVX1-ONLY-NEXT: vmovapd 96(%rdi), %ymm5
1136 ; AVX1-ONLY-NEXT: vmovapd 96(%rsi), %ymm11
1137 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm5[1],ymm11[1],ymm5[3],ymm11[3]
1138 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm5 = ymm1[2,3],ymm5[2,3]
1139 ; AVX1-ONLY-NEXT: vmovapd 96(%r9), %ymm1
1140 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm11 = ymm1[2,3],ymm11[2,3]
1141 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm11 = ymm5[0],ymm11[0],ymm5[2],ymm11[3]
1142 ; AVX1-ONLY-NEXT: vmovaps 16(%rdi), %xmm5
1143 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm5 = xmm5[0],mem[0]
1144 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3],mem[4,5,6,7]
1145 ; AVX1-ONLY-NEXT: vbroadcastsd 16(%rcx), %ymm12
1146 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],ymm12[6,7]
1147 ; AVX1-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1148 ; AVX1-ONLY-NEXT: vmovapd 16(%rdx), %xmm5
1149 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm5 = xmm5[1],mem[1]
1150 ; AVX1-ONLY-NEXT: vbroadcastsd 24(%r8), %ymm12
1151 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm5 = ymm5[0,1],ymm12[2,3]
1152 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm4 = ymm5[0,1,2],ymm4[3]
1153 ; AVX1-ONLY-NEXT: vmovupd %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1154 ; AVX1-ONLY-NEXT: vmovaps 48(%rdi), %xmm4
1155 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm4 = xmm4[0],mem[0]
1156 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],mem[4,5,6,7]
1157 ; AVX1-ONLY-NEXT: vbroadcastsd 48(%rcx), %ymm5
1158 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm4[0,1,2,3,4,5],ymm5[6,7]
1159 ; AVX1-ONLY-NEXT: vmovapd 48(%rdx), %xmm4
1160 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm4[1],mem[1]
1161 ; AVX1-ONLY-NEXT: vbroadcastsd 56(%r8), %ymm14
1162 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm4 = ymm4[0,1],ymm14[2,3]
1163 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm4 = ymm4[0,1,2],ymm2[3]
1164 ; AVX1-ONLY-NEXT: vmovaps 80(%rdi), %xmm2
1165 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm2 = xmm2[0],mem[0]
1166 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],mem[4,5,6,7]
1167 ; AVX1-ONLY-NEXT: vbroadcastsd 80(%rcx), %ymm14
1168 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm14[6,7]
1169 ; AVX1-ONLY-NEXT: vmovapd 80(%rdx), %xmm14
1170 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm14 = xmm14[1],mem[1]
1171 ; AVX1-ONLY-NEXT: vbroadcastsd 88(%r8), %ymm13
1172 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm13 = ymm14[0,1],ymm13[2,3]
1173 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm13[0,1,2],ymm0[3]
1174 ; AVX1-ONLY-NEXT: vmovaps 112(%rdi), %xmm13
1175 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm13 = xmm13[0],mem[0]
1176 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3],mem[4,5,6,7]
1177 ; AVX1-ONLY-NEXT: vbroadcastsd 112(%rcx), %ymm14
1178 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3,4,5],ymm14[6,7]
1179 ; AVX1-ONLY-NEXT: vmovapd 112(%rdx), %xmm14
1180 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm14 = xmm14[1],mem[1]
1181 ; AVX1-ONLY-NEXT: vbroadcastsd 120(%r8), %ymm12
1182 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm12 = ymm14[0,1],ymm12[2,3]
1183 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm12[0,1,2],ymm1[3]
1184 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm12 # 16-byte Folded Reload
1185 ; AVX1-ONLY-NEXT: # xmm12 = xmm15[0],mem[0]
1186 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm9 = xmm9[0],xmm10[0]
1187 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
1188 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm10, %xmm10 # 16-byte Folded Reload
1189 ; AVX1-ONLY-NEXT: # xmm10 = xmm10[0],mem[0]
1190 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
1191 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm14 # 16-byte Folded Reload
1192 ; AVX1-ONLY-NEXT: # xmm14 = xmm14[0],mem[0]
1193 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm7 = xmm7[0],xmm8[0]
1194 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm3 = xmm3[0],xmm6[0]
1195 ; AVX1-ONLY-NEXT: vmovaps (%rsp), %xmm6 # 16-byte Reload
1196 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm6, %xmm6 # 16-byte Folded Reload
1197 ; AVX1-ONLY-NEXT: # xmm6 = xmm6[0],mem[0]
1198 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
1199 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm8, %xmm8 # 16-byte Folded Reload
1200 ; AVX1-ONLY-NEXT: # xmm8 = xmm8[0],mem[0]
1201 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
1202 ; AVX1-ONLY-NEXT: vmovaps %xmm8, 16(%rax)
1203 ; AVX1-ONLY-NEXT: vmovaps %xmm6, (%rax)
1204 ; AVX1-ONLY-NEXT: vmovaps %xmm3, 592(%rax)
1205 ; AVX1-ONLY-NEXT: vmovaps %xmm7, 576(%rax)
1206 ; AVX1-ONLY-NEXT: vmovaps %xmm14, 208(%rax)
1207 ; AVX1-ONLY-NEXT: vmovaps %xmm10, 192(%rax)
1208 ; AVX1-ONLY-NEXT: vmovaps %xmm9, 400(%rax)
1209 ; AVX1-ONLY-NEXT: vmovaps %xmm12, 384(%rax)
1210 ; AVX1-ONLY-NEXT: vmovapd %ymm11, 704(%rax)
1211 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
1212 ; AVX1-ONLY-NEXT: vmovaps %ymm3, 512(%rax)
1213 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
1214 ; AVX1-ONLY-NEXT: vmovaps %ymm3, 320(%rax)
1215 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
1216 ; AVX1-ONLY-NEXT: vmovaps %ymm3, 128(%rax)
1217 ; AVX1-ONLY-NEXT: vmovapd %ymm1, 736(%rax)
1218 ; AVX1-ONLY-NEXT: vmovaps %ymm13, 672(%rax)
1219 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
1220 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 640(%rax)
1221 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
1222 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 608(%rax)
1223 ; AVX1-ONLY-NEXT: vmovapd %ymm0, 544(%rax)
1224 ; AVX1-ONLY-NEXT: vmovaps %ymm2, 480(%rax)
1225 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1226 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 448(%rax)
1227 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1228 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 416(%rax)
1229 ; AVX1-ONLY-NEXT: vmovapd %ymm4, 352(%rax)
1230 ; AVX1-ONLY-NEXT: vmovaps %ymm5, 288(%rax)
1231 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1232 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 256(%rax)
1233 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1234 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 224(%rax)
1235 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1236 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 160(%rax)
1237 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1238 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 96(%rax)
1239 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1240 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 64(%rax)
1241 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1242 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
1243 ; AVX1-ONLY-NEXT: addq $440, %rsp # imm = 0x1B8
1244 ; AVX1-ONLY-NEXT: vzeroupper
1245 ; AVX1-ONLY-NEXT: retq
1247 ; AVX2-ONLY-LABEL: store_i64_stride6_vf16:
1248 ; AVX2-ONLY: # %bb.0:
1249 ; AVX2-ONLY-NEXT: subq $360, %rsp # imm = 0x168
1250 ; AVX2-ONLY-NEXT: vmovaps (%r8), %ymm5
1251 ; AVX2-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1252 ; AVX2-ONLY-NEXT: vmovaps 32(%r8), %ymm4
1253 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1254 ; AVX2-ONLY-NEXT: vmovaps (%r9), %xmm1
1255 ; AVX2-ONLY-NEXT: vmovaps 32(%r9), %xmm0
1256 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
1257 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %xmm3
1258 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1259 ; AVX2-ONLY-NEXT: vmovaps 32(%rsi), %xmm6
1260 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1261 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %xmm15
1262 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %xmm7
1263 ; AVX2-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1264 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm15[1],xmm3[1]
1265 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm5[0,1],ymm3[0,1]
1266 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5,6,7]
1267 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1268 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
1269 ; AVX2-ONLY-NEXT: vmovaps (%rcx), %xmm9
1270 ; AVX2-ONLY-NEXT: vmovaps 32(%rcx), %xmm13
1271 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %xmm8
1272 ; AVX2-ONLY-NEXT: vmovaps 32(%rdx), %xmm11
1273 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm8[1],xmm9[1]
1274 ; AVX2-ONLY-NEXT: vbroadcastsd 8(%r8), %ymm3
1275 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
1276 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
1277 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1278 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = xmm0[0,0]
1279 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm7[1],xmm6[1]
1280 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm4[0,1],ymm2[0,1]
1281 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3],ymm2[4,5,6,7]
1282 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1283 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm11[1],xmm13[1]
1284 ; AVX2-ONLY-NEXT: vbroadcastsd 40(%r8), %ymm2
1285 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
1286 ; AVX2-ONLY-NEXT: vmovaps 64(%r8), %ymm2
1287 ; AVX2-ONLY-NEXT: vmovups %ymm2, (%rsp) # 32-byte Spill
1288 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
1289 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
1290 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1291 ; AVX2-ONLY-NEXT: vmovaps 64(%rsi), %xmm6
1292 ; AVX2-ONLY-NEXT: vmovaps 64(%rdi), %xmm7
1293 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm6[1]
1294 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm2[0,1],ymm0[0,1]
1295 ; AVX2-ONLY-NEXT: vmovaps 64(%r9), %xmm1
1296 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
1297 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
1298 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1299 ; AVX2-ONLY-NEXT: vmovaps 64(%rcx), %xmm4
1300 ; AVX2-ONLY-NEXT: vmovaps 64(%rdx), %xmm5
1301 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm4[1]
1302 ; AVX2-ONLY-NEXT: vbroadcastsd 72(%r8), %ymm12
1303 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm12[4,5,6,7]
1304 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
1305 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
1306 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1307 ; AVX2-ONLY-NEXT: vmovaps 96(%r8), %ymm0
1308 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1309 ; AVX2-ONLY-NEXT: vmovaps 96(%rsi), %xmm2
1310 ; AVX2-ONLY-NEXT: vmovaps 96(%rdi), %xmm3
1311 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm10 = xmm3[1],xmm2[1]
1312 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm10 = ymm0[0,1],ymm10[0,1]
1313 ; AVX2-ONLY-NEXT: vmovaps 96(%r9), %xmm0
1314 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm12 = xmm0[0,0]
1315 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm10[0,1],ymm12[2,3],ymm10[4,5,6,7]
1316 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1317 ; AVX2-ONLY-NEXT: vmovaps 96(%rcx), %xmm10
1318 ; AVX2-ONLY-NEXT: vmovaps 96(%rdx), %xmm12
1319 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm12[1],xmm10[1]
1320 ; AVX2-ONLY-NEXT: vbroadcastsd 104(%r8), %ymm14
1321 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm14[4,5,6,7]
1322 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
1323 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
1324 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1325 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1326 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm0, %ymm0
1327 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm15, %ymm1
1328 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
1329 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1330 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1331 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm0, %ymm0
1332 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
1333 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm1, %ymm1
1334 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
1335 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1336 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm6, %ymm0
1337 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm7, %ymm1
1338 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm14 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
1339 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm2, %ymm0
1340 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm3, %ymm1
1341 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
1342 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %ymm1
1343 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %ymm2
1344 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %ymm3
1345 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm1[0],ymm2[0],ymm1[2],ymm2[2]
1346 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],ymm3[2,3]
1347 ; AVX2-ONLY-NEXT: vbroadcastsd 16(%rcx), %ymm5
1348 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm15 = ymm4[0,1,2,3,4,5],ymm5[6,7]
1349 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
1350 ; AVX2-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
1351 ; AVX2-ONLY-NEXT: # ymm1 = mem[2,3],ymm1[2,3]
1352 ; AVX2-ONLY-NEXT: vbroadcastsd 16(%r9), %ymm2
1353 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3],ymm1[4,5,6,7]
1354 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm3[1],mem[1],ymm3[3],mem[3]
1355 ; AVX2-ONLY-NEXT: vbroadcastsd 24(%r8), %ymm3
1356 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm2[2,3],ymm3[2,3]
1357 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %ymm3
1358 ; AVX2-ONLY-NEXT: vmovaps 32(%rsi), %ymm5
1359 ; AVX2-ONLY-NEXT: vmovaps 32(%rdx), %ymm6
1360 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm3[0],ymm5[0],ymm3[2],ymm5[2]
1361 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],ymm6[2,3]
1362 ; AVX2-ONLY-NEXT: vbroadcastsd 48(%rcx), %ymm7
1363 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm7[6,7]
1364 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm5[1],ymm3[3],ymm5[3]
1365 ; AVX2-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm3 # 32-byte Folded Reload
1366 ; AVX2-ONLY-NEXT: # ymm3 = mem[2,3],ymm3[2,3]
1367 ; AVX2-ONLY-NEXT: vbroadcastsd 48(%r9), %ymm5
1368 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1],ymm5[2,3],ymm3[4,5,6,7]
1369 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm6[1],mem[1],ymm6[3],mem[3]
1370 ; AVX2-ONLY-NEXT: vbroadcastsd 56(%r8), %ymm6
1371 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm6 = ymm5[2,3],ymm6[2,3]
1372 ; AVX2-ONLY-NEXT: vmovaps 64(%rdi), %ymm7
1373 ; AVX2-ONLY-NEXT: vmovaps 64(%rsi), %ymm9
1374 ; AVX2-ONLY-NEXT: vmovaps 64(%rdx), %ymm10
1375 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm5 = ymm7[0],ymm9[0],ymm7[2],ymm9[2]
1376 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm5 = ymm5[2,3],ymm10[2,3]
1377 ; AVX2-ONLY-NEXT: vbroadcastsd 80(%rcx), %ymm11
1378 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],ymm11[6,7]
1379 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm7 = ymm7[1],ymm9[1],ymm7[3],ymm9[3]
1380 ; AVX2-ONLY-NEXT: vperm2f128 $19, (%rsp), %ymm7, %ymm7 # 32-byte Folded Reload
1381 ; AVX2-ONLY-NEXT: # ymm7 = mem[2,3],ymm7[2,3]
1382 ; AVX2-ONLY-NEXT: vbroadcastsd 80(%r9), %ymm9
1383 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1],ymm9[2,3],ymm7[4,5,6,7]
1384 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm9 = ymm10[1],mem[1],ymm10[3],mem[3]
1385 ; AVX2-ONLY-NEXT: vbroadcastsd 88(%r8), %ymm10
1386 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm9 = ymm9[2,3],ymm10[2,3]
1387 ; AVX2-ONLY-NEXT: vmovaps 96(%rdi), %ymm10
1388 ; AVX2-ONLY-NEXT: vmovaps 96(%rsi), %ymm11
1389 ; AVX2-ONLY-NEXT: vmovaps 96(%rdx), %ymm12
1390 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm10[0],ymm11[0],ymm10[2],ymm11[2]
1391 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm8 = ymm8[2,3],ymm12[2,3]
1392 ; AVX2-ONLY-NEXT: vbroadcastsd 112(%rcx), %ymm13
1393 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],ymm13[6,7]
1394 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm10 = ymm10[1],ymm11[1],ymm10[3],ymm11[3]
1395 ; AVX2-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm10, %ymm10 # 32-byte Folded Reload
1396 ; AVX2-ONLY-NEXT: # ymm10 = mem[2,3],ymm10[2,3]
1397 ; AVX2-ONLY-NEXT: vbroadcastsd 112(%r9), %ymm11
1398 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1],ymm11[2,3],ymm10[4,5,6,7]
1399 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm11 = ymm12[1],mem[1],ymm12[3],mem[3]
1400 ; AVX2-ONLY-NEXT: vbroadcastsd 120(%r8), %ymm12
1401 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm11 = ymm11[2,3],ymm12[2,3]
1402 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
1403 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],mem[6,7]
1404 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3,4,5],mem[6,7]
1405 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],mem[6,7]
1406 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm11[0,1,2,3,4,5],mem[6,7]
1407 ; AVX2-ONLY-NEXT: vmovaps %ymm11, 736(%rax)
1408 ; AVX2-ONLY-NEXT: vmovaps %ymm10, 704(%rax)
1409 ; AVX2-ONLY-NEXT: vmovaps %ymm8, 672(%rax)
1410 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 576(%rax)
1411 ; AVX2-ONLY-NEXT: vmovaps %ymm9, 544(%rax)
1412 ; AVX2-ONLY-NEXT: vmovaps %ymm7, 512(%rax)
1413 ; AVX2-ONLY-NEXT: vmovaps %ymm5, 480(%rax)
1414 ; AVX2-ONLY-NEXT: vmovaps %ymm14, 384(%rax)
1415 ; AVX2-ONLY-NEXT: vmovaps %ymm6, 352(%rax)
1416 ; AVX2-ONLY-NEXT: vmovaps %ymm3, 320(%rax)
1417 ; AVX2-ONLY-NEXT: vmovaps %ymm2, 288(%rax)
1418 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1419 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 192(%rax)
1420 ; AVX2-ONLY-NEXT: vmovaps %ymm4, 160(%rax)
1421 ; AVX2-ONLY-NEXT: vmovaps %ymm1, 128(%rax)
1422 ; AVX2-ONLY-NEXT: vmovaps %ymm15, 96(%rax)
1423 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1424 ; AVX2-ONLY-NEXT: vmovaps %ymm0, (%rax)
1425 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1426 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 640(%rax)
1427 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1428 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 608(%rax)
1429 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1430 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 448(%rax)
1431 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1432 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 416(%rax)
1433 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1434 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 256(%rax)
1435 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1436 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 224(%rax)
1437 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1438 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 64(%rax)
1439 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1440 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
1441 ; AVX2-ONLY-NEXT: addq $360, %rsp # imm = 0x168
1442 ; AVX2-ONLY-NEXT: vzeroupper
1443 ; AVX2-ONLY-NEXT: retq
1445 ; AVX512F-ONLY-SLOW-LABEL: store_i64_stride6_vf16:
1446 ; AVX512F-ONLY-SLOW: # %bb.0:
1447 ; AVX512F-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1448 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm13
1449 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rdi), %zmm5
1450 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rsi), %zmm6
1451 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm14
1452 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm7
1453 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rdx), %zmm2
1454 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rcx), %zmm4
1455 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm11
1456 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%r8), %zmm3
1457 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%r8), %zmm8
1458 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
1459 ; AVX512F-ONLY-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
1460 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm1
1461 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm0, %zmm1
1462 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm9 = [4,12,4,12]
1463 ; AVX512F-ONLY-SLOW-NEXT: # ymm9 = mem[0,1,0,1]
1464 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm10
1465 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm9, %zmm10
1466 ; AVX512F-ONLY-SLOW-NEXT: movb $12, %r10b
1467 ; AVX512F-ONLY-SLOW-NEXT: kmovw %r10d, %k1
1468 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm1 {%k1}
1469 ; AVX512F-ONLY-SLOW-NEXT: movb $16, %r10b
1470 ; AVX512F-ONLY-SLOW-NEXT: kmovw %r10d, %k2
1471 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm1 {%k2}
1472 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm14, %zmm13, %zmm0
1473 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm11, %zmm7, %zmm9
1474 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm0 {%k1}
1475 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0 {%k2}
1476 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%r9), %zmm10
1477 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%r9), %zmm16
1478 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm18 = [6,14,6,14,6,14,6,14]
1479 ; AVX512F-ONLY-SLOW-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1480 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, %zmm15
1481 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm18, %zmm15
1482 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [5,13,6,14,5,13,6,14]
1483 ; AVX512F-ONLY-SLOW-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
1484 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm9
1485 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm12, %zmm9
1486 ; AVX512F-ONLY-SLOW-NEXT: movb $48, %r9b
1487 ; AVX512F-ONLY-SLOW-NEXT: kmovw %r9d, %k2
1488 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm15, %zmm9 {%k2}
1489 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm19 = <0,1,13,u,4,5,6,7>
1490 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm19, %zmm9
1491 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm20 = [0,1,2,13,4,5,6,7]
1492 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm16, %zmm20, %zmm9
1493 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm21 = [2,10,2,10,2,10,2,10]
1494 ; AVX512F-ONLY-SLOW-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1495 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, %zmm22
1496 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm21, %zmm22
1497 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm17 = [1,9,2,10,1,9,2,10]
1498 ; AVX512F-ONLY-SLOW-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3]
1499 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm15
1500 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm17, %zmm15
1501 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm22, %zmm15 {%k2}
1502 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm22 = <0,1,9,u,4,5,6,7>
1503 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm22, %zmm15
1504 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm23 = [0,1,2,9,4,5,6,7]
1505 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm16, %zmm23, %zmm15
1506 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm6, %zmm5, %zmm18
1507 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm4, %zmm2, %zmm12
1508 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, %zmm12 {%k2}
1509 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm19, %zmm12
1510 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm10, %zmm20, %zmm12
1511 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm6, %zmm5, %zmm21
1512 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm4, %zmm2, %zmm17
1513 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm17 {%k2}
1514 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm22, %zmm17
1515 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm10, %zmm23, %zmm17
1516 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm19 = [0,8,1,9,0,8,1,9]
1517 ; AVX512F-ONLY-SLOW-NEXT: # zmm19 = mem[0,1,2,3,0,1,2,3]
1518 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm18
1519 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm19, %zmm18
1520 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rdx), %xmm20
1521 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rdx), %xmm21
1522 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm20 = xmm20[0],mem[0]
1523 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $1, %xmm20, %ymm0, %ymm20
1524 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm20, %zmm0, %zmm18 {%k1}
1525 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $2, (%r8), %zmm18, %zmm18
1526 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm20 = [0,1,2,3,4,8,6,7]
1527 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm10, %zmm20, %zmm18
1528 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm14, %zmm13, %zmm19
1529 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm21 = xmm21[0],mem[0]
1530 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $1, %xmm21, %ymm0, %ymm21
1531 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm21, %zmm0, %zmm19 {%k1}
1532 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $2, 64(%r8), %zmm19, %zmm19
1533 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm16, %zmm20, %zmm19
1534 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [7,15,7,15,7,15,7,15]
1535 ; AVX512F-ONLY-SLOW-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1536 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm21
1537 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm20, %zmm21
1538 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} ymm22 = [7,15,7,15]
1539 ; AVX512F-ONLY-SLOW-NEXT: # ymm22 = mem[0,1,2,3,0,1,2,3]
1540 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm22, %zmm13
1541 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm13 = zmm13[0,1,2,3],zmm21[4,5,6,7]
1542 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm14 = <14,u,2,3,4,5,15,u>
1543 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm14, %zmm13
1544 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm21 = [0,14,2,3,4,5,6,15]
1545 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm16, %zmm21, %zmm13
1546 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm23 = [0,1,2,3,4,12,6,7]
1547 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm16, %zmm23, %zmm0
1548 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm24 = [3,11,3,11,3,11,3,11]
1549 ; AVX512F-ONLY-SLOW-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1550 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm24, %zmm7
1551 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdi), %ymm11
1552 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rdi), %ymm25
1553 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm25 = ymm25[1],mem[1],ymm25[3],mem[3]
1554 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm25, %zmm7, %zmm7
1555 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm25 = <10,u,2,3,4,5,11,u>
1556 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm25, %zmm7
1557 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm8 = [0,10,2,3,4,5,6,11]
1558 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm16, %zmm8, %zmm7
1559 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm4, %zmm2, %zmm20
1560 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm22, %zmm5
1561 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm5 = zmm5[0,1,2,3],zmm20[4,5,6,7]
1562 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm14, %zmm5
1563 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm10, %zmm21, %zmm5
1564 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm10, %zmm23, %zmm1
1565 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm24, %zmm2
1566 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm4 = ymm11[1],mem[1],ymm11[3],mem[3]
1567 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm2, %zmm2
1568 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm25, %zmm2
1569 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm10, %zmm8, %zmm2
1570 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, 64(%rax)
1571 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, 128(%rax)
1572 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, 192(%rax)
1573 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, 256(%rax)
1574 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, 320(%rax)
1575 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm15, 448(%rax)
1576 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, 512(%rax)
1577 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, 576(%rax)
1578 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, 640(%rax)
1579 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, 704(%rax)
1580 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm19, 384(%rax)
1581 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, (%rax)
1582 ; AVX512F-ONLY-SLOW-NEXT: vzeroupper
1583 ; AVX512F-ONLY-SLOW-NEXT: retq
1585 ; AVX512F-ONLY-FAST-LABEL: store_i64_stride6_vf16:
1586 ; AVX512F-ONLY-FAST: # %bb.0:
1587 ; AVX512F-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
1588 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rdi), %zmm13
1589 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rdi), %zmm5
1590 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rsi), %zmm6
1591 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rsi), %zmm14
1592 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rdx), %zmm7
1593 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rdx), %zmm2
1594 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rcx), %zmm4
1595 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rcx), %zmm11
1596 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%r8), %zmm3
1597 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%r8), %zmm8
1598 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
1599 ; AVX512F-ONLY-FAST-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
1600 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm1
1601 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm0, %zmm1
1602 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm9 = [4,12,4,12]
1603 ; AVX512F-ONLY-FAST-NEXT: # ymm9 = mem[0,1,0,1]
1604 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm10
1605 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm9, %zmm10
1606 ; AVX512F-ONLY-FAST-NEXT: movb $12, %r10b
1607 ; AVX512F-ONLY-FAST-NEXT: kmovw %r10d, %k1
1608 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm1 {%k1}
1609 ; AVX512F-ONLY-FAST-NEXT: movb $16, %r10b
1610 ; AVX512F-ONLY-FAST-NEXT: kmovw %r10d, %k2
1611 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm1 {%k2}
1612 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm14, %zmm13, %zmm0
1613 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm11, %zmm7, %zmm9
1614 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm0 {%k1}
1615 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm0 {%k2}
1616 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%r9), %zmm10
1617 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%r9), %zmm16
1618 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm18 = [6,14,6,14,6,14,6,14]
1619 ; AVX512F-ONLY-FAST-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1620 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm15
1621 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm18, %zmm15
1622 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [5,13,6,14,5,13,6,14]
1623 ; AVX512F-ONLY-FAST-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
1624 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm9
1625 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm12, %zmm9
1626 ; AVX512F-ONLY-FAST-NEXT: movb $48, %r9b
1627 ; AVX512F-ONLY-FAST-NEXT: kmovw %r9d, %k2
1628 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm15, %zmm9 {%k2}
1629 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm19 = <0,1,13,u,4,5,6,7>
1630 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm19, %zmm9
1631 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm20 = [0,1,2,13,4,5,6,7]
1632 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm16, %zmm20, %zmm9
1633 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm21 = [2,10,2,10,2,10,2,10]
1634 ; AVX512F-ONLY-FAST-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1635 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm22
1636 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm21, %zmm22
1637 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm17 = [1,9,2,10,1,9,2,10]
1638 ; AVX512F-ONLY-FAST-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3]
1639 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm15
1640 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm17, %zmm15
1641 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm22, %zmm15 {%k2}
1642 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm22 = <0,1,9,u,4,5,6,7>
1643 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm22, %zmm15
1644 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm23 = [0,1,2,9,4,5,6,7]
1645 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm16, %zmm23, %zmm15
1646 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm6, %zmm5, %zmm18
1647 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm4, %zmm2, %zmm12
1648 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm18, %zmm12 {%k2}
1649 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm19, %zmm12
1650 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm10, %zmm20, %zmm12
1651 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm6, %zmm5, %zmm21
1652 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm4, %zmm2, %zmm17
1653 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm21, %zmm17 {%k2}
1654 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm22, %zmm17
1655 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm10, %zmm23, %zmm17
1656 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm19 = [0,8,1,9,0,8,1,9]
1657 ; AVX512F-ONLY-FAST-NEXT: # zmm19 = mem[0,1,2,3,0,1,2,3]
1658 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm18
1659 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm19, %zmm18
1660 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rdx), %xmm20
1661 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rdx), %xmm21
1662 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm20 = xmm20[0],mem[0]
1663 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $1, %xmm20, %ymm0, %ymm20
1664 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm20, %zmm0, %zmm18 {%k1}
1665 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $2, (%r8), %zmm18, %zmm18
1666 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm20 = [0,1,2,3,4,8,6,7]
1667 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm10, %zmm20, %zmm18
1668 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm14, %zmm13, %zmm19
1669 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm21 = xmm21[0],mem[0]
1670 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $1, %xmm21, %ymm0, %ymm21
1671 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm21, %zmm0, %zmm19 {%k1}
1672 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $2, 64(%r8), %zmm19, %zmm19
1673 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm16, %zmm20, %zmm19
1674 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [7,15,7,15,7,15,7,15]
1675 ; AVX512F-ONLY-FAST-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1676 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm21
1677 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm20, %zmm21
1678 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} ymm22 = [7,15,7,15]
1679 ; AVX512F-ONLY-FAST-NEXT: # ymm22 = mem[0,1,2,3,0,1,2,3]
1680 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm22, %zmm13
1681 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm13 = zmm13[0,1,2,3],zmm21[4,5,6,7]
1682 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm14 = <14,u,2,3,4,5,15,u>
1683 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm14, %zmm13
1684 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm21 = [0,14,2,3,4,5,6,15]
1685 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm16, %zmm21, %zmm13
1686 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm23 = [0,1,2,3,4,12,6,7]
1687 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm16, %zmm23, %zmm0
1688 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm24 = [3,11,3,11,3,11,3,11]
1689 ; AVX512F-ONLY-FAST-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1690 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm24, %zmm7
1691 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdi), %ymm11
1692 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rdi), %ymm25
1693 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm25 = ymm25[1],mem[1],ymm25[3],mem[3]
1694 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm25, %zmm7, %zmm7
1695 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm25 = <10,u,2,3,4,5,11,u>
1696 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm25, %zmm7
1697 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm8 = [0,10,2,3,4,5,6,11]
1698 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm16, %zmm8, %zmm7
1699 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm4, %zmm2, %zmm20
1700 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm22, %zmm5
1701 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm5 = zmm5[0,1,2,3],zmm20[4,5,6,7]
1702 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm14, %zmm5
1703 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm10, %zmm21, %zmm5
1704 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm10, %zmm23, %zmm1
1705 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm24, %zmm2
1706 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm4 = ymm11[1],mem[1],ymm11[3],mem[3]
1707 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm2, %zmm2
1708 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm25, %zmm2
1709 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm10, %zmm8, %zmm2
1710 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm17, 64(%rax)
1711 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, 128(%rax)
1712 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, 192(%rax)
1713 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm12, 256(%rax)
1714 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, 320(%rax)
1715 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm15, 448(%rax)
1716 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, 512(%rax)
1717 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, 576(%rax)
1718 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm9, 640(%rax)
1719 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, 704(%rax)
1720 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm19, 384(%rax)
1721 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm18, (%rax)
1722 ; AVX512F-ONLY-FAST-NEXT: vzeroupper
1723 ; AVX512F-ONLY-FAST-NEXT: retq
1725 ; AVX512DQ-SLOW-LABEL: store_i64_stride6_vf16:
1726 ; AVX512DQ-SLOW: # %bb.0:
1727 ; AVX512DQ-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1728 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm13
1729 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rdi), %zmm5
1730 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rsi), %zmm6
1731 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm14
1732 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm7
1733 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rdx), %zmm2
1734 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rcx), %zmm4
1735 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm11
1736 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%r8), %zmm3
1737 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%r8), %zmm8
1738 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
1739 ; AVX512DQ-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
1740 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm1
1741 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm0, %zmm1
1742 ; AVX512DQ-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm9 = [4,12,4,12]
1743 ; AVX512DQ-SLOW-NEXT: # ymm9 = mem[0,1,0,1]
1744 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm10
1745 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm9, %zmm10
1746 ; AVX512DQ-SLOW-NEXT: movb $12, %r10b
1747 ; AVX512DQ-SLOW-NEXT: kmovw %r10d, %k1
1748 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, %zmm1 {%k1}
1749 ; AVX512DQ-SLOW-NEXT: movb $16, %r10b
1750 ; AVX512DQ-SLOW-NEXT: kmovw %r10d, %k2
1751 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm1 {%k2}
1752 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm14, %zmm13, %zmm0
1753 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm11, %zmm7, %zmm9
1754 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, %zmm0 {%k1}
1755 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0 {%k2}
1756 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%r9), %zmm10
1757 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%r9), %zmm16
1758 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm18 = [6,14,6,14,6,14,6,14]
1759 ; AVX512DQ-SLOW-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1760 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm13, %zmm15
1761 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm14, %zmm18, %zmm15
1762 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [5,13,6,14,5,13,6,14]
1763 ; AVX512DQ-SLOW-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
1764 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, %zmm9
1765 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm11, %zmm12, %zmm9
1766 ; AVX512DQ-SLOW-NEXT: movb $48, %r9b
1767 ; AVX512DQ-SLOW-NEXT: kmovw %r9d, %k2
1768 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm15, %zmm9 {%k2}
1769 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm19 = <0,1,13,u,4,5,6,7>
1770 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm8, %zmm19, %zmm9
1771 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm20 = [0,1,2,13,4,5,6,7]
1772 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm16, %zmm20, %zmm9
1773 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm21 = [2,10,2,10,2,10,2,10]
1774 ; AVX512DQ-SLOW-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1775 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm13, %zmm22
1776 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm14, %zmm21, %zmm22
1777 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm17 = [1,9,2,10,1,9,2,10]
1778 ; AVX512DQ-SLOW-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3]
1779 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, %zmm15
1780 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm11, %zmm17, %zmm15
1781 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm22, %zmm15 {%k2}
1782 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm22 = <0,1,9,u,4,5,6,7>
1783 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm8, %zmm22, %zmm15
1784 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm23 = [0,1,2,9,4,5,6,7]
1785 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm16, %zmm23, %zmm15
1786 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm6, %zmm5, %zmm18
1787 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm4, %zmm2, %zmm12
1788 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm18, %zmm12 {%k2}
1789 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm19, %zmm12
1790 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm10, %zmm20, %zmm12
1791 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm6, %zmm5, %zmm21
1792 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm4, %zmm2, %zmm17
1793 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm21, %zmm17 {%k2}
1794 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm22, %zmm17
1795 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm10, %zmm23, %zmm17
1796 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm19 = [0,8,1,9,0,8,1,9]
1797 ; AVX512DQ-SLOW-NEXT: # zmm19 = mem[0,1,2,3,0,1,2,3]
1798 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm18
1799 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm19, %zmm18
1800 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rdx), %xmm20
1801 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rdx), %xmm21
1802 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm20 = xmm20[0],mem[0]
1803 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $1, %xmm20, %ymm0, %ymm20
1804 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm20, %zmm0, %zmm18 {%k1}
1805 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $2, (%r8), %zmm18, %zmm18
1806 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm20 = [0,1,2,3,4,8,6,7]
1807 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm10, %zmm20, %zmm18
1808 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm14, %zmm13, %zmm19
1809 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm21 = xmm21[0],mem[0]
1810 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $1, %xmm21, %ymm0, %ymm21
1811 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm21, %zmm0, %zmm19 {%k1}
1812 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $2, 64(%r8), %zmm19, %zmm19
1813 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm16, %zmm20, %zmm19
1814 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [7,15,7,15,7,15,7,15]
1815 ; AVX512DQ-SLOW-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1816 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, %zmm21
1817 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm11, %zmm20, %zmm21
1818 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x2 {{.*#+}} ymm22 = [7,15,7,15]
1819 ; AVX512DQ-SLOW-NEXT: # ymm22 = mem[0,1,0,1]
1820 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm14, %zmm22, %zmm13
1821 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm13 = zmm13[0,1,2,3],zmm21[4,5,6,7]
1822 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm14 = <14,u,2,3,4,5,15,u>
1823 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm8, %zmm14, %zmm13
1824 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm21 = [0,14,2,3,4,5,6,15]
1825 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm16, %zmm21, %zmm13
1826 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm23 = [0,1,2,3,4,12,6,7]
1827 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm16, %zmm23, %zmm0
1828 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm24 = [3,11,3,11,3,11,3,11]
1829 ; AVX512DQ-SLOW-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1830 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm11, %zmm24, %zmm7
1831 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdi), %ymm11
1832 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rdi), %ymm25
1833 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm25 = ymm25[1],mem[1],ymm25[3],mem[3]
1834 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm25, %zmm7, %zmm7
1835 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm25 = <10,u,2,3,4,5,11,u>
1836 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm8, %zmm25, %zmm7
1837 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm8 = [0,10,2,3,4,5,6,11]
1838 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm16, %zmm8, %zmm7
1839 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm4, %zmm2, %zmm20
1840 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm22, %zmm5
1841 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm5 = zmm5[0,1,2,3],zmm20[4,5,6,7]
1842 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm14, %zmm5
1843 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm10, %zmm21, %zmm5
1844 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm10, %zmm23, %zmm1
1845 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm24, %zmm2
1846 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm4 = ymm11[1],mem[1],ymm11[3],mem[3]
1847 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm2, %zmm2
1848 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm25, %zmm2
1849 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm10, %zmm8, %zmm2
1850 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm17, 64(%rax)
1851 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, 128(%rax)
1852 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, 192(%rax)
1853 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm12, 256(%rax)
1854 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, 320(%rax)
1855 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm15, 448(%rax)
1856 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, 512(%rax)
1857 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, 576(%rax)
1858 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, 640(%rax)
1859 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm13, 704(%rax)
1860 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm19, 384(%rax)
1861 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm18, (%rax)
1862 ; AVX512DQ-SLOW-NEXT: vzeroupper
1863 ; AVX512DQ-SLOW-NEXT: retq
1865 ; AVX512DQ-FAST-LABEL: store_i64_stride6_vf16:
1866 ; AVX512DQ-FAST: # %bb.0:
1867 ; AVX512DQ-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
1868 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rdi), %zmm13
1869 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rdi), %zmm5
1870 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rsi), %zmm6
1871 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rsi), %zmm14
1872 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rdx), %zmm7
1873 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rdx), %zmm2
1874 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rcx), %zmm4
1875 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rcx), %zmm11
1876 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%r8), %zmm3
1877 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%r8), %zmm8
1878 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
1879 ; AVX512DQ-FAST-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
1880 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm1
1881 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm0, %zmm1
1882 ; AVX512DQ-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm9 = [4,12,4,12]
1883 ; AVX512DQ-FAST-NEXT: # ymm9 = mem[0,1,0,1]
1884 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm10
1885 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm9, %zmm10
1886 ; AVX512DQ-FAST-NEXT: movb $12, %r10b
1887 ; AVX512DQ-FAST-NEXT: kmovw %r10d, %k1
1888 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, %zmm1 {%k1}
1889 ; AVX512DQ-FAST-NEXT: movb $16, %r10b
1890 ; AVX512DQ-FAST-NEXT: kmovw %r10d, %k2
1891 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm1 {%k2}
1892 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm14, %zmm13, %zmm0
1893 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm11, %zmm7, %zmm9
1894 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm9, %zmm0 {%k1}
1895 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm8, %zmm0 {%k2}
1896 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%r9), %zmm10
1897 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%r9), %zmm16
1898 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm18 = [6,14,6,14,6,14,6,14]
1899 ; AVX512DQ-FAST-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1900 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm13, %zmm15
1901 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm14, %zmm18, %zmm15
1902 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [5,13,6,14,5,13,6,14]
1903 ; AVX512DQ-FAST-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
1904 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, %zmm9
1905 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm11, %zmm12, %zmm9
1906 ; AVX512DQ-FAST-NEXT: movb $48, %r9b
1907 ; AVX512DQ-FAST-NEXT: kmovw %r9d, %k2
1908 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm15, %zmm9 {%k2}
1909 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm19 = <0,1,13,u,4,5,6,7>
1910 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm8, %zmm19, %zmm9
1911 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm20 = [0,1,2,13,4,5,6,7]
1912 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm16, %zmm20, %zmm9
1913 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm21 = [2,10,2,10,2,10,2,10]
1914 ; AVX512DQ-FAST-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1915 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm13, %zmm22
1916 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm14, %zmm21, %zmm22
1917 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm17 = [1,9,2,10,1,9,2,10]
1918 ; AVX512DQ-FAST-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3]
1919 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, %zmm15
1920 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm11, %zmm17, %zmm15
1921 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm22, %zmm15 {%k2}
1922 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm22 = <0,1,9,u,4,5,6,7>
1923 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm8, %zmm22, %zmm15
1924 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm23 = [0,1,2,9,4,5,6,7]
1925 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm16, %zmm23, %zmm15
1926 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm6, %zmm5, %zmm18
1927 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm4, %zmm2, %zmm12
1928 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm18, %zmm12 {%k2}
1929 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm19, %zmm12
1930 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm10, %zmm20, %zmm12
1931 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm6, %zmm5, %zmm21
1932 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm4, %zmm2, %zmm17
1933 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm21, %zmm17 {%k2}
1934 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm22, %zmm17
1935 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm10, %zmm23, %zmm17
1936 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm19 = [0,8,1,9,0,8,1,9]
1937 ; AVX512DQ-FAST-NEXT: # zmm19 = mem[0,1,2,3,0,1,2,3]
1938 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm18
1939 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm19, %zmm18
1940 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rdx), %xmm20
1941 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rdx), %xmm21
1942 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm20 = xmm20[0],mem[0]
1943 ; AVX512DQ-FAST-NEXT: vinserti32x4 $1, %xmm20, %ymm0, %ymm20
1944 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm20, %zmm0, %zmm18 {%k1}
1945 ; AVX512DQ-FAST-NEXT: vinserti32x4 $2, (%r8), %zmm18, %zmm18
1946 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm20 = [0,1,2,3,4,8,6,7]
1947 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm10, %zmm20, %zmm18
1948 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm14, %zmm13, %zmm19
1949 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm21 = xmm21[0],mem[0]
1950 ; AVX512DQ-FAST-NEXT: vinserti32x4 $1, %xmm21, %ymm0, %ymm21
1951 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm21, %zmm0, %zmm19 {%k1}
1952 ; AVX512DQ-FAST-NEXT: vinserti32x4 $2, 64(%r8), %zmm19, %zmm19
1953 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm16, %zmm20, %zmm19
1954 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [7,15,7,15,7,15,7,15]
1955 ; AVX512DQ-FAST-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1956 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, %zmm21
1957 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm11, %zmm20, %zmm21
1958 ; AVX512DQ-FAST-NEXT: vbroadcasti64x2 {{.*#+}} ymm22 = [7,15,7,15]
1959 ; AVX512DQ-FAST-NEXT: # ymm22 = mem[0,1,0,1]
1960 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm14, %zmm22, %zmm13
1961 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm13 = zmm13[0,1,2,3],zmm21[4,5,6,7]
1962 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm14 = <14,u,2,3,4,5,15,u>
1963 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm8, %zmm14, %zmm13
1964 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm21 = [0,14,2,3,4,5,6,15]
1965 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm16, %zmm21, %zmm13
1966 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm23 = [0,1,2,3,4,12,6,7]
1967 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm16, %zmm23, %zmm0
1968 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm24 = [3,11,3,11,3,11,3,11]
1969 ; AVX512DQ-FAST-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1970 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm11, %zmm24, %zmm7
1971 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdi), %ymm11
1972 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rdi), %ymm25
1973 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm25 = ymm25[1],mem[1],ymm25[3],mem[3]
1974 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm25, %zmm7, %zmm7
1975 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm25 = <10,u,2,3,4,5,11,u>
1976 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm8, %zmm25, %zmm7
1977 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm8 = [0,10,2,3,4,5,6,11]
1978 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm16, %zmm8, %zmm7
1979 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm4, %zmm2, %zmm20
1980 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm22, %zmm5
1981 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm5 = zmm5[0,1,2,3],zmm20[4,5,6,7]
1982 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm14, %zmm5
1983 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm10, %zmm21, %zmm5
1984 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm10, %zmm23, %zmm1
1985 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm24, %zmm2
1986 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm4 = ymm11[1],mem[1],ymm11[3],mem[3]
1987 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm2, %zmm2
1988 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm25, %zmm2
1989 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm10, %zmm8, %zmm2
1990 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm17, 64(%rax)
1991 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, 128(%rax)
1992 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, 192(%rax)
1993 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm12, 256(%rax)
1994 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, 320(%rax)
1995 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm15, 448(%rax)
1996 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, 512(%rax)
1997 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, 576(%rax)
1998 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm9, 640(%rax)
1999 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm13, 704(%rax)
2000 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm19, 384(%rax)
2001 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm18, (%rax)
2002 ; AVX512DQ-FAST-NEXT: vzeroupper
2003 ; AVX512DQ-FAST-NEXT: retq
2005 ; AVX512BW-ONLY-SLOW-LABEL: store_i64_stride6_vf16:
2006 ; AVX512BW-ONLY-SLOW: # %bb.0:
2007 ; AVX512BW-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
2008 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm13
2009 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rdi), %zmm5
2010 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rsi), %zmm6
2011 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm14
2012 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm7
2013 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rdx), %zmm2
2014 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rcx), %zmm4
2015 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm11
2016 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%r8), %zmm3
2017 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%r8), %zmm8
2018 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
2019 ; AVX512BW-ONLY-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
2020 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm1
2021 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm0, %zmm1
2022 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm9 = [4,12,4,12]
2023 ; AVX512BW-ONLY-SLOW-NEXT: # ymm9 = mem[0,1,0,1]
2024 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm10
2025 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm9, %zmm10
2026 ; AVX512BW-ONLY-SLOW-NEXT: movb $12, %r10b
2027 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %r10d, %k1
2028 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm1 {%k1}
2029 ; AVX512BW-ONLY-SLOW-NEXT: movb $16, %r10b
2030 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %r10d, %k2
2031 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm1 {%k2}
2032 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm14, %zmm13, %zmm0
2033 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm11, %zmm7, %zmm9
2034 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm0 {%k1}
2035 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0 {%k2}
2036 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%r9), %zmm10
2037 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%r9), %zmm16
2038 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm18 = [6,14,6,14,6,14,6,14]
2039 ; AVX512BW-ONLY-SLOW-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2040 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, %zmm15
2041 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm18, %zmm15
2042 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [5,13,6,14,5,13,6,14]
2043 ; AVX512BW-ONLY-SLOW-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
2044 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm9
2045 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm12, %zmm9
2046 ; AVX512BW-ONLY-SLOW-NEXT: movb $48, %r9b
2047 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %r9d, %k2
2048 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm15, %zmm9 {%k2}
2049 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm19 = <0,1,13,u,4,5,6,7>
2050 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm19, %zmm9
2051 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm20 = [0,1,2,13,4,5,6,7]
2052 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm16, %zmm20, %zmm9
2053 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm21 = [2,10,2,10,2,10,2,10]
2054 ; AVX512BW-ONLY-SLOW-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2055 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, %zmm22
2056 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm21, %zmm22
2057 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm17 = [1,9,2,10,1,9,2,10]
2058 ; AVX512BW-ONLY-SLOW-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3]
2059 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm15
2060 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm17, %zmm15
2061 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm22, %zmm15 {%k2}
2062 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm22 = <0,1,9,u,4,5,6,7>
2063 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm22, %zmm15
2064 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm23 = [0,1,2,9,4,5,6,7]
2065 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm16, %zmm23, %zmm15
2066 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm6, %zmm5, %zmm18
2067 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm4, %zmm2, %zmm12
2068 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, %zmm12 {%k2}
2069 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm19, %zmm12
2070 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm10, %zmm20, %zmm12
2071 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm6, %zmm5, %zmm21
2072 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm4, %zmm2, %zmm17
2073 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm17 {%k2}
2074 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm22, %zmm17
2075 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm10, %zmm23, %zmm17
2076 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm19 = [0,8,1,9,0,8,1,9]
2077 ; AVX512BW-ONLY-SLOW-NEXT: # zmm19 = mem[0,1,2,3,0,1,2,3]
2078 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm18
2079 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm19, %zmm18
2080 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rdx), %xmm20
2081 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rdx), %xmm21
2082 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm20 = xmm20[0],mem[0]
2083 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $1, %xmm20, %ymm0, %ymm20
2084 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm20, %zmm0, %zmm18 {%k1}
2085 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $2, (%r8), %zmm18, %zmm18
2086 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm20 = [0,1,2,3,4,8,6,7]
2087 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm10, %zmm20, %zmm18
2088 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm14, %zmm13, %zmm19
2089 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm21 = xmm21[0],mem[0]
2090 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $1, %xmm21, %ymm0, %ymm21
2091 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm21, %zmm0, %zmm19 {%k1}
2092 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $2, 64(%r8), %zmm19, %zmm19
2093 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm16, %zmm20, %zmm19
2094 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [7,15,7,15,7,15,7,15]
2095 ; AVX512BW-ONLY-SLOW-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2096 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm21
2097 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm20, %zmm21
2098 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} ymm22 = [7,15,7,15]
2099 ; AVX512BW-ONLY-SLOW-NEXT: # ymm22 = mem[0,1,2,3,0,1,2,3]
2100 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm22, %zmm13
2101 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm13 = zmm13[0,1,2,3],zmm21[4,5,6,7]
2102 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm14 = <14,u,2,3,4,5,15,u>
2103 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm14, %zmm13
2104 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm21 = [0,14,2,3,4,5,6,15]
2105 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm16, %zmm21, %zmm13
2106 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm23 = [0,1,2,3,4,12,6,7]
2107 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm16, %zmm23, %zmm0
2108 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm24 = [3,11,3,11,3,11,3,11]
2109 ; AVX512BW-ONLY-SLOW-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2110 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm24, %zmm7
2111 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%rdi), %ymm11
2112 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rdi), %ymm25
2113 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm25 = ymm25[1],mem[1],ymm25[3],mem[3]
2114 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm25, %zmm7, %zmm7
2115 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm25 = <10,u,2,3,4,5,11,u>
2116 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm25, %zmm7
2117 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm8 = [0,10,2,3,4,5,6,11]
2118 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm16, %zmm8, %zmm7
2119 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm4, %zmm2, %zmm20
2120 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm22, %zmm5
2121 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm5 = zmm5[0,1,2,3],zmm20[4,5,6,7]
2122 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm14, %zmm5
2123 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm10, %zmm21, %zmm5
2124 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm10, %zmm23, %zmm1
2125 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm24, %zmm2
2126 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm4 = ymm11[1],mem[1],ymm11[3],mem[3]
2127 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm2, %zmm2
2128 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm25, %zmm2
2129 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm10, %zmm8, %zmm2
2130 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, 64(%rax)
2131 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, 128(%rax)
2132 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, 192(%rax)
2133 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, 256(%rax)
2134 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, 320(%rax)
2135 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm15, 448(%rax)
2136 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, 512(%rax)
2137 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, 576(%rax)
2138 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, 640(%rax)
2139 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, 704(%rax)
2140 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm19, 384(%rax)
2141 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, (%rax)
2142 ; AVX512BW-ONLY-SLOW-NEXT: vzeroupper
2143 ; AVX512BW-ONLY-SLOW-NEXT: retq
2145 ; AVX512BW-ONLY-FAST-LABEL: store_i64_stride6_vf16:
2146 ; AVX512BW-ONLY-FAST: # %bb.0:
2147 ; AVX512BW-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
2148 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rdi), %zmm13
2149 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rdi), %zmm5
2150 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rsi), %zmm6
2151 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rsi), %zmm14
2152 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rdx), %zmm7
2153 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rdx), %zmm2
2154 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rcx), %zmm4
2155 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rcx), %zmm11
2156 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%r8), %zmm3
2157 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%r8), %zmm8
2158 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
2159 ; AVX512BW-ONLY-FAST-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
2160 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm1
2161 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm0, %zmm1
2162 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm9 = [4,12,4,12]
2163 ; AVX512BW-ONLY-FAST-NEXT: # ymm9 = mem[0,1,0,1]
2164 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm10
2165 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm9, %zmm10
2166 ; AVX512BW-ONLY-FAST-NEXT: movb $12, %r10b
2167 ; AVX512BW-ONLY-FAST-NEXT: kmovd %r10d, %k1
2168 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm1 {%k1}
2169 ; AVX512BW-ONLY-FAST-NEXT: movb $16, %r10b
2170 ; AVX512BW-ONLY-FAST-NEXT: kmovd %r10d, %k2
2171 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm1 {%k2}
2172 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm14, %zmm13, %zmm0
2173 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm11, %zmm7, %zmm9
2174 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm0 {%k1}
2175 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm0 {%k2}
2176 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%r9), %zmm10
2177 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%r9), %zmm16
2178 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm18 = [6,14,6,14,6,14,6,14]
2179 ; AVX512BW-ONLY-FAST-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2180 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm15
2181 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm18, %zmm15
2182 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [5,13,6,14,5,13,6,14]
2183 ; AVX512BW-ONLY-FAST-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
2184 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm9
2185 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm12, %zmm9
2186 ; AVX512BW-ONLY-FAST-NEXT: movb $48, %r9b
2187 ; AVX512BW-ONLY-FAST-NEXT: kmovd %r9d, %k2
2188 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm15, %zmm9 {%k2}
2189 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm19 = <0,1,13,u,4,5,6,7>
2190 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm19, %zmm9
2191 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm20 = [0,1,2,13,4,5,6,7]
2192 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm16, %zmm20, %zmm9
2193 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm21 = [2,10,2,10,2,10,2,10]
2194 ; AVX512BW-ONLY-FAST-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2195 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm22
2196 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm21, %zmm22
2197 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm17 = [1,9,2,10,1,9,2,10]
2198 ; AVX512BW-ONLY-FAST-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3]
2199 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm15
2200 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm17, %zmm15
2201 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm22, %zmm15 {%k2}
2202 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm22 = <0,1,9,u,4,5,6,7>
2203 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm22, %zmm15
2204 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm23 = [0,1,2,9,4,5,6,7]
2205 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm16, %zmm23, %zmm15
2206 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm6, %zmm5, %zmm18
2207 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm4, %zmm2, %zmm12
2208 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm18, %zmm12 {%k2}
2209 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm19, %zmm12
2210 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm10, %zmm20, %zmm12
2211 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm6, %zmm5, %zmm21
2212 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm4, %zmm2, %zmm17
2213 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm21, %zmm17 {%k2}
2214 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm22, %zmm17
2215 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm10, %zmm23, %zmm17
2216 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm19 = [0,8,1,9,0,8,1,9]
2217 ; AVX512BW-ONLY-FAST-NEXT: # zmm19 = mem[0,1,2,3,0,1,2,3]
2218 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm18
2219 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm19, %zmm18
2220 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rdx), %xmm20
2221 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rdx), %xmm21
2222 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm20 = xmm20[0],mem[0]
2223 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $1, %xmm20, %ymm0, %ymm20
2224 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm20, %zmm0, %zmm18 {%k1}
2225 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $2, (%r8), %zmm18, %zmm18
2226 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm20 = [0,1,2,3,4,8,6,7]
2227 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm10, %zmm20, %zmm18
2228 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm14, %zmm13, %zmm19
2229 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm21 = xmm21[0],mem[0]
2230 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $1, %xmm21, %ymm0, %ymm21
2231 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm21, %zmm0, %zmm19 {%k1}
2232 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $2, 64(%r8), %zmm19, %zmm19
2233 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm16, %zmm20, %zmm19
2234 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [7,15,7,15,7,15,7,15]
2235 ; AVX512BW-ONLY-FAST-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2236 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm21
2237 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm20, %zmm21
2238 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} ymm22 = [7,15,7,15]
2239 ; AVX512BW-ONLY-FAST-NEXT: # ymm22 = mem[0,1,2,3,0,1,2,3]
2240 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm22, %zmm13
2241 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm13 = zmm13[0,1,2,3],zmm21[4,5,6,7]
2242 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm14 = <14,u,2,3,4,5,15,u>
2243 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm14, %zmm13
2244 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm21 = [0,14,2,3,4,5,6,15]
2245 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm16, %zmm21, %zmm13
2246 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm23 = [0,1,2,3,4,12,6,7]
2247 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm16, %zmm23, %zmm0
2248 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm24 = [3,11,3,11,3,11,3,11]
2249 ; AVX512BW-ONLY-FAST-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2250 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm24, %zmm7
2251 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa (%rdi), %ymm11
2252 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rdi), %ymm25
2253 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm25 = ymm25[1],mem[1],ymm25[3],mem[3]
2254 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm25, %zmm7, %zmm7
2255 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm25 = <10,u,2,3,4,5,11,u>
2256 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm25, %zmm7
2257 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm8 = [0,10,2,3,4,5,6,11]
2258 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm16, %zmm8, %zmm7
2259 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm4, %zmm2, %zmm20
2260 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm22, %zmm5
2261 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm5 = zmm5[0,1,2,3],zmm20[4,5,6,7]
2262 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm14, %zmm5
2263 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm10, %zmm21, %zmm5
2264 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm10, %zmm23, %zmm1
2265 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm24, %zmm2
2266 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm4 = ymm11[1],mem[1],ymm11[3],mem[3]
2267 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm2, %zmm2
2268 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm25, %zmm2
2269 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm10, %zmm8, %zmm2
2270 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm17, 64(%rax)
2271 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, 128(%rax)
2272 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, 192(%rax)
2273 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm12, 256(%rax)
2274 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, 320(%rax)
2275 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm15, 448(%rax)
2276 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, 512(%rax)
2277 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, 576(%rax)
2278 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm9, 640(%rax)
2279 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, 704(%rax)
2280 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm19, 384(%rax)
2281 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm18, (%rax)
2282 ; AVX512BW-ONLY-FAST-NEXT: vzeroupper
2283 ; AVX512BW-ONLY-FAST-NEXT: retq
2285 ; AVX512DQBW-SLOW-LABEL: store_i64_stride6_vf16:
2286 ; AVX512DQBW-SLOW: # %bb.0:
2287 ; AVX512DQBW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
2288 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm13
2289 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rdi), %zmm5
2290 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rsi), %zmm6
2291 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm14
2292 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm7
2293 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rdx), %zmm2
2294 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rcx), %zmm4
2295 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm11
2296 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%r8), %zmm3
2297 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%r8), %zmm8
2298 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
2299 ; AVX512DQBW-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
2300 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm1
2301 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm0, %zmm1
2302 ; AVX512DQBW-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm9 = [4,12,4,12]
2303 ; AVX512DQBW-SLOW-NEXT: # ymm9 = mem[0,1,0,1]
2304 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm10
2305 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm9, %zmm10
2306 ; AVX512DQBW-SLOW-NEXT: movb $12, %r10b
2307 ; AVX512DQBW-SLOW-NEXT: kmovd %r10d, %k1
2308 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, %zmm1 {%k1}
2309 ; AVX512DQBW-SLOW-NEXT: movb $16, %r10b
2310 ; AVX512DQBW-SLOW-NEXT: kmovd %r10d, %k2
2311 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm1 {%k2}
2312 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm14, %zmm13, %zmm0
2313 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm11, %zmm7, %zmm9
2314 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, %zmm0 {%k1}
2315 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0 {%k2}
2316 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%r9), %zmm10
2317 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%r9), %zmm16
2318 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm18 = [6,14,6,14,6,14,6,14]
2319 ; AVX512DQBW-SLOW-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2320 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm13, %zmm15
2321 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm14, %zmm18, %zmm15
2322 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [5,13,6,14,5,13,6,14]
2323 ; AVX512DQBW-SLOW-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
2324 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, %zmm9
2325 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm11, %zmm12, %zmm9
2326 ; AVX512DQBW-SLOW-NEXT: movb $48, %r9b
2327 ; AVX512DQBW-SLOW-NEXT: kmovd %r9d, %k2
2328 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm15, %zmm9 {%k2}
2329 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm19 = <0,1,13,u,4,5,6,7>
2330 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm8, %zmm19, %zmm9
2331 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm20 = [0,1,2,13,4,5,6,7]
2332 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm16, %zmm20, %zmm9
2333 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm21 = [2,10,2,10,2,10,2,10]
2334 ; AVX512DQBW-SLOW-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2335 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm13, %zmm22
2336 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm14, %zmm21, %zmm22
2337 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm17 = [1,9,2,10,1,9,2,10]
2338 ; AVX512DQBW-SLOW-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3]
2339 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, %zmm15
2340 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm11, %zmm17, %zmm15
2341 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm22, %zmm15 {%k2}
2342 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm22 = <0,1,9,u,4,5,6,7>
2343 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm8, %zmm22, %zmm15
2344 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm23 = [0,1,2,9,4,5,6,7]
2345 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm16, %zmm23, %zmm15
2346 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm6, %zmm5, %zmm18
2347 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm4, %zmm2, %zmm12
2348 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm18, %zmm12 {%k2}
2349 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm19, %zmm12
2350 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm10, %zmm20, %zmm12
2351 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm6, %zmm5, %zmm21
2352 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm4, %zmm2, %zmm17
2353 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm21, %zmm17 {%k2}
2354 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm22, %zmm17
2355 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm10, %zmm23, %zmm17
2356 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm19 = [0,8,1,9,0,8,1,9]
2357 ; AVX512DQBW-SLOW-NEXT: # zmm19 = mem[0,1,2,3,0,1,2,3]
2358 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm18
2359 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm19, %zmm18
2360 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rdx), %xmm20
2361 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rdx), %xmm21
2362 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm20 = xmm20[0],mem[0]
2363 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $1, %xmm20, %ymm0, %ymm20
2364 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm20, %zmm0, %zmm18 {%k1}
2365 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $2, (%r8), %zmm18, %zmm18
2366 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm20 = [0,1,2,3,4,8,6,7]
2367 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm10, %zmm20, %zmm18
2368 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm14, %zmm13, %zmm19
2369 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm21 = xmm21[0],mem[0]
2370 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $1, %xmm21, %ymm0, %ymm21
2371 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm21, %zmm0, %zmm19 {%k1}
2372 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $2, 64(%r8), %zmm19, %zmm19
2373 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm16, %zmm20, %zmm19
2374 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [7,15,7,15,7,15,7,15]
2375 ; AVX512DQBW-SLOW-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2376 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, %zmm21
2377 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm11, %zmm20, %zmm21
2378 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x2 {{.*#+}} ymm22 = [7,15,7,15]
2379 ; AVX512DQBW-SLOW-NEXT: # ymm22 = mem[0,1,0,1]
2380 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm14, %zmm22, %zmm13
2381 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm13 = zmm13[0,1,2,3],zmm21[4,5,6,7]
2382 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm14 = <14,u,2,3,4,5,15,u>
2383 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm8, %zmm14, %zmm13
2384 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm21 = [0,14,2,3,4,5,6,15]
2385 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm16, %zmm21, %zmm13
2386 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm23 = [0,1,2,3,4,12,6,7]
2387 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm16, %zmm23, %zmm0
2388 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm24 = [3,11,3,11,3,11,3,11]
2389 ; AVX512DQBW-SLOW-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2390 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm11, %zmm24, %zmm7
2391 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%rdi), %ymm11
2392 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rdi), %ymm25
2393 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm25 = ymm25[1],mem[1],ymm25[3],mem[3]
2394 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm25, %zmm7, %zmm7
2395 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm25 = <10,u,2,3,4,5,11,u>
2396 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm8, %zmm25, %zmm7
2397 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm8 = [0,10,2,3,4,5,6,11]
2398 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm16, %zmm8, %zmm7
2399 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm4, %zmm2, %zmm20
2400 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm22, %zmm5
2401 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm5 = zmm5[0,1,2,3],zmm20[4,5,6,7]
2402 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm14, %zmm5
2403 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm10, %zmm21, %zmm5
2404 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm10, %zmm23, %zmm1
2405 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm24, %zmm2
2406 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm4 = ymm11[1],mem[1],ymm11[3],mem[3]
2407 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm2, %zmm2
2408 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm25, %zmm2
2409 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm10, %zmm8, %zmm2
2410 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm17, 64(%rax)
2411 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, 128(%rax)
2412 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, 192(%rax)
2413 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm12, 256(%rax)
2414 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, 320(%rax)
2415 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm15, 448(%rax)
2416 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, 512(%rax)
2417 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, 576(%rax)
2418 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, 640(%rax)
2419 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm13, 704(%rax)
2420 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm19, 384(%rax)
2421 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm18, (%rax)
2422 ; AVX512DQBW-SLOW-NEXT: vzeroupper
2423 ; AVX512DQBW-SLOW-NEXT: retq
2425 ; AVX512DQBW-FAST-LABEL: store_i64_stride6_vf16:
2426 ; AVX512DQBW-FAST: # %bb.0:
2427 ; AVX512DQBW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
2428 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rdi), %zmm13
2429 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rdi), %zmm5
2430 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rsi), %zmm6
2431 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rsi), %zmm14
2432 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rdx), %zmm7
2433 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rdx), %zmm2
2434 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rcx), %zmm4
2435 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rcx), %zmm11
2436 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%r8), %zmm3
2437 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%r8), %zmm8
2438 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
2439 ; AVX512DQBW-FAST-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
2440 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm1
2441 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm0, %zmm1
2442 ; AVX512DQBW-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm9 = [4,12,4,12]
2443 ; AVX512DQBW-FAST-NEXT: # ymm9 = mem[0,1,0,1]
2444 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm10
2445 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm9, %zmm10
2446 ; AVX512DQBW-FAST-NEXT: movb $12, %r10b
2447 ; AVX512DQBW-FAST-NEXT: kmovd %r10d, %k1
2448 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, %zmm1 {%k1}
2449 ; AVX512DQBW-FAST-NEXT: movb $16, %r10b
2450 ; AVX512DQBW-FAST-NEXT: kmovd %r10d, %k2
2451 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm1 {%k2}
2452 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm14, %zmm13, %zmm0
2453 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm11, %zmm7, %zmm9
2454 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm9, %zmm0 {%k1}
2455 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm8, %zmm0 {%k2}
2456 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%r9), %zmm10
2457 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%r9), %zmm16
2458 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm18 = [6,14,6,14,6,14,6,14]
2459 ; AVX512DQBW-FAST-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2460 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm13, %zmm15
2461 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm14, %zmm18, %zmm15
2462 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [5,13,6,14,5,13,6,14]
2463 ; AVX512DQBW-FAST-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
2464 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, %zmm9
2465 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm11, %zmm12, %zmm9
2466 ; AVX512DQBW-FAST-NEXT: movb $48, %r9b
2467 ; AVX512DQBW-FAST-NEXT: kmovd %r9d, %k2
2468 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm15, %zmm9 {%k2}
2469 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm19 = <0,1,13,u,4,5,6,7>
2470 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm8, %zmm19, %zmm9
2471 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm20 = [0,1,2,13,4,5,6,7]
2472 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm16, %zmm20, %zmm9
2473 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm21 = [2,10,2,10,2,10,2,10]
2474 ; AVX512DQBW-FAST-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2475 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm13, %zmm22
2476 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm14, %zmm21, %zmm22
2477 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm17 = [1,9,2,10,1,9,2,10]
2478 ; AVX512DQBW-FAST-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3]
2479 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, %zmm15
2480 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm11, %zmm17, %zmm15
2481 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm22, %zmm15 {%k2}
2482 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm22 = <0,1,9,u,4,5,6,7>
2483 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm8, %zmm22, %zmm15
2484 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm23 = [0,1,2,9,4,5,6,7]
2485 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm16, %zmm23, %zmm15
2486 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm6, %zmm5, %zmm18
2487 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm4, %zmm2, %zmm12
2488 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm18, %zmm12 {%k2}
2489 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm19, %zmm12
2490 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm10, %zmm20, %zmm12
2491 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm6, %zmm5, %zmm21
2492 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm4, %zmm2, %zmm17
2493 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm21, %zmm17 {%k2}
2494 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm22, %zmm17
2495 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm10, %zmm23, %zmm17
2496 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm19 = [0,8,1,9,0,8,1,9]
2497 ; AVX512DQBW-FAST-NEXT: # zmm19 = mem[0,1,2,3,0,1,2,3]
2498 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm18
2499 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm19, %zmm18
2500 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rdx), %xmm20
2501 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rdx), %xmm21
2502 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm20 = xmm20[0],mem[0]
2503 ; AVX512DQBW-FAST-NEXT: vinserti32x4 $1, %xmm20, %ymm0, %ymm20
2504 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm20, %zmm0, %zmm18 {%k1}
2505 ; AVX512DQBW-FAST-NEXT: vinserti32x4 $2, (%r8), %zmm18, %zmm18
2506 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm20 = [0,1,2,3,4,8,6,7]
2507 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm10, %zmm20, %zmm18
2508 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm14, %zmm13, %zmm19
2509 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm21 = xmm21[0],mem[0]
2510 ; AVX512DQBW-FAST-NEXT: vinserti32x4 $1, %xmm21, %ymm0, %ymm21
2511 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm21, %zmm0, %zmm19 {%k1}
2512 ; AVX512DQBW-FAST-NEXT: vinserti32x4 $2, 64(%r8), %zmm19, %zmm19
2513 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm16, %zmm20, %zmm19
2514 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [7,15,7,15,7,15,7,15]
2515 ; AVX512DQBW-FAST-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2516 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, %zmm21
2517 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm11, %zmm20, %zmm21
2518 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x2 {{.*#+}} ymm22 = [7,15,7,15]
2519 ; AVX512DQBW-FAST-NEXT: # ymm22 = mem[0,1,0,1]
2520 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm14, %zmm22, %zmm13
2521 ; AVX512DQBW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm13 = zmm13[0,1,2,3],zmm21[4,5,6,7]
2522 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm14 = <14,u,2,3,4,5,15,u>
2523 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm8, %zmm14, %zmm13
2524 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm21 = [0,14,2,3,4,5,6,15]
2525 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm16, %zmm21, %zmm13
2526 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm23 = [0,1,2,3,4,12,6,7]
2527 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm16, %zmm23, %zmm0
2528 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm24 = [3,11,3,11,3,11,3,11]
2529 ; AVX512DQBW-FAST-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2530 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm11, %zmm24, %zmm7
2531 ; AVX512DQBW-FAST-NEXT: vmovdqa (%rdi), %ymm11
2532 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rdi), %ymm25
2533 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm25 = ymm25[1],mem[1],ymm25[3],mem[3]
2534 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm25, %zmm7, %zmm7
2535 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm25 = <10,u,2,3,4,5,11,u>
2536 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm8, %zmm25, %zmm7
2537 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm8 = [0,10,2,3,4,5,6,11]
2538 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm16, %zmm8, %zmm7
2539 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm4, %zmm2, %zmm20
2540 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm22, %zmm5
2541 ; AVX512DQBW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm5 = zmm5[0,1,2,3],zmm20[4,5,6,7]
2542 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm14, %zmm5
2543 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm10, %zmm21, %zmm5
2544 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm10, %zmm23, %zmm1
2545 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm24, %zmm2
2546 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm4 = ymm11[1],mem[1],ymm11[3],mem[3]
2547 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm2, %zmm2
2548 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm25, %zmm2
2549 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm10, %zmm8, %zmm2
2550 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm17, 64(%rax)
2551 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, 128(%rax)
2552 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, 192(%rax)
2553 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm12, 256(%rax)
2554 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, 320(%rax)
2555 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm15, 448(%rax)
2556 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, 512(%rax)
2557 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, 576(%rax)
2558 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm9, 640(%rax)
2559 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm13, 704(%rax)
2560 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm19, 384(%rax)
2561 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm18, (%rax)
2562 ; AVX512DQBW-FAST-NEXT: vzeroupper
2563 ; AVX512DQBW-FAST-NEXT: retq
2564 %in.vec0 = load <16 x i64>, ptr %in.vecptr0, align 64
2565 %in.vec1 = load <16 x i64>, ptr %in.vecptr1, align 64
2566 %in.vec2 = load <16 x i64>, ptr %in.vecptr2, align 64
2567 %in.vec3 = load <16 x i64>, ptr %in.vecptr3, align 64
2568 %in.vec4 = load <16 x i64>, ptr %in.vecptr4, align 64
2569 %in.vec5 = load <16 x i64>, ptr %in.vecptr5, align 64
2570 %1 = shufflevector <16 x i64> %in.vec0, <16 x i64> %in.vec1, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
2571 %2 = shufflevector <16 x i64> %in.vec2, <16 x i64> %in.vec3, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
2572 %3 = shufflevector <16 x i64> %in.vec4, <16 x i64> %in.vec5, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
2573 %4 = shufflevector <32 x i64> %1, <32 x i64> %2, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
2574 %5 = shufflevector <32 x i64> %3, <32 x i64> poison, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
2575 %6 = shufflevector <64 x i64> %4, <64 x i64> %5, <96 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95>
2576 %interleaved.vec = shufflevector <96 x i64> %6, <96 x i64> poison, <96 x i32> <i32 0, i32 16, i32 32, i32 48, i32 64, i32 80, i32 1, i32 17, i32 33, i32 49, i32 65, i32 81, i32 2, i32 18, i32 34, i32 50, i32 66, i32 82, i32 3, i32 19, i32 35, i32 51, i32 67, i32 83, i32 4, i32 20, i32 36, i32 52, i32 68, i32 84, i32 5, i32 21, i32 37, i32 53, i32 69, i32 85, i32 6, i32 22, i32 38, i32 54, i32 70, i32 86, i32 7, i32 23, i32 39, i32 55, i32 71, i32 87, i32 8, i32 24, i32 40, i32 56, i32 72, i32 88, i32 9, i32 25, i32 41, i32 57, i32 73, i32 89, i32 10, i32 26, i32 42, i32 58, i32 74, i32 90, i32 11, i32 27, i32 43, i32 59, i32 75, i32 91, i32 12, i32 28, i32 44, i32 60, i32 76, i32 92, i32 13, i32 29, i32 45, i32 61, i32 77, i32 93, i32 14, i32 30, i32 46, i32 62, i32 78, i32 94, i32 15, i32 31, i32 47, i32 63, i32 79, i32 95>
2577 store <96 x i64> %interleaved.vec, ptr %out.vec, align 64
2581 define void @store_i64_stride6_vf32(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
2582 ; SSE-LABEL: store_i64_stride6_vf32:
2584 ; SSE-NEXT: subq $1176, %rsp # imm = 0x498
2585 ; SSE-NEXT: movaps (%rdi), %xmm7
2586 ; SSE-NEXT: movaps 16(%rdi), %xmm8
2587 ; SSE-NEXT: movaps 32(%rdi), %xmm9
2588 ; SSE-NEXT: movaps (%rsi), %xmm2
2589 ; SSE-NEXT: movaps 16(%rsi), %xmm1
2590 ; SSE-NEXT: movaps 32(%rsi), %xmm0
2591 ; SSE-NEXT: movaps (%rdx), %xmm10
2592 ; SSE-NEXT: movaps 16(%rdx), %xmm11
2593 ; SSE-NEXT: movaps 32(%rdx), %xmm12
2594 ; SSE-NEXT: movaps (%rcx), %xmm4
2595 ; SSE-NEXT: movaps 16(%rcx), %xmm3
2596 ; SSE-NEXT: movaps (%r8), %xmm13
2597 ; SSE-NEXT: movaps 16(%r8), %xmm14
2598 ; SSE-NEXT: movaps (%r9), %xmm6
2599 ; SSE-NEXT: movaps 16(%r9), %xmm5
2600 ; SSE-NEXT: movaps %xmm7, %xmm15
2601 ; SSE-NEXT: movlhps {{.*#+}} xmm15 = xmm15[0],xmm2[0]
2602 ; SSE-NEXT: movaps %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2603 ; SSE-NEXT: unpckhpd {{.*#+}} xmm7 = xmm7[1],xmm2[1]
2604 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2605 ; SSE-NEXT: movaps %xmm10, %xmm2
2606 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm4[0]
2607 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2608 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm4[1]
2609 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2610 ; SSE-NEXT: movaps %xmm13, %xmm4
2611 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm6[0]
2612 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2613 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm6[1]
2614 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2615 ; SSE-NEXT: movaps %xmm8, %xmm2
2616 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm1[0]
2617 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2618 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm1[1]
2619 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2620 ; SSE-NEXT: movaps %xmm11, %xmm1
2621 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
2622 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2623 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm3[1]
2624 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2625 ; SSE-NEXT: movaps %xmm14, %xmm1
2626 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm5[0]
2627 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2628 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm5[1]
2629 ; SSE-NEXT: movaps %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2630 ; SSE-NEXT: movaps %xmm9, %xmm1
2631 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2632 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2633 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm0[1]
2634 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2635 ; SSE-NEXT: movaps 32(%rcx), %xmm0
2636 ; SSE-NEXT: movaps %xmm12, %xmm1
2637 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2638 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2639 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm0[1]
2640 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2641 ; SSE-NEXT: movaps 32(%r8), %xmm2
2642 ; SSE-NEXT: movaps 32(%r9), %xmm0
2643 ; SSE-NEXT: movaps %xmm2, %xmm1
2644 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2645 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2646 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2647 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2648 ; SSE-NEXT: movaps 48(%rdi), %xmm2
2649 ; SSE-NEXT: movaps 48(%rsi), %xmm0
2650 ; SSE-NEXT: movaps %xmm2, %xmm1
2651 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2652 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2653 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2654 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2655 ; SSE-NEXT: movaps 48(%rdx), %xmm2
2656 ; SSE-NEXT: movaps 48(%rcx), %xmm0
2657 ; SSE-NEXT: movaps %xmm2, %xmm1
2658 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2659 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2660 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2661 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2662 ; SSE-NEXT: movaps 48(%r8), %xmm2
2663 ; SSE-NEXT: movaps 48(%r9), %xmm0
2664 ; SSE-NEXT: movaps %xmm2, %xmm1
2665 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2666 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2667 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2668 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2669 ; SSE-NEXT: movaps 64(%rdi), %xmm2
2670 ; SSE-NEXT: movaps 64(%rsi), %xmm0
2671 ; SSE-NEXT: movaps %xmm2, %xmm1
2672 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2673 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2674 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2675 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2676 ; SSE-NEXT: movaps 64(%rdx), %xmm2
2677 ; SSE-NEXT: movaps 64(%rcx), %xmm0
2678 ; SSE-NEXT: movaps %xmm2, %xmm1
2679 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2680 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2681 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2682 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2683 ; SSE-NEXT: movaps 64(%r8), %xmm2
2684 ; SSE-NEXT: movaps 64(%r9), %xmm0
2685 ; SSE-NEXT: movaps %xmm2, %xmm1
2686 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2687 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2688 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2689 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2690 ; SSE-NEXT: movaps 80(%rdi), %xmm2
2691 ; SSE-NEXT: movaps 80(%rsi), %xmm0
2692 ; SSE-NEXT: movaps %xmm2, %xmm1
2693 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2694 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2695 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2696 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2697 ; SSE-NEXT: movaps 80(%rdx), %xmm2
2698 ; SSE-NEXT: movaps 80(%rcx), %xmm0
2699 ; SSE-NEXT: movaps %xmm2, %xmm1
2700 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2701 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2702 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2703 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2704 ; SSE-NEXT: movaps 80(%r8), %xmm2
2705 ; SSE-NEXT: movaps 80(%r9), %xmm0
2706 ; SSE-NEXT: movaps %xmm2, %xmm1
2707 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2708 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2709 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2710 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2711 ; SSE-NEXT: movaps 96(%rdi), %xmm2
2712 ; SSE-NEXT: movaps 96(%rsi), %xmm0
2713 ; SSE-NEXT: movaps %xmm2, %xmm1
2714 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2715 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2716 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2717 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2718 ; SSE-NEXT: movaps 96(%rdx), %xmm2
2719 ; SSE-NEXT: movaps 96(%rcx), %xmm0
2720 ; SSE-NEXT: movaps %xmm2, %xmm1
2721 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2722 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2723 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2724 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2725 ; SSE-NEXT: movaps 96(%r8), %xmm2
2726 ; SSE-NEXT: movaps 96(%r9), %xmm0
2727 ; SSE-NEXT: movaps %xmm2, %xmm1
2728 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2729 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2730 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2731 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2732 ; SSE-NEXT: movaps 112(%rdi), %xmm2
2733 ; SSE-NEXT: movaps 112(%rsi), %xmm0
2734 ; SSE-NEXT: movaps %xmm2, %xmm1
2735 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2736 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2737 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2738 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2739 ; SSE-NEXT: movaps 112(%rdx), %xmm2
2740 ; SSE-NEXT: movaps 112(%rcx), %xmm0
2741 ; SSE-NEXT: movaps %xmm2, %xmm1
2742 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2743 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2744 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2745 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2746 ; SSE-NEXT: movaps 112(%r8), %xmm2
2747 ; SSE-NEXT: movaps 112(%r9), %xmm0
2748 ; SSE-NEXT: movaps %xmm2, %xmm1
2749 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2750 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2751 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2752 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2753 ; SSE-NEXT: movaps 128(%rdi), %xmm2
2754 ; SSE-NEXT: movaps 128(%rsi), %xmm0
2755 ; SSE-NEXT: movaps %xmm2, %xmm1
2756 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2757 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2758 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2759 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2760 ; SSE-NEXT: movaps 128(%rdx), %xmm2
2761 ; SSE-NEXT: movaps 128(%rcx), %xmm0
2762 ; SSE-NEXT: movaps %xmm2, %xmm1
2763 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2764 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2765 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2766 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2767 ; SSE-NEXT: movaps 128(%r8), %xmm2
2768 ; SSE-NEXT: movaps 128(%r9), %xmm0
2769 ; SSE-NEXT: movaps %xmm2, %xmm1
2770 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2771 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2772 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2773 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2774 ; SSE-NEXT: movaps 144(%rdi), %xmm2
2775 ; SSE-NEXT: movaps 144(%rsi), %xmm0
2776 ; SSE-NEXT: movaps %xmm2, %xmm1
2777 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2778 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2779 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2780 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2781 ; SSE-NEXT: movaps 144(%rdx), %xmm2
2782 ; SSE-NEXT: movaps 144(%rcx), %xmm0
2783 ; SSE-NEXT: movaps %xmm2, %xmm1
2784 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2785 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2786 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2787 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2788 ; SSE-NEXT: movaps 144(%r8), %xmm2
2789 ; SSE-NEXT: movaps 144(%r9), %xmm0
2790 ; SSE-NEXT: movaps %xmm2, %xmm1
2791 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2792 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2793 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2794 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2795 ; SSE-NEXT: movaps 160(%rdi), %xmm2
2796 ; SSE-NEXT: movaps 160(%rsi), %xmm0
2797 ; SSE-NEXT: movaps %xmm2, %xmm1
2798 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2799 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2800 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2801 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2802 ; SSE-NEXT: movaps 160(%rdx), %xmm2
2803 ; SSE-NEXT: movaps 160(%rcx), %xmm0
2804 ; SSE-NEXT: movaps %xmm2, %xmm1
2805 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2806 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2807 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2808 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2809 ; SSE-NEXT: movaps 160(%r8), %xmm2
2810 ; SSE-NEXT: movaps 160(%r9), %xmm0
2811 ; SSE-NEXT: movaps %xmm2, %xmm1
2812 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2813 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2814 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2815 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2816 ; SSE-NEXT: movaps 176(%rdi), %xmm2
2817 ; SSE-NEXT: movaps 176(%rsi), %xmm0
2818 ; SSE-NEXT: movaps %xmm2, %xmm1
2819 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2820 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2821 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2822 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2823 ; SSE-NEXT: movaps 176(%rdx), %xmm2
2824 ; SSE-NEXT: movaps 176(%rcx), %xmm0
2825 ; SSE-NEXT: movaps %xmm2, %xmm1
2826 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2827 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2828 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2829 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2830 ; SSE-NEXT: movaps 176(%r8), %xmm2
2831 ; SSE-NEXT: movaps 176(%r9), %xmm0
2832 ; SSE-NEXT: movaps %xmm2, %xmm1
2833 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2834 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2835 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2836 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2837 ; SSE-NEXT: movaps 192(%rdi), %xmm2
2838 ; SSE-NEXT: movaps 192(%rsi), %xmm0
2839 ; SSE-NEXT: movaps %xmm2, %xmm1
2840 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2841 ; SSE-NEXT: movaps %xmm1, (%rsp) # 16-byte Spill
2842 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2843 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2844 ; SSE-NEXT: movaps 192(%rdx), %xmm2
2845 ; SSE-NEXT: movaps 192(%rcx), %xmm0
2846 ; SSE-NEXT: movaps %xmm2, %xmm1
2847 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2848 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2849 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2850 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2851 ; SSE-NEXT: movaps 192(%r8), %xmm2
2852 ; SSE-NEXT: movaps 192(%r9), %xmm0
2853 ; SSE-NEXT: movaps %xmm2, %xmm1
2854 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2855 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2856 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2857 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2858 ; SSE-NEXT: movaps 208(%rdi), %xmm15
2859 ; SSE-NEXT: movaps 208(%rsi), %xmm0
2860 ; SSE-NEXT: movaps %xmm15, %xmm1
2861 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2862 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2863 ; SSE-NEXT: unpckhpd {{.*#+}} xmm15 = xmm15[1],xmm0[1]
2864 ; SSE-NEXT: movaps 208(%rdx), %xmm12
2865 ; SSE-NEXT: movaps 208(%rcx), %xmm0
2866 ; SSE-NEXT: movaps %xmm12, %xmm1
2867 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2868 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2869 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm0[1]
2870 ; SSE-NEXT: movaps 208(%r8), %xmm11
2871 ; SSE-NEXT: movaps 208(%r9), %xmm0
2872 ; SSE-NEXT: movaps %xmm11, %xmm1
2873 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2874 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2875 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm0[1]
2876 ; SSE-NEXT: movaps 224(%rdi), %xmm13
2877 ; SSE-NEXT: movaps 224(%rsi), %xmm0
2878 ; SSE-NEXT: movaps %xmm13, %xmm14
2879 ; SSE-NEXT: movlhps {{.*#+}} xmm14 = xmm14[0],xmm0[0]
2880 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm0[1]
2881 ; SSE-NEXT: movaps 224(%rdx), %xmm9
2882 ; SSE-NEXT: movaps 224(%rcx), %xmm0
2883 ; SSE-NEXT: movaps %xmm9, %xmm10
2884 ; SSE-NEXT: movlhps {{.*#+}} xmm10 = xmm10[0],xmm0[0]
2885 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm0[1]
2886 ; SSE-NEXT: movaps 224(%r8), %xmm5
2887 ; SSE-NEXT: movaps 224(%r9), %xmm0
2888 ; SSE-NEXT: movaps %xmm5, %xmm8
2889 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm0[0]
2890 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm0[1]
2891 ; SSE-NEXT: movaps 240(%rdi), %xmm6
2892 ; SSE-NEXT: movaps 240(%rsi), %xmm1
2893 ; SSE-NEXT: movaps %xmm6, %xmm7
2894 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm1[0]
2895 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm1[1]
2896 ; SSE-NEXT: movaps 240(%rdx), %xmm1
2897 ; SSE-NEXT: movaps 240(%rcx), %xmm0
2898 ; SSE-NEXT: movaps %xmm1, %xmm4
2899 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm0[0]
2900 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
2901 ; SSE-NEXT: movaps 240(%r8), %xmm0
2902 ; SSE-NEXT: movaps 240(%r9), %xmm3
2903 ; SSE-NEXT: movaps %xmm0, %xmm2
2904 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm3[0]
2905 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
2906 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
2907 ; SSE-NEXT: movaps %xmm0, 1520(%rax)
2908 ; SSE-NEXT: movaps %xmm1, 1504(%rax)
2909 ; SSE-NEXT: movaps %xmm6, 1488(%rax)
2910 ; SSE-NEXT: movaps %xmm2, 1472(%rax)
2911 ; SSE-NEXT: movaps %xmm4, 1456(%rax)
2912 ; SSE-NEXT: movaps %xmm7, 1440(%rax)
2913 ; SSE-NEXT: movaps %xmm5, 1424(%rax)
2914 ; SSE-NEXT: movaps %xmm9, 1408(%rax)
2915 ; SSE-NEXT: movaps %xmm13, 1392(%rax)
2916 ; SSE-NEXT: movaps %xmm8, 1376(%rax)
2917 ; SSE-NEXT: movaps %xmm10, 1360(%rax)
2918 ; SSE-NEXT: movaps %xmm14, 1344(%rax)
2919 ; SSE-NEXT: movaps %xmm11, 1328(%rax)
2920 ; SSE-NEXT: movaps %xmm12, 1312(%rax)
2921 ; SSE-NEXT: movaps %xmm15, 1296(%rax)
2922 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2923 ; SSE-NEXT: movaps %xmm0, 1280(%rax)
2924 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2925 ; SSE-NEXT: movaps %xmm0, 1264(%rax)
2926 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2927 ; SSE-NEXT: movaps %xmm0, 1248(%rax)
2928 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2929 ; SSE-NEXT: movaps %xmm0, 1232(%rax)
2930 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2931 ; SSE-NEXT: movaps %xmm0, 1216(%rax)
2932 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2933 ; SSE-NEXT: movaps %xmm0, 1200(%rax)
2934 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2935 ; SSE-NEXT: movaps %xmm0, 1184(%rax)
2936 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2937 ; SSE-NEXT: movaps %xmm0, 1168(%rax)
2938 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
2939 ; SSE-NEXT: movaps %xmm0, 1152(%rax)
2940 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2941 ; SSE-NEXT: movaps %xmm0, 1136(%rax)
2942 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2943 ; SSE-NEXT: movaps %xmm0, 1120(%rax)
2944 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2945 ; SSE-NEXT: movaps %xmm0, 1104(%rax)
2946 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2947 ; SSE-NEXT: movaps %xmm0, 1088(%rax)
2948 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2949 ; SSE-NEXT: movaps %xmm0, 1072(%rax)
2950 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2951 ; SSE-NEXT: movaps %xmm0, 1056(%rax)
2952 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2953 ; SSE-NEXT: movaps %xmm0, 1040(%rax)
2954 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2955 ; SSE-NEXT: movaps %xmm0, 1024(%rax)
2956 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2957 ; SSE-NEXT: movaps %xmm0, 1008(%rax)
2958 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2959 ; SSE-NEXT: movaps %xmm0, 992(%rax)
2960 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2961 ; SSE-NEXT: movaps %xmm0, 976(%rax)
2962 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2963 ; SSE-NEXT: movaps %xmm0, 960(%rax)
2964 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2965 ; SSE-NEXT: movaps %xmm0, 944(%rax)
2966 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2967 ; SSE-NEXT: movaps %xmm0, 928(%rax)
2968 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2969 ; SSE-NEXT: movaps %xmm0, 912(%rax)
2970 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2971 ; SSE-NEXT: movaps %xmm0, 896(%rax)
2972 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2973 ; SSE-NEXT: movaps %xmm0, 880(%rax)
2974 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2975 ; SSE-NEXT: movaps %xmm0, 864(%rax)
2976 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2977 ; SSE-NEXT: movaps %xmm0, 848(%rax)
2978 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2979 ; SSE-NEXT: movaps %xmm0, 832(%rax)
2980 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2981 ; SSE-NEXT: movaps %xmm0, 816(%rax)
2982 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2983 ; SSE-NEXT: movaps %xmm0, 800(%rax)
2984 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2985 ; SSE-NEXT: movaps %xmm0, 784(%rax)
2986 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2987 ; SSE-NEXT: movaps %xmm0, 768(%rax)
2988 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2989 ; SSE-NEXT: movaps %xmm0, 752(%rax)
2990 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2991 ; SSE-NEXT: movaps %xmm0, 736(%rax)
2992 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2993 ; SSE-NEXT: movaps %xmm0, 720(%rax)
2994 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2995 ; SSE-NEXT: movaps %xmm0, 704(%rax)
2996 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2997 ; SSE-NEXT: movaps %xmm0, 688(%rax)
2998 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2999 ; SSE-NEXT: movaps %xmm0, 672(%rax)
3000 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3001 ; SSE-NEXT: movaps %xmm0, 656(%rax)
3002 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3003 ; SSE-NEXT: movaps %xmm0, 640(%rax)
3004 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3005 ; SSE-NEXT: movaps %xmm0, 624(%rax)
3006 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3007 ; SSE-NEXT: movaps %xmm0, 608(%rax)
3008 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3009 ; SSE-NEXT: movaps %xmm0, 592(%rax)
3010 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3011 ; SSE-NEXT: movaps %xmm0, 576(%rax)
3012 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3013 ; SSE-NEXT: movaps %xmm0, 560(%rax)
3014 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3015 ; SSE-NEXT: movaps %xmm0, 544(%rax)
3016 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3017 ; SSE-NEXT: movaps %xmm0, 528(%rax)
3018 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3019 ; SSE-NEXT: movaps %xmm0, 512(%rax)
3020 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3021 ; SSE-NEXT: movaps %xmm0, 496(%rax)
3022 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3023 ; SSE-NEXT: movaps %xmm0, 480(%rax)
3024 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3025 ; SSE-NEXT: movaps %xmm0, 464(%rax)
3026 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3027 ; SSE-NEXT: movaps %xmm0, 448(%rax)
3028 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3029 ; SSE-NEXT: movaps %xmm0, 432(%rax)
3030 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3031 ; SSE-NEXT: movaps %xmm0, 416(%rax)
3032 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3033 ; SSE-NEXT: movaps %xmm0, 400(%rax)
3034 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3035 ; SSE-NEXT: movaps %xmm0, 384(%rax)
3036 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3037 ; SSE-NEXT: movaps %xmm0, 368(%rax)
3038 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3039 ; SSE-NEXT: movaps %xmm0, 352(%rax)
3040 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3041 ; SSE-NEXT: movaps %xmm0, 336(%rax)
3042 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3043 ; SSE-NEXT: movaps %xmm0, 320(%rax)
3044 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3045 ; SSE-NEXT: movaps %xmm0, 304(%rax)
3046 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3047 ; SSE-NEXT: movaps %xmm0, 288(%rax)
3048 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3049 ; SSE-NEXT: movaps %xmm0, 272(%rax)
3050 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3051 ; SSE-NEXT: movaps %xmm0, 256(%rax)
3052 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3053 ; SSE-NEXT: movaps %xmm0, 240(%rax)
3054 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3055 ; SSE-NEXT: movaps %xmm0, 224(%rax)
3056 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3057 ; SSE-NEXT: movaps %xmm0, 208(%rax)
3058 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3059 ; SSE-NEXT: movaps %xmm0, 192(%rax)
3060 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3061 ; SSE-NEXT: movaps %xmm0, 176(%rax)
3062 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3063 ; SSE-NEXT: movaps %xmm0, 160(%rax)
3064 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3065 ; SSE-NEXT: movaps %xmm0, 144(%rax)
3066 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3067 ; SSE-NEXT: movaps %xmm0, 128(%rax)
3068 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3069 ; SSE-NEXT: movaps %xmm0, 112(%rax)
3070 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3071 ; SSE-NEXT: movaps %xmm0, 96(%rax)
3072 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3073 ; SSE-NEXT: movaps %xmm0, 80(%rax)
3074 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3075 ; SSE-NEXT: movaps %xmm0, 64(%rax)
3076 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3077 ; SSE-NEXT: movaps %xmm0, 48(%rax)
3078 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3079 ; SSE-NEXT: movaps %xmm0, 32(%rax)
3080 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3081 ; SSE-NEXT: movaps %xmm0, 16(%rax)
3082 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3083 ; SSE-NEXT: movaps %xmm0, (%rax)
3084 ; SSE-NEXT: addq $1176, %rsp # imm = 0x498
3087 ; AVX1-ONLY-LABEL: store_i64_stride6_vf32:
3088 ; AVX1-ONLY: # %bb.0:
3089 ; AVX1-ONLY-NEXT: subq $1608, %rsp # imm = 0x648
3090 ; AVX1-ONLY-NEXT: vmovapd (%r8), %ymm0
3091 ; AVX1-ONLY-NEXT: vmovapd 32(%r8), %ymm1
3092 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = mem[0,0]
3093 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %xmm4
3094 ; AVX1-ONLY-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3095 ; AVX1-ONLY-NEXT: vmovaps 32(%rsi), %xmm10
3096 ; AVX1-ONLY-NEXT: vmovaps 64(%rsi), %xmm11
3097 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %xmm3
3098 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3099 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm4[1]
3100 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
3101 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm3 = ymm0[0,1],ymm3[2,3]
3102 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3]
3103 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3104 ; AVX1-ONLY-NEXT: vmovaps (%rcx), %xmm3
3105 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3106 ; AVX1-ONLY-NEXT: vmovaps 32(%rcx), %xmm5
3107 ; AVX1-ONLY-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3108 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %xmm2
3109 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3110 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
3111 ; AVX1-ONLY-NEXT: vinsertf128 $1, (%r9), %ymm2, %ymm3
3112 ; AVX1-ONLY-NEXT: vbroadcastsd 8(%r8), %ymm4
3113 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm4[4,5,6,7]
3114 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1],ymm3[2,3],ymm2[4,5],ymm3[6,7]
3115 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3116 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = mem[0,0]
3117 ; AVX1-ONLY-NEXT: vmovaps 32(%rdi), %xmm3
3118 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3119 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm10[1]
3120 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
3121 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm3 = ymm1[0,1],ymm3[2,3]
3122 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3]
3123 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3124 ; AVX1-ONLY-NEXT: vmovaps 32(%rdx), %xmm2
3125 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3126 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm5[1]
3127 ; AVX1-ONLY-NEXT: vbroadcastsd 40(%r8), %ymm3
3128 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm2[0,1,2,3],ymm3[4,5,6,7]
3129 ; AVX1-ONLY-NEXT: vinsertf128 $1, 32(%r9), %ymm2, %ymm2
3130 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7]
3131 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3132 ; AVX1-ONLY-NEXT: vmovaps 64(%rdi), %xmm13
3133 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm13[1],xmm11[1]
3134 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
3135 ; AVX1-ONLY-NEXT: vmovapd 64(%r8), %ymm3
3136 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3]
3137 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm4 = mem[0,0]
3138 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm2 = ymm2[0],ymm4[1],ymm2[2,3]
3139 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3140 ; AVX1-ONLY-NEXT: vmovaps 64(%rcx), %xmm4
3141 ; AVX1-ONLY-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3142 ; AVX1-ONLY-NEXT: vmovaps 64(%rdx), %xmm2
3143 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3144 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm4[1]
3145 ; AVX1-ONLY-NEXT: vbroadcastsd 72(%r8), %ymm4
3146 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm2[0,1,2,3],ymm4[4,5,6,7]
3147 ; AVX1-ONLY-NEXT: vinsertf128 $1, 64(%r9), %ymm2, %ymm2
3148 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm4[0,1],ymm2[2,3],ymm4[4,5],ymm2[6,7]
3149 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3150 ; AVX1-ONLY-NEXT: vmovaps 96(%rsi), %xmm4
3151 ; AVX1-ONLY-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3152 ; AVX1-ONLY-NEXT: vmovaps 96(%rdi), %xmm2
3153 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3154 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm4[1]
3155 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
3156 ; AVX1-ONLY-NEXT: vmovapd 96(%r8), %ymm5
3157 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3]
3158 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm4 = mem[0,0]
3159 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm2 = ymm2[0],ymm4[1],ymm2[2,3]
3160 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3161 ; AVX1-ONLY-NEXT: vmovaps 96(%rcx), %xmm4
3162 ; AVX1-ONLY-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3163 ; AVX1-ONLY-NEXT: vmovaps 96(%rdx), %xmm2
3164 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3165 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm4[1]
3166 ; AVX1-ONLY-NEXT: vbroadcastsd 104(%r8), %ymm4
3167 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm2[0,1,2,3],ymm4[4,5,6,7]
3168 ; AVX1-ONLY-NEXT: vinsertf128 $1, 96(%r9), %ymm2, %ymm2
3169 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm4[0,1],ymm2[2,3],ymm4[4,5],ymm2[6,7]
3170 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3171 ; AVX1-ONLY-NEXT: vmovaps 128(%rsi), %xmm14
3172 ; AVX1-ONLY-NEXT: vmovaps 128(%rdi), %xmm2
3173 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3174 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm14[1]
3175 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
3176 ; AVX1-ONLY-NEXT: vmovapd 128(%r8), %ymm8
3177 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm2 = ymm8[0,1],ymm2[2,3]
3178 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm4 = mem[0,0]
3179 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm2 = ymm2[0],ymm4[1],ymm2[2,3]
3180 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3181 ; AVX1-ONLY-NEXT: vmovaps 128(%rcx), %xmm4
3182 ; AVX1-ONLY-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3183 ; AVX1-ONLY-NEXT: vmovaps 128(%rdx), %xmm2
3184 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3185 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm4[1]
3186 ; AVX1-ONLY-NEXT: vbroadcastsd 136(%r8), %ymm4
3187 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm2[0,1,2,3],ymm4[4,5,6,7]
3188 ; AVX1-ONLY-NEXT: vinsertf128 $1, 128(%r9), %ymm2, %ymm2
3189 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm4[0,1],ymm2[2,3],ymm4[4,5],ymm2[6,7]
3190 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3191 ; AVX1-ONLY-NEXT: vmovaps 160(%rsi), %xmm4
3192 ; AVX1-ONLY-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3193 ; AVX1-ONLY-NEXT: vmovaps 160(%rdi), %xmm2
3194 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3195 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm4[1]
3196 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm4
3197 ; AVX1-ONLY-NEXT: vmovapd 160(%r8), %ymm2
3198 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm4 = ymm2[0,1],ymm4[2,3]
3199 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm6 = mem[0,0]
3200 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm4 = ymm4[0],ymm6[1],ymm4[2,3]
3201 ; AVX1-ONLY-NEXT: vmovupd %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3202 ; AVX1-ONLY-NEXT: vmovaps 160(%rcx), %xmm6
3203 ; AVX1-ONLY-NEXT: vmovaps %xmm6, (%rsp) # 16-byte Spill
3204 ; AVX1-ONLY-NEXT: vmovaps 160(%rdx), %xmm4
3205 ; AVX1-ONLY-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3206 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm4[1],xmm6[1]
3207 ; AVX1-ONLY-NEXT: vbroadcastsd 168(%r8), %ymm6
3208 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm4[0,1,2,3],ymm6[4,5,6,7]
3209 ; AVX1-ONLY-NEXT: vinsertf128 $1, 160(%r9), %ymm4, %ymm4
3210 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm6[0,1],ymm4[2,3],ymm6[4,5],ymm4[6,7]
3211 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3212 ; AVX1-ONLY-NEXT: vmovaps 192(%rsi), %xmm6
3213 ; AVX1-ONLY-NEXT: vmovaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3214 ; AVX1-ONLY-NEXT: vmovaps 192(%rdi), %xmm4
3215 ; AVX1-ONLY-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3216 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm4[1],xmm6[1]
3217 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm6
3218 ; AVX1-ONLY-NEXT: vmovapd 192(%r8), %ymm4
3219 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm6 = ymm4[0,1],ymm6[2,3]
3220 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm7 = mem[0,0]
3221 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2,3]
3222 ; AVX1-ONLY-NEXT: vmovupd %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3223 ; AVX1-ONLY-NEXT: vmovaps 192(%rcx), %xmm7
3224 ; AVX1-ONLY-NEXT: vmovaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3225 ; AVX1-ONLY-NEXT: vmovaps 192(%rdx), %xmm6
3226 ; AVX1-ONLY-NEXT: vmovaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3227 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm6 = xmm6[1],xmm7[1]
3228 ; AVX1-ONLY-NEXT: vbroadcastsd 200(%r8), %ymm7
3229 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm6[0,1,2,3],ymm7[4,5,6,7]
3230 ; AVX1-ONLY-NEXT: vinsertf128 $1, 192(%r9), %ymm6, %ymm6
3231 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1],ymm6[2,3],ymm7[4,5],ymm6[6,7]
3232 ; AVX1-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3233 ; AVX1-ONLY-NEXT: vmovaps 224(%rsi), %xmm7
3234 ; AVX1-ONLY-NEXT: vmovaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3235 ; AVX1-ONLY-NEXT: vmovaps 224(%rdi), %xmm6
3236 ; AVX1-ONLY-NEXT: vmovaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3237 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm6 = xmm6[1],xmm7[1]
3238 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
3239 ; AVX1-ONLY-NEXT: vmovapd 224(%r8), %ymm9
3240 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm6 = ymm9[0,1],ymm6[2,3]
3241 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm7 = mem[0,0]
3242 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2,3]
3243 ; AVX1-ONLY-NEXT: vmovupd %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3244 ; AVX1-ONLY-NEXT: vmovaps 224(%rcx), %xmm7
3245 ; AVX1-ONLY-NEXT: vmovaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3246 ; AVX1-ONLY-NEXT: vmovaps 224(%rdx), %xmm6
3247 ; AVX1-ONLY-NEXT: vmovaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3248 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm6 = xmm6[1],xmm7[1]
3249 ; AVX1-ONLY-NEXT: vbroadcastsd 232(%r8), %ymm7
3250 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm6[0,1,2,3],ymm7[4,5,6,7]
3251 ; AVX1-ONLY-NEXT: vinsertf128 $1, 224(%r9), %ymm6, %ymm6
3252 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1],ymm6[2,3],ymm7[4,5],ymm6[6,7]
3253 ; AVX1-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3254 ; AVX1-ONLY-NEXT: vmovapd (%rdi), %ymm6
3255 ; AVX1-ONLY-NEXT: vmovapd (%rsi), %ymm7
3256 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm6 = ymm6[1],ymm7[1],ymm6[3],ymm7[3]
3257 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm6 = ymm0[2,3],ymm6[2,3]
3258 ; AVX1-ONLY-NEXT: vmovapd (%r9), %ymm0
3259 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm7 = ymm0[2,3],ymm7[2,3]
3260 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm6 = ymm6[0],ymm7[0],ymm6[2],ymm7[3]
3261 ; AVX1-ONLY-NEXT: vmovupd %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3262 ; AVX1-ONLY-NEXT: vmovapd 32(%rdi), %ymm6
3263 ; AVX1-ONLY-NEXT: vmovapd 32(%rsi), %ymm7
3264 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm6 = ymm6[1],ymm7[1],ymm6[3],ymm7[3]
3265 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm1[2,3],ymm6[2,3]
3266 ; AVX1-ONLY-NEXT: vmovapd 32(%r9), %ymm12
3267 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm6 = ymm12[2,3],ymm7[2,3]
3268 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm1 = ymm1[0],ymm6[0],ymm1[2],ymm6[3]
3269 ; AVX1-ONLY-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3270 ; AVX1-ONLY-NEXT: vmovapd 64(%rdi), %ymm1
3271 ; AVX1-ONLY-NEXT: vmovapd 64(%rsi), %ymm7
3272 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm7[1],ymm1[3],ymm7[3]
3273 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm3[2,3],ymm1[2,3]
3274 ; AVX1-ONLY-NEXT: vmovapd 64(%r9), %ymm6
3275 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm6[2,3],ymm7[2,3]
3276 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm1 = ymm1[0],ymm3[0],ymm1[2],ymm3[3]
3277 ; AVX1-ONLY-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3278 ; AVX1-ONLY-NEXT: vmovapd 96(%rdi), %ymm1
3279 ; AVX1-ONLY-NEXT: vmovapd 96(%rsi), %ymm3
3280 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm3[1],ymm1[3],ymm3[3]
3281 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm5[2,3],ymm1[2,3]
3282 ; AVX1-ONLY-NEXT: vmovapd 96(%r9), %ymm5
3283 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm5[2,3],ymm3[2,3]
3284 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm1 = ymm1[0],ymm3[0],ymm1[2],ymm3[3]
3285 ; AVX1-ONLY-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3286 ; AVX1-ONLY-NEXT: vmovapd 128(%rdi), %ymm1
3287 ; AVX1-ONLY-NEXT: vmovapd 128(%rsi), %ymm7
3288 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm7[1],ymm1[3],ymm7[3]
3289 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm8[2,3],ymm1[2,3]
3290 ; AVX1-ONLY-NEXT: vmovapd 128(%r9), %ymm3
3291 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm7 = ymm3[2,3],ymm7[2,3]
3292 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm1 = ymm1[0],ymm7[0],ymm1[2],ymm7[3]
3293 ; AVX1-ONLY-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3294 ; AVX1-ONLY-NEXT: vmovapd 160(%rdi), %ymm1
3295 ; AVX1-ONLY-NEXT: vmovapd 160(%rsi), %ymm7
3296 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm7[1],ymm1[3],ymm7[3]
3297 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],ymm1[2,3]
3298 ; AVX1-ONLY-NEXT: vmovapd 160(%r9), %ymm1
3299 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm7 = ymm1[2,3],ymm7[2,3]
3300 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0],ymm7[0],ymm2[2],ymm7[3]
3301 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3302 ; AVX1-ONLY-NEXT: vmovapd 192(%rdi), %ymm2
3303 ; AVX1-ONLY-NEXT: vmovapd 192(%rsi), %ymm7
3304 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm7[1],ymm2[3],ymm7[3]
3305 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],ymm2[2,3]
3306 ; AVX1-ONLY-NEXT: vmovapd 192(%r9), %ymm2
3307 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm7 = ymm2[2,3],ymm7[2,3]
3308 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm4 = ymm4[0],ymm7[0],ymm4[2],ymm7[3]
3309 ; AVX1-ONLY-NEXT: vmovupd %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3310 ; AVX1-ONLY-NEXT: vmovapd 224(%rdi), %ymm4
3311 ; AVX1-ONLY-NEXT: vmovapd 224(%rsi), %ymm7
3312 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm4 = ymm4[1],ymm7[1],ymm4[3],ymm7[3]
3313 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm8 = ymm9[2,3],ymm4[2,3]
3314 ; AVX1-ONLY-NEXT: vmovapd 224(%r9), %ymm4
3315 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm7 = ymm4[2,3],ymm7[2,3]
3316 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm7 = ymm8[0],ymm7[0],ymm8[2],ymm7[3]
3317 ; AVX1-ONLY-NEXT: vmovupd %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3318 ; AVX1-ONLY-NEXT: vmovaps 16(%rdi), %xmm7
3319 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm7 = xmm7[0],mem[0]
3320 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],mem[4,5,6,7]
3321 ; AVX1-ONLY-NEXT: vbroadcastsd 16(%rcx), %ymm8
3322 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3,4,5],ymm8[6,7]
3323 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3324 ; AVX1-ONLY-NEXT: vmovapd 16(%rdx), %xmm7
3325 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm7 = xmm7[1],mem[1]
3326 ; AVX1-ONLY-NEXT: vbroadcastsd 24(%r8), %ymm8
3327 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm7 = ymm7[0,1],ymm8[2,3]
3328 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm7[0,1,2],ymm0[3]
3329 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3330 ; AVX1-ONLY-NEXT: vmovaps 48(%rdi), %xmm0
3331 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
3332 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
3333 ; AVX1-ONLY-NEXT: vbroadcastsd 48(%rcx), %ymm7
3334 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm7[6,7]
3335 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3336 ; AVX1-ONLY-NEXT: vmovapd 48(%rdx), %xmm0
3337 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
3338 ; AVX1-ONLY-NEXT: vbroadcastsd 56(%r8), %ymm7
3339 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm7[2,3]
3340 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm12[3]
3341 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3342 ; AVX1-ONLY-NEXT: vmovaps 80(%rdi), %xmm0
3343 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
3344 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
3345 ; AVX1-ONLY-NEXT: vbroadcastsd 80(%rcx), %ymm7
3346 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm7[6,7]
3347 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3348 ; AVX1-ONLY-NEXT: vmovapd 80(%rdx), %xmm0
3349 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
3350 ; AVX1-ONLY-NEXT: vbroadcastsd 88(%r8), %ymm7
3351 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm7[2,3]
3352 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm6[3]
3353 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3354 ; AVX1-ONLY-NEXT: vmovaps 112(%rdi), %xmm0
3355 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
3356 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
3357 ; AVX1-ONLY-NEXT: vbroadcastsd 112(%rcx), %ymm6
3358 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm6[6,7]
3359 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3360 ; AVX1-ONLY-NEXT: vmovapd 112(%rdx), %xmm0
3361 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
3362 ; AVX1-ONLY-NEXT: vbroadcastsd 120(%r8), %ymm6
3363 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm6[2,3]
3364 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm5[3]
3365 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3366 ; AVX1-ONLY-NEXT: vmovaps 144(%rdi), %xmm0
3367 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
3368 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
3369 ; AVX1-ONLY-NEXT: vbroadcastsd 144(%rcx), %ymm5
3370 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm5[6,7]
3371 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3372 ; AVX1-ONLY-NEXT: vmovapd 144(%rdx), %xmm0
3373 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
3374 ; AVX1-ONLY-NEXT: vbroadcastsd 152(%r8), %ymm5
3375 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm5[2,3]
3376 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm3[3]
3377 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3378 ; AVX1-ONLY-NEXT: vmovaps 176(%rdi), %xmm0
3379 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
3380 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
3381 ; AVX1-ONLY-NEXT: vbroadcastsd 176(%rcx), %ymm3
3382 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm3[6,7]
3383 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3384 ; AVX1-ONLY-NEXT: vmovapd 176(%rdx), %xmm0
3385 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
3386 ; AVX1-ONLY-NEXT: vbroadcastsd 184(%r8), %ymm3
3387 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm3[2,3]
3388 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm1[3]
3389 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3390 ; AVX1-ONLY-NEXT: vmovaps 208(%rdi), %xmm0
3391 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
3392 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
3393 ; AVX1-ONLY-NEXT: vbroadcastsd 208(%rcx), %ymm1
3394 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
3395 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3396 ; AVX1-ONLY-NEXT: vmovapd 208(%rdx), %xmm0
3397 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
3398 ; AVX1-ONLY-NEXT: vbroadcastsd 216(%r8), %ymm15
3399 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm15[2,3]
3400 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm2[3]
3401 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3402 ; AVX1-ONLY-NEXT: vmovaps 240(%rdi), %xmm0
3403 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
3404 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
3405 ; AVX1-ONLY-NEXT: vbroadcastsd 240(%rcx), %ymm15
3406 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm15[6,7]
3407 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3408 ; AVX1-ONLY-NEXT: vmovapd 240(%rdx), %xmm15
3409 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm15 = xmm15[1],mem[1]
3410 ; AVX1-ONLY-NEXT: vbroadcastsd 248(%r8), %ymm12
3411 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm12 = ymm15[0,1],ymm12[2,3]
3412 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm12[0,1,2],ymm4[3]
3413 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3414 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3415 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm14 = xmm0[0],xmm14[0]
3416 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3417 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm15 # 16-byte Folded Reload
3418 ; AVX1-ONLY-NEXT: # xmm15 = xmm0[0],mem[0]
3419 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm13 = xmm13[0],xmm11[0]
3420 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3421 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm12 # 16-byte Folded Reload
3422 ; AVX1-ONLY-NEXT: # xmm12 = xmm0[0],mem[0]
3423 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3424 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm11 = xmm0[0],xmm10[0]
3425 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3426 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm10 # 16-byte Folded Reload
3427 ; AVX1-ONLY-NEXT: # xmm10 = xmm0[0],mem[0]
3428 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3429 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm9 # 16-byte Folded Reload
3430 ; AVX1-ONLY-NEXT: # xmm9 = xmm0[0],mem[0]
3431 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3432 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm8 # 16-byte Folded Reload
3433 ; AVX1-ONLY-NEXT: # xmm8 = xmm0[0],mem[0]
3434 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3435 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm7 # 16-byte Folded Reload
3436 ; AVX1-ONLY-NEXT: # xmm7 = xmm0[0],mem[0]
3437 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3438 ; AVX1-ONLY-NEXT: vunpcklpd (%rsp), %xmm0, %xmm6 # 16-byte Folded Reload
3439 ; AVX1-ONLY-NEXT: # xmm6 = xmm0[0],mem[0]
3440 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3441 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm5 # 16-byte Folded Reload
3442 ; AVX1-ONLY-NEXT: # xmm5 = xmm0[0],mem[0]
3443 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3444 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm4 # 16-byte Folded Reload
3445 ; AVX1-ONLY-NEXT: # xmm4 = xmm0[0],mem[0]
3446 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3447 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm3 # 16-byte Folded Reload
3448 ; AVX1-ONLY-NEXT: # xmm3 = xmm0[0],mem[0]
3449 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3450 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm2 # 16-byte Folded Reload
3451 ; AVX1-ONLY-NEXT: # xmm2 = xmm0[0],mem[0]
3452 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3453 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm1 # 16-byte Folded Reload
3454 ; AVX1-ONLY-NEXT: # xmm1 = xmm0[0],mem[0]
3455 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3456 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
3457 ; AVX1-ONLY-NEXT: # xmm0 = xmm0[0],mem[0]
3458 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
3459 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 16(%rax)
3460 ; AVX1-ONLY-NEXT: vmovaps %xmm1, (%rax)
3461 ; AVX1-ONLY-NEXT: vmovaps %xmm2, 1168(%rax)
3462 ; AVX1-ONLY-NEXT: vmovaps %xmm3, 1152(%rax)
3463 ; AVX1-ONLY-NEXT: vmovaps %xmm4, 1360(%rax)
3464 ; AVX1-ONLY-NEXT: vmovaps %xmm5, 1344(%rax)
3465 ; AVX1-ONLY-NEXT: vmovaps %xmm6, 976(%rax)
3466 ; AVX1-ONLY-NEXT: vmovaps %xmm7, 960(%rax)
3467 ; AVX1-ONLY-NEXT: vmovaps %xmm8, 592(%rax)
3468 ; AVX1-ONLY-NEXT: vmovaps %xmm9, 576(%rax)
3469 ; AVX1-ONLY-NEXT: vmovaps %xmm10, 208(%rax)
3470 ; AVX1-ONLY-NEXT: vmovaps %xmm11, 192(%rax)
3471 ; AVX1-ONLY-NEXT: vmovaps %xmm12, 400(%rax)
3472 ; AVX1-ONLY-NEXT: vmovaps %xmm13, 384(%rax)
3473 ; AVX1-ONLY-NEXT: vmovaps %xmm15, 784(%rax)
3474 ; AVX1-ONLY-NEXT: vmovaps %xmm14, 768(%rax)
3475 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3476 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1472(%rax)
3477 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3478 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1280(%rax)
3479 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3480 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1088(%rax)
3481 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3482 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 896(%rax)
3483 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3484 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 704(%rax)
3485 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3486 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 512(%rax)
3487 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3488 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 320(%rax)
3489 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3490 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 128(%rax)
3491 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3492 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1504(%rax)
3493 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3494 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1440(%rax)
3495 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3496 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1408(%rax)
3497 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3498 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1376(%rax)
3499 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3500 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1312(%rax)
3501 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3502 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1248(%rax)
3503 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3504 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1216(%rax)
3505 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3506 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1184(%rax)
3507 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3508 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1120(%rax)
3509 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3510 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1056(%rax)
3511 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3512 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1024(%rax)
3513 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3514 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 992(%rax)
3515 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3516 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 928(%rax)
3517 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3518 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 864(%rax)
3519 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3520 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 832(%rax)
3521 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3522 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 800(%rax)
3523 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3524 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 736(%rax)
3525 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3526 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 672(%rax)
3527 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3528 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 640(%rax)
3529 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3530 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 608(%rax)
3531 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3532 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 544(%rax)
3533 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3534 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 480(%rax)
3535 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3536 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 448(%rax)
3537 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3538 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 416(%rax)
3539 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3540 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 352(%rax)
3541 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3542 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 288(%rax)
3543 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3544 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 256(%rax)
3545 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3546 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 224(%rax)
3547 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3548 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 160(%rax)
3549 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3550 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 96(%rax)
3551 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3552 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 64(%rax)
3553 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3554 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
3555 ; AVX1-ONLY-NEXT: addq $1608, %rsp # imm = 0x648
3556 ; AVX1-ONLY-NEXT: vzeroupper
3557 ; AVX1-ONLY-NEXT: retq
3559 ; AVX2-ONLY-LABEL: store_i64_stride6_vf32:
3560 ; AVX2-ONLY: # %bb.0:
3561 ; AVX2-ONLY-NEXT: subq $1208, %rsp # imm = 0x4B8
3562 ; AVX2-ONLY-NEXT: vmovaps (%r8), %ymm5
3563 ; AVX2-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3564 ; AVX2-ONLY-NEXT: vmovaps 32(%r8), %ymm4
3565 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3566 ; AVX2-ONLY-NEXT: vmovaps (%r9), %xmm1
3567 ; AVX2-ONLY-NEXT: vmovaps 32(%r9), %xmm0
3568 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
3569 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %xmm8
3570 ; AVX2-ONLY-NEXT: vmovups %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3571 ; AVX2-ONLY-NEXT: vmovaps 32(%rsi), %xmm6
3572 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3573 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %xmm3
3574 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3575 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %xmm7
3576 ; AVX2-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3577 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm8[1]
3578 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm5[0,1],ymm3[0,1]
3579 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5,6,7]
3580 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3581 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
3582 ; AVX2-ONLY-NEXT: vmovaps (%rcx), %xmm3
3583 ; AVX2-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3584 ; AVX2-ONLY-NEXT: vmovaps 32(%rcx), %xmm5
3585 ; AVX2-ONLY-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3586 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %xmm2
3587 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3588 ; AVX2-ONLY-NEXT: vmovaps 32(%rdx), %xmm8
3589 ; AVX2-ONLY-NEXT: vmovaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3590 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
3591 ; AVX2-ONLY-NEXT: vbroadcastsd 8(%r8), %ymm3
3592 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
3593 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
3594 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3595 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = xmm0[0,0]
3596 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm7[1],xmm6[1]
3597 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm4[0,1],ymm2[0,1]
3598 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3],ymm2[4,5,6,7]
3599 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3600 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm8[1],xmm5[1]
3601 ; AVX2-ONLY-NEXT: vbroadcastsd 40(%r8), %ymm2
3602 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
3603 ; AVX2-ONLY-NEXT: vmovaps 64(%r8), %ymm2
3604 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3605 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
3606 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
3607 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3608 ; AVX2-ONLY-NEXT: vmovaps 64(%rsi), %xmm1
3609 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3610 ; AVX2-ONLY-NEXT: vmovaps 64(%rdi), %xmm0
3611 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3612 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
3613 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm2[0,1],ymm0[0,1]
3614 ; AVX2-ONLY-NEXT: vmovaps 64(%r9), %xmm1
3615 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
3616 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
3617 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3618 ; AVX2-ONLY-NEXT: vmovaps 64(%rcx), %xmm2
3619 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3620 ; AVX2-ONLY-NEXT: vmovaps 64(%rdx), %xmm0
3621 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3622 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
3623 ; AVX2-ONLY-NEXT: vbroadcastsd 72(%r8), %ymm2
3624 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
3625 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
3626 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
3627 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3628 ; AVX2-ONLY-NEXT: vmovaps 96(%r8), %ymm1
3629 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3630 ; AVX2-ONLY-NEXT: vmovaps 96(%rsi), %xmm2
3631 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3632 ; AVX2-ONLY-NEXT: vmovaps 96(%rdi), %xmm0
3633 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3634 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
3635 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
3636 ; AVX2-ONLY-NEXT: vmovaps 96(%r9), %xmm1
3637 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
3638 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
3639 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3640 ; AVX2-ONLY-NEXT: vmovaps 96(%rcx), %xmm2
3641 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3642 ; AVX2-ONLY-NEXT: vmovaps 96(%rdx), %xmm0
3643 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3644 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
3645 ; AVX2-ONLY-NEXT: vbroadcastsd 104(%r8), %ymm2
3646 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
3647 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
3648 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
3649 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3650 ; AVX2-ONLY-NEXT: vmovaps 128(%r8), %ymm1
3651 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3652 ; AVX2-ONLY-NEXT: vmovaps 128(%rsi), %xmm2
3653 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3654 ; AVX2-ONLY-NEXT: vmovaps 128(%rdi), %xmm0
3655 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3656 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
3657 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
3658 ; AVX2-ONLY-NEXT: vmovaps 128(%r9), %xmm1
3659 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
3660 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
3661 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3662 ; AVX2-ONLY-NEXT: vmovaps 128(%rcx), %xmm14
3663 ; AVX2-ONLY-NEXT: vmovaps 128(%rdx), %xmm0
3664 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3665 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm14[1]
3666 ; AVX2-ONLY-NEXT: vbroadcastsd 136(%r8), %ymm2
3667 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
3668 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
3669 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
3670 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3671 ; AVX2-ONLY-NEXT: vmovaps 160(%r8), %ymm1
3672 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3673 ; AVX2-ONLY-NEXT: vmovaps 160(%rsi), %xmm12
3674 ; AVX2-ONLY-NEXT: vmovaps 160(%rdi), %xmm13
3675 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm13[1],xmm12[1]
3676 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
3677 ; AVX2-ONLY-NEXT: vmovaps 160(%r9), %xmm1
3678 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
3679 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
3680 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3681 ; AVX2-ONLY-NEXT: vmovaps 160(%rcx), %xmm10
3682 ; AVX2-ONLY-NEXT: vmovaps 160(%rdx), %xmm11
3683 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm11[1],xmm10[1]
3684 ; AVX2-ONLY-NEXT: vbroadcastsd 168(%r8), %ymm2
3685 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
3686 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
3687 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
3688 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3689 ; AVX2-ONLY-NEXT: vmovaps 192(%r8), %ymm1
3690 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3691 ; AVX2-ONLY-NEXT: vmovaps 192(%rsi), %xmm8
3692 ; AVX2-ONLY-NEXT: vmovaps 192(%rdi), %xmm9
3693 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm9[1],xmm8[1]
3694 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
3695 ; AVX2-ONLY-NEXT: vmovaps 192(%r9), %xmm1
3696 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
3697 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
3698 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3699 ; AVX2-ONLY-NEXT: vmovaps 192(%rcx), %xmm6
3700 ; AVX2-ONLY-NEXT: vmovaps 192(%rdx), %xmm7
3701 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm6[1]
3702 ; AVX2-ONLY-NEXT: vbroadcastsd 200(%r8), %ymm2
3703 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
3704 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
3705 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
3706 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3707 ; AVX2-ONLY-NEXT: vmovaps 224(%r8), %ymm1
3708 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3709 ; AVX2-ONLY-NEXT: vmovaps 224(%rsi), %xmm4
3710 ; AVX2-ONLY-NEXT: vmovaps 224(%rdi), %xmm5
3711 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm4[1]
3712 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm1[0,1],ymm0[0,1]
3713 ; AVX2-ONLY-NEXT: vmovaps 224(%r9), %xmm0
3714 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = xmm0[0,0]
3715 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3],ymm1[4,5,6,7]
3716 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3717 ; AVX2-ONLY-NEXT: vmovaps 224(%rcx), %xmm2
3718 ; AVX2-ONLY-NEXT: vmovaps 224(%rdx), %xmm3
3719 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm3[1],xmm2[1]
3720 ; AVX2-ONLY-NEXT: vbroadcastsd 232(%r8), %ymm15
3721 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm15[4,5,6,7]
3722 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
3723 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
3724 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3725 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3726 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
3727 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3728 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
3729 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
3730 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3731 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3732 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
3733 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3734 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
3735 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
3736 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3737 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3738 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
3739 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3740 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
3741 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
3742 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3743 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3744 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
3745 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3746 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
3747 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
3748 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3749 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3750 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm14, %ymm0, %ymm0
3751 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3752 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
3753 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
3754 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3755 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm12, %ymm0
3756 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm13, %ymm1
3757 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
3758 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3759 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm8, %ymm0
3760 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm9, %ymm1
3761 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
3762 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3763 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm4, %ymm0
3764 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm5, %ymm1
3765 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
3766 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3767 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %ymm0
3768 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %ymm1
3769 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %ymm2
3770 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
3771 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
3772 ; AVX2-ONLY-NEXT: vbroadcastsd 16(%rcx), %ymm4
3773 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
3774 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3775 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
3776 ; AVX2-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
3777 ; AVX2-ONLY-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
3778 ; AVX2-ONLY-NEXT: vbroadcastsd 16(%r9), %ymm1
3779 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
3780 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3781 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
3782 ; AVX2-ONLY-NEXT: vbroadcastsd 24(%r8), %ymm1
3783 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
3784 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3785 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %ymm0
3786 ; AVX2-ONLY-NEXT: vmovaps 32(%rsi), %ymm1
3787 ; AVX2-ONLY-NEXT: vmovaps 32(%rdx), %ymm2
3788 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
3789 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
3790 ; AVX2-ONLY-NEXT: vbroadcastsd 48(%rcx), %ymm4
3791 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
3792 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3793 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
3794 ; AVX2-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
3795 ; AVX2-ONLY-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
3796 ; AVX2-ONLY-NEXT: vbroadcastsd 48(%r9), %ymm1
3797 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
3798 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3799 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
3800 ; AVX2-ONLY-NEXT: vbroadcastsd 56(%r8), %ymm1
3801 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
3802 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3803 ; AVX2-ONLY-NEXT: vmovaps 64(%rdi), %ymm0
3804 ; AVX2-ONLY-NEXT: vmovaps 64(%rsi), %ymm1
3805 ; AVX2-ONLY-NEXT: vmovaps 64(%rdx), %ymm2
3806 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
3807 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
3808 ; AVX2-ONLY-NEXT: vbroadcastsd 80(%rcx), %ymm4
3809 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
3810 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3811 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
3812 ; AVX2-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
3813 ; AVX2-ONLY-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
3814 ; AVX2-ONLY-NEXT: vbroadcastsd 80(%r9), %ymm1
3815 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
3816 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3817 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
3818 ; AVX2-ONLY-NEXT: vbroadcastsd 88(%r8), %ymm1
3819 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm13 = ymm0[2,3],ymm1[2,3]
3820 ; AVX2-ONLY-NEXT: vmovaps 96(%rdi), %ymm0
3821 ; AVX2-ONLY-NEXT: vmovaps 96(%rsi), %ymm1
3822 ; AVX2-ONLY-NEXT: vmovaps 96(%rdx), %ymm2
3823 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
3824 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
3825 ; AVX2-ONLY-NEXT: vbroadcastsd 112(%rcx), %ymm4
3826 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
3827 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3828 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
3829 ; AVX2-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
3830 ; AVX2-ONLY-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
3831 ; AVX2-ONLY-NEXT: vbroadcastsd 112(%r9), %ymm1
3832 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
3833 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3834 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
3835 ; AVX2-ONLY-NEXT: vbroadcastsd 120(%r8), %ymm1
3836 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm9 = ymm0[2,3],ymm1[2,3]
3837 ; AVX2-ONLY-NEXT: vmovaps 128(%rdi), %ymm0
3838 ; AVX2-ONLY-NEXT: vmovaps 128(%rsi), %ymm1
3839 ; AVX2-ONLY-NEXT: vmovaps 128(%rdx), %ymm2
3840 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
3841 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
3842 ; AVX2-ONLY-NEXT: vbroadcastsd 144(%rcx), %ymm4
3843 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm3[0,1,2,3,4,5],ymm4[6,7]
3844 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
3845 ; AVX2-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
3846 ; AVX2-ONLY-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
3847 ; AVX2-ONLY-NEXT: vbroadcastsd 144(%r9), %ymm1
3848 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
3849 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
3850 ; AVX2-ONLY-NEXT: vbroadcastsd 152(%r8), %ymm1
3851 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm6 = ymm0[2,3],ymm1[2,3]
3852 ; AVX2-ONLY-NEXT: vmovaps 160(%rdi), %ymm0
3853 ; AVX2-ONLY-NEXT: vmovaps 160(%rsi), %ymm1
3854 ; AVX2-ONLY-NEXT: vmovaps 160(%rdx), %ymm2
3855 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
3856 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],ymm2[2,3]
3857 ; AVX2-ONLY-NEXT: vbroadcastsd 176(%rcx), %ymm5
3858 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm4[0,1,2,3,4,5],ymm5[6,7]
3859 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
3860 ; AVX2-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
3861 ; AVX2-ONLY-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
3862 ; AVX2-ONLY-NEXT: vbroadcastsd 176(%r9), %ymm1
3863 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
3864 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
3865 ; AVX2-ONLY-NEXT: vbroadcastsd 184(%r8), %ymm1
3866 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm5 = ymm0[2,3],ymm1[2,3]
3867 ; AVX2-ONLY-NEXT: vmovaps 192(%rdi), %ymm1
3868 ; AVX2-ONLY-NEXT: vmovaps 192(%rsi), %ymm0
3869 ; AVX2-ONLY-NEXT: vmovaps 192(%rdx), %ymm2
3870 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
3871 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],ymm2[2,3]
3872 ; AVX2-ONLY-NEXT: vbroadcastsd 208(%rcx), %ymm12
3873 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm12[6,7]
3874 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
3875 ; AVX2-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
3876 ; AVX2-ONLY-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
3877 ; AVX2-ONLY-NEXT: vbroadcastsd 208(%r9), %ymm1
3878 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
3879 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm2[1],mem[1],ymm2[3],mem[3]
3880 ; AVX2-ONLY-NEXT: vbroadcastsd 216(%r8), %ymm2
3881 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm1[2,3],ymm2[2,3]
3882 ; AVX2-ONLY-NEXT: vmovaps 224(%rdi), %ymm2
3883 ; AVX2-ONLY-NEXT: vmovaps 224(%rsi), %ymm12
3884 ; AVX2-ONLY-NEXT: vmovaps 224(%rdx), %ymm0
3885 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm14 = ymm2[0],ymm12[0],ymm2[2],ymm12[2]
3886 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm14 = ymm14[2,3],ymm0[2,3]
3887 ; AVX2-ONLY-NEXT: vbroadcastsd 240(%rcx), %ymm15
3888 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
3889 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm12[1],ymm2[3],ymm12[3]
3890 ; AVX2-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2, %ymm2 # 32-byte Folded Reload
3891 ; AVX2-ONLY-NEXT: # ymm2 = mem[2,3],ymm2[2,3]
3892 ; AVX2-ONLY-NEXT: vbroadcastsd 240(%r9), %ymm12
3893 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1],ymm12[2,3],ymm2[4,5,6,7]
3894 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
3895 ; AVX2-ONLY-NEXT: vbroadcastsd 248(%r8), %ymm12
3896 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm12[2,3]
3897 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
3898 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
3899 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm12[0,1,2,3,4,5],mem[6,7]
3900 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
3901 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm15 = ymm15[0,1,2,3,4,5],mem[6,7]
3902 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3,4,5],mem[6,7]
3903 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],mem[6,7]
3904 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3,4,5],mem[6,7]
3905 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],mem[6,7]
3906 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],mem[6,7]
3907 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
3908 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1504(%rax)
3909 ; AVX2-ONLY-NEXT: vmovaps %ymm2, 1472(%rax)
3910 ; AVX2-ONLY-NEXT: vmovaps %ymm14, 1440(%rax)
3911 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3912 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1344(%rax)
3913 ; AVX2-ONLY-NEXT: vmovaps %ymm1, 1312(%rax)
3914 ; AVX2-ONLY-NEXT: vmovaps %ymm3, 1280(%rax)
3915 ; AVX2-ONLY-NEXT: vmovaps %ymm4, 1248(%rax)
3916 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3917 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1152(%rax)
3918 ; AVX2-ONLY-NEXT: vmovaps %ymm5, 1120(%rax)
3919 ; AVX2-ONLY-NEXT: vmovaps %ymm7, 1088(%rax)
3920 ; AVX2-ONLY-NEXT: vmovaps %ymm8, 1056(%rax)
3921 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3922 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 960(%rax)
3923 ; AVX2-ONLY-NEXT: vmovaps %ymm6, 928(%rax)
3924 ; AVX2-ONLY-NEXT: vmovaps %ymm10, 896(%rax)
3925 ; AVX2-ONLY-NEXT: vmovaps %ymm11, 864(%rax)
3926 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3927 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 768(%rax)
3928 ; AVX2-ONLY-NEXT: vmovaps %ymm9, 736(%rax)
3929 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3930 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 704(%rax)
3931 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3932 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 672(%rax)
3933 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3934 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 576(%rax)
3935 ; AVX2-ONLY-NEXT: vmovaps %ymm13, 544(%rax)
3936 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3937 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 512(%rax)
3938 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3939 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 480(%rax)
3940 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3941 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 384(%rax)
3942 ; AVX2-ONLY-NEXT: vmovaps %ymm15, 352(%rax)
3943 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3944 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 320(%rax)
3945 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3946 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 288(%rax)
3947 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3948 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 192(%rax)
3949 ; AVX2-ONLY-NEXT: vmovaps %ymm12, 160(%rax)
3950 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3951 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 128(%rax)
3952 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3953 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 96(%rax)
3954 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3955 ; AVX2-ONLY-NEXT: vmovaps %ymm0, (%rax)
3956 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3957 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1408(%rax)
3958 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3959 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1376(%rax)
3960 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3961 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1216(%rax)
3962 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3963 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1184(%rax)
3964 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3965 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1024(%rax)
3966 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3967 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 992(%rax)
3968 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3969 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 832(%rax)
3970 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3971 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 800(%rax)
3972 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3973 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 640(%rax)
3974 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3975 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 608(%rax)
3976 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3977 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 448(%rax)
3978 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3979 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 416(%rax)
3980 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3981 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 256(%rax)
3982 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3983 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 224(%rax)
3984 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3985 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 64(%rax)
3986 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3987 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
3988 ; AVX2-ONLY-NEXT: addq $1208, %rsp # imm = 0x4B8
3989 ; AVX2-ONLY-NEXT: vzeroupper
3990 ; AVX2-ONLY-NEXT: retq
3992 ; AVX512F-ONLY-SLOW-LABEL: store_i64_stride6_vf32:
3993 ; AVX512F-ONLY-SLOW: # %bb.0:
3994 ; AVX512F-ONLY-SLOW-NEXT: subq $712, %rsp # imm = 0x2C8
3995 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rdi), %zmm7
3996 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm4
3997 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%rdi), %zmm2
3998 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%rdi), %zmm18
3999 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rsi), %zmm22
4000 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm1
4001 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%rsi), %zmm19
4002 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%rsi), %zmm21
4003 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rdx), %zmm8
4004 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm6
4005 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%rdx), %zmm5
4006 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%rdx), %zmm12
4007 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rcx), %zmm29
4008 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm27
4009 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%rcx), %zmm26
4010 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%rcx), %zmm25
4011 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [4,12,5,13,4,12,5,13]
4012 ; AVX512F-ONLY-SLOW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
4013 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} ymm20 = [4,12,4,12]
4014 ; AVX512F-ONLY-SLOW-NEXT: # ymm20 = mem[0,1,2,3,0,1,2,3]
4015 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, %zmm0
4016 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm25, %zmm20, %zmm0
4017 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4018 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm0
4019 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm20, %zmm0
4020 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4021 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm0
4022 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm20, %zmm0
4023 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4024 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm29, %zmm8, %zmm20
4025 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [1,9,2,10,1,9,2,10]
4026 ; AVX512F-ONLY-SLOW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
4027 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
4028 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm10, %zmm0
4029 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4030 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm13 = [5,13,6,14,5,13,6,14]
4031 ; AVX512F-ONLY-SLOW-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3]
4032 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm17
4033 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm13, %zmm17
4034 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm31
4035 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm10, %zmm31
4036 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm14
4037 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm13, %zmm14
4038 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm15
4039 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm10, %zmm15
4040 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm16
4041 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm13, %zmm16
4042 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm28 = [3,11,3,11,3,11,3,11]
4043 ; AVX512F-ONLY-SLOW-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4044 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
4045 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm28, %zmm0
4046 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4047 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [7,15,7,15,7,15,7,15]
4048 ; AVX512F-ONLY-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4049 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm0, %zmm8
4050 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4051 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm29
4052 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm28, %zmm29
4053 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm0, %zmm6
4054 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4055 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm6
4056 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm28, %zmm6
4057 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4058 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm0, %zmm5
4059 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4060 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, %zmm26
4061 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm11, %zmm26
4062 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm25, %zmm12, %zmm10
4063 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm25, %zmm12, %zmm13
4064 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm25, %zmm12, %zmm28
4065 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm25, %zmm0, %zmm12
4066 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm27
4067 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm9
4068 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm19, %zmm5
4069 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm11, %zmm9
4070 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm0
4071 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm25
4072 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm11, %zmm25
4073 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm22, %zmm7, %zmm11
4074 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [2,10,2,10,2,10,2,10]
4075 ; AVX512F-ONLY-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4076 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm23
4077 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm3, %zmm23
4078 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [6,14,6,14,6,14,6,14]
4079 ; AVX512F-ONLY-SLOW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4080 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm24
4081 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm6, %zmm24
4082 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,8,1,9,0,8,1,9]
4083 ; AVX512F-ONLY-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
4084 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm30
4085 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm2, %zmm30
4086 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} ymm19 = [7,15,7,15]
4087 ; AVX512F-ONLY-SLOW-NEXT: # ymm19 = mem[0,1,2,3,0,1,2,3]
4088 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm19, %zmm7
4089 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4090 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm22
4091 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm3, %zmm22
4092 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm6, %zmm4
4093 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm8
4094 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm2, %zmm8
4095 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm19, %zmm0
4096 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
4097 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, %zmm0
4098 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm3, %zmm0
4099 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, %zmm1
4100 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm6, %zmm1
4101 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, %zmm7
4102 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm2, %zmm7
4103 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm19, %zmm27
4104 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4105 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm21, %zmm18, %zmm3
4106 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm21, %zmm18, %zmm6
4107 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm21, %zmm18, %zmm2
4108 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm19, %zmm18
4109 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4110 ; AVX512F-ONLY-SLOW-NEXT: movb $12, %al
4111 ; AVX512F-ONLY-SLOW-NEXT: kmovw %eax, %k1
4112 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
4113 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm26 {%k1}
4114 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
4115 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm9 {%k1}
4116 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
4117 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm25 {%k1}
4118 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm20, %zmm11 {%k1}
4119 ; AVX512F-ONLY-SLOW-NEXT: movb $48, %al
4120 ; AVX512F-ONLY-SLOW-NEXT: kmovw %eax, %k2
4121 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
4122 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm5 {%k2}
4123 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm24, %zmm17 {%k2}
4124 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm22, %zmm31 {%k2}
4125 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm14 {%k2}
4126 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm15 {%k2}
4127 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%r8), %zmm23
4128 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm16 {%k2}
4129 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%r8), %zmm1
4130 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm10 {%k2}
4131 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = <0,1,9,u,4,5,6,7>
4132 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm23, %zmm0, %zmm5
4133 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm0, %zmm31
4134 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%r8), %zmm3
4135 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm0, %zmm15
4136 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%r8), %zmm22
4137 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm0, %zmm10
4138 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%r9), %zmm20
4139 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,9,4,5,6,7]
4140 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm0, %zmm5
4141 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4142 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%r9), %zmm21
4143 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm0, %zmm31
4144 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%r9), %zmm19
4145 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm0, %zmm15
4146 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%r9), %zmm18
4147 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm0, %zmm10
4148 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm13 {%k2}
4149 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = <0,1,13,u,4,5,6,7>
4150 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm27
4151 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm23, %zmm0, %zmm27
4152 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm0, %zmm14
4153 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm0, %zmm16
4154 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm0, %zmm13
4155 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,13,4,5,6,7]
4156 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm0, %zmm27
4157 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm0, %zmm14
4158 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm0, %zmm16
4159 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm0, %zmm13
4160 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdx), %xmm0
4161 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
4162 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4163 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm30 {%k1}
4164 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rdx), %xmm0
4165 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
4166 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4167 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm8 {%k1}
4168 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 128(%rdx), %xmm0
4169 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
4170 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4171 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm7 {%k1}
4172 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 192(%rdx), %xmm0
4173 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
4174 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4175 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm2 {%k1}
4176 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $2, (%r8), %zmm30, %zmm24
4177 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,3,4,8,6,7]
4178 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm0, %zmm24
4179 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $2, 64(%r8), %zmm8, %zmm6
4180 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm0, %zmm6
4181 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $2, 128(%r8), %zmm7, %zmm5
4182 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm0, %zmm5
4183 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $2, 192(%r8), %zmm2, %zmm4
4184 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm0, %zmm4
4185 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdi), %ymm0
4186 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
4187 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
4188 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm2, %zmm0
4189 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
4190 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 64-byte Folded Reload
4191 ; AVX512F-ONLY-SLOW-NEXT: # zmm2 = zmm2[0,1,2,3],mem[4,5,6,7]
4192 ; AVX512F-ONLY-SLOW-NEXT: movb $16, %al
4193 ; AVX512F-ONLY-SLOW-NEXT: kmovw %eax, %k1
4194 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm11 {%k1}
4195 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <10,u,2,3,4,5,11,u>
4196 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm23, %zmm7, %zmm0
4197 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm17 = <14,u,2,3,4,5,15,u>
4198 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm23, %zmm17, %zmm2
4199 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rdi), %ymm23
4200 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm23 = ymm23[1],mem[1],ymm23[3],mem[3]
4201 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm23, %zmm29, %zmm23
4202 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 (%rsp), %zmm8 # 64-byte Reload
4203 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm29 # 64-byte Folded Reload
4204 ; AVX512F-ONLY-SLOW-NEXT: # zmm29 = zmm8[0,1,2,3],mem[4,5,6,7]
4205 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm25 {%k1}
4206 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm7, %zmm23
4207 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm17, %zmm29
4208 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 128(%rdi), %ymm1
4209 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
4210 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
4211 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm8, %zmm1
4212 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
4213 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm30 # 64-byte Folded Reload
4214 ; AVX512F-ONLY-SLOW-NEXT: # zmm30 = zmm8[0,1,2,3],mem[4,5,6,7]
4215 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm9 {%k1}
4216 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
4217 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm17, %zmm30
4218 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 192(%rdi), %ymm3
4219 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
4220 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm3, %zmm28, %zmm3
4221 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm7, %zmm3
4222 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm22, %zmm26 {%k1}
4223 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
4224 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm7 = zmm7[0,1,2,3],zmm12[4,5,6,7]
4225 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm28 = [0,10,2,3,4,5,6,11]
4226 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm28, %zmm0
4227 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm17, %zmm7
4228 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm17 = [0,1,2,3,4,12,6,7]
4229 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm17, %zmm11
4230 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm22 = [0,14,2,3,4,5,6,15]
4231 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm22, %zmm2
4232 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm28, %zmm23
4233 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm17, %zmm25
4234 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm22, %zmm29
4235 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm28, %zmm1
4236 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm17, %zmm9
4237 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm22, %zmm30
4238 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm28, %zmm3
4239 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm17, %zmm26
4240 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm22, %zmm7
4241 ; AVX512F-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
4242 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, 1472(%rax)
4243 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, 1408(%rax)
4244 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm26, 1344(%rax)
4245 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, 1280(%rax)
4246 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, 1216(%rax)
4247 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm30, 1088(%rax)
4248 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, 1024(%rax)
4249 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, 960(%rax)
4250 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, 896(%rax)
4251 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm15, 832(%rax)
4252 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm29, 704(%rax)
4253 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, 640(%rax)
4254 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, 576(%rax)
4255 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, 512(%rax)
4256 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, 448(%rax)
4257 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, 320(%rax)
4258 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, 256(%rax)
4259 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, 192(%rax)
4260 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, 128(%rax)
4261 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4262 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 64(%rax)
4263 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, 1152(%rax)
4264 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, 768(%rax)
4265 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, 384(%rax)
4266 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm24, (%rax)
4267 ; AVX512F-ONLY-SLOW-NEXT: addq $712, %rsp # imm = 0x2C8
4268 ; AVX512F-ONLY-SLOW-NEXT: vzeroupper
4269 ; AVX512F-ONLY-SLOW-NEXT: retq
4271 ; AVX512F-ONLY-FAST-LABEL: store_i64_stride6_vf32:
4272 ; AVX512F-ONLY-FAST: # %bb.0:
4273 ; AVX512F-ONLY-FAST-NEXT: subq $712, %rsp # imm = 0x2C8
4274 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rdi), %zmm7
4275 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rdi), %zmm4
4276 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%rdi), %zmm2
4277 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%rdi), %zmm18
4278 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rsi), %zmm22
4279 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rsi), %zmm1
4280 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%rsi), %zmm19
4281 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%rsi), %zmm21
4282 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rdx), %zmm8
4283 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rdx), %zmm6
4284 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%rdx), %zmm5
4285 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%rdx), %zmm12
4286 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rcx), %zmm29
4287 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rcx), %zmm27
4288 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%rcx), %zmm26
4289 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%rcx), %zmm25
4290 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [4,12,5,13,4,12,5,13]
4291 ; AVX512F-ONLY-FAST-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
4292 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} ymm20 = [4,12,4,12]
4293 ; AVX512F-ONLY-FAST-NEXT: # ymm20 = mem[0,1,2,3,0,1,2,3]
4294 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm0
4295 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm25, %zmm20, %zmm0
4296 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4297 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm0
4298 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm20, %zmm0
4299 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4300 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm0
4301 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm20, %zmm0
4302 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4303 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm29, %zmm8, %zmm20
4304 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [1,9,2,10,1,9,2,10]
4305 ; AVX512F-ONLY-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
4306 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
4307 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm29, %zmm10, %zmm0
4308 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4309 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm13 = [5,13,6,14,5,13,6,14]
4310 ; AVX512F-ONLY-FAST-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3]
4311 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm17
4312 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm29, %zmm13, %zmm17
4313 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm31
4314 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm10, %zmm31
4315 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm14
4316 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm13, %zmm14
4317 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm15
4318 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm10, %zmm15
4319 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm16
4320 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm13, %zmm16
4321 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm28 = [3,11,3,11,3,11,3,11]
4322 ; AVX512F-ONLY-FAST-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4323 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
4324 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm29, %zmm28, %zmm0
4325 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4326 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [7,15,7,15,7,15,7,15]
4327 ; AVX512F-ONLY-FAST-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4328 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm29, %zmm0, %zmm8
4329 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4330 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm29
4331 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm28, %zmm29
4332 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm0, %zmm6
4333 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4334 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm6
4335 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm28, %zmm6
4336 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4337 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm0, %zmm5
4338 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4339 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm18, %zmm26
4340 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm11, %zmm26
4341 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm25, %zmm12, %zmm10
4342 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm25, %zmm12, %zmm13
4343 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm25, %zmm12, %zmm28
4344 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm25, %zmm0, %zmm12
4345 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm27
4346 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm9
4347 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm19, %zmm5
4348 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm11, %zmm9
4349 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm0
4350 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm25
4351 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm11, %zmm25
4352 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm22, %zmm7, %zmm11
4353 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [2,10,2,10,2,10,2,10]
4354 ; AVX512F-ONLY-FAST-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4355 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm23
4356 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm3, %zmm23
4357 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [6,14,6,14,6,14,6,14]
4358 ; AVX512F-ONLY-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4359 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm24
4360 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm6, %zmm24
4361 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,8,1,9,0,8,1,9]
4362 ; AVX512F-ONLY-FAST-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
4363 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm30
4364 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm2, %zmm30
4365 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} ymm19 = [7,15,7,15]
4366 ; AVX512F-ONLY-FAST-NEXT: # ymm19 = mem[0,1,2,3,0,1,2,3]
4367 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm19, %zmm7
4368 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4369 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm22
4370 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm3, %zmm22
4371 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm6, %zmm4
4372 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm8
4373 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm2, %zmm8
4374 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm19, %zmm0
4375 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
4376 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm27, %zmm0
4377 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm3, %zmm0
4378 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm27, %zmm1
4379 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm6, %zmm1
4380 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm27, %zmm7
4381 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm2, %zmm7
4382 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm19, %zmm27
4383 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4384 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm21, %zmm18, %zmm3
4385 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm21, %zmm18, %zmm6
4386 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm21, %zmm18, %zmm2
4387 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm19, %zmm18
4388 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4389 ; AVX512F-ONLY-FAST-NEXT: movb $12, %al
4390 ; AVX512F-ONLY-FAST-NEXT: kmovw %eax, %k1
4391 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
4392 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm26 {%k1}
4393 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
4394 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm9 {%k1}
4395 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
4396 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm25 {%k1}
4397 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm20, %zmm11 {%k1}
4398 ; AVX512F-ONLY-FAST-NEXT: movb $48, %al
4399 ; AVX512F-ONLY-FAST-NEXT: kmovw %eax, %k2
4400 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
4401 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm5 {%k2}
4402 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm24, %zmm17 {%k2}
4403 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm22, %zmm31 {%k2}
4404 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm14 {%k2}
4405 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm15 {%k2}
4406 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%r8), %zmm23
4407 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm16 {%k2}
4408 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%r8), %zmm1
4409 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm10 {%k2}
4410 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = <0,1,9,u,4,5,6,7>
4411 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm23, %zmm0, %zmm5
4412 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm0, %zmm31
4413 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%r8), %zmm3
4414 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm0, %zmm15
4415 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%r8), %zmm22
4416 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm0, %zmm10
4417 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%r9), %zmm20
4418 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,9,4,5,6,7]
4419 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm0, %zmm5
4420 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4421 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%r9), %zmm21
4422 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm0, %zmm31
4423 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%r9), %zmm19
4424 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm0, %zmm15
4425 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%r9), %zmm18
4426 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm0, %zmm10
4427 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm13 {%k2}
4428 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = <0,1,13,u,4,5,6,7>
4429 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm17, %zmm27
4430 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm23, %zmm0, %zmm27
4431 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm0, %zmm14
4432 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm0, %zmm16
4433 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm0, %zmm13
4434 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,13,4,5,6,7]
4435 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm0, %zmm27
4436 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm0, %zmm14
4437 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm0, %zmm16
4438 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm0, %zmm13
4439 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdx), %xmm0
4440 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
4441 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4442 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm30 {%k1}
4443 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rdx), %xmm0
4444 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
4445 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4446 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm8 {%k1}
4447 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 128(%rdx), %xmm0
4448 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
4449 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4450 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm7 {%k1}
4451 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 192(%rdx), %xmm0
4452 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
4453 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4454 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm2 {%k1}
4455 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $2, (%r8), %zmm30, %zmm24
4456 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,3,4,8,6,7]
4457 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm0, %zmm24
4458 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $2, 64(%r8), %zmm8, %zmm6
4459 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm0, %zmm6
4460 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $2, 128(%r8), %zmm7, %zmm5
4461 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm0, %zmm5
4462 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $2, 192(%r8), %zmm2, %zmm4
4463 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm0, %zmm4
4464 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdi), %ymm0
4465 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
4466 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
4467 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm2, %zmm0
4468 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
4469 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 64-byte Folded Reload
4470 ; AVX512F-ONLY-FAST-NEXT: # zmm2 = zmm2[0,1,2,3],mem[4,5,6,7]
4471 ; AVX512F-ONLY-FAST-NEXT: movb $16, %al
4472 ; AVX512F-ONLY-FAST-NEXT: kmovw %eax, %k1
4473 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm11 {%k1}
4474 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm7 = <10,u,2,3,4,5,11,u>
4475 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm23, %zmm7, %zmm0
4476 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm17 = <14,u,2,3,4,5,15,u>
4477 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm23, %zmm17, %zmm2
4478 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rdi), %ymm23
4479 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm23 = ymm23[1],mem[1],ymm23[3],mem[3]
4480 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm23, %zmm29, %zmm23
4481 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 (%rsp), %zmm8 # 64-byte Reload
4482 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm29 # 64-byte Folded Reload
4483 ; AVX512F-ONLY-FAST-NEXT: # zmm29 = zmm8[0,1,2,3],mem[4,5,6,7]
4484 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm25 {%k1}
4485 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm7, %zmm23
4486 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm17, %zmm29
4487 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 128(%rdi), %ymm1
4488 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
4489 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
4490 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm8, %zmm1
4491 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
4492 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm30 # 64-byte Folded Reload
4493 ; AVX512F-ONLY-FAST-NEXT: # zmm30 = zmm8[0,1,2,3],mem[4,5,6,7]
4494 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm9 {%k1}
4495 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
4496 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm17, %zmm30
4497 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 192(%rdi), %ymm3
4498 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
4499 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm28, %zmm3
4500 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm7, %zmm3
4501 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm22, %zmm26 {%k1}
4502 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
4503 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm7 = zmm7[0,1,2,3],zmm12[4,5,6,7]
4504 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm28 = [0,10,2,3,4,5,6,11]
4505 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm28, %zmm0
4506 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm17, %zmm7
4507 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm17 = [0,1,2,3,4,12,6,7]
4508 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm17, %zmm11
4509 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm22 = [0,14,2,3,4,5,6,15]
4510 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm22, %zmm2
4511 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm28, %zmm23
4512 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm17, %zmm25
4513 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm22, %zmm29
4514 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm28, %zmm1
4515 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm17, %zmm9
4516 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm22, %zmm30
4517 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm28, %zmm3
4518 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm17, %zmm26
4519 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm22, %zmm7
4520 ; AVX512F-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
4521 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, 1472(%rax)
4522 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, 1408(%rax)
4523 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm26, 1344(%rax)
4524 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, 1280(%rax)
4525 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, 1216(%rax)
4526 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm30, 1088(%rax)
4527 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm16, 1024(%rax)
4528 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm9, 960(%rax)
4529 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, 896(%rax)
4530 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm15, 832(%rax)
4531 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm29, 704(%rax)
4532 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm14, 640(%rax)
4533 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm25, 576(%rax)
4534 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm23, 512(%rax)
4535 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm31, 448(%rax)
4536 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, 320(%rax)
4537 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm27, 256(%rax)
4538 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, 192(%rax)
4539 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, 128(%rax)
4540 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4541 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 64(%rax)
4542 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, 1152(%rax)
4543 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, 768(%rax)
4544 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm6, 384(%rax)
4545 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm24, (%rax)
4546 ; AVX512F-ONLY-FAST-NEXT: addq $712, %rsp # imm = 0x2C8
4547 ; AVX512F-ONLY-FAST-NEXT: vzeroupper
4548 ; AVX512F-ONLY-FAST-NEXT: retq
4550 ; AVX512DQ-SLOW-LABEL: store_i64_stride6_vf32:
4551 ; AVX512DQ-SLOW: # %bb.0:
4552 ; AVX512DQ-SLOW-NEXT: subq $712, %rsp # imm = 0x2C8
4553 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rdi), %zmm7
4554 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm4
4555 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%rdi), %zmm2
4556 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%rdi), %zmm18
4557 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rsi), %zmm22
4558 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm1
4559 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%rsi), %zmm19
4560 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%rsi), %zmm21
4561 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rdx), %zmm8
4562 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm6
4563 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%rdx), %zmm5
4564 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%rdx), %zmm12
4565 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rcx), %zmm29
4566 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm27
4567 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%rcx), %zmm26
4568 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%rcx), %zmm25
4569 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [4,12,5,13,4,12,5,13]
4570 ; AVX512DQ-SLOW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
4571 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x2 {{.*#+}} ymm20 = [4,12,4,12]
4572 ; AVX512DQ-SLOW-NEXT: # ymm20 = mem[0,1,0,1]
4573 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm12, %zmm0
4574 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm25, %zmm20, %zmm0
4575 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4576 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm0
4577 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm26, %zmm20, %zmm0
4578 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4579 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, %zmm0
4580 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm27, %zmm20, %zmm0
4581 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4582 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm29, %zmm8, %zmm20
4583 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [1,9,2,10,1,9,2,10]
4584 ; AVX512DQ-SLOW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
4585 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
4586 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm29, %zmm10, %zmm0
4587 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4588 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm13 = [5,13,6,14,5,13,6,14]
4589 ; AVX512DQ-SLOW-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3]
4590 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, %zmm17
4591 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm29, %zmm13, %zmm17
4592 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, %zmm31
4593 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm27, %zmm10, %zmm31
4594 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, %zmm14
4595 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm27, %zmm13, %zmm14
4596 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm15
4597 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm26, %zmm10, %zmm15
4598 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm16
4599 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm26, %zmm13, %zmm16
4600 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm28 = [3,11,3,11,3,11,3,11]
4601 ; AVX512DQ-SLOW-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4602 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
4603 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm29, %zmm28, %zmm0
4604 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4605 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [7,15,7,15,7,15,7,15]
4606 ; AVX512DQ-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4607 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm29, %zmm0, %zmm8
4608 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4609 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, %zmm29
4610 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm27, %zmm28, %zmm29
4611 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm27, %zmm0, %zmm6
4612 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4613 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm6
4614 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm26, %zmm28, %zmm6
4615 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4616 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm26, %zmm0, %zmm5
4617 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4618 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm18, %zmm26
4619 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm21, %zmm11, %zmm26
4620 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm25, %zmm12, %zmm10
4621 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm25, %zmm12, %zmm13
4622 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm25, %zmm12, %zmm28
4623 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm25, %zmm0, %zmm12
4624 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm27
4625 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm9
4626 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm19, %zmm5
4627 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm19, %zmm11, %zmm9
4628 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm0
4629 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm25
4630 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm11, %zmm25
4631 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm22, %zmm7, %zmm11
4632 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [2,10,2,10,2,10,2,10]
4633 ; AVX512DQ-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4634 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, %zmm23
4635 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm3, %zmm23
4636 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [6,14,6,14,6,14,6,14]
4637 ; AVX512DQ-SLOW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4638 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, %zmm24
4639 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm6, %zmm24
4640 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,8,1,9,0,8,1,9]
4641 ; AVX512DQ-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
4642 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, %zmm30
4643 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm2, %zmm30
4644 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x2 {{.*#+}} ymm19 = [7,15,7,15]
4645 ; AVX512DQ-SLOW-NEXT: # ymm19 = mem[0,1,0,1]
4646 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm19, %zmm7
4647 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4648 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm22
4649 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm3, %zmm22
4650 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm6, %zmm4
4651 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm8
4652 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm2, %zmm8
4653 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm19, %zmm0
4654 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
4655 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm27, %zmm0
4656 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm3, %zmm0
4657 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm27, %zmm1
4658 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm6, %zmm1
4659 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm27, %zmm7
4660 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm2, %zmm7
4661 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm19, %zmm27
4662 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4663 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm21, %zmm18, %zmm3
4664 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm21, %zmm18, %zmm6
4665 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm21, %zmm18, %zmm2
4666 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm21, %zmm19, %zmm18
4667 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4668 ; AVX512DQ-SLOW-NEXT: movb $12, %al
4669 ; AVX512DQ-SLOW-NEXT: kmovw %eax, %k1
4670 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
4671 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm26 {%k1}
4672 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
4673 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm9 {%k1}
4674 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
4675 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm25 {%k1}
4676 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm20, %zmm11 {%k1}
4677 ; AVX512DQ-SLOW-NEXT: movb $48, %al
4678 ; AVX512DQ-SLOW-NEXT: kmovw %eax, %k2
4679 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
4680 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm23, %zmm5 {%k2}
4681 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm24, %zmm17 {%k2}
4682 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm22, %zmm31 {%k2}
4683 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm14 {%k2}
4684 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm15 {%k2}
4685 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%r8), %zmm23
4686 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm16 {%k2}
4687 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%r8), %zmm1
4688 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm10 {%k2}
4689 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = <0,1,9,u,4,5,6,7>
4690 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm23, %zmm0, %zmm5
4691 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm0, %zmm31
4692 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%r8), %zmm3
4693 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm0, %zmm15
4694 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%r8), %zmm22
4695 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm0, %zmm10
4696 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%r9), %zmm20
4697 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,9,4,5,6,7]
4698 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm20, %zmm0, %zmm5
4699 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4700 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%r9), %zmm21
4701 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm21, %zmm0, %zmm31
4702 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%r9), %zmm19
4703 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm19, %zmm0, %zmm15
4704 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%r9), %zmm18
4705 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm18, %zmm0, %zmm10
4706 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, %zmm13 {%k2}
4707 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = <0,1,13,u,4,5,6,7>
4708 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm17, %zmm27
4709 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm23, %zmm0, %zmm27
4710 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm0, %zmm14
4711 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm0, %zmm16
4712 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm0, %zmm13
4713 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,13,4,5,6,7]
4714 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm20, %zmm0, %zmm27
4715 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm21, %zmm0, %zmm14
4716 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm19, %zmm0, %zmm16
4717 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm18, %zmm0, %zmm13
4718 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdx), %xmm0
4719 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
4720 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4721 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm30 {%k1}
4722 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rdx), %xmm0
4723 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
4724 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4725 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm8 {%k1}
4726 ; AVX512DQ-SLOW-NEXT: vmovdqa 128(%rdx), %xmm0
4727 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
4728 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4729 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm7 {%k1}
4730 ; AVX512DQ-SLOW-NEXT: vmovdqa 192(%rdx), %xmm0
4731 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
4732 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4733 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm2 {%k1}
4734 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $2, (%r8), %zmm30, %zmm24
4735 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,3,4,8,6,7]
4736 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm20, %zmm0, %zmm24
4737 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $2, 64(%r8), %zmm8, %zmm6
4738 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm21, %zmm0, %zmm6
4739 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $2, 128(%r8), %zmm7, %zmm5
4740 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm19, %zmm0, %zmm5
4741 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $2, 192(%r8), %zmm2, %zmm4
4742 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm18, %zmm0, %zmm4
4743 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdi), %ymm0
4744 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
4745 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
4746 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm2, %zmm0
4747 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
4748 ; AVX512DQ-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 64-byte Folded Reload
4749 ; AVX512DQ-SLOW-NEXT: # zmm2 = zmm2[0,1,2,3],mem[4,5,6,7]
4750 ; AVX512DQ-SLOW-NEXT: movb $16, %al
4751 ; AVX512DQ-SLOW-NEXT: kmovw %eax, %k1
4752 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm23, %zmm11 {%k1}
4753 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <10,u,2,3,4,5,11,u>
4754 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm23, %zmm7, %zmm0
4755 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm17 = <14,u,2,3,4,5,15,u>
4756 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm23, %zmm17, %zmm2
4757 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rdi), %ymm23
4758 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm23 = ymm23[1],mem[1],ymm23[3],mem[3]
4759 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm23, %zmm29, %zmm23
4760 ; AVX512DQ-SLOW-NEXT: vmovdqu64 (%rsp), %zmm8 # 64-byte Reload
4761 ; AVX512DQ-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm29 # 64-byte Folded Reload
4762 ; AVX512DQ-SLOW-NEXT: # zmm29 = zmm8[0,1,2,3],mem[4,5,6,7]
4763 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm25 {%k1}
4764 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm7, %zmm23
4765 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm17, %zmm29
4766 ; AVX512DQ-SLOW-NEXT: vmovdqa 128(%rdi), %ymm1
4767 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
4768 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
4769 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm8, %zmm1
4770 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
4771 ; AVX512DQ-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm30 # 64-byte Folded Reload
4772 ; AVX512DQ-SLOW-NEXT: # zmm30 = zmm8[0,1,2,3],mem[4,5,6,7]
4773 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm9 {%k1}
4774 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
4775 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm17, %zmm30
4776 ; AVX512DQ-SLOW-NEXT: vmovdqa 192(%rdi), %ymm3
4777 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
4778 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm3, %zmm28, %zmm3
4779 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm7, %zmm3
4780 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm22, %zmm26 {%k1}
4781 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
4782 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm7 = zmm7[0,1,2,3],zmm12[4,5,6,7]
4783 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm28 = [0,10,2,3,4,5,6,11]
4784 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm20, %zmm28, %zmm0
4785 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm17, %zmm7
4786 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm17 = [0,1,2,3,4,12,6,7]
4787 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm20, %zmm17, %zmm11
4788 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm22 = [0,14,2,3,4,5,6,15]
4789 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm20, %zmm22, %zmm2
4790 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm21, %zmm28, %zmm23
4791 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm21, %zmm17, %zmm25
4792 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm21, %zmm22, %zmm29
4793 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm19, %zmm28, %zmm1
4794 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm19, %zmm17, %zmm9
4795 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm19, %zmm22, %zmm30
4796 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm18, %zmm28, %zmm3
4797 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm18, %zmm17, %zmm26
4798 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm18, %zmm22, %zmm7
4799 ; AVX512DQ-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
4800 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, 1472(%rax)
4801 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm13, 1408(%rax)
4802 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm26, 1344(%rax)
4803 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, 1280(%rax)
4804 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, 1216(%rax)
4805 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm30, 1088(%rax)
4806 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm16, 1024(%rax)
4807 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, 960(%rax)
4808 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, 896(%rax)
4809 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm15, 832(%rax)
4810 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm29, 704(%rax)
4811 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm14, 640(%rax)
4812 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm25, 576(%rax)
4813 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm23, 512(%rax)
4814 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm31, 448(%rax)
4815 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, 320(%rax)
4816 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm27, 256(%rax)
4817 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, 192(%rax)
4818 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, 128(%rax)
4819 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
4820 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 64(%rax)
4821 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, 1152(%rax)
4822 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, 768(%rax)
4823 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, 384(%rax)
4824 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm24, (%rax)
4825 ; AVX512DQ-SLOW-NEXT: addq $712, %rsp # imm = 0x2C8
4826 ; AVX512DQ-SLOW-NEXT: vzeroupper
4827 ; AVX512DQ-SLOW-NEXT: retq
4829 ; AVX512DQ-FAST-LABEL: store_i64_stride6_vf32:
4830 ; AVX512DQ-FAST: # %bb.0:
4831 ; AVX512DQ-FAST-NEXT: subq $712, %rsp # imm = 0x2C8
4832 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rdi), %zmm7
4833 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rdi), %zmm4
4834 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%rdi), %zmm2
4835 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%rdi), %zmm18
4836 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rsi), %zmm22
4837 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rsi), %zmm1
4838 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%rsi), %zmm19
4839 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%rsi), %zmm21
4840 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rdx), %zmm8
4841 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rdx), %zmm6
4842 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%rdx), %zmm5
4843 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%rdx), %zmm12
4844 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rcx), %zmm29
4845 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rcx), %zmm27
4846 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%rcx), %zmm26
4847 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%rcx), %zmm25
4848 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [4,12,5,13,4,12,5,13]
4849 ; AVX512DQ-FAST-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
4850 ; AVX512DQ-FAST-NEXT: vbroadcasti64x2 {{.*#+}} ymm20 = [4,12,4,12]
4851 ; AVX512DQ-FAST-NEXT: # ymm20 = mem[0,1,0,1]
4852 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm12, %zmm0
4853 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm25, %zmm20, %zmm0
4854 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4855 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm0
4856 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm26, %zmm20, %zmm0
4857 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4858 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, %zmm0
4859 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm27, %zmm20, %zmm0
4860 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4861 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm29, %zmm8, %zmm20
4862 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [1,9,2,10,1,9,2,10]
4863 ; AVX512DQ-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
4864 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
4865 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm29, %zmm10, %zmm0
4866 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4867 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm13 = [5,13,6,14,5,13,6,14]
4868 ; AVX512DQ-FAST-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3]
4869 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm8, %zmm17
4870 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm29, %zmm13, %zmm17
4871 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, %zmm31
4872 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm27, %zmm10, %zmm31
4873 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, %zmm14
4874 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm27, %zmm13, %zmm14
4875 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm15
4876 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm26, %zmm10, %zmm15
4877 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm16
4878 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm26, %zmm13, %zmm16
4879 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm28 = [3,11,3,11,3,11,3,11]
4880 ; AVX512DQ-FAST-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4881 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
4882 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm29, %zmm28, %zmm0
4883 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4884 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [7,15,7,15,7,15,7,15]
4885 ; AVX512DQ-FAST-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4886 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm29, %zmm0, %zmm8
4887 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4888 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, %zmm29
4889 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm27, %zmm28, %zmm29
4890 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm27, %zmm0, %zmm6
4891 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4892 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm6
4893 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm26, %zmm28, %zmm6
4894 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4895 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm26, %zmm0, %zmm5
4896 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4897 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm18, %zmm26
4898 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm21, %zmm11, %zmm26
4899 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm25, %zmm12, %zmm10
4900 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm25, %zmm12, %zmm13
4901 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm25, %zmm12, %zmm28
4902 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm25, %zmm0, %zmm12
4903 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm27
4904 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm9
4905 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm19, %zmm5
4906 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm19, %zmm11, %zmm9
4907 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, %zmm0
4908 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, %zmm25
4909 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm11, %zmm25
4910 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm22, %zmm7, %zmm11
4911 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [2,10,2,10,2,10,2,10]
4912 ; AVX512DQ-FAST-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4913 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, %zmm23
4914 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm3, %zmm23
4915 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [6,14,6,14,6,14,6,14]
4916 ; AVX512DQ-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4917 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, %zmm24
4918 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm6, %zmm24
4919 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,8,1,9,0,8,1,9]
4920 ; AVX512DQ-FAST-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
4921 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, %zmm30
4922 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm2, %zmm30
4923 ; AVX512DQ-FAST-NEXT: vbroadcasti64x2 {{.*#+}} ymm19 = [7,15,7,15]
4924 ; AVX512DQ-FAST-NEXT: # ymm19 = mem[0,1,0,1]
4925 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm19, %zmm7
4926 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4927 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, %zmm22
4928 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm3, %zmm22
4929 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm6, %zmm4
4930 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm8
4931 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm2, %zmm8
4932 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm19, %zmm0
4933 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
4934 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm27, %zmm0
4935 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm3, %zmm0
4936 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm27, %zmm1
4937 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm6, %zmm1
4938 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm27, %zmm7
4939 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm2, %zmm7
4940 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm19, %zmm27
4941 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4942 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm21, %zmm18, %zmm3
4943 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm21, %zmm18, %zmm6
4944 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm21, %zmm18, %zmm2
4945 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm21, %zmm19, %zmm18
4946 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4947 ; AVX512DQ-FAST-NEXT: movb $12, %al
4948 ; AVX512DQ-FAST-NEXT: kmovw %eax, %k1
4949 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
4950 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm26 {%k1}
4951 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
4952 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm9 {%k1}
4953 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
4954 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm25 {%k1}
4955 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm20, %zmm11 {%k1}
4956 ; AVX512DQ-FAST-NEXT: movb $48, %al
4957 ; AVX512DQ-FAST-NEXT: kmovw %eax, %k2
4958 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
4959 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, %zmm5 {%k2}
4960 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm24, %zmm17 {%k2}
4961 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm22, %zmm31 {%k2}
4962 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, %zmm14 {%k2}
4963 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm15 {%k2}
4964 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%r8), %zmm23
4965 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm16 {%k2}
4966 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%r8), %zmm1
4967 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm10 {%k2}
4968 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = <0,1,9,u,4,5,6,7>
4969 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm23, %zmm0, %zmm5
4970 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm0, %zmm31
4971 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%r8), %zmm3
4972 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm0, %zmm15
4973 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%r8), %zmm22
4974 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm0, %zmm10
4975 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%r9), %zmm20
4976 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,9,4,5,6,7]
4977 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm0, %zmm5
4978 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4979 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%r9), %zmm21
4980 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm21, %zmm0, %zmm31
4981 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%r9), %zmm19
4982 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm19, %zmm0, %zmm15
4983 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%r9), %zmm18
4984 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm18, %zmm0, %zmm10
4985 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, %zmm13 {%k2}
4986 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = <0,1,13,u,4,5,6,7>
4987 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm17, %zmm27
4988 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm23, %zmm0, %zmm27
4989 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm0, %zmm14
4990 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm0, %zmm16
4991 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm0, %zmm13
4992 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,13,4,5,6,7]
4993 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm0, %zmm27
4994 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm21, %zmm0, %zmm14
4995 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm19, %zmm0, %zmm16
4996 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm18, %zmm0, %zmm13
4997 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdx), %xmm0
4998 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
4999 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
5000 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm30 {%k1}
5001 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rdx), %xmm0
5002 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
5003 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
5004 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm8 {%k1}
5005 ; AVX512DQ-FAST-NEXT: vmovdqa 128(%rdx), %xmm0
5006 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
5007 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
5008 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm7 {%k1}
5009 ; AVX512DQ-FAST-NEXT: vmovdqa 192(%rdx), %xmm0
5010 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
5011 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
5012 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm2 {%k1}
5013 ; AVX512DQ-FAST-NEXT: vinserti32x4 $2, (%r8), %zmm30, %zmm24
5014 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,3,4,8,6,7]
5015 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm0, %zmm24
5016 ; AVX512DQ-FAST-NEXT: vinserti32x4 $2, 64(%r8), %zmm8, %zmm6
5017 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm21, %zmm0, %zmm6
5018 ; AVX512DQ-FAST-NEXT: vinserti32x4 $2, 128(%r8), %zmm7, %zmm5
5019 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm19, %zmm0, %zmm5
5020 ; AVX512DQ-FAST-NEXT: vinserti32x4 $2, 192(%r8), %zmm2, %zmm4
5021 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm18, %zmm0, %zmm4
5022 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdi), %ymm0
5023 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5024 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
5025 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm2, %zmm0
5026 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
5027 ; AVX512DQ-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 64-byte Folded Reload
5028 ; AVX512DQ-FAST-NEXT: # zmm2 = zmm2[0,1,2,3],mem[4,5,6,7]
5029 ; AVX512DQ-FAST-NEXT: movb $16, %al
5030 ; AVX512DQ-FAST-NEXT: kmovw %eax, %k1
5031 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, %zmm11 {%k1}
5032 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm7 = <10,u,2,3,4,5,11,u>
5033 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm23, %zmm7, %zmm0
5034 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm17 = <14,u,2,3,4,5,15,u>
5035 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm23, %zmm17, %zmm2
5036 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rdi), %ymm23
5037 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm23 = ymm23[1],mem[1],ymm23[3],mem[3]
5038 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm23, %zmm29, %zmm23
5039 ; AVX512DQ-FAST-NEXT: vmovdqu64 (%rsp), %zmm8 # 64-byte Reload
5040 ; AVX512DQ-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm29 # 64-byte Folded Reload
5041 ; AVX512DQ-FAST-NEXT: # zmm29 = zmm8[0,1,2,3],mem[4,5,6,7]
5042 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm25 {%k1}
5043 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm7, %zmm23
5044 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm17, %zmm29
5045 ; AVX512DQ-FAST-NEXT: vmovdqa 128(%rdi), %ymm1
5046 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
5047 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
5048 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm8, %zmm1
5049 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
5050 ; AVX512DQ-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm30 # 64-byte Folded Reload
5051 ; AVX512DQ-FAST-NEXT: # zmm30 = zmm8[0,1,2,3],mem[4,5,6,7]
5052 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm9 {%k1}
5053 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
5054 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm17, %zmm30
5055 ; AVX512DQ-FAST-NEXT: vmovdqa 192(%rdi), %ymm3
5056 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
5057 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm28, %zmm3
5058 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm7, %zmm3
5059 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm22, %zmm26 {%k1}
5060 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
5061 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm7 = zmm7[0,1,2,3],zmm12[4,5,6,7]
5062 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm28 = [0,10,2,3,4,5,6,11]
5063 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm28, %zmm0
5064 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm17, %zmm7
5065 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm17 = [0,1,2,3,4,12,6,7]
5066 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm17, %zmm11
5067 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm22 = [0,14,2,3,4,5,6,15]
5068 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm22, %zmm2
5069 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm21, %zmm28, %zmm23
5070 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm21, %zmm17, %zmm25
5071 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm21, %zmm22, %zmm29
5072 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm19, %zmm28, %zmm1
5073 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm19, %zmm17, %zmm9
5074 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm19, %zmm22, %zmm30
5075 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm18, %zmm28, %zmm3
5076 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm18, %zmm17, %zmm26
5077 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm18, %zmm22, %zmm7
5078 ; AVX512DQ-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
5079 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, 1472(%rax)
5080 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm13, 1408(%rax)
5081 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm26, 1344(%rax)
5082 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, 1280(%rax)
5083 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, 1216(%rax)
5084 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm30, 1088(%rax)
5085 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm16, 1024(%rax)
5086 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm9, 960(%rax)
5087 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, 896(%rax)
5088 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm15, 832(%rax)
5089 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm29, 704(%rax)
5090 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm14, 640(%rax)
5091 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm25, 576(%rax)
5092 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, 512(%rax)
5093 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm31, 448(%rax)
5094 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, 320(%rax)
5095 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm27, 256(%rax)
5096 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, 192(%rax)
5097 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, 128(%rax)
5098 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
5099 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 64(%rax)
5100 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, 1152(%rax)
5101 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, 768(%rax)
5102 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, 384(%rax)
5103 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm24, (%rax)
5104 ; AVX512DQ-FAST-NEXT: addq $712, %rsp # imm = 0x2C8
5105 ; AVX512DQ-FAST-NEXT: vzeroupper
5106 ; AVX512DQ-FAST-NEXT: retq
5108 ; AVX512BW-ONLY-SLOW-LABEL: store_i64_stride6_vf32:
5109 ; AVX512BW-ONLY-SLOW: # %bb.0:
5110 ; AVX512BW-ONLY-SLOW-NEXT: subq $712, %rsp # imm = 0x2C8
5111 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rdi), %zmm7
5112 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm4
5113 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%rdi), %zmm2
5114 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%rdi), %zmm18
5115 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rsi), %zmm22
5116 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm1
5117 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%rsi), %zmm19
5118 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%rsi), %zmm21
5119 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rdx), %zmm8
5120 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm6
5121 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%rdx), %zmm5
5122 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%rdx), %zmm12
5123 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rcx), %zmm29
5124 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm27
5125 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%rcx), %zmm26
5126 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%rcx), %zmm25
5127 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [4,12,5,13,4,12,5,13]
5128 ; AVX512BW-ONLY-SLOW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
5129 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} ymm20 = [4,12,4,12]
5130 ; AVX512BW-ONLY-SLOW-NEXT: # ymm20 = mem[0,1,2,3,0,1,2,3]
5131 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, %zmm0
5132 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm25, %zmm20, %zmm0
5133 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5134 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm0
5135 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm20, %zmm0
5136 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5137 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm0
5138 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm20, %zmm0
5139 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5140 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm29, %zmm8, %zmm20
5141 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [1,9,2,10,1,9,2,10]
5142 ; AVX512BW-ONLY-SLOW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
5143 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
5144 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm10, %zmm0
5145 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5146 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm13 = [5,13,6,14,5,13,6,14]
5147 ; AVX512BW-ONLY-SLOW-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3]
5148 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm17
5149 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm13, %zmm17
5150 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm31
5151 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm10, %zmm31
5152 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm14
5153 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm13, %zmm14
5154 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm15
5155 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm10, %zmm15
5156 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm16
5157 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm13, %zmm16
5158 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm28 = [3,11,3,11,3,11,3,11]
5159 ; AVX512BW-ONLY-SLOW-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
5160 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
5161 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm28, %zmm0
5162 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5163 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [7,15,7,15,7,15,7,15]
5164 ; AVX512BW-ONLY-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
5165 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm0, %zmm8
5166 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5167 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm29
5168 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm28, %zmm29
5169 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm0, %zmm6
5170 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5171 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm6
5172 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm28, %zmm6
5173 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5174 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm0, %zmm5
5175 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5176 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, %zmm26
5177 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm11, %zmm26
5178 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm25, %zmm12, %zmm10
5179 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm25, %zmm12, %zmm13
5180 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm25, %zmm12, %zmm28
5181 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm25, %zmm0, %zmm12
5182 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm27
5183 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm9
5184 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm19, %zmm5
5185 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm11, %zmm9
5186 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm0
5187 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm25
5188 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm11, %zmm25
5189 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm22, %zmm7, %zmm11
5190 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [2,10,2,10,2,10,2,10]
5191 ; AVX512BW-ONLY-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
5192 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm23
5193 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm3, %zmm23
5194 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [6,14,6,14,6,14,6,14]
5195 ; AVX512BW-ONLY-SLOW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
5196 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm24
5197 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm6, %zmm24
5198 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,8,1,9,0,8,1,9]
5199 ; AVX512BW-ONLY-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
5200 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm30
5201 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm2, %zmm30
5202 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} ymm19 = [7,15,7,15]
5203 ; AVX512BW-ONLY-SLOW-NEXT: # ymm19 = mem[0,1,2,3,0,1,2,3]
5204 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm19, %zmm7
5205 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5206 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm22
5207 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm3, %zmm22
5208 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm6, %zmm4
5209 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm8
5210 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm2, %zmm8
5211 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm19, %zmm0
5212 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
5213 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, %zmm0
5214 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm3, %zmm0
5215 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, %zmm1
5216 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm6, %zmm1
5217 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, %zmm7
5218 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm2, %zmm7
5219 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm19, %zmm27
5220 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5221 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm21, %zmm18, %zmm3
5222 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm21, %zmm18, %zmm6
5223 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm21, %zmm18, %zmm2
5224 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm19, %zmm18
5225 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5226 ; AVX512BW-ONLY-SLOW-NEXT: movb $12, %al
5227 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %eax, %k1
5228 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
5229 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm26 {%k1}
5230 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
5231 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm9 {%k1}
5232 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
5233 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm25 {%k1}
5234 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm20, %zmm11 {%k1}
5235 ; AVX512BW-ONLY-SLOW-NEXT: movb $48, %al
5236 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %eax, %k2
5237 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
5238 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm5 {%k2}
5239 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm24, %zmm17 {%k2}
5240 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm22, %zmm31 {%k2}
5241 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm14 {%k2}
5242 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm15 {%k2}
5243 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%r8), %zmm23
5244 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm16 {%k2}
5245 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%r8), %zmm1
5246 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm10 {%k2}
5247 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = <0,1,9,u,4,5,6,7>
5248 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm23, %zmm0, %zmm5
5249 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm0, %zmm31
5250 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%r8), %zmm3
5251 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm0, %zmm15
5252 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%r8), %zmm22
5253 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm0, %zmm10
5254 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%r9), %zmm20
5255 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,9,4,5,6,7]
5256 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm0, %zmm5
5257 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5258 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%r9), %zmm21
5259 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm0, %zmm31
5260 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%r9), %zmm19
5261 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm0, %zmm15
5262 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%r9), %zmm18
5263 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm0, %zmm10
5264 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm13 {%k2}
5265 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = <0,1,13,u,4,5,6,7>
5266 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm27
5267 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm23, %zmm0, %zmm27
5268 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm0, %zmm14
5269 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm0, %zmm16
5270 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm0, %zmm13
5271 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,13,4,5,6,7]
5272 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm0, %zmm27
5273 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm0, %zmm14
5274 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm0, %zmm16
5275 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm0, %zmm13
5276 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%rdx), %xmm0
5277 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
5278 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
5279 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm30 {%k1}
5280 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 64(%rdx), %xmm0
5281 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
5282 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
5283 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm8 {%k1}
5284 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 128(%rdx), %xmm0
5285 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
5286 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
5287 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm7 {%k1}
5288 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 192(%rdx), %xmm0
5289 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
5290 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
5291 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm2 {%k1}
5292 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $2, (%r8), %zmm30, %zmm24
5293 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,3,4,8,6,7]
5294 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm0, %zmm24
5295 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $2, 64(%r8), %zmm8, %zmm6
5296 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm0, %zmm6
5297 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $2, 128(%r8), %zmm7, %zmm5
5298 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm0, %zmm5
5299 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $2, 192(%r8), %zmm2, %zmm4
5300 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm0, %zmm4
5301 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%rdi), %ymm0
5302 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5303 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
5304 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm2, %zmm0
5305 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
5306 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 64-byte Folded Reload
5307 ; AVX512BW-ONLY-SLOW-NEXT: # zmm2 = zmm2[0,1,2,3],mem[4,5,6,7]
5308 ; AVX512BW-ONLY-SLOW-NEXT: movb $16, %al
5309 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %eax, %k1
5310 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm11 {%k1}
5311 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <10,u,2,3,4,5,11,u>
5312 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm23, %zmm7, %zmm0
5313 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm17 = <14,u,2,3,4,5,15,u>
5314 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm23, %zmm17, %zmm2
5315 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rdi), %ymm23
5316 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm23 = ymm23[1],mem[1],ymm23[3],mem[3]
5317 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm23, %zmm29, %zmm23
5318 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 (%rsp), %zmm8 # 64-byte Reload
5319 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm29 # 64-byte Folded Reload
5320 ; AVX512BW-ONLY-SLOW-NEXT: # zmm29 = zmm8[0,1,2,3],mem[4,5,6,7]
5321 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm25 {%k1}
5322 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm7, %zmm23
5323 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm17, %zmm29
5324 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 128(%rdi), %ymm1
5325 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
5326 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
5327 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm8, %zmm1
5328 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
5329 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm30 # 64-byte Folded Reload
5330 ; AVX512BW-ONLY-SLOW-NEXT: # zmm30 = zmm8[0,1,2,3],mem[4,5,6,7]
5331 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm9 {%k1}
5332 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
5333 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm17, %zmm30
5334 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 192(%rdi), %ymm3
5335 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
5336 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm3, %zmm28, %zmm3
5337 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm7, %zmm3
5338 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm22, %zmm26 {%k1}
5339 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
5340 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm7 = zmm7[0,1,2,3],zmm12[4,5,6,7]
5341 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm28 = [0,10,2,3,4,5,6,11]
5342 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm28, %zmm0
5343 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm17, %zmm7
5344 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm17 = [0,1,2,3,4,12,6,7]
5345 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm17, %zmm11
5346 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm22 = [0,14,2,3,4,5,6,15]
5347 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm22, %zmm2
5348 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm28, %zmm23
5349 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm17, %zmm25
5350 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm22, %zmm29
5351 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm28, %zmm1
5352 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm17, %zmm9
5353 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm22, %zmm30
5354 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm28, %zmm3
5355 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm17, %zmm26
5356 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm22, %zmm7
5357 ; AVX512BW-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
5358 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, 1472(%rax)
5359 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, 1408(%rax)
5360 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm26, 1344(%rax)
5361 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, 1280(%rax)
5362 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, 1216(%rax)
5363 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm30, 1088(%rax)
5364 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, 1024(%rax)
5365 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, 960(%rax)
5366 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, 896(%rax)
5367 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm15, 832(%rax)
5368 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm29, 704(%rax)
5369 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, 640(%rax)
5370 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, 576(%rax)
5371 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, 512(%rax)
5372 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, 448(%rax)
5373 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, 320(%rax)
5374 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, 256(%rax)
5375 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, 192(%rax)
5376 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, 128(%rax)
5377 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
5378 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 64(%rax)
5379 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, 1152(%rax)
5380 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, 768(%rax)
5381 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, 384(%rax)
5382 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm24, (%rax)
5383 ; AVX512BW-ONLY-SLOW-NEXT: addq $712, %rsp # imm = 0x2C8
5384 ; AVX512BW-ONLY-SLOW-NEXT: vzeroupper
5385 ; AVX512BW-ONLY-SLOW-NEXT: retq
5387 ; AVX512BW-ONLY-FAST-LABEL: store_i64_stride6_vf32:
5388 ; AVX512BW-ONLY-FAST: # %bb.0:
5389 ; AVX512BW-ONLY-FAST-NEXT: subq $712, %rsp # imm = 0x2C8
5390 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rdi), %zmm7
5391 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rdi), %zmm4
5392 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%rdi), %zmm2
5393 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%rdi), %zmm18
5394 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rsi), %zmm22
5395 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rsi), %zmm1
5396 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%rsi), %zmm19
5397 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%rsi), %zmm21
5398 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rdx), %zmm8
5399 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rdx), %zmm6
5400 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%rdx), %zmm5
5401 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%rdx), %zmm12
5402 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rcx), %zmm29
5403 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rcx), %zmm27
5404 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%rcx), %zmm26
5405 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%rcx), %zmm25
5406 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [4,12,5,13,4,12,5,13]
5407 ; AVX512BW-ONLY-FAST-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
5408 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} ymm20 = [4,12,4,12]
5409 ; AVX512BW-ONLY-FAST-NEXT: # ymm20 = mem[0,1,2,3,0,1,2,3]
5410 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm0
5411 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm25, %zmm20, %zmm0
5412 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5413 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm0
5414 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm20, %zmm0
5415 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5416 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm0
5417 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm20, %zmm0
5418 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5419 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm29, %zmm8, %zmm20
5420 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [1,9,2,10,1,9,2,10]
5421 ; AVX512BW-ONLY-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
5422 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
5423 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm29, %zmm10, %zmm0
5424 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5425 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm13 = [5,13,6,14,5,13,6,14]
5426 ; AVX512BW-ONLY-FAST-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3]
5427 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm17
5428 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm29, %zmm13, %zmm17
5429 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm31
5430 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm10, %zmm31
5431 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm14
5432 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm13, %zmm14
5433 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm15
5434 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm10, %zmm15
5435 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm16
5436 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm13, %zmm16
5437 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm28 = [3,11,3,11,3,11,3,11]
5438 ; AVX512BW-ONLY-FAST-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
5439 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
5440 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm29, %zmm28, %zmm0
5441 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5442 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [7,15,7,15,7,15,7,15]
5443 ; AVX512BW-ONLY-FAST-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
5444 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm29, %zmm0, %zmm8
5445 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5446 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm29
5447 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm28, %zmm29
5448 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm0, %zmm6
5449 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5450 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm6
5451 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm28, %zmm6
5452 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5453 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm0, %zmm5
5454 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5455 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm18, %zmm26
5456 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm11, %zmm26
5457 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm25, %zmm12, %zmm10
5458 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm25, %zmm12, %zmm13
5459 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm25, %zmm12, %zmm28
5460 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm25, %zmm0, %zmm12
5461 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm27
5462 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm9
5463 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm19, %zmm5
5464 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm11, %zmm9
5465 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm0
5466 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm25
5467 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm11, %zmm25
5468 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm22, %zmm7, %zmm11
5469 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [2,10,2,10,2,10,2,10]
5470 ; AVX512BW-ONLY-FAST-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
5471 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm23
5472 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm3, %zmm23
5473 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [6,14,6,14,6,14,6,14]
5474 ; AVX512BW-ONLY-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
5475 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm24
5476 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm6, %zmm24
5477 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,8,1,9,0,8,1,9]
5478 ; AVX512BW-ONLY-FAST-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
5479 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm30
5480 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm2, %zmm30
5481 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} ymm19 = [7,15,7,15]
5482 ; AVX512BW-ONLY-FAST-NEXT: # ymm19 = mem[0,1,2,3,0,1,2,3]
5483 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm19, %zmm7
5484 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5485 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm22
5486 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm3, %zmm22
5487 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm6, %zmm4
5488 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm8
5489 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm2, %zmm8
5490 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm19, %zmm0
5491 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
5492 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm27, %zmm0
5493 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm3, %zmm0
5494 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm27, %zmm1
5495 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm6, %zmm1
5496 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm27, %zmm7
5497 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm2, %zmm7
5498 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm19, %zmm27
5499 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5500 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm21, %zmm18, %zmm3
5501 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm21, %zmm18, %zmm6
5502 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm21, %zmm18, %zmm2
5503 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm19, %zmm18
5504 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5505 ; AVX512BW-ONLY-FAST-NEXT: movb $12, %al
5506 ; AVX512BW-ONLY-FAST-NEXT: kmovd %eax, %k1
5507 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
5508 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm26 {%k1}
5509 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
5510 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm9 {%k1}
5511 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
5512 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm25 {%k1}
5513 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm20, %zmm11 {%k1}
5514 ; AVX512BW-ONLY-FAST-NEXT: movb $48, %al
5515 ; AVX512BW-ONLY-FAST-NEXT: kmovd %eax, %k2
5516 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
5517 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm5 {%k2}
5518 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm24, %zmm17 {%k2}
5519 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm22, %zmm31 {%k2}
5520 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm14 {%k2}
5521 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm15 {%k2}
5522 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%r8), %zmm23
5523 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm16 {%k2}
5524 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%r8), %zmm1
5525 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm10 {%k2}
5526 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = <0,1,9,u,4,5,6,7>
5527 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm23, %zmm0, %zmm5
5528 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm0, %zmm31
5529 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%r8), %zmm3
5530 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm0, %zmm15
5531 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%r8), %zmm22
5532 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm0, %zmm10
5533 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%r9), %zmm20
5534 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,9,4,5,6,7]
5535 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm0, %zmm5
5536 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5537 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%r9), %zmm21
5538 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm0, %zmm31
5539 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%r9), %zmm19
5540 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm0, %zmm15
5541 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%r9), %zmm18
5542 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm0, %zmm10
5543 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm13 {%k2}
5544 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = <0,1,13,u,4,5,6,7>
5545 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm17, %zmm27
5546 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm23, %zmm0, %zmm27
5547 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm0, %zmm14
5548 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm0, %zmm16
5549 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm0, %zmm13
5550 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,13,4,5,6,7]
5551 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm0, %zmm27
5552 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm0, %zmm14
5553 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm0, %zmm16
5554 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm0, %zmm13
5555 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa (%rdx), %xmm0
5556 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
5557 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
5558 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm30 {%k1}
5559 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 64(%rdx), %xmm0
5560 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
5561 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
5562 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm8 {%k1}
5563 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 128(%rdx), %xmm0
5564 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
5565 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
5566 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm7 {%k1}
5567 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 192(%rdx), %xmm0
5568 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
5569 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
5570 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm2 {%k1}
5571 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $2, (%r8), %zmm30, %zmm24
5572 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,3,4,8,6,7]
5573 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm0, %zmm24
5574 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $2, 64(%r8), %zmm8, %zmm6
5575 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm0, %zmm6
5576 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $2, 128(%r8), %zmm7, %zmm5
5577 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm0, %zmm5
5578 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $2, 192(%r8), %zmm2, %zmm4
5579 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm0, %zmm4
5580 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa (%rdi), %ymm0
5581 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5582 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
5583 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm2, %zmm0
5584 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
5585 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 64-byte Folded Reload
5586 ; AVX512BW-ONLY-FAST-NEXT: # zmm2 = zmm2[0,1,2,3],mem[4,5,6,7]
5587 ; AVX512BW-ONLY-FAST-NEXT: movb $16, %al
5588 ; AVX512BW-ONLY-FAST-NEXT: kmovd %eax, %k1
5589 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm11 {%k1}
5590 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm7 = <10,u,2,3,4,5,11,u>
5591 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm23, %zmm7, %zmm0
5592 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm17 = <14,u,2,3,4,5,15,u>
5593 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm23, %zmm17, %zmm2
5594 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rdi), %ymm23
5595 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm23 = ymm23[1],mem[1],ymm23[3],mem[3]
5596 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm23, %zmm29, %zmm23
5597 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 (%rsp), %zmm8 # 64-byte Reload
5598 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm29 # 64-byte Folded Reload
5599 ; AVX512BW-ONLY-FAST-NEXT: # zmm29 = zmm8[0,1,2,3],mem[4,5,6,7]
5600 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm25 {%k1}
5601 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm7, %zmm23
5602 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm17, %zmm29
5603 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 128(%rdi), %ymm1
5604 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
5605 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
5606 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm8, %zmm1
5607 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
5608 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm30 # 64-byte Folded Reload
5609 ; AVX512BW-ONLY-FAST-NEXT: # zmm30 = zmm8[0,1,2,3],mem[4,5,6,7]
5610 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm9 {%k1}
5611 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
5612 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm17, %zmm30
5613 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 192(%rdi), %ymm3
5614 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
5615 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm28, %zmm3
5616 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm7, %zmm3
5617 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm22, %zmm26 {%k1}
5618 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
5619 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm7 = zmm7[0,1,2,3],zmm12[4,5,6,7]
5620 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm28 = [0,10,2,3,4,5,6,11]
5621 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm28, %zmm0
5622 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm17, %zmm7
5623 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm17 = [0,1,2,3,4,12,6,7]
5624 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm17, %zmm11
5625 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm22 = [0,14,2,3,4,5,6,15]
5626 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm22, %zmm2
5627 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm28, %zmm23
5628 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm17, %zmm25
5629 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm22, %zmm29
5630 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm28, %zmm1
5631 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm17, %zmm9
5632 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm22, %zmm30
5633 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm28, %zmm3
5634 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm17, %zmm26
5635 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm22, %zmm7
5636 ; AVX512BW-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
5637 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, 1472(%rax)
5638 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, 1408(%rax)
5639 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm26, 1344(%rax)
5640 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, 1280(%rax)
5641 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm10, 1216(%rax)
5642 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm30, 1088(%rax)
5643 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm16, 1024(%rax)
5644 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm9, 960(%rax)
5645 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, 896(%rax)
5646 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm15, 832(%rax)
5647 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm29, 704(%rax)
5648 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm14, 640(%rax)
5649 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm25, 576(%rax)
5650 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm23, 512(%rax)
5651 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm31, 448(%rax)
5652 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, 320(%rax)
5653 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm27, 256(%rax)
5654 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, 192(%rax)
5655 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, 128(%rax)
5656 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
5657 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 64(%rax)
5658 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, 1152(%rax)
5659 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, 768(%rax)
5660 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm6, 384(%rax)
5661 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm24, (%rax)
5662 ; AVX512BW-ONLY-FAST-NEXT: addq $712, %rsp # imm = 0x2C8
5663 ; AVX512BW-ONLY-FAST-NEXT: vzeroupper
5664 ; AVX512BW-ONLY-FAST-NEXT: retq
5666 ; AVX512DQBW-SLOW-LABEL: store_i64_stride6_vf32:
5667 ; AVX512DQBW-SLOW: # %bb.0:
5668 ; AVX512DQBW-SLOW-NEXT: subq $712, %rsp # imm = 0x2C8
5669 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rdi), %zmm7
5670 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm4
5671 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%rdi), %zmm2
5672 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%rdi), %zmm18
5673 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rsi), %zmm22
5674 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm1
5675 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%rsi), %zmm19
5676 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%rsi), %zmm21
5677 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rdx), %zmm8
5678 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm6
5679 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%rdx), %zmm5
5680 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%rdx), %zmm12
5681 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rcx), %zmm29
5682 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm27
5683 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%rcx), %zmm26
5684 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%rcx), %zmm25
5685 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [4,12,5,13,4,12,5,13]
5686 ; AVX512DQBW-SLOW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
5687 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x2 {{.*#+}} ymm20 = [4,12,4,12]
5688 ; AVX512DQBW-SLOW-NEXT: # ymm20 = mem[0,1,0,1]
5689 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm12, %zmm0
5690 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm25, %zmm20, %zmm0
5691 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5692 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm0
5693 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm26, %zmm20, %zmm0
5694 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5695 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, %zmm0
5696 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm27, %zmm20, %zmm0
5697 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5698 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm29, %zmm8, %zmm20
5699 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [1,9,2,10,1,9,2,10]
5700 ; AVX512DQBW-SLOW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
5701 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
5702 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm29, %zmm10, %zmm0
5703 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5704 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm13 = [5,13,6,14,5,13,6,14]
5705 ; AVX512DQBW-SLOW-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3]
5706 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm8, %zmm17
5707 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm29, %zmm13, %zmm17
5708 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, %zmm31
5709 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm27, %zmm10, %zmm31
5710 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, %zmm14
5711 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm27, %zmm13, %zmm14
5712 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm15
5713 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm26, %zmm10, %zmm15
5714 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm16
5715 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm26, %zmm13, %zmm16
5716 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm28 = [3,11,3,11,3,11,3,11]
5717 ; AVX512DQBW-SLOW-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
5718 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
5719 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm29, %zmm28, %zmm0
5720 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5721 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [7,15,7,15,7,15,7,15]
5722 ; AVX512DQBW-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
5723 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm29, %zmm0, %zmm8
5724 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5725 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, %zmm29
5726 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm27, %zmm28, %zmm29
5727 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm27, %zmm0, %zmm6
5728 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5729 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm6
5730 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm26, %zmm28, %zmm6
5731 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5732 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm26, %zmm0, %zmm5
5733 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5734 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm18, %zmm26
5735 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm21, %zmm11, %zmm26
5736 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm25, %zmm12, %zmm10
5737 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm25, %zmm12, %zmm13
5738 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm25, %zmm12, %zmm28
5739 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm25, %zmm0, %zmm12
5740 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm27
5741 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm9
5742 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm19, %zmm5
5743 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm19, %zmm11, %zmm9
5744 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm0
5745 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm25
5746 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm11, %zmm25
5747 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm22, %zmm7, %zmm11
5748 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [2,10,2,10,2,10,2,10]
5749 ; AVX512DQBW-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
5750 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, %zmm23
5751 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm3, %zmm23
5752 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [6,14,6,14,6,14,6,14]
5753 ; AVX512DQBW-SLOW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
5754 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, %zmm24
5755 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm6, %zmm24
5756 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,8,1,9,0,8,1,9]
5757 ; AVX512DQBW-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
5758 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, %zmm30
5759 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm2, %zmm30
5760 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x2 {{.*#+}} ymm19 = [7,15,7,15]
5761 ; AVX512DQBW-SLOW-NEXT: # ymm19 = mem[0,1,0,1]
5762 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm19, %zmm7
5763 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5764 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm22
5765 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm3, %zmm22
5766 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm6, %zmm4
5767 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm8
5768 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm2, %zmm8
5769 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm19, %zmm0
5770 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
5771 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm27, %zmm0
5772 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm3, %zmm0
5773 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm27, %zmm1
5774 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm6, %zmm1
5775 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm27, %zmm7
5776 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm2, %zmm7
5777 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm19, %zmm27
5778 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5779 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm21, %zmm18, %zmm3
5780 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm21, %zmm18, %zmm6
5781 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm21, %zmm18, %zmm2
5782 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm21, %zmm19, %zmm18
5783 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5784 ; AVX512DQBW-SLOW-NEXT: movb $12, %al
5785 ; AVX512DQBW-SLOW-NEXT: kmovd %eax, %k1
5786 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
5787 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm26 {%k1}
5788 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
5789 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm9 {%k1}
5790 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
5791 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm25 {%k1}
5792 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm20, %zmm11 {%k1}
5793 ; AVX512DQBW-SLOW-NEXT: movb $48, %al
5794 ; AVX512DQBW-SLOW-NEXT: kmovd %eax, %k2
5795 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
5796 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm23, %zmm5 {%k2}
5797 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm24, %zmm17 {%k2}
5798 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm22, %zmm31 {%k2}
5799 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm14 {%k2}
5800 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm15 {%k2}
5801 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%r8), %zmm23
5802 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm16 {%k2}
5803 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%r8), %zmm1
5804 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm10 {%k2}
5805 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = <0,1,9,u,4,5,6,7>
5806 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm23, %zmm0, %zmm5
5807 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm0, %zmm31
5808 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%r8), %zmm3
5809 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm0, %zmm15
5810 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%r8), %zmm22
5811 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm0, %zmm10
5812 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%r9), %zmm20
5813 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,9,4,5,6,7]
5814 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm20, %zmm0, %zmm5
5815 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5816 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%r9), %zmm21
5817 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm21, %zmm0, %zmm31
5818 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%r9), %zmm19
5819 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm19, %zmm0, %zmm15
5820 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%r9), %zmm18
5821 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm18, %zmm0, %zmm10
5822 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, %zmm13 {%k2}
5823 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = <0,1,13,u,4,5,6,7>
5824 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm17, %zmm27
5825 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm23, %zmm0, %zmm27
5826 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm0, %zmm14
5827 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm0, %zmm16
5828 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm0, %zmm13
5829 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,13,4,5,6,7]
5830 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm20, %zmm0, %zmm27
5831 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm21, %zmm0, %zmm14
5832 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm19, %zmm0, %zmm16
5833 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm18, %zmm0, %zmm13
5834 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%rdx), %xmm0
5835 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
5836 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
5837 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm30 {%k1}
5838 ; AVX512DQBW-SLOW-NEXT: vmovdqa 64(%rdx), %xmm0
5839 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
5840 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
5841 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm8 {%k1}
5842 ; AVX512DQBW-SLOW-NEXT: vmovdqa 128(%rdx), %xmm0
5843 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
5844 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
5845 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm7 {%k1}
5846 ; AVX512DQBW-SLOW-NEXT: vmovdqa 192(%rdx), %xmm0
5847 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
5848 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
5849 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm2 {%k1}
5850 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $2, (%r8), %zmm30, %zmm24
5851 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,3,4,8,6,7]
5852 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm20, %zmm0, %zmm24
5853 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $2, 64(%r8), %zmm8, %zmm6
5854 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm21, %zmm0, %zmm6
5855 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $2, 128(%r8), %zmm7, %zmm5
5856 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm19, %zmm0, %zmm5
5857 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $2, 192(%r8), %zmm2, %zmm4
5858 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm18, %zmm0, %zmm4
5859 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%rdi), %ymm0
5860 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
5861 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
5862 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm2, %zmm0
5863 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
5864 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 64-byte Folded Reload
5865 ; AVX512DQBW-SLOW-NEXT: # zmm2 = zmm2[0,1,2,3],mem[4,5,6,7]
5866 ; AVX512DQBW-SLOW-NEXT: movb $16, %al
5867 ; AVX512DQBW-SLOW-NEXT: kmovd %eax, %k1
5868 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm23, %zmm11 {%k1}
5869 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <10,u,2,3,4,5,11,u>
5870 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm23, %zmm7, %zmm0
5871 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm17 = <14,u,2,3,4,5,15,u>
5872 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm23, %zmm17, %zmm2
5873 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rdi), %ymm23
5874 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm23 = ymm23[1],mem[1],ymm23[3],mem[3]
5875 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm23, %zmm29, %zmm23
5876 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 (%rsp), %zmm8 # 64-byte Reload
5877 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm29 # 64-byte Folded Reload
5878 ; AVX512DQBW-SLOW-NEXT: # zmm29 = zmm8[0,1,2,3],mem[4,5,6,7]
5879 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm25 {%k1}
5880 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm7, %zmm23
5881 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm17, %zmm29
5882 ; AVX512DQBW-SLOW-NEXT: vmovdqa 128(%rdi), %ymm1
5883 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
5884 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
5885 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm8, %zmm1
5886 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
5887 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm30 # 64-byte Folded Reload
5888 ; AVX512DQBW-SLOW-NEXT: # zmm30 = zmm8[0,1,2,3],mem[4,5,6,7]
5889 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm9 {%k1}
5890 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
5891 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm17, %zmm30
5892 ; AVX512DQBW-SLOW-NEXT: vmovdqa 192(%rdi), %ymm3
5893 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
5894 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm3, %zmm28, %zmm3
5895 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm7, %zmm3
5896 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm22, %zmm26 {%k1}
5897 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
5898 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm7 = zmm7[0,1,2,3],zmm12[4,5,6,7]
5899 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm28 = [0,10,2,3,4,5,6,11]
5900 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm20, %zmm28, %zmm0
5901 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm17, %zmm7
5902 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm17 = [0,1,2,3,4,12,6,7]
5903 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm20, %zmm17, %zmm11
5904 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm22 = [0,14,2,3,4,5,6,15]
5905 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm20, %zmm22, %zmm2
5906 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm21, %zmm28, %zmm23
5907 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm21, %zmm17, %zmm25
5908 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm21, %zmm22, %zmm29
5909 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm19, %zmm28, %zmm1
5910 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm19, %zmm17, %zmm9
5911 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm19, %zmm22, %zmm30
5912 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm18, %zmm28, %zmm3
5913 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm18, %zmm17, %zmm26
5914 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm18, %zmm22, %zmm7
5915 ; AVX512DQBW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
5916 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, 1472(%rax)
5917 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm13, 1408(%rax)
5918 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm26, 1344(%rax)
5919 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, 1280(%rax)
5920 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, 1216(%rax)
5921 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm30, 1088(%rax)
5922 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm16, 1024(%rax)
5923 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, 960(%rax)
5924 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, 896(%rax)
5925 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm15, 832(%rax)
5926 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm29, 704(%rax)
5927 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm14, 640(%rax)
5928 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm25, 576(%rax)
5929 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm23, 512(%rax)
5930 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm31, 448(%rax)
5931 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, 320(%rax)
5932 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm27, 256(%rax)
5933 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, 192(%rax)
5934 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, 128(%rax)
5935 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
5936 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 64(%rax)
5937 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, 1152(%rax)
5938 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, 768(%rax)
5939 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, 384(%rax)
5940 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm24, (%rax)
5941 ; AVX512DQBW-SLOW-NEXT: addq $712, %rsp # imm = 0x2C8
5942 ; AVX512DQBW-SLOW-NEXT: vzeroupper
5943 ; AVX512DQBW-SLOW-NEXT: retq
5945 ; AVX512DQBW-FAST-LABEL: store_i64_stride6_vf32:
5946 ; AVX512DQBW-FAST: # %bb.0:
5947 ; AVX512DQBW-FAST-NEXT: subq $712, %rsp # imm = 0x2C8
5948 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rdi), %zmm7
5949 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rdi), %zmm4
5950 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%rdi), %zmm2
5951 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%rdi), %zmm18
5952 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rsi), %zmm22
5953 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rsi), %zmm1
5954 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%rsi), %zmm19
5955 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%rsi), %zmm21
5956 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rdx), %zmm8
5957 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rdx), %zmm6
5958 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%rdx), %zmm5
5959 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%rdx), %zmm12
5960 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rcx), %zmm29
5961 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rcx), %zmm27
5962 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%rcx), %zmm26
5963 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%rcx), %zmm25
5964 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [4,12,5,13,4,12,5,13]
5965 ; AVX512DQBW-FAST-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
5966 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x2 {{.*#+}} ymm20 = [4,12,4,12]
5967 ; AVX512DQBW-FAST-NEXT: # ymm20 = mem[0,1,0,1]
5968 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm12, %zmm0
5969 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm25, %zmm20, %zmm0
5970 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5971 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm0
5972 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm26, %zmm20, %zmm0
5973 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5974 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm6, %zmm0
5975 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm27, %zmm20, %zmm0
5976 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5977 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm29, %zmm8, %zmm20
5978 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [1,9,2,10,1,9,2,10]
5979 ; AVX512DQBW-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
5980 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
5981 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm29, %zmm10, %zmm0
5982 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5983 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm13 = [5,13,6,14,5,13,6,14]
5984 ; AVX512DQBW-FAST-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3]
5985 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm8, %zmm17
5986 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm29, %zmm13, %zmm17
5987 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm6, %zmm31
5988 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm27, %zmm10, %zmm31
5989 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm6, %zmm14
5990 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm27, %zmm13, %zmm14
5991 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm15
5992 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm26, %zmm10, %zmm15
5993 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm16
5994 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm26, %zmm13, %zmm16
5995 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm28 = [3,11,3,11,3,11,3,11]
5996 ; AVX512DQBW-FAST-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
5997 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
5998 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm29, %zmm28, %zmm0
5999 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6000 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [7,15,7,15,7,15,7,15]
6001 ; AVX512DQBW-FAST-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6002 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm29, %zmm0, %zmm8
6003 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6004 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm6, %zmm29
6005 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm27, %zmm28, %zmm29
6006 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm27, %zmm0, %zmm6
6007 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6008 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm6
6009 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm26, %zmm28, %zmm6
6010 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6011 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm26, %zmm0, %zmm5
6012 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6013 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm18, %zmm26
6014 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm21, %zmm11, %zmm26
6015 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm25, %zmm12, %zmm10
6016 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm25, %zmm12, %zmm13
6017 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm25, %zmm12, %zmm28
6018 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm25, %zmm0, %zmm12
6019 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm27
6020 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm9
6021 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm19, %zmm5
6022 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm19, %zmm11, %zmm9
6023 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, %zmm0
6024 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, %zmm25
6025 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm11, %zmm25
6026 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm22, %zmm7, %zmm11
6027 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [2,10,2,10,2,10,2,10]
6028 ; AVX512DQBW-FAST-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6029 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, %zmm23
6030 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm3, %zmm23
6031 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [6,14,6,14,6,14,6,14]
6032 ; AVX512DQBW-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6033 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, %zmm24
6034 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm6, %zmm24
6035 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,8,1,9,0,8,1,9]
6036 ; AVX512DQBW-FAST-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
6037 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, %zmm30
6038 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm2, %zmm30
6039 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x2 {{.*#+}} ymm19 = [7,15,7,15]
6040 ; AVX512DQBW-FAST-NEXT: # ymm19 = mem[0,1,0,1]
6041 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm19, %zmm7
6042 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6043 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, %zmm22
6044 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm3, %zmm22
6045 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm6, %zmm4
6046 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm8
6047 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm2, %zmm8
6048 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm19, %zmm0
6049 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
6050 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm27, %zmm0
6051 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm3, %zmm0
6052 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm27, %zmm1
6053 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm6, %zmm1
6054 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm27, %zmm7
6055 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm2, %zmm7
6056 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm19, %zmm27
6057 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6058 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm21, %zmm18, %zmm3
6059 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm21, %zmm18, %zmm6
6060 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm21, %zmm18, %zmm2
6061 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm21, %zmm19, %zmm18
6062 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6063 ; AVX512DQBW-FAST-NEXT: movb $12, %al
6064 ; AVX512DQBW-FAST-NEXT: kmovd %eax, %k1
6065 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
6066 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm26 {%k1}
6067 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
6068 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm9 {%k1}
6069 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
6070 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm25 {%k1}
6071 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm20, %zmm11 {%k1}
6072 ; AVX512DQBW-FAST-NEXT: movb $48, %al
6073 ; AVX512DQBW-FAST-NEXT: kmovd %eax, %k2
6074 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
6075 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm23, %zmm5 {%k2}
6076 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm24, %zmm17 {%k2}
6077 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm22, %zmm31 {%k2}
6078 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, %zmm14 {%k2}
6079 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm15 {%k2}
6080 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%r8), %zmm23
6081 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm16 {%k2}
6082 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%r8), %zmm1
6083 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm10 {%k2}
6084 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = <0,1,9,u,4,5,6,7>
6085 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm23, %zmm0, %zmm5
6086 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm0, %zmm31
6087 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%r8), %zmm3
6088 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm0, %zmm15
6089 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%r8), %zmm22
6090 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm0, %zmm10
6091 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%r9), %zmm20
6092 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,9,4,5,6,7]
6093 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm0, %zmm5
6094 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6095 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%r9), %zmm21
6096 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm21, %zmm0, %zmm31
6097 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%r9), %zmm19
6098 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm19, %zmm0, %zmm15
6099 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%r9), %zmm18
6100 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm18, %zmm0, %zmm10
6101 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm6, %zmm13 {%k2}
6102 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = <0,1,13,u,4,5,6,7>
6103 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm17, %zmm27
6104 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm23, %zmm0, %zmm27
6105 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm0, %zmm14
6106 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm0, %zmm16
6107 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm0, %zmm13
6108 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,13,4,5,6,7]
6109 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm0, %zmm27
6110 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm21, %zmm0, %zmm14
6111 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm19, %zmm0, %zmm16
6112 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm18, %zmm0, %zmm13
6113 ; AVX512DQBW-FAST-NEXT: vmovdqa (%rdx), %xmm0
6114 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
6115 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
6116 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm30 {%k1}
6117 ; AVX512DQBW-FAST-NEXT: vmovdqa 64(%rdx), %xmm0
6118 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
6119 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
6120 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm8 {%k1}
6121 ; AVX512DQBW-FAST-NEXT: vmovdqa 128(%rdx), %xmm0
6122 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
6123 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
6124 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm7 {%k1}
6125 ; AVX512DQBW-FAST-NEXT: vmovdqa 192(%rdx), %xmm0
6126 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
6127 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
6128 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm2 {%k1}
6129 ; AVX512DQBW-FAST-NEXT: vinserti32x4 $2, (%r8), %zmm30, %zmm24
6130 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,3,4,8,6,7]
6131 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm0, %zmm24
6132 ; AVX512DQBW-FAST-NEXT: vinserti32x4 $2, 64(%r8), %zmm8, %zmm6
6133 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm21, %zmm0, %zmm6
6134 ; AVX512DQBW-FAST-NEXT: vinserti32x4 $2, 128(%r8), %zmm7, %zmm5
6135 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm19, %zmm0, %zmm5
6136 ; AVX512DQBW-FAST-NEXT: vinserti32x4 $2, 192(%r8), %zmm2, %zmm4
6137 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm18, %zmm0, %zmm4
6138 ; AVX512DQBW-FAST-NEXT: vmovdqa (%rdi), %ymm0
6139 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
6140 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
6141 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm2, %zmm0
6142 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
6143 ; AVX512DQBW-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 64-byte Folded Reload
6144 ; AVX512DQBW-FAST-NEXT: # zmm2 = zmm2[0,1,2,3],mem[4,5,6,7]
6145 ; AVX512DQBW-FAST-NEXT: movb $16, %al
6146 ; AVX512DQBW-FAST-NEXT: kmovd %eax, %k1
6147 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm23, %zmm11 {%k1}
6148 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm7 = <10,u,2,3,4,5,11,u>
6149 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm23, %zmm7, %zmm0
6150 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm17 = <14,u,2,3,4,5,15,u>
6151 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm23, %zmm17, %zmm2
6152 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rdi), %ymm23
6153 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm23 = ymm23[1],mem[1],ymm23[3],mem[3]
6154 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm23, %zmm29, %zmm23
6155 ; AVX512DQBW-FAST-NEXT: vmovdqu64 (%rsp), %zmm8 # 64-byte Reload
6156 ; AVX512DQBW-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm29 # 64-byte Folded Reload
6157 ; AVX512DQBW-FAST-NEXT: # zmm29 = zmm8[0,1,2,3],mem[4,5,6,7]
6158 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm25 {%k1}
6159 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm7, %zmm23
6160 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm17, %zmm29
6161 ; AVX512DQBW-FAST-NEXT: vmovdqa 128(%rdi), %ymm1
6162 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
6163 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
6164 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm8, %zmm1
6165 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
6166 ; AVX512DQBW-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm30 # 64-byte Folded Reload
6167 ; AVX512DQBW-FAST-NEXT: # zmm30 = zmm8[0,1,2,3],mem[4,5,6,7]
6168 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm9 {%k1}
6169 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm7, %zmm1
6170 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm17, %zmm30
6171 ; AVX512DQBW-FAST-NEXT: vmovdqa 192(%rdi), %ymm3
6172 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
6173 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm28, %zmm3
6174 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm7, %zmm3
6175 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm22, %zmm26 {%k1}
6176 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
6177 ; AVX512DQBW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm7 = zmm7[0,1,2,3],zmm12[4,5,6,7]
6178 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm28 = [0,10,2,3,4,5,6,11]
6179 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm28, %zmm0
6180 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm17, %zmm7
6181 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm17 = [0,1,2,3,4,12,6,7]
6182 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm17, %zmm11
6183 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm22 = [0,14,2,3,4,5,6,15]
6184 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm22, %zmm2
6185 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm21, %zmm28, %zmm23
6186 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm21, %zmm17, %zmm25
6187 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm21, %zmm22, %zmm29
6188 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm19, %zmm28, %zmm1
6189 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm19, %zmm17, %zmm9
6190 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm19, %zmm22, %zmm30
6191 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm18, %zmm28, %zmm3
6192 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm18, %zmm17, %zmm26
6193 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm18, %zmm22, %zmm7
6194 ; AVX512DQBW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
6195 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, 1472(%rax)
6196 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm13, 1408(%rax)
6197 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm26, 1344(%rax)
6198 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, 1280(%rax)
6199 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, 1216(%rax)
6200 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm30, 1088(%rax)
6201 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm16, 1024(%rax)
6202 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm9, 960(%rax)
6203 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, 896(%rax)
6204 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm15, 832(%rax)
6205 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm29, 704(%rax)
6206 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm14, 640(%rax)
6207 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm25, 576(%rax)
6208 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm23, 512(%rax)
6209 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm31, 448(%rax)
6210 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, 320(%rax)
6211 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm27, 256(%rax)
6212 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, 192(%rax)
6213 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, 128(%rax)
6214 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6215 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 64(%rax)
6216 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, 1152(%rax)
6217 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, 768(%rax)
6218 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm6, 384(%rax)
6219 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm24, (%rax)
6220 ; AVX512DQBW-FAST-NEXT: addq $712, %rsp # imm = 0x2C8
6221 ; AVX512DQBW-FAST-NEXT: vzeroupper
6222 ; AVX512DQBW-FAST-NEXT: retq
6223 %in.vec0 = load <32 x i64>, ptr %in.vecptr0, align 64
6224 %in.vec1 = load <32 x i64>, ptr %in.vecptr1, align 64
6225 %in.vec2 = load <32 x i64>, ptr %in.vecptr2, align 64
6226 %in.vec3 = load <32 x i64>, ptr %in.vecptr3, align 64
6227 %in.vec4 = load <32 x i64>, ptr %in.vecptr4, align 64
6228 %in.vec5 = load <32 x i64>, ptr %in.vecptr5, align 64
6229 %1 = shufflevector <32 x i64> %in.vec0, <32 x i64> %in.vec1, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
6230 %2 = shufflevector <32 x i64> %in.vec2, <32 x i64> %in.vec3, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
6231 %3 = shufflevector <32 x i64> %in.vec4, <32 x i64> %in.vec5, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
6232 %4 = shufflevector <64 x i64> %1, <64 x i64> %2, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
6233 %5 = shufflevector <64 x i64> %3, <64 x i64> poison, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
6234 %6 = shufflevector <128 x i64> %4, <128 x i64> %5, <192 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191>
6235 %interleaved.vec = shufflevector <192 x i64> %6, <192 x i64> poison, <192 x i32> <i32 0, i32 32, i32 64, i32 96, i32 128, i32 160, i32 1, i32 33, i32 65, i32 97, i32 129, i32 161, i32 2, i32 34, i32 66, i32 98, i32 130, i32 162, i32 3, i32 35, i32 67, i32 99, i32 131, i32 163, i32 4, i32 36, i32 68, i32 100, i32 132, i32 164, i32 5, i32 37, i32 69, i32 101, i32 133, i32 165, i32 6, i32 38, i32 70, i32 102, i32 134, i32 166, i32 7, i32 39, i32 71, i32 103, i32 135, i32 167, i32 8, i32 40, i32 72, i32 104, i32 136, i32 168, i32 9, i32 41, i32 73, i32 105, i32 137, i32 169, i32 10, i32 42, i32 74, i32 106, i32 138, i32 170, i32 11, i32 43, i32 75, i32 107, i32 139, i32 171, i32 12, i32 44, i32 76, i32 108, i32 140, i32 172, i32 13, i32 45, i32 77, i32 109, i32 141, i32 173, i32 14, i32 46, i32 78, i32 110, i32 142, i32 174, i32 15, i32 47, i32 79, i32 111, i32 143, i32 175, i32 16, i32 48, i32 80, i32 112, i32 144, i32 176, i32 17, i32 49, i32 81, i32 113, i32 145, i32 177, i32 18, i32 50, i32 82, i32 114, i32 146, i32 178, i32 19, i32 51, i32 83, i32 115, i32 147, i32 179, i32 20, i32 52, i32 84, i32 116, i32 148, i32 180, i32 21, i32 53, i32 85, i32 117, i32 149, i32 181, i32 22, i32 54, i32 86, i32 118, i32 150, i32 182, i32 23, i32 55, i32 87, i32 119, i32 151, i32 183, i32 24, i32 56, i32 88, i32 120, i32 152, i32 184, i32 25, i32 57, i32 89, i32 121, i32 153, i32 185, i32 26, i32 58, i32 90, i32 122, i32 154, i32 186, i32 27, i32 59, i32 91, i32 123, i32 155, i32 187, i32 28, i32 60, i32 92, i32 124, i32 156, i32 188, i32 29, i32 61, i32 93, i32 125, i32 157, i32 189, i32 30, i32 62, i32 94, i32 126, i32 158, i32 190, i32 31, i32 63, i32 95, i32 127, i32 159, i32 191>
6236 store <192 x i64> %interleaved.vec, ptr %out.vec, align 64
6240 define void @store_i64_stride6_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
6241 ; SSE-LABEL: store_i64_stride6_vf64:
6243 ; SSE-NEXT: subq $2712, %rsp # imm = 0xA98
6244 ; SSE-NEXT: movaps (%rdi), %xmm7
6245 ; SSE-NEXT: movaps 16(%rdi), %xmm8
6246 ; SSE-NEXT: movaps 32(%rdi), %xmm9
6247 ; SSE-NEXT: movaps (%rsi), %xmm2
6248 ; SSE-NEXT: movaps 16(%rsi), %xmm1
6249 ; SSE-NEXT: movaps 32(%rsi), %xmm0
6250 ; SSE-NEXT: movaps (%rdx), %xmm10
6251 ; SSE-NEXT: movaps 16(%rdx), %xmm11
6252 ; SSE-NEXT: movaps 32(%rdx), %xmm12
6253 ; SSE-NEXT: movaps (%rcx), %xmm4
6254 ; SSE-NEXT: movaps 16(%rcx), %xmm3
6255 ; SSE-NEXT: movaps 16(%r8), %xmm14
6256 ; SSE-NEXT: movaps (%r8), %xmm13
6257 ; SSE-NEXT: movaps 16(%r9), %xmm5
6258 ; SSE-NEXT: movaps (%r9), %xmm6
6259 ; SSE-NEXT: movaps %xmm7, %xmm15
6260 ; SSE-NEXT: movlhps {{.*#+}} xmm15 = xmm15[0],xmm2[0]
6261 ; SSE-NEXT: movaps %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6262 ; SSE-NEXT: unpckhpd {{.*#+}} xmm7 = xmm7[1],xmm2[1]
6263 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6264 ; SSE-NEXT: movaps %xmm10, %xmm2
6265 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm4[0]
6266 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6267 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm4[1]
6268 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6269 ; SSE-NEXT: movaps %xmm13, %xmm2
6270 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm6[0]
6271 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6272 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm6[1]
6273 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6274 ; SSE-NEXT: movaps %xmm8, %xmm2
6275 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm1[0]
6276 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6277 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm1[1]
6278 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6279 ; SSE-NEXT: movaps %xmm11, %xmm1
6280 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
6281 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6282 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm3[1]
6283 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6284 ; SSE-NEXT: movaps %xmm14, %xmm1
6285 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm5[0]
6286 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6287 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm5[1]
6288 ; SSE-NEXT: movaps %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6289 ; SSE-NEXT: movaps %xmm9, %xmm1
6290 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6291 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6292 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm0[1]
6293 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6294 ; SSE-NEXT: movaps 32(%rcx), %xmm0
6295 ; SSE-NEXT: movaps %xmm12, %xmm1
6296 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6297 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6298 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm0[1]
6299 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6300 ; SSE-NEXT: movaps 32(%r8), %xmm2
6301 ; SSE-NEXT: movaps 32(%r9), %xmm0
6302 ; SSE-NEXT: movaps %xmm2, %xmm1
6303 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6304 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6305 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6306 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6307 ; SSE-NEXT: movaps 48(%rdi), %xmm2
6308 ; SSE-NEXT: movaps 48(%rsi), %xmm0
6309 ; SSE-NEXT: movaps %xmm2, %xmm1
6310 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6311 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6312 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6313 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6314 ; SSE-NEXT: movaps 48(%rdx), %xmm2
6315 ; SSE-NEXT: movaps 48(%rcx), %xmm0
6316 ; SSE-NEXT: movaps %xmm2, %xmm1
6317 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6318 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6319 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6320 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6321 ; SSE-NEXT: movaps 48(%r8), %xmm2
6322 ; SSE-NEXT: movaps 48(%r9), %xmm0
6323 ; SSE-NEXT: movaps %xmm2, %xmm1
6324 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6325 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6326 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6327 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6328 ; SSE-NEXT: movaps 64(%rdi), %xmm2
6329 ; SSE-NEXT: movaps 64(%rsi), %xmm0
6330 ; SSE-NEXT: movaps %xmm2, %xmm1
6331 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6332 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6333 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6334 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6335 ; SSE-NEXT: movaps 64(%rdx), %xmm2
6336 ; SSE-NEXT: movaps 64(%rcx), %xmm0
6337 ; SSE-NEXT: movaps %xmm2, %xmm1
6338 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6339 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6340 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6341 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6342 ; SSE-NEXT: movaps 64(%r8), %xmm2
6343 ; SSE-NEXT: movaps 64(%r9), %xmm0
6344 ; SSE-NEXT: movaps %xmm2, %xmm1
6345 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6346 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6347 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6348 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6349 ; SSE-NEXT: movaps 80(%rdi), %xmm2
6350 ; SSE-NEXT: movaps 80(%rsi), %xmm0
6351 ; SSE-NEXT: movaps %xmm2, %xmm1
6352 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6353 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6354 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6355 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6356 ; SSE-NEXT: movaps 80(%rdx), %xmm2
6357 ; SSE-NEXT: movaps 80(%rcx), %xmm0
6358 ; SSE-NEXT: movaps %xmm2, %xmm1
6359 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6360 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6361 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6362 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6363 ; SSE-NEXT: movaps 80(%r8), %xmm2
6364 ; SSE-NEXT: movaps 80(%r9), %xmm0
6365 ; SSE-NEXT: movaps %xmm2, %xmm1
6366 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6367 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6368 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6369 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6370 ; SSE-NEXT: movaps 96(%rdi), %xmm2
6371 ; SSE-NEXT: movaps 96(%rsi), %xmm0
6372 ; SSE-NEXT: movaps %xmm2, %xmm1
6373 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6374 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6375 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6376 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6377 ; SSE-NEXT: movaps 96(%rdx), %xmm2
6378 ; SSE-NEXT: movaps 96(%rcx), %xmm0
6379 ; SSE-NEXT: movaps %xmm2, %xmm1
6380 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6381 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6382 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6383 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6384 ; SSE-NEXT: movaps 96(%r8), %xmm2
6385 ; SSE-NEXT: movaps 96(%r9), %xmm0
6386 ; SSE-NEXT: movaps %xmm2, %xmm1
6387 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6388 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6389 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6390 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6391 ; SSE-NEXT: movaps 112(%rdi), %xmm2
6392 ; SSE-NEXT: movaps 112(%rsi), %xmm0
6393 ; SSE-NEXT: movaps %xmm2, %xmm1
6394 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6395 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6396 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6397 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6398 ; SSE-NEXT: movaps 112(%rdx), %xmm2
6399 ; SSE-NEXT: movaps 112(%rcx), %xmm0
6400 ; SSE-NEXT: movaps %xmm2, %xmm1
6401 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6402 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6403 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6404 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6405 ; SSE-NEXT: movaps 112(%r8), %xmm2
6406 ; SSE-NEXT: movaps 112(%r9), %xmm0
6407 ; SSE-NEXT: movaps %xmm2, %xmm1
6408 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6409 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6410 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6411 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6412 ; SSE-NEXT: movaps 128(%rdi), %xmm2
6413 ; SSE-NEXT: movaps 128(%rsi), %xmm0
6414 ; SSE-NEXT: movaps %xmm2, %xmm1
6415 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6416 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6417 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6418 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6419 ; SSE-NEXT: movaps 128(%rdx), %xmm2
6420 ; SSE-NEXT: movaps 128(%rcx), %xmm0
6421 ; SSE-NEXT: movaps %xmm2, %xmm1
6422 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6423 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6424 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6425 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6426 ; SSE-NEXT: movaps 128(%r8), %xmm2
6427 ; SSE-NEXT: movaps 128(%r9), %xmm0
6428 ; SSE-NEXT: movaps %xmm2, %xmm1
6429 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6430 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6431 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6432 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6433 ; SSE-NEXT: movaps 144(%rdi), %xmm2
6434 ; SSE-NEXT: movaps 144(%rsi), %xmm0
6435 ; SSE-NEXT: movaps %xmm2, %xmm1
6436 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6437 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6438 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6439 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6440 ; SSE-NEXT: movaps 144(%rdx), %xmm2
6441 ; SSE-NEXT: movaps 144(%rcx), %xmm0
6442 ; SSE-NEXT: movaps %xmm2, %xmm1
6443 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6444 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6445 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6446 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6447 ; SSE-NEXT: movaps 144(%r8), %xmm2
6448 ; SSE-NEXT: movaps 144(%r9), %xmm0
6449 ; SSE-NEXT: movaps %xmm2, %xmm1
6450 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6451 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6452 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6453 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6454 ; SSE-NEXT: movaps 160(%rdi), %xmm2
6455 ; SSE-NEXT: movaps 160(%rsi), %xmm0
6456 ; SSE-NEXT: movaps %xmm2, %xmm1
6457 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6458 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6459 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6460 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6461 ; SSE-NEXT: movaps 160(%rdx), %xmm2
6462 ; SSE-NEXT: movaps 160(%rcx), %xmm0
6463 ; SSE-NEXT: movaps %xmm2, %xmm1
6464 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6465 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6466 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6467 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6468 ; SSE-NEXT: movaps 160(%r8), %xmm2
6469 ; SSE-NEXT: movaps 160(%r9), %xmm0
6470 ; SSE-NEXT: movaps %xmm2, %xmm1
6471 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6472 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6473 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6474 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6475 ; SSE-NEXT: movaps 176(%rdi), %xmm2
6476 ; SSE-NEXT: movaps 176(%rsi), %xmm0
6477 ; SSE-NEXT: movaps %xmm2, %xmm1
6478 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6479 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6480 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6481 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6482 ; SSE-NEXT: movaps 176(%rdx), %xmm2
6483 ; SSE-NEXT: movaps 176(%rcx), %xmm0
6484 ; SSE-NEXT: movaps %xmm2, %xmm1
6485 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6486 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6487 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6488 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6489 ; SSE-NEXT: movaps 176(%r8), %xmm2
6490 ; SSE-NEXT: movaps 176(%r9), %xmm0
6491 ; SSE-NEXT: movaps %xmm2, %xmm1
6492 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6493 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6494 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6495 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6496 ; SSE-NEXT: movaps 192(%rdi), %xmm2
6497 ; SSE-NEXT: movaps 192(%rsi), %xmm0
6498 ; SSE-NEXT: movaps %xmm2, %xmm1
6499 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6500 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6501 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6502 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6503 ; SSE-NEXT: movaps 192(%rdx), %xmm2
6504 ; SSE-NEXT: movaps 192(%rcx), %xmm0
6505 ; SSE-NEXT: movaps %xmm2, %xmm1
6506 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6507 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6508 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6509 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6510 ; SSE-NEXT: movaps 192(%r8), %xmm2
6511 ; SSE-NEXT: movaps 192(%r9), %xmm0
6512 ; SSE-NEXT: movaps %xmm2, %xmm1
6513 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6514 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6515 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6516 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6517 ; SSE-NEXT: movaps 208(%rdi), %xmm2
6518 ; SSE-NEXT: movaps 208(%rsi), %xmm0
6519 ; SSE-NEXT: movaps %xmm2, %xmm1
6520 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6521 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6522 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6523 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6524 ; SSE-NEXT: movaps 208(%rdx), %xmm2
6525 ; SSE-NEXT: movaps 208(%rcx), %xmm0
6526 ; SSE-NEXT: movaps %xmm2, %xmm1
6527 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6528 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6529 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6530 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6531 ; SSE-NEXT: movaps 208(%r8), %xmm2
6532 ; SSE-NEXT: movaps 208(%r9), %xmm0
6533 ; SSE-NEXT: movaps %xmm2, %xmm1
6534 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6535 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6536 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6537 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6538 ; SSE-NEXT: movaps 224(%rdi), %xmm2
6539 ; SSE-NEXT: movaps 224(%rsi), %xmm0
6540 ; SSE-NEXT: movaps %xmm2, %xmm1
6541 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6542 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6543 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6544 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6545 ; SSE-NEXT: movaps 224(%rdx), %xmm2
6546 ; SSE-NEXT: movaps 224(%rcx), %xmm0
6547 ; SSE-NEXT: movaps %xmm2, %xmm1
6548 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6549 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6550 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6551 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6552 ; SSE-NEXT: movaps 224(%r8), %xmm2
6553 ; SSE-NEXT: movaps 224(%r9), %xmm0
6554 ; SSE-NEXT: movaps %xmm2, %xmm1
6555 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6556 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6557 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6558 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6559 ; SSE-NEXT: movaps 240(%rdi), %xmm2
6560 ; SSE-NEXT: movaps 240(%rsi), %xmm0
6561 ; SSE-NEXT: movaps %xmm2, %xmm1
6562 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6563 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6564 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6565 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6566 ; SSE-NEXT: movaps 240(%rdx), %xmm2
6567 ; SSE-NEXT: movaps 240(%rcx), %xmm0
6568 ; SSE-NEXT: movaps %xmm2, %xmm1
6569 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6570 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6571 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6572 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6573 ; SSE-NEXT: movaps 240(%r8), %xmm2
6574 ; SSE-NEXT: movaps 240(%r9), %xmm0
6575 ; SSE-NEXT: movaps %xmm2, %xmm1
6576 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6577 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6578 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6579 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6580 ; SSE-NEXT: movaps 256(%rdi), %xmm2
6581 ; SSE-NEXT: movaps 256(%rsi), %xmm0
6582 ; SSE-NEXT: movaps %xmm2, %xmm1
6583 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6584 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6585 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6586 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6587 ; SSE-NEXT: movaps 256(%rdx), %xmm2
6588 ; SSE-NEXT: movaps 256(%rcx), %xmm0
6589 ; SSE-NEXT: movaps %xmm2, %xmm1
6590 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6591 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6592 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6593 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6594 ; SSE-NEXT: movaps 256(%r8), %xmm2
6595 ; SSE-NEXT: movaps 256(%r9), %xmm0
6596 ; SSE-NEXT: movaps %xmm2, %xmm1
6597 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6598 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6599 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6600 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6601 ; SSE-NEXT: movaps 272(%rdi), %xmm2
6602 ; SSE-NEXT: movaps 272(%rsi), %xmm0
6603 ; SSE-NEXT: movaps %xmm2, %xmm1
6604 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6605 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6606 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6607 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6608 ; SSE-NEXT: movaps 272(%rdx), %xmm2
6609 ; SSE-NEXT: movaps 272(%rcx), %xmm0
6610 ; SSE-NEXT: movaps %xmm2, %xmm1
6611 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6612 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6613 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6614 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6615 ; SSE-NEXT: movaps 272(%r8), %xmm2
6616 ; SSE-NEXT: movaps 272(%r9), %xmm0
6617 ; SSE-NEXT: movaps %xmm2, %xmm1
6618 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6619 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6620 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6621 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6622 ; SSE-NEXT: movaps 288(%rdi), %xmm2
6623 ; SSE-NEXT: movaps 288(%rsi), %xmm0
6624 ; SSE-NEXT: movaps %xmm2, %xmm1
6625 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6626 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6627 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6628 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6629 ; SSE-NEXT: movaps 288(%rdx), %xmm2
6630 ; SSE-NEXT: movaps 288(%rcx), %xmm0
6631 ; SSE-NEXT: movaps %xmm2, %xmm1
6632 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6633 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6634 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6635 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6636 ; SSE-NEXT: movaps 288(%r8), %xmm2
6637 ; SSE-NEXT: movaps 288(%r9), %xmm0
6638 ; SSE-NEXT: movaps %xmm2, %xmm1
6639 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6640 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6641 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6642 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6643 ; SSE-NEXT: movaps 304(%rdi), %xmm2
6644 ; SSE-NEXT: movaps 304(%rsi), %xmm0
6645 ; SSE-NEXT: movaps %xmm2, %xmm1
6646 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6647 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6648 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6649 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6650 ; SSE-NEXT: movaps 304(%rdx), %xmm2
6651 ; SSE-NEXT: movaps 304(%rcx), %xmm0
6652 ; SSE-NEXT: movaps %xmm2, %xmm1
6653 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6654 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6655 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6656 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6657 ; SSE-NEXT: movaps 304(%r8), %xmm2
6658 ; SSE-NEXT: movaps 304(%r9), %xmm0
6659 ; SSE-NEXT: movaps %xmm2, %xmm1
6660 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6661 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6662 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6663 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6664 ; SSE-NEXT: movaps 320(%rdi), %xmm2
6665 ; SSE-NEXT: movaps 320(%rsi), %xmm0
6666 ; SSE-NEXT: movaps %xmm2, %xmm1
6667 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6668 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6669 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6670 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6671 ; SSE-NEXT: movaps 320(%rdx), %xmm2
6672 ; SSE-NEXT: movaps 320(%rcx), %xmm0
6673 ; SSE-NEXT: movaps %xmm2, %xmm1
6674 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6675 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6676 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6677 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6678 ; SSE-NEXT: movaps 320(%r8), %xmm2
6679 ; SSE-NEXT: movaps 320(%r9), %xmm0
6680 ; SSE-NEXT: movaps %xmm2, %xmm1
6681 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6682 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6683 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6684 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6685 ; SSE-NEXT: movaps 336(%rdi), %xmm2
6686 ; SSE-NEXT: movaps 336(%rsi), %xmm0
6687 ; SSE-NEXT: movaps %xmm2, %xmm1
6688 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6689 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6690 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6691 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6692 ; SSE-NEXT: movaps 336(%rdx), %xmm2
6693 ; SSE-NEXT: movaps 336(%rcx), %xmm0
6694 ; SSE-NEXT: movaps %xmm2, %xmm1
6695 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6696 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6697 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6698 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6699 ; SSE-NEXT: movaps 336(%r8), %xmm2
6700 ; SSE-NEXT: movaps 336(%r9), %xmm0
6701 ; SSE-NEXT: movaps %xmm2, %xmm1
6702 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6703 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6704 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6705 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6706 ; SSE-NEXT: movaps 352(%rdi), %xmm2
6707 ; SSE-NEXT: movaps 352(%rsi), %xmm0
6708 ; SSE-NEXT: movaps %xmm2, %xmm1
6709 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6710 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6711 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6712 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6713 ; SSE-NEXT: movaps 352(%rdx), %xmm2
6714 ; SSE-NEXT: movaps 352(%rcx), %xmm0
6715 ; SSE-NEXT: movaps %xmm2, %xmm1
6716 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6717 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6718 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6719 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6720 ; SSE-NEXT: movaps 352(%r8), %xmm2
6721 ; SSE-NEXT: movaps 352(%r9), %xmm0
6722 ; SSE-NEXT: movaps %xmm2, %xmm1
6723 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6724 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6725 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6726 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6727 ; SSE-NEXT: movaps 368(%rdi), %xmm2
6728 ; SSE-NEXT: movaps 368(%rsi), %xmm0
6729 ; SSE-NEXT: movaps %xmm2, %xmm1
6730 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6731 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6732 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6733 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6734 ; SSE-NEXT: movaps 368(%rdx), %xmm2
6735 ; SSE-NEXT: movaps 368(%rcx), %xmm0
6736 ; SSE-NEXT: movaps %xmm2, %xmm1
6737 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6738 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6739 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6740 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6741 ; SSE-NEXT: movaps 368(%r8), %xmm2
6742 ; SSE-NEXT: movaps 368(%r9), %xmm0
6743 ; SSE-NEXT: movaps %xmm2, %xmm1
6744 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6745 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6746 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6747 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6748 ; SSE-NEXT: movaps 384(%rdi), %xmm2
6749 ; SSE-NEXT: movaps 384(%rsi), %xmm0
6750 ; SSE-NEXT: movaps %xmm2, %xmm1
6751 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6752 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6753 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6754 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6755 ; SSE-NEXT: movaps 384(%rdx), %xmm2
6756 ; SSE-NEXT: movaps 384(%rcx), %xmm0
6757 ; SSE-NEXT: movaps %xmm2, %xmm1
6758 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6759 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6760 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6761 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6762 ; SSE-NEXT: movaps 384(%r8), %xmm2
6763 ; SSE-NEXT: movaps 384(%r9), %xmm0
6764 ; SSE-NEXT: movaps %xmm2, %xmm1
6765 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6766 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6767 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6768 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6769 ; SSE-NEXT: movaps 400(%rdi), %xmm2
6770 ; SSE-NEXT: movaps 400(%rsi), %xmm0
6771 ; SSE-NEXT: movaps %xmm2, %xmm1
6772 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6773 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6774 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6775 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6776 ; SSE-NEXT: movaps 400(%rdx), %xmm2
6777 ; SSE-NEXT: movaps 400(%rcx), %xmm0
6778 ; SSE-NEXT: movaps %xmm2, %xmm1
6779 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6780 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6781 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6782 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6783 ; SSE-NEXT: movaps 400(%r8), %xmm2
6784 ; SSE-NEXT: movaps 400(%r9), %xmm0
6785 ; SSE-NEXT: movaps %xmm2, %xmm1
6786 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6787 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6788 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6789 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6790 ; SSE-NEXT: movaps 416(%rdi), %xmm2
6791 ; SSE-NEXT: movaps 416(%rsi), %xmm0
6792 ; SSE-NEXT: movaps %xmm2, %xmm1
6793 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6794 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6795 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6796 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6797 ; SSE-NEXT: movaps 416(%rdx), %xmm2
6798 ; SSE-NEXT: movaps 416(%rcx), %xmm0
6799 ; SSE-NEXT: movaps %xmm2, %xmm1
6800 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6801 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6802 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6803 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6804 ; SSE-NEXT: movaps 416(%r8), %xmm2
6805 ; SSE-NEXT: movaps 416(%r9), %xmm0
6806 ; SSE-NEXT: movaps %xmm2, %xmm1
6807 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6808 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6809 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6810 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6811 ; SSE-NEXT: movaps 432(%rdi), %xmm2
6812 ; SSE-NEXT: movaps 432(%rsi), %xmm0
6813 ; SSE-NEXT: movaps %xmm2, %xmm1
6814 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6815 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6816 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6817 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6818 ; SSE-NEXT: movaps 432(%rdx), %xmm2
6819 ; SSE-NEXT: movaps 432(%rcx), %xmm0
6820 ; SSE-NEXT: movaps %xmm2, %xmm1
6821 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6822 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6823 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6824 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6825 ; SSE-NEXT: movaps 432(%r8), %xmm2
6826 ; SSE-NEXT: movaps 432(%r9), %xmm0
6827 ; SSE-NEXT: movaps %xmm2, %xmm1
6828 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6829 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6830 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6831 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6832 ; SSE-NEXT: movaps 448(%rdi), %xmm2
6833 ; SSE-NEXT: movaps 448(%rsi), %xmm0
6834 ; SSE-NEXT: movaps %xmm2, %xmm1
6835 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6836 ; SSE-NEXT: movaps %xmm1, (%rsp) # 16-byte Spill
6837 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6838 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6839 ; SSE-NEXT: movaps 448(%rdx), %xmm2
6840 ; SSE-NEXT: movaps 448(%rcx), %xmm0
6841 ; SSE-NEXT: movaps %xmm2, %xmm1
6842 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6843 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6844 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6845 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6846 ; SSE-NEXT: movaps 448(%r8), %xmm2
6847 ; SSE-NEXT: movaps 448(%r9), %xmm0
6848 ; SSE-NEXT: movaps %xmm2, %xmm1
6849 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6850 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6851 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6852 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6853 ; SSE-NEXT: movaps 464(%rdi), %xmm15
6854 ; SSE-NEXT: movaps 464(%rsi), %xmm0
6855 ; SSE-NEXT: movaps %xmm15, %xmm1
6856 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6857 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6858 ; SSE-NEXT: unpckhpd {{.*#+}} xmm15 = xmm15[1],xmm0[1]
6859 ; SSE-NEXT: movaps 464(%rdx), %xmm14
6860 ; SSE-NEXT: movaps 464(%rcx), %xmm0
6861 ; SSE-NEXT: movaps %xmm14, %xmm1
6862 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6863 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6864 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm0[1]
6865 ; SSE-NEXT: movaps 464(%r8), %xmm11
6866 ; SSE-NEXT: movaps 464(%r9), %xmm0
6867 ; SSE-NEXT: movaps %xmm11, %xmm1
6868 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
6869 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6870 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm0[1]
6871 ; SSE-NEXT: movaps 480(%rdi), %xmm12
6872 ; SSE-NEXT: movaps 480(%rsi), %xmm0
6873 ; SSE-NEXT: movaps %xmm12, %xmm13
6874 ; SSE-NEXT: movlhps {{.*#+}} xmm13 = xmm13[0],xmm0[0]
6875 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm0[1]
6876 ; SSE-NEXT: movaps 480(%rdx), %xmm8
6877 ; SSE-NEXT: movaps 480(%rcx), %xmm0
6878 ; SSE-NEXT: movaps %xmm8, %xmm10
6879 ; SSE-NEXT: movlhps {{.*#+}} xmm10 = xmm10[0],xmm0[0]
6880 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm0[1]
6881 ; SSE-NEXT: movaps 480(%r8), %xmm5
6882 ; SSE-NEXT: movaps 480(%r9), %xmm0
6883 ; SSE-NEXT: movaps %xmm5, %xmm9
6884 ; SSE-NEXT: movlhps {{.*#+}} xmm9 = xmm9[0],xmm0[0]
6885 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm0[1]
6886 ; SSE-NEXT: movaps 496(%rdi), %xmm6
6887 ; SSE-NEXT: movaps 496(%rsi), %xmm1
6888 ; SSE-NEXT: movaps %xmm6, %xmm7
6889 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm1[0]
6890 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm1[1]
6891 ; SSE-NEXT: movaps 496(%rdx), %xmm1
6892 ; SSE-NEXT: movaps 496(%rcx), %xmm0
6893 ; SSE-NEXT: movaps %xmm1, %xmm4
6894 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm0[0]
6895 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm0[1]
6896 ; SSE-NEXT: movaps 496(%r8), %xmm0
6897 ; SSE-NEXT: movaps 496(%r9), %xmm3
6898 ; SSE-NEXT: movaps %xmm0, %xmm2
6899 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm3[0]
6900 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
6901 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
6902 ; SSE-NEXT: movaps %xmm0, 3056(%rax)
6903 ; SSE-NEXT: movaps %xmm1, 3040(%rax)
6904 ; SSE-NEXT: movaps %xmm6, 3024(%rax)
6905 ; SSE-NEXT: movaps %xmm2, 3008(%rax)
6906 ; SSE-NEXT: movaps %xmm4, 2992(%rax)
6907 ; SSE-NEXT: movaps %xmm7, 2976(%rax)
6908 ; SSE-NEXT: movaps %xmm5, 2960(%rax)
6909 ; SSE-NEXT: movaps %xmm8, 2944(%rax)
6910 ; SSE-NEXT: movaps %xmm12, 2928(%rax)
6911 ; SSE-NEXT: movaps %xmm9, 2912(%rax)
6912 ; SSE-NEXT: movaps %xmm10, 2896(%rax)
6913 ; SSE-NEXT: movaps %xmm13, 2880(%rax)
6914 ; SSE-NEXT: movaps %xmm11, 2864(%rax)
6915 ; SSE-NEXT: movaps %xmm14, 2848(%rax)
6916 ; SSE-NEXT: movaps %xmm15, 2832(%rax)
6917 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6918 ; SSE-NEXT: movaps %xmm0, 2816(%rax)
6919 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6920 ; SSE-NEXT: movaps %xmm0, 2800(%rax)
6921 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6922 ; SSE-NEXT: movaps %xmm0, 2784(%rax)
6923 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6924 ; SSE-NEXT: movaps %xmm0, 2768(%rax)
6925 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6926 ; SSE-NEXT: movaps %xmm0, 2752(%rax)
6927 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6928 ; SSE-NEXT: movaps %xmm0, 2736(%rax)
6929 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6930 ; SSE-NEXT: movaps %xmm0, 2720(%rax)
6931 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6932 ; SSE-NEXT: movaps %xmm0, 2704(%rax)
6933 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
6934 ; SSE-NEXT: movaps %xmm0, 2688(%rax)
6935 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6936 ; SSE-NEXT: movaps %xmm0, 2672(%rax)
6937 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6938 ; SSE-NEXT: movaps %xmm0, 2656(%rax)
6939 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6940 ; SSE-NEXT: movaps %xmm0, 2640(%rax)
6941 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6942 ; SSE-NEXT: movaps %xmm0, 2624(%rax)
6943 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6944 ; SSE-NEXT: movaps %xmm0, 2608(%rax)
6945 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6946 ; SSE-NEXT: movaps %xmm0, 2592(%rax)
6947 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6948 ; SSE-NEXT: movaps %xmm0, 2576(%rax)
6949 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6950 ; SSE-NEXT: movaps %xmm0, 2560(%rax)
6951 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6952 ; SSE-NEXT: movaps %xmm0, 2544(%rax)
6953 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6954 ; SSE-NEXT: movaps %xmm0, 2528(%rax)
6955 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6956 ; SSE-NEXT: movaps %xmm0, 2512(%rax)
6957 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6958 ; SSE-NEXT: movaps %xmm0, 2496(%rax)
6959 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6960 ; SSE-NEXT: movaps %xmm0, 2480(%rax)
6961 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6962 ; SSE-NEXT: movaps %xmm0, 2464(%rax)
6963 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6964 ; SSE-NEXT: movaps %xmm0, 2448(%rax)
6965 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6966 ; SSE-NEXT: movaps %xmm0, 2432(%rax)
6967 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6968 ; SSE-NEXT: movaps %xmm0, 2416(%rax)
6969 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6970 ; SSE-NEXT: movaps %xmm0, 2400(%rax)
6971 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6972 ; SSE-NEXT: movaps %xmm0, 2384(%rax)
6973 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6974 ; SSE-NEXT: movaps %xmm0, 2368(%rax)
6975 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6976 ; SSE-NEXT: movaps %xmm0, 2352(%rax)
6977 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6978 ; SSE-NEXT: movaps %xmm0, 2336(%rax)
6979 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6980 ; SSE-NEXT: movaps %xmm0, 2320(%rax)
6981 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6982 ; SSE-NEXT: movaps %xmm0, 2304(%rax)
6983 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6984 ; SSE-NEXT: movaps %xmm0, 2288(%rax)
6985 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6986 ; SSE-NEXT: movaps %xmm0, 2272(%rax)
6987 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6988 ; SSE-NEXT: movaps %xmm0, 2256(%rax)
6989 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6990 ; SSE-NEXT: movaps %xmm0, 2240(%rax)
6991 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6992 ; SSE-NEXT: movaps %xmm0, 2224(%rax)
6993 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6994 ; SSE-NEXT: movaps %xmm0, 2208(%rax)
6995 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6996 ; SSE-NEXT: movaps %xmm0, 2192(%rax)
6997 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6998 ; SSE-NEXT: movaps %xmm0, 2176(%rax)
6999 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7000 ; SSE-NEXT: movaps %xmm0, 2160(%rax)
7001 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7002 ; SSE-NEXT: movaps %xmm0, 2144(%rax)
7003 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7004 ; SSE-NEXT: movaps %xmm0, 2128(%rax)
7005 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7006 ; SSE-NEXT: movaps %xmm0, 2112(%rax)
7007 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7008 ; SSE-NEXT: movaps %xmm0, 2096(%rax)
7009 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7010 ; SSE-NEXT: movaps %xmm0, 2080(%rax)
7011 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7012 ; SSE-NEXT: movaps %xmm0, 2064(%rax)
7013 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7014 ; SSE-NEXT: movaps %xmm0, 2048(%rax)
7015 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7016 ; SSE-NEXT: movaps %xmm0, 2032(%rax)
7017 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7018 ; SSE-NEXT: movaps %xmm0, 2016(%rax)
7019 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7020 ; SSE-NEXT: movaps %xmm0, 2000(%rax)
7021 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7022 ; SSE-NEXT: movaps %xmm0, 1984(%rax)
7023 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7024 ; SSE-NEXT: movaps %xmm0, 1968(%rax)
7025 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7026 ; SSE-NEXT: movaps %xmm0, 1952(%rax)
7027 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7028 ; SSE-NEXT: movaps %xmm0, 1936(%rax)
7029 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7030 ; SSE-NEXT: movaps %xmm0, 1920(%rax)
7031 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7032 ; SSE-NEXT: movaps %xmm0, 1904(%rax)
7033 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7034 ; SSE-NEXT: movaps %xmm0, 1888(%rax)
7035 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7036 ; SSE-NEXT: movaps %xmm0, 1872(%rax)
7037 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7038 ; SSE-NEXT: movaps %xmm0, 1856(%rax)
7039 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7040 ; SSE-NEXT: movaps %xmm0, 1840(%rax)
7041 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7042 ; SSE-NEXT: movaps %xmm0, 1824(%rax)
7043 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7044 ; SSE-NEXT: movaps %xmm0, 1808(%rax)
7045 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7046 ; SSE-NEXT: movaps %xmm0, 1792(%rax)
7047 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7048 ; SSE-NEXT: movaps %xmm0, 1776(%rax)
7049 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7050 ; SSE-NEXT: movaps %xmm0, 1760(%rax)
7051 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7052 ; SSE-NEXT: movaps %xmm0, 1744(%rax)
7053 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7054 ; SSE-NEXT: movaps %xmm0, 1728(%rax)
7055 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7056 ; SSE-NEXT: movaps %xmm0, 1712(%rax)
7057 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7058 ; SSE-NEXT: movaps %xmm0, 1696(%rax)
7059 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7060 ; SSE-NEXT: movaps %xmm0, 1680(%rax)
7061 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7062 ; SSE-NEXT: movaps %xmm0, 1664(%rax)
7063 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7064 ; SSE-NEXT: movaps %xmm0, 1648(%rax)
7065 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7066 ; SSE-NEXT: movaps %xmm0, 1632(%rax)
7067 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7068 ; SSE-NEXT: movaps %xmm0, 1616(%rax)
7069 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7070 ; SSE-NEXT: movaps %xmm0, 1600(%rax)
7071 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7072 ; SSE-NEXT: movaps %xmm0, 1584(%rax)
7073 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7074 ; SSE-NEXT: movaps %xmm0, 1568(%rax)
7075 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7076 ; SSE-NEXT: movaps %xmm0, 1552(%rax)
7077 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7078 ; SSE-NEXT: movaps %xmm0, 1536(%rax)
7079 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7080 ; SSE-NEXT: movaps %xmm0, 1520(%rax)
7081 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7082 ; SSE-NEXT: movaps %xmm0, 1504(%rax)
7083 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7084 ; SSE-NEXT: movaps %xmm0, 1488(%rax)
7085 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7086 ; SSE-NEXT: movaps %xmm0, 1472(%rax)
7087 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7088 ; SSE-NEXT: movaps %xmm0, 1456(%rax)
7089 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7090 ; SSE-NEXT: movaps %xmm0, 1440(%rax)
7091 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7092 ; SSE-NEXT: movaps %xmm0, 1424(%rax)
7093 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7094 ; SSE-NEXT: movaps %xmm0, 1408(%rax)
7095 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7096 ; SSE-NEXT: movaps %xmm0, 1392(%rax)
7097 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7098 ; SSE-NEXT: movaps %xmm0, 1376(%rax)
7099 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7100 ; SSE-NEXT: movaps %xmm0, 1360(%rax)
7101 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7102 ; SSE-NEXT: movaps %xmm0, 1344(%rax)
7103 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7104 ; SSE-NEXT: movaps %xmm0, 1328(%rax)
7105 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7106 ; SSE-NEXT: movaps %xmm0, 1312(%rax)
7107 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7108 ; SSE-NEXT: movaps %xmm0, 1296(%rax)
7109 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7110 ; SSE-NEXT: movaps %xmm0, 1280(%rax)
7111 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7112 ; SSE-NEXT: movaps %xmm0, 1264(%rax)
7113 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7114 ; SSE-NEXT: movaps %xmm0, 1248(%rax)
7115 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7116 ; SSE-NEXT: movaps %xmm0, 1232(%rax)
7117 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7118 ; SSE-NEXT: movaps %xmm0, 1216(%rax)
7119 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7120 ; SSE-NEXT: movaps %xmm0, 1200(%rax)
7121 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7122 ; SSE-NEXT: movaps %xmm0, 1184(%rax)
7123 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7124 ; SSE-NEXT: movaps %xmm0, 1168(%rax)
7125 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7126 ; SSE-NEXT: movaps %xmm0, 1152(%rax)
7127 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7128 ; SSE-NEXT: movaps %xmm0, 1136(%rax)
7129 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7130 ; SSE-NEXT: movaps %xmm0, 1120(%rax)
7131 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7132 ; SSE-NEXT: movaps %xmm0, 1104(%rax)
7133 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7134 ; SSE-NEXT: movaps %xmm0, 1088(%rax)
7135 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7136 ; SSE-NEXT: movaps %xmm0, 1072(%rax)
7137 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7138 ; SSE-NEXT: movaps %xmm0, 1056(%rax)
7139 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7140 ; SSE-NEXT: movaps %xmm0, 1040(%rax)
7141 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7142 ; SSE-NEXT: movaps %xmm0, 1024(%rax)
7143 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7144 ; SSE-NEXT: movaps %xmm0, 1008(%rax)
7145 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7146 ; SSE-NEXT: movaps %xmm0, 992(%rax)
7147 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7148 ; SSE-NEXT: movaps %xmm0, 976(%rax)
7149 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7150 ; SSE-NEXT: movaps %xmm0, 960(%rax)
7151 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7152 ; SSE-NEXT: movaps %xmm0, 944(%rax)
7153 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7154 ; SSE-NEXT: movaps %xmm0, 928(%rax)
7155 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7156 ; SSE-NEXT: movaps %xmm0, 912(%rax)
7157 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7158 ; SSE-NEXT: movaps %xmm0, 896(%rax)
7159 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7160 ; SSE-NEXT: movaps %xmm0, 880(%rax)
7161 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7162 ; SSE-NEXT: movaps %xmm0, 864(%rax)
7163 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7164 ; SSE-NEXT: movaps %xmm0, 848(%rax)
7165 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7166 ; SSE-NEXT: movaps %xmm0, 832(%rax)
7167 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7168 ; SSE-NEXT: movaps %xmm0, 816(%rax)
7169 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7170 ; SSE-NEXT: movaps %xmm0, 800(%rax)
7171 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7172 ; SSE-NEXT: movaps %xmm0, 784(%rax)
7173 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7174 ; SSE-NEXT: movaps %xmm0, 768(%rax)
7175 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7176 ; SSE-NEXT: movaps %xmm0, 752(%rax)
7177 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7178 ; SSE-NEXT: movaps %xmm0, 736(%rax)
7179 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7180 ; SSE-NEXT: movaps %xmm0, 720(%rax)
7181 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7182 ; SSE-NEXT: movaps %xmm0, 704(%rax)
7183 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7184 ; SSE-NEXT: movaps %xmm0, 688(%rax)
7185 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7186 ; SSE-NEXT: movaps %xmm0, 672(%rax)
7187 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7188 ; SSE-NEXT: movaps %xmm0, 656(%rax)
7189 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7190 ; SSE-NEXT: movaps %xmm0, 640(%rax)
7191 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7192 ; SSE-NEXT: movaps %xmm0, 624(%rax)
7193 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7194 ; SSE-NEXT: movaps %xmm0, 608(%rax)
7195 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7196 ; SSE-NEXT: movaps %xmm0, 592(%rax)
7197 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7198 ; SSE-NEXT: movaps %xmm0, 576(%rax)
7199 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7200 ; SSE-NEXT: movaps %xmm0, 560(%rax)
7201 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7202 ; SSE-NEXT: movaps %xmm0, 544(%rax)
7203 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7204 ; SSE-NEXT: movaps %xmm0, 528(%rax)
7205 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7206 ; SSE-NEXT: movaps %xmm0, 512(%rax)
7207 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7208 ; SSE-NEXT: movaps %xmm0, 496(%rax)
7209 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7210 ; SSE-NEXT: movaps %xmm0, 480(%rax)
7211 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7212 ; SSE-NEXT: movaps %xmm0, 464(%rax)
7213 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7214 ; SSE-NEXT: movaps %xmm0, 448(%rax)
7215 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7216 ; SSE-NEXT: movaps %xmm0, 432(%rax)
7217 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7218 ; SSE-NEXT: movaps %xmm0, 416(%rax)
7219 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7220 ; SSE-NEXT: movaps %xmm0, 400(%rax)
7221 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7222 ; SSE-NEXT: movaps %xmm0, 384(%rax)
7223 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7224 ; SSE-NEXT: movaps %xmm0, 368(%rax)
7225 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7226 ; SSE-NEXT: movaps %xmm0, 352(%rax)
7227 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7228 ; SSE-NEXT: movaps %xmm0, 336(%rax)
7229 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7230 ; SSE-NEXT: movaps %xmm0, 320(%rax)
7231 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7232 ; SSE-NEXT: movaps %xmm0, 304(%rax)
7233 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7234 ; SSE-NEXT: movaps %xmm0, 288(%rax)
7235 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7236 ; SSE-NEXT: movaps %xmm0, 272(%rax)
7237 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7238 ; SSE-NEXT: movaps %xmm0, 256(%rax)
7239 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7240 ; SSE-NEXT: movaps %xmm0, 240(%rax)
7241 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7242 ; SSE-NEXT: movaps %xmm0, 224(%rax)
7243 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7244 ; SSE-NEXT: movaps %xmm0, 208(%rax)
7245 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7246 ; SSE-NEXT: movaps %xmm0, 192(%rax)
7247 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7248 ; SSE-NEXT: movaps %xmm0, 176(%rax)
7249 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7250 ; SSE-NEXT: movaps %xmm0, 160(%rax)
7251 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7252 ; SSE-NEXT: movaps %xmm0, 144(%rax)
7253 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7254 ; SSE-NEXT: movaps %xmm0, 128(%rax)
7255 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7256 ; SSE-NEXT: movaps %xmm0, 112(%rax)
7257 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7258 ; SSE-NEXT: movaps %xmm0, 96(%rax)
7259 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7260 ; SSE-NEXT: movaps %xmm0, 80(%rax)
7261 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7262 ; SSE-NEXT: movaps %xmm0, 64(%rax)
7263 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7264 ; SSE-NEXT: movaps %xmm0, 48(%rax)
7265 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7266 ; SSE-NEXT: movaps %xmm0, 32(%rax)
7267 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7268 ; SSE-NEXT: movaps %xmm0, 16(%rax)
7269 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7270 ; SSE-NEXT: movaps %xmm0, (%rax)
7271 ; SSE-NEXT: addq $2712, %rsp # imm = 0xA98
7274 ; AVX1-ONLY-LABEL: store_i64_stride6_vf64:
7275 ; AVX1-ONLY: # %bb.0:
7276 ; AVX1-ONLY-NEXT: subq $3464, %rsp # imm = 0xD88
7277 ; AVX1-ONLY-NEXT: vmovaps 32(%r8), %ymm7
7278 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7279 ; AVX1-ONLY-NEXT: vmovapd (%r8), %ymm0
7280 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
7281 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %xmm2
7282 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7283 ; AVX1-ONLY-NEXT: vmovaps 32(%rsi), %xmm5
7284 ; AVX1-ONLY-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7285 ; AVX1-ONLY-NEXT: vmovaps 64(%rsi), %xmm6
7286 ; AVX1-ONLY-NEXT: vmovaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7287 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %xmm3
7288 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7289 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm2[1]
7290 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
7291 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm3 = ymm0[0,1],ymm3[2,3]
7292 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm3[0],ymm1[1],ymm3[2,3]
7293 ; AVX1-ONLY-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7294 ; AVX1-ONLY-NEXT: vmovaps (%rcx), %xmm2
7295 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7296 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %xmm1
7297 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7298 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7299 ; AVX1-ONLY-NEXT: vinsertf128 $1, (%r9), %ymm1, %ymm3
7300 ; AVX1-ONLY-NEXT: vbroadcastsd 8(%r8), %ymm4
7301 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm4[4,5,6,7]
7302 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm3[2,3],ymm1[4,5],ymm3[6,7]
7303 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7304 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
7305 ; AVX1-ONLY-NEXT: vmovaps 32(%rdi), %xmm2
7306 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7307 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm2[1],xmm5[1]
7308 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
7309 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm7[0,1,2,3],ymm3[4,5,6,7]
7310 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5,6,7]
7311 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7312 ; AVX1-ONLY-NEXT: vmovaps 32(%rcx), %xmm2
7313 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7314 ; AVX1-ONLY-NEXT: vmovaps 32(%rdx), %xmm1
7315 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7316 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7317 ; AVX1-ONLY-NEXT: vbroadcastsd 40(%r8), %ymm3
7318 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
7319 ; AVX1-ONLY-NEXT: vinsertf128 $1, 32(%r9), %ymm1, %ymm1
7320 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
7321 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7322 ; AVX1-ONLY-NEXT: vmovaps 64(%rdi), %xmm1
7323 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7324 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm6[1]
7325 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7326 ; AVX1-ONLY-NEXT: vmovapd 64(%r8), %ymm15
7327 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm15[0,1],ymm1[2,3]
7328 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
7329 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0],ymm3[1],ymm1[2,3]
7330 ; AVX1-ONLY-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7331 ; AVX1-ONLY-NEXT: vmovaps 64(%rcx), %xmm2
7332 ; AVX1-ONLY-NEXT: vmovaps %xmm2, (%rsp) # 16-byte Spill
7333 ; AVX1-ONLY-NEXT: vmovaps 64(%rdx), %xmm1
7334 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7335 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7336 ; AVX1-ONLY-NEXT: vbroadcastsd 72(%r8), %ymm3
7337 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
7338 ; AVX1-ONLY-NEXT: vinsertf128 $1, 64(%r9), %ymm1, %ymm1
7339 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
7340 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7341 ; AVX1-ONLY-NEXT: vmovaps 96(%rsi), %xmm2
7342 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7343 ; AVX1-ONLY-NEXT: vmovaps 96(%rdi), %xmm1
7344 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7345 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7346 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7347 ; AVX1-ONLY-NEXT: vmovapd 96(%r8), %ymm9
7348 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm9[0,1],ymm1[2,3]
7349 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
7350 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0],ymm3[1],ymm1[2,3]
7351 ; AVX1-ONLY-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7352 ; AVX1-ONLY-NEXT: vmovaps 96(%rcx), %xmm2
7353 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7354 ; AVX1-ONLY-NEXT: vmovaps 96(%rdx), %xmm1
7355 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7356 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7357 ; AVX1-ONLY-NEXT: vbroadcastsd 104(%r8), %ymm3
7358 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
7359 ; AVX1-ONLY-NEXT: vinsertf128 $1, 96(%r9), %ymm1, %ymm1
7360 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
7361 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7362 ; AVX1-ONLY-NEXT: vmovaps 128(%rsi), %xmm2
7363 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7364 ; AVX1-ONLY-NEXT: vmovaps 128(%rdi), %xmm1
7365 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7366 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7367 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7368 ; AVX1-ONLY-NEXT: vmovaps 128(%r8), %ymm2
7369 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7370 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
7371 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
7372 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm3[2,3],ymm1[4,5,6,7]
7373 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7374 ; AVX1-ONLY-NEXT: vmovaps 128(%rcx), %xmm2
7375 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7376 ; AVX1-ONLY-NEXT: vmovaps 128(%rdx), %xmm1
7377 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7378 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7379 ; AVX1-ONLY-NEXT: vbroadcastsd 136(%r8), %ymm3
7380 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
7381 ; AVX1-ONLY-NEXT: vinsertf128 $1, 128(%r9), %ymm1, %ymm1
7382 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
7383 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7384 ; AVX1-ONLY-NEXT: vmovaps 160(%rsi), %xmm2
7385 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7386 ; AVX1-ONLY-NEXT: vmovaps 160(%rdi), %xmm1
7387 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7388 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7389 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7390 ; AVX1-ONLY-NEXT: vmovapd 160(%r8), %ymm8
7391 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm8[0,1],ymm1[2,3]
7392 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
7393 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0],ymm3[1],ymm1[2,3]
7394 ; AVX1-ONLY-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7395 ; AVX1-ONLY-NEXT: vmovaps 160(%rcx), %xmm2
7396 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7397 ; AVX1-ONLY-NEXT: vmovaps 160(%rdx), %xmm1
7398 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7399 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7400 ; AVX1-ONLY-NEXT: vbroadcastsd 168(%r8), %ymm3
7401 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
7402 ; AVX1-ONLY-NEXT: vinsertf128 $1, 160(%r9), %ymm1, %ymm1
7403 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
7404 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7405 ; AVX1-ONLY-NEXT: vmovaps 192(%rsi), %xmm2
7406 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7407 ; AVX1-ONLY-NEXT: vmovaps 192(%rdi), %xmm1
7408 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7409 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7410 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7411 ; AVX1-ONLY-NEXT: vmovapd 192(%r8), %ymm7
7412 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm7[0,1],ymm1[2,3]
7413 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
7414 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0],ymm3[1],ymm1[2,3]
7415 ; AVX1-ONLY-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7416 ; AVX1-ONLY-NEXT: vmovaps 192(%rcx), %xmm2
7417 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7418 ; AVX1-ONLY-NEXT: vmovaps 192(%rdx), %xmm1
7419 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7420 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7421 ; AVX1-ONLY-NEXT: vbroadcastsd 200(%r8), %ymm3
7422 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
7423 ; AVX1-ONLY-NEXT: vinsertf128 $1, 192(%r9), %ymm1, %ymm1
7424 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
7425 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7426 ; AVX1-ONLY-NEXT: vmovaps 224(%rsi), %xmm2
7427 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7428 ; AVX1-ONLY-NEXT: vmovaps 224(%rdi), %xmm1
7429 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7430 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7431 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7432 ; AVX1-ONLY-NEXT: vmovapd 224(%r8), %ymm11
7433 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm11[0,1],ymm1[2,3]
7434 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
7435 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0],ymm3[1],ymm1[2,3]
7436 ; AVX1-ONLY-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7437 ; AVX1-ONLY-NEXT: vmovaps 224(%rcx), %xmm2
7438 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7439 ; AVX1-ONLY-NEXT: vmovaps 224(%rdx), %xmm1
7440 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7441 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7442 ; AVX1-ONLY-NEXT: vbroadcastsd 232(%r8), %ymm3
7443 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
7444 ; AVX1-ONLY-NEXT: vinsertf128 $1, 224(%r9), %ymm1, %ymm1
7445 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
7446 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7447 ; AVX1-ONLY-NEXT: vmovaps 256(%rsi), %xmm2
7448 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7449 ; AVX1-ONLY-NEXT: vmovaps 256(%rdi), %xmm1
7450 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7451 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7452 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7453 ; AVX1-ONLY-NEXT: vmovaps 256(%r8), %ymm2
7454 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7455 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
7456 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
7457 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm3[2,3],ymm1[4,5,6,7]
7458 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7459 ; AVX1-ONLY-NEXT: vmovaps 256(%rcx), %xmm2
7460 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7461 ; AVX1-ONLY-NEXT: vmovaps 256(%rdx), %xmm1
7462 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7463 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7464 ; AVX1-ONLY-NEXT: vbroadcastsd 264(%r8), %ymm3
7465 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
7466 ; AVX1-ONLY-NEXT: vinsertf128 $1, 256(%r9), %ymm1, %ymm1
7467 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
7468 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7469 ; AVX1-ONLY-NEXT: vmovaps 288(%rsi), %xmm2
7470 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7471 ; AVX1-ONLY-NEXT: vmovaps 288(%rdi), %xmm1
7472 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7473 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7474 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7475 ; AVX1-ONLY-NEXT: vmovaps 288(%r8), %ymm2
7476 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7477 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
7478 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
7479 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm3[2,3],ymm1[4,5,6,7]
7480 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7481 ; AVX1-ONLY-NEXT: vmovaps 288(%rcx), %xmm2
7482 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7483 ; AVX1-ONLY-NEXT: vmovaps 288(%rdx), %xmm1
7484 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7485 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7486 ; AVX1-ONLY-NEXT: vbroadcastsd 296(%r8), %ymm3
7487 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
7488 ; AVX1-ONLY-NEXT: vinsertf128 $1, 288(%r9), %ymm1, %ymm1
7489 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
7490 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7491 ; AVX1-ONLY-NEXT: vmovaps 320(%rsi), %xmm2
7492 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7493 ; AVX1-ONLY-NEXT: vmovaps 320(%rdi), %xmm1
7494 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7495 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7496 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7497 ; AVX1-ONLY-NEXT: vmovaps 320(%r8), %ymm2
7498 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7499 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
7500 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
7501 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm3[2,3],ymm1[4,5,6,7]
7502 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7503 ; AVX1-ONLY-NEXT: vmovaps 320(%rcx), %xmm2
7504 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7505 ; AVX1-ONLY-NEXT: vmovaps 320(%rdx), %xmm1
7506 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7507 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7508 ; AVX1-ONLY-NEXT: vbroadcastsd 328(%r8), %ymm3
7509 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
7510 ; AVX1-ONLY-NEXT: vinsertf128 $1, 320(%r9), %ymm1, %ymm1
7511 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
7512 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7513 ; AVX1-ONLY-NEXT: vmovaps 352(%rsi), %xmm2
7514 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7515 ; AVX1-ONLY-NEXT: vmovaps 352(%rdi), %xmm1
7516 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7517 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7518 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7519 ; AVX1-ONLY-NEXT: vmovaps 352(%r8), %ymm2
7520 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7521 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
7522 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
7523 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm3[2,3],ymm1[4,5,6,7]
7524 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7525 ; AVX1-ONLY-NEXT: vmovaps 352(%rcx), %xmm2
7526 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7527 ; AVX1-ONLY-NEXT: vmovaps 352(%rdx), %xmm1
7528 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7529 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7530 ; AVX1-ONLY-NEXT: vbroadcastsd 360(%r8), %ymm3
7531 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
7532 ; AVX1-ONLY-NEXT: vinsertf128 $1, 352(%r9), %ymm1, %ymm1
7533 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
7534 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7535 ; AVX1-ONLY-NEXT: vmovaps 384(%rsi), %xmm2
7536 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7537 ; AVX1-ONLY-NEXT: vmovaps 384(%rdi), %xmm1
7538 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7539 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7540 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7541 ; AVX1-ONLY-NEXT: vmovapd 384(%r8), %ymm12
7542 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm12[0,1],ymm1[2,3]
7543 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
7544 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0],ymm3[1],ymm1[2,3]
7545 ; AVX1-ONLY-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7546 ; AVX1-ONLY-NEXT: vmovaps 384(%rcx), %xmm2
7547 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7548 ; AVX1-ONLY-NEXT: vmovaps 384(%rdx), %xmm1
7549 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7550 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7551 ; AVX1-ONLY-NEXT: vbroadcastsd 392(%r8), %ymm3
7552 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
7553 ; AVX1-ONLY-NEXT: vinsertf128 $1, 384(%r9), %ymm1, %ymm1
7554 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
7555 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7556 ; AVX1-ONLY-NEXT: vmovaps 416(%rsi), %xmm2
7557 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7558 ; AVX1-ONLY-NEXT: vmovaps 416(%rdi), %xmm1
7559 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7560 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7561 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7562 ; AVX1-ONLY-NEXT: vmovapd 416(%r8), %ymm13
7563 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm13[0,1],ymm1[2,3]
7564 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
7565 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0],ymm3[1],ymm1[2,3]
7566 ; AVX1-ONLY-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7567 ; AVX1-ONLY-NEXT: vmovaps 416(%rcx), %xmm2
7568 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7569 ; AVX1-ONLY-NEXT: vmovaps 416(%rdx), %xmm1
7570 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7571 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7572 ; AVX1-ONLY-NEXT: vbroadcastsd 424(%r8), %ymm3
7573 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
7574 ; AVX1-ONLY-NEXT: vinsertf128 $1, 416(%r9), %ymm1, %ymm1
7575 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
7576 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7577 ; AVX1-ONLY-NEXT: vmovaps 448(%rsi), %xmm2
7578 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7579 ; AVX1-ONLY-NEXT: vmovaps 448(%rdi), %xmm1
7580 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7581 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7582 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7583 ; AVX1-ONLY-NEXT: vmovapd 448(%r8), %ymm14
7584 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm14[0,1],ymm1[2,3]
7585 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
7586 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0],ymm3[1],ymm1[2,3]
7587 ; AVX1-ONLY-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7588 ; AVX1-ONLY-NEXT: vmovaps 448(%rcx), %xmm2
7589 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7590 ; AVX1-ONLY-NEXT: vmovaps 448(%rdx), %xmm1
7591 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7592 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7593 ; AVX1-ONLY-NEXT: vbroadcastsd 456(%r8), %ymm3
7594 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1,2,3],ymm3[4,5,6,7]
7595 ; AVX1-ONLY-NEXT: vinsertf128 $1, 448(%r9), %ymm1, %ymm1
7596 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
7597 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7598 ; AVX1-ONLY-NEXT: vmovaps 480(%rsi), %xmm2
7599 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7600 ; AVX1-ONLY-NEXT: vmovaps 480(%rdi), %xmm1
7601 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7602 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
7603 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm3
7604 ; AVX1-ONLY-NEXT: vmovapd 480(%r8), %ymm5
7605 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm3 = ymm5[0,1],ymm3[2,3]
7606 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = mem[0,0]
7607 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm3[0],ymm2[1],ymm3[2,3]
7608 ; AVX1-ONLY-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7609 ; AVX1-ONLY-NEXT: vmovaps 480(%rcx), %xmm1
7610 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7611 ; AVX1-ONLY-NEXT: vmovaps 480(%rdx), %xmm2
7612 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7613 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
7614 ; AVX1-ONLY-NEXT: vbroadcastsd 488(%r8), %ymm3
7615 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm2[0,1,2,3],ymm3[4,5,6,7]
7616 ; AVX1-ONLY-NEXT: vinsertf128 $1, 480(%r9), %ymm2, %ymm2
7617 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7]
7618 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7619 ; AVX1-ONLY-NEXT: vmovapd (%rdi), %ymm2
7620 ; AVX1-ONLY-NEXT: vmovapd (%rsi), %ymm3
7621 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
7622 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm0[2,3],ymm2[2,3]
7623 ; AVX1-ONLY-NEXT: vmovapd (%r9), %ymm4
7624 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm4[2,3],ymm3[2,3]
7625 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm0 = ymm2[0],ymm3[0],ymm2[2],ymm3[3]
7626 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7627 ; AVX1-ONLY-NEXT: vmovapd 32(%rdi), %ymm2
7628 ; AVX1-ONLY-NEXT: vmovapd 32(%rsi), %ymm3
7629 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
7630 ; AVX1-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2, %ymm0 # 32-byte Folded Reload
7631 ; AVX1-ONLY-NEXT: # ymm0 = mem[2,3],ymm2[2,3]
7632 ; AVX1-ONLY-NEXT: vmovapd 32(%r9), %ymm6
7633 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm6[2,3],ymm3[2,3]
7634 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[3]
7635 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7636 ; AVX1-ONLY-NEXT: vmovapd 64(%rdi), %ymm0
7637 ; AVX1-ONLY-NEXT: vmovapd 64(%rsi), %ymm3
7638 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm3[1],ymm0[3],ymm3[3]
7639 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm15[2,3],ymm0[2,3]
7640 ; AVX1-ONLY-NEXT: vmovapd 64(%r9), %ymm15
7641 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm15[2,3],ymm3[2,3]
7642 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[3]
7643 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7644 ; AVX1-ONLY-NEXT: vmovapd 96(%rdi), %ymm3
7645 ; AVX1-ONLY-NEXT: vmovapd 96(%rsi), %ymm0
7646 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm3[1],ymm0[1],ymm3[3],ymm0[3]
7647 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm9 = ymm9[2,3],ymm3[2,3]
7648 ; AVX1-ONLY-NEXT: vmovapd 96(%r9), %ymm3
7649 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm3[2,3],ymm0[2,3]
7650 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm0 = ymm9[0],ymm0[0],ymm9[2],ymm0[3]
7651 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7652 ; AVX1-ONLY-NEXT: vmovapd 128(%rdi), %ymm9
7653 ; AVX1-ONLY-NEXT: vmovapd 128(%rsi), %ymm0
7654 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm9 = ymm9[1],ymm0[1],ymm9[3],ymm0[3]
7655 ; AVX1-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm1 # 32-byte Folded Reload
7656 ; AVX1-ONLY-NEXT: # ymm1 = mem[2,3],ymm9[2,3]
7657 ; AVX1-ONLY-NEXT: vmovapd 128(%r9), %ymm9
7658 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm9[2,3],ymm0[2,3]
7659 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[3]
7660 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7661 ; AVX1-ONLY-NEXT: vmovapd 160(%rdi), %ymm0
7662 ; AVX1-ONLY-NEXT: vmovapd 160(%rsi), %ymm1
7663 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
7664 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm8[2,3],ymm0[2,3]
7665 ; AVX1-ONLY-NEXT: vmovapd 160(%r9), %ymm8
7666 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm8[2,3],ymm1[2,3]
7667 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[3]
7668 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7669 ; AVX1-ONLY-NEXT: vmovapd 192(%rdi), %ymm0
7670 ; AVX1-ONLY-NEXT: vmovapd 192(%rsi), %ymm1
7671 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
7672 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm7[2,3],ymm0[2,3]
7673 ; AVX1-ONLY-NEXT: vmovapd 192(%r9), %ymm7
7674 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm7[2,3],ymm1[2,3]
7675 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[3]
7676 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7677 ; AVX1-ONLY-NEXT: vmovapd 224(%rdi), %ymm0
7678 ; AVX1-ONLY-NEXT: vmovapd 224(%rsi), %ymm1
7679 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
7680 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm11[2,3],ymm0[2,3]
7681 ; AVX1-ONLY-NEXT: vmovapd 224(%r9), %ymm10
7682 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm10[2,3],ymm1[2,3]
7683 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[3]
7684 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7685 ; AVX1-ONLY-NEXT: vmovapd 256(%rdi), %ymm1
7686 ; AVX1-ONLY-NEXT: vmovapd 256(%rsi), %ymm0
7687 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
7688 ; AVX1-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
7689 ; AVX1-ONLY-NEXT: # ymm1 = mem[2,3],ymm1[2,3]
7690 ; AVX1-ONLY-NEXT: vmovapd 256(%r9), %ymm11
7691 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm11[2,3],ymm0[2,3]
7692 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[3]
7693 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7694 ; AVX1-ONLY-NEXT: vmovapd 288(%rdi), %ymm0
7695 ; AVX1-ONLY-NEXT: vmovapd 288(%rsi), %ymm1
7696 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
7697 ; AVX1-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
7698 ; AVX1-ONLY-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
7699 ; AVX1-ONLY-NEXT: vmovapd 288(%r9), %ymm2
7700 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7701 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm2[2,3],ymm1[2,3]
7702 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[3]
7703 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7704 ; AVX1-ONLY-NEXT: vmovapd 320(%rdi), %ymm1
7705 ; AVX1-ONLY-NEXT: vmovapd 320(%rsi), %ymm0
7706 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
7707 ; AVX1-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
7708 ; AVX1-ONLY-NEXT: # ymm1 = mem[2,3],ymm1[2,3]
7709 ; AVX1-ONLY-NEXT: vmovapd 320(%r9), %ymm2
7710 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7711 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm2[2,3],ymm0[2,3]
7712 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[3]
7713 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7714 ; AVX1-ONLY-NEXT: vmovapd 352(%rdi), %ymm0
7715 ; AVX1-ONLY-NEXT: vmovapd 352(%rsi), %ymm1
7716 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
7717 ; AVX1-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm2 # 32-byte Folded Reload
7718 ; AVX1-ONLY-NEXT: # ymm2 = mem[2,3],ymm0[2,3]
7719 ; AVX1-ONLY-NEXT: vmovapd 352(%r9), %ymm0
7720 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7721 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm0[2,3],ymm1[2,3]
7722 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm0 = ymm2[0],ymm1[0],ymm2[2],ymm1[3]
7723 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7724 ; AVX1-ONLY-NEXT: vmovapd 384(%rdi), %ymm0
7725 ; AVX1-ONLY-NEXT: vmovapd 384(%rsi), %ymm1
7726 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
7727 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm12[2,3],ymm0[2,3]
7728 ; AVX1-ONLY-NEXT: vmovapd 384(%r9), %ymm12
7729 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm12[2,3],ymm1[2,3]
7730 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[3]
7731 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7732 ; AVX1-ONLY-NEXT: vmovapd 416(%rdi), %ymm0
7733 ; AVX1-ONLY-NEXT: vmovapd 416(%rsi), %ymm1
7734 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
7735 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm13[2,3],ymm0[2,3]
7736 ; AVX1-ONLY-NEXT: vmovapd 416(%r9), %ymm13
7737 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm13[2,3],ymm1[2,3]
7738 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[3]
7739 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7740 ; AVX1-ONLY-NEXT: vmovapd 448(%rdi), %ymm0
7741 ; AVX1-ONLY-NEXT: vmovapd 448(%rsi), %ymm1
7742 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
7743 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm14[2,3],ymm0[2,3]
7744 ; AVX1-ONLY-NEXT: vmovapd 448(%r9), %ymm14
7745 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm14[2,3],ymm1[2,3]
7746 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[3]
7747 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7748 ; AVX1-ONLY-NEXT: vmovapd 480(%rdi), %ymm0
7749 ; AVX1-ONLY-NEXT: vmovapd 480(%rsi), %ymm1
7750 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
7751 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm5[2,3],ymm0[2,3]
7752 ; AVX1-ONLY-NEXT: vmovapd 480(%r9), %ymm5
7753 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm5[2,3],ymm1[2,3]
7754 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[3]
7755 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7756 ; AVX1-ONLY-NEXT: vmovaps 16(%rdi), %xmm0
7757 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
7758 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
7759 ; AVX1-ONLY-NEXT: vbroadcastsd 16(%rcx), %ymm1
7760 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7761 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7762 ; AVX1-ONLY-NEXT: vmovapd 16(%rdx), %xmm0
7763 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
7764 ; AVX1-ONLY-NEXT: vbroadcastsd 24(%r8), %ymm1
7765 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
7766 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm4[3]
7767 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7768 ; AVX1-ONLY-NEXT: vmovaps 48(%rdi), %xmm0
7769 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
7770 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
7771 ; AVX1-ONLY-NEXT: vbroadcastsd 48(%rcx), %ymm1
7772 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7773 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7774 ; AVX1-ONLY-NEXT: vmovapd 48(%rdx), %xmm0
7775 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
7776 ; AVX1-ONLY-NEXT: vbroadcastsd 56(%r8), %ymm1
7777 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
7778 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm6[3]
7779 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7780 ; AVX1-ONLY-NEXT: vmovaps 80(%rdi), %xmm0
7781 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
7782 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
7783 ; AVX1-ONLY-NEXT: vbroadcastsd 80(%rcx), %ymm1
7784 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7785 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7786 ; AVX1-ONLY-NEXT: vmovapd 80(%rdx), %xmm0
7787 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
7788 ; AVX1-ONLY-NEXT: vbroadcastsd 88(%r8), %ymm1
7789 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
7790 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm15[3]
7791 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7792 ; AVX1-ONLY-NEXT: vmovaps 112(%rdi), %xmm0
7793 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
7794 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
7795 ; AVX1-ONLY-NEXT: vbroadcastsd 112(%rcx), %ymm1
7796 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7797 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7798 ; AVX1-ONLY-NEXT: vmovapd 112(%rdx), %xmm0
7799 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
7800 ; AVX1-ONLY-NEXT: vbroadcastsd 120(%r8), %ymm1
7801 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
7802 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm3[3]
7803 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7804 ; AVX1-ONLY-NEXT: vmovaps 144(%rdi), %xmm0
7805 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
7806 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
7807 ; AVX1-ONLY-NEXT: vbroadcastsd 144(%rcx), %ymm1
7808 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7809 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7810 ; AVX1-ONLY-NEXT: vmovapd 144(%rdx), %xmm0
7811 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
7812 ; AVX1-ONLY-NEXT: vbroadcastsd 152(%r8), %ymm1
7813 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
7814 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm9[3]
7815 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7816 ; AVX1-ONLY-NEXT: vmovaps 176(%rdi), %xmm0
7817 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
7818 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
7819 ; AVX1-ONLY-NEXT: vbroadcastsd 176(%rcx), %ymm1
7820 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7821 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7822 ; AVX1-ONLY-NEXT: vmovapd 176(%rdx), %xmm0
7823 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
7824 ; AVX1-ONLY-NEXT: vbroadcastsd 184(%r8), %ymm1
7825 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
7826 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm8[3]
7827 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7828 ; AVX1-ONLY-NEXT: vmovaps 208(%rdi), %xmm0
7829 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
7830 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
7831 ; AVX1-ONLY-NEXT: vbroadcastsd 208(%rcx), %ymm1
7832 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7833 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7834 ; AVX1-ONLY-NEXT: vmovapd 208(%rdx), %xmm0
7835 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
7836 ; AVX1-ONLY-NEXT: vbroadcastsd 216(%r8), %ymm1
7837 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
7838 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm7[3]
7839 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7840 ; AVX1-ONLY-NEXT: vmovaps 240(%rdi), %xmm0
7841 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
7842 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
7843 ; AVX1-ONLY-NEXT: vbroadcastsd 240(%rcx), %ymm1
7844 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7845 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7846 ; AVX1-ONLY-NEXT: vmovapd 240(%rdx), %xmm0
7847 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
7848 ; AVX1-ONLY-NEXT: vbroadcastsd 248(%r8), %ymm1
7849 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
7850 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm10[3]
7851 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7852 ; AVX1-ONLY-NEXT: vmovaps 272(%rdi), %xmm0
7853 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
7854 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
7855 ; AVX1-ONLY-NEXT: vbroadcastsd 272(%rcx), %ymm1
7856 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7857 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7858 ; AVX1-ONLY-NEXT: vmovapd 272(%rdx), %xmm0
7859 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
7860 ; AVX1-ONLY-NEXT: vbroadcastsd 280(%r8), %ymm1
7861 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
7862 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm11[3]
7863 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7864 ; AVX1-ONLY-NEXT: vmovaps 304(%rdi), %xmm0
7865 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
7866 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
7867 ; AVX1-ONLY-NEXT: vbroadcastsd 304(%rcx), %ymm1
7868 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7869 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7870 ; AVX1-ONLY-NEXT: vmovaps 304(%rdx), %xmm0
7871 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
7872 ; AVX1-ONLY-NEXT: vbroadcastsd 312(%r8), %ymm1
7873 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7874 ; AVX1-ONLY-NEXT: vblendps $192, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
7875 ; AVX1-ONLY-NEXT: # ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
7876 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7877 ; AVX1-ONLY-NEXT: vmovaps 336(%rdi), %xmm0
7878 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
7879 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
7880 ; AVX1-ONLY-NEXT: vbroadcastsd 336(%rcx), %ymm1
7881 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7882 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7883 ; AVX1-ONLY-NEXT: vmovaps 336(%rdx), %xmm0
7884 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
7885 ; AVX1-ONLY-NEXT: vbroadcastsd 344(%r8), %ymm1
7886 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7887 ; AVX1-ONLY-NEXT: vblendps $192, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
7888 ; AVX1-ONLY-NEXT: # ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
7889 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7890 ; AVX1-ONLY-NEXT: vmovaps 368(%rdi), %xmm0
7891 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
7892 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
7893 ; AVX1-ONLY-NEXT: vbroadcastsd 368(%rcx), %ymm1
7894 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7895 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7896 ; AVX1-ONLY-NEXT: vmovaps 368(%rdx), %xmm0
7897 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
7898 ; AVX1-ONLY-NEXT: vbroadcastsd 376(%r8), %ymm1
7899 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7900 ; AVX1-ONLY-NEXT: vblendps $192, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
7901 ; AVX1-ONLY-NEXT: # ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
7902 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7903 ; AVX1-ONLY-NEXT: vmovaps 400(%rdi), %xmm0
7904 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
7905 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
7906 ; AVX1-ONLY-NEXT: vbroadcastsd 400(%rcx), %ymm1
7907 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7908 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7909 ; AVX1-ONLY-NEXT: vmovapd 400(%rdx), %xmm0
7910 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
7911 ; AVX1-ONLY-NEXT: vbroadcastsd 408(%r8), %ymm1
7912 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
7913 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm12[3]
7914 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7915 ; AVX1-ONLY-NEXT: vmovaps 432(%rdi), %xmm0
7916 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
7917 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
7918 ; AVX1-ONLY-NEXT: vbroadcastsd 432(%rcx), %ymm1
7919 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7920 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7921 ; AVX1-ONLY-NEXT: vmovapd 432(%rdx), %xmm0
7922 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
7923 ; AVX1-ONLY-NEXT: vbroadcastsd 440(%r8), %ymm1
7924 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
7925 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm13[3]
7926 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7927 ; AVX1-ONLY-NEXT: vmovaps 464(%rdi), %xmm0
7928 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
7929 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
7930 ; AVX1-ONLY-NEXT: vbroadcastsd 464(%rcx), %ymm1
7931 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7932 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7933 ; AVX1-ONLY-NEXT: vmovapd 464(%rdx), %xmm0
7934 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
7935 ; AVX1-ONLY-NEXT: vbroadcastsd 472(%r8), %ymm1
7936 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
7937 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm14[3]
7938 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7939 ; AVX1-ONLY-NEXT: vmovaps 496(%rdi), %xmm0
7940 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
7941 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
7942 ; AVX1-ONLY-NEXT: vbroadcastsd 496(%rcx), %ymm1
7943 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7944 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7945 ; AVX1-ONLY-NEXT: vmovapd 496(%rdx), %xmm0
7946 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
7947 ; AVX1-ONLY-NEXT: vbroadcastsd 504(%r8), %ymm1
7948 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
7949 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm5[3]
7950 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7951 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7952 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
7953 ; AVX1-ONLY-NEXT: # xmm0 = xmm0[0],mem[0]
7954 ; AVX1-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7955 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7956 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
7957 ; AVX1-ONLY-NEXT: # xmm0 = xmm0[0],mem[0]
7958 ; AVX1-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7959 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7960 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
7961 ; AVX1-ONLY-NEXT: # xmm0 = xmm0[0],mem[0]
7962 ; AVX1-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7963 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7964 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
7965 ; AVX1-ONLY-NEXT: # xmm0 = xmm0[0],mem[0]
7966 ; AVX1-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7967 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7968 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
7969 ; AVX1-ONLY-NEXT: # xmm0 = xmm0[0],mem[0]
7970 ; AVX1-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7971 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7972 ; AVX1-ONLY-NEXT: vunpcklpd (%rsp), %xmm0, %xmm0 # 16-byte Folded Reload
7973 ; AVX1-ONLY-NEXT: # xmm0 = xmm0[0],mem[0]
7974 ; AVX1-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7975 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7976 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
7977 ; AVX1-ONLY-NEXT: # xmm0 = xmm0[0],mem[0]
7978 ; AVX1-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7979 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7980 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
7981 ; AVX1-ONLY-NEXT: # xmm0 = xmm0[0],mem[0]
7982 ; AVX1-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7983 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7984 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
7985 ; AVX1-ONLY-NEXT: # xmm0 = xmm0[0],mem[0]
7986 ; AVX1-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7987 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7988 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
7989 ; AVX1-ONLY-NEXT: # xmm0 = xmm0[0],mem[0]
7990 ; AVX1-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7991 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7992 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
7993 ; AVX1-ONLY-NEXT: # xmm0 = xmm0[0],mem[0]
7994 ; AVX1-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7995 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7996 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
7997 ; AVX1-ONLY-NEXT: # xmm0 = xmm0[0],mem[0]
7998 ; AVX1-ONLY-NEXT: vmovaps %xmm0, (%rsp) # 16-byte Spill
7999 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8000 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
8001 ; AVX1-ONLY-NEXT: # xmm0 = xmm0[0],mem[0]
8002 ; AVX1-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8003 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8004 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
8005 ; AVX1-ONLY-NEXT: # xmm0 = xmm0[0],mem[0]
8006 ; AVX1-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8007 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8008 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
8009 ; AVX1-ONLY-NEXT: # xmm0 = xmm0[0],mem[0]
8010 ; AVX1-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8011 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8012 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
8013 ; AVX1-ONLY-NEXT: # xmm0 = xmm0[0],mem[0]
8014 ; AVX1-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8015 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8016 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm15 # 16-byte Folded Reload
8017 ; AVX1-ONLY-NEXT: # xmm15 = xmm0[0],mem[0]
8018 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8019 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm14 # 16-byte Folded Reload
8020 ; AVX1-ONLY-NEXT: # xmm14 = xmm0[0],mem[0]
8021 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8022 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm13 # 16-byte Folded Reload
8023 ; AVX1-ONLY-NEXT: # xmm13 = xmm0[0],mem[0]
8024 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8025 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm12 # 16-byte Folded Reload
8026 ; AVX1-ONLY-NEXT: # xmm12 = xmm0[0],mem[0]
8027 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8028 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm11 # 16-byte Folded Reload
8029 ; AVX1-ONLY-NEXT: # xmm11 = xmm0[0],mem[0]
8030 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8031 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm10 # 16-byte Folded Reload
8032 ; AVX1-ONLY-NEXT: # xmm10 = xmm0[0],mem[0]
8033 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8034 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm9 # 16-byte Folded Reload
8035 ; AVX1-ONLY-NEXT: # xmm9 = xmm0[0],mem[0]
8036 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8037 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm8 # 16-byte Folded Reload
8038 ; AVX1-ONLY-NEXT: # xmm8 = xmm0[0],mem[0]
8039 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8040 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm7 # 16-byte Folded Reload
8041 ; AVX1-ONLY-NEXT: # xmm7 = xmm0[0],mem[0]
8042 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8043 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm6 # 16-byte Folded Reload
8044 ; AVX1-ONLY-NEXT: # xmm6 = xmm0[0],mem[0]
8045 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8046 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm5 # 16-byte Folded Reload
8047 ; AVX1-ONLY-NEXT: # xmm5 = xmm0[0],mem[0]
8048 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8049 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm4 # 16-byte Folded Reload
8050 ; AVX1-ONLY-NEXT: # xmm4 = xmm0[0],mem[0]
8051 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8052 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm3 # 16-byte Folded Reload
8053 ; AVX1-ONLY-NEXT: # xmm3 = xmm0[0],mem[0]
8054 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8055 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm2 # 16-byte Folded Reload
8056 ; AVX1-ONLY-NEXT: # xmm2 = xmm0[0],mem[0]
8057 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8058 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm1 # 16-byte Folded Reload
8059 ; AVX1-ONLY-NEXT: # xmm1 = xmm0[0],mem[0]
8060 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8061 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
8062 ; AVX1-ONLY-NEXT: # xmm0 = xmm0[0],mem[0]
8063 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
8064 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 16(%rax)
8065 ; AVX1-ONLY-NEXT: vmovaps %xmm1, (%rax)
8066 ; AVX1-ONLY-NEXT: vmovaps %xmm2, 2320(%rax)
8067 ; AVX1-ONLY-NEXT: vmovaps %xmm3, 2304(%rax)
8068 ; AVX1-ONLY-NEXT: vmovaps %xmm4, 2704(%rax)
8069 ; AVX1-ONLY-NEXT: vmovaps %xmm5, 2688(%rax)
8070 ; AVX1-ONLY-NEXT: vmovaps %xmm6, 2896(%rax)
8071 ; AVX1-ONLY-NEXT: vmovaps %xmm7, 2880(%rax)
8072 ; AVX1-ONLY-NEXT: vmovaps %xmm8, 2512(%rax)
8073 ; AVX1-ONLY-NEXT: vmovaps %xmm9, 2496(%rax)
8074 ; AVX1-ONLY-NEXT: vmovaps %xmm10, 1936(%rax)
8075 ; AVX1-ONLY-NEXT: vmovaps %xmm11, 1920(%rax)
8076 ; AVX1-ONLY-NEXT: vmovaps %xmm12, 2128(%rax)
8077 ; AVX1-ONLY-NEXT: vmovaps %xmm13, 2112(%rax)
8078 ; AVX1-ONLY-NEXT: vmovaps %xmm14, 1744(%rax)
8079 ; AVX1-ONLY-NEXT: vmovaps %xmm15, 1728(%rax)
8080 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8081 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 1168(%rax)
8082 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8083 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 1152(%rax)
8084 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8085 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 1360(%rax)
8086 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8087 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 1344(%rax)
8088 ; AVX1-ONLY-NEXT: vmovaps (%rsp), %xmm0 # 16-byte Reload
8089 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 976(%rax)
8090 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8091 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 960(%rax)
8092 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8093 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 592(%rax)
8094 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8095 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 576(%rax)
8096 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8097 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 208(%rax)
8098 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8099 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 192(%rax)
8100 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8101 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 400(%rax)
8102 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8103 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 384(%rax)
8104 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8105 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 784(%rax)
8106 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8107 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 768(%rax)
8108 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8109 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 1552(%rax)
8110 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8111 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 1536(%rax)
8112 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8113 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3008(%rax)
8114 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8115 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2816(%rax)
8116 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8117 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2624(%rax)
8118 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8119 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2432(%rax)
8120 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8121 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2240(%rax)
8122 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8123 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2048(%rax)
8124 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8125 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1856(%rax)
8126 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8127 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1664(%rax)
8128 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8129 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1472(%rax)
8130 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8131 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1280(%rax)
8132 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8133 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1088(%rax)
8134 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8135 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 896(%rax)
8136 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8137 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 704(%rax)
8138 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8139 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 512(%rax)
8140 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8141 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 320(%rax)
8142 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8143 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 128(%rax)
8144 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8145 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3040(%rax)
8146 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8147 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2976(%rax)
8148 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8149 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2944(%rax)
8150 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8151 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2912(%rax)
8152 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8153 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2848(%rax)
8154 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8155 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2784(%rax)
8156 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8157 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2752(%rax)
8158 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8159 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2720(%rax)
8160 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8161 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2656(%rax)
8162 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8163 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2592(%rax)
8164 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8165 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2560(%rax)
8166 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8167 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2528(%rax)
8168 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8169 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2464(%rax)
8170 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8171 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2400(%rax)
8172 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8173 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2368(%rax)
8174 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8175 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2336(%rax)
8176 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8177 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2272(%rax)
8178 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8179 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2208(%rax)
8180 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8181 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2176(%rax)
8182 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8183 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2144(%rax)
8184 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8185 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2080(%rax)
8186 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8187 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2016(%rax)
8188 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8189 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1984(%rax)
8190 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8191 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1952(%rax)
8192 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8193 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1888(%rax)
8194 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8195 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1824(%rax)
8196 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8197 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1792(%rax)
8198 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8199 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1760(%rax)
8200 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8201 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1696(%rax)
8202 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8203 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1632(%rax)
8204 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8205 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1600(%rax)
8206 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8207 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1568(%rax)
8208 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8209 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1504(%rax)
8210 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8211 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1440(%rax)
8212 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8213 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1408(%rax)
8214 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8215 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1376(%rax)
8216 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8217 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1312(%rax)
8218 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8219 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1248(%rax)
8220 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8221 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1216(%rax)
8222 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8223 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1184(%rax)
8224 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8225 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1120(%rax)
8226 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8227 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1056(%rax)
8228 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8229 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1024(%rax)
8230 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8231 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 992(%rax)
8232 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8233 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 928(%rax)
8234 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8235 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 864(%rax)
8236 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8237 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 832(%rax)
8238 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8239 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 800(%rax)
8240 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8241 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 736(%rax)
8242 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8243 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 672(%rax)
8244 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8245 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 640(%rax)
8246 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8247 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 608(%rax)
8248 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8249 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 544(%rax)
8250 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8251 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 480(%rax)
8252 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8253 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 448(%rax)
8254 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8255 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 416(%rax)
8256 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8257 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 352(%rax)
8258 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8259 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 288(%rax)
8260 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8261 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 256(%rax)
8262 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8263 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 224(%rax)
8264 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8265 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 160(%rax)
8266 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8267 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 96(%rax)
8268 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8269 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 64(%rax)
8270 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8271 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
8272 ; AVX1-ONLY-NEXT: addq $3464, %rsp # imm = 0xD88
8273 ; AVX1-ONLY-NEXT: vzeroupper
8274 ; AVX1-ONLY-NEXT: retq
8276 ; AVX2-ONLY-LABEL: store_i64_stride6_vf64:
8277 ; AVX2-ONLY: # %bb.0:
8278 ; AVX2-ONLY-NEXT: subq $2968, %rsp # imm = 0xB98
8279 ; AVX2-ONLY-NEXT: vmovaps 32(%r8), %ymm3
8280 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8281 ; AVX2-ONLY-NEXT: vmovaps (%r8), %ymm4
8282 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8283 ; AVX2-ONLY-NEXT: vmovaps (%r9), %xmm0
8284 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = xmm0[0,0]
8285 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %xmm7
8286 ; AVX2-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8287 ; AVX2-ONLY-NEXT: vmovaps 32(%rsi), %xmm5
8288 ; AVX2-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8289 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %xmm2
8290 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8291 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %xmm6
8292 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8293 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm7[1]
8294 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm4[0,1],ymm2[0,1]
8295 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3],ymm2[4,5,6,7]
8296 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8297 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
8298 ; AVX2-ONLY-NEXT: vmovaps (%rcx), %xmm2
8299 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8300 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %xmm1
8301 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8302 ; AVX2-ONLY-NEXT: vmovaps 32(%rdx), %xmm4
8303 ; AVX2-ONLY-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8304 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
8305 ; AVX2-ONLY-NEXT: vbroadcastsd 8(%r8), %ymm2
8306 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
8307 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
8308 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8309 ; AVX2-ONLY-NEXT: vmovaps 32(%r9), %xmm0
8310 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = xmm0[0,0]
8311 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm6[1],xmm5[1]
8312 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm3[0,1],ymm2[0,1]
8313 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3],ymm2[4,5,6,7]
8314 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8315 ; AVX2-ONLY-NEXT: vmovaps 32(%rcx), %xmm1
8316 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8317 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm4[1],xmm1[1]
8318 ; AVX2-ONLY-NEXT: vbroadcastsd 40(%r8), %ymm2
8319 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
8320 ; AVX2-ONLY-NEXT: vmovaps 64(%r8), %ymm2
8321 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8322 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
8323 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
8324 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8325 ; AVX2-ONLY-NEXT: vmovaps 64(%rsi), %xmm1
8326 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8327 ; AVX2-ONLY-NEXT: vmovaps 64(%rdi), %xmm0
8328 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8329 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
8330 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm2[0,1],ymm0[0,1]
8331 ; AVX2-ONLY-NEXT: vmovaps 64(%r9), %xmm1
8332 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
8333 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
8334 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8335 ; AVX2-ONLY-NEXT: vmovaps 64(%rdx), %xmm0
8336 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8337 ; AVX2-ONLY-NEXT: vmovaps 64(%rcx), %xmm2
8338 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8339 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
8340 ; AVX2-ONLY-NEXT: vbroadcastsd 72(%r8), %ymm2
8341 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
8342 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
8343 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
8344 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8345 ; AVX2-ONLY-NEXT: vmovaps 96(%r8), %ymm1
8346 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8347 ; AVX2-ONLY-NEXT: vmovaps 96(%rsi), %xmm2
8348 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8349 ; AVX2-ONLY-NEXT: vmovaps 96(%rdi), %xmm0
8350 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8351 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
8352 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
8353 ; AVX2-ONLY-NEXT: vmovaps 96(%r9), %xmm1
8354 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
8355 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
8356 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8357 ; AVX2-ONLY-NEXT: vmovaps 96(%rcx), %xmm2
8358 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8359 ; AVX2-ONLY-NEXT: vmovaps 96(%rdx), %xmm0
8360 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8361 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
8362 ; AVX2-ONLY-NEXT: vbroadcastsd 104(%r8), %ymm2
8363 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
8364 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
8365 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
8366 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8367 ; AVX2-ONLY-NEXT: vmovaps 128(%r8), %ymm1
8368 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8369 ; AVX2-ONLY-NEXT: vmovaps 128(%rsi), %xmm2
8370 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8371 ; AVX2-ONLY-NEXT: vmovaps 128(%rdi), %xmm0
8372 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8373 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
8374 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
8375 ; AVX2-ONLY-NEXT: vmovaps 128(%r9), %xmm1
8376 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
8377 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
8378 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8379 ; AVX2-ONLY-NEXT: vmovaps 128(%rcx), %xmm2
8380 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8381 ; AVX2-ONLY-NEXT: vmovaps 128(%rdx), %xmm0
8382 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8383 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
8384 ; AVX2-ONLY-NEXT: vbroadcastsd 136(%r8), %ymm2
8385 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
8386 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
8387 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
8388 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8389 ; AVX2-ONLY-NEXT: vmovaps 160(%r8), %ymm1
8390 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8391 ; AVX2-ONLY-NEXT: vmovaps 160(%rsi), %xmm2
8392 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8393 ; AVX2-ONLY-NEXT: vmovaps 160(%rdi), %xmm0
8394 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8395 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
8396 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
8397 ; AVX2-ONLY-NEXT: vmovaps 160(%r9), %xmm1
8398 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
8399 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
8400 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8401 ; AVX2-ONLY-NEXT: vmovaps 160(%rcx), %xmm2
8402 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8403 ; AVX2-ONLY-NEXT: vmovaps 160(%rdx), %xmm0
8404 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8405 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
8406 ; AVX2-ONLY-NEXT: vbroadcastsd 168(%r8), %ymm2
8407 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
8408 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
8409 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
8410 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8411 ; AVX2-ONLY-NEXT: vmovaps 192(%r8), %ymm1
8412 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8413 ; AVX2-ONLY-NEXT: vmovaps 192(%rsi), %xmm2
8414 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8415 ; AVX2-ONLY-NEXT: vmovaps 192(%rdi), %xmm0
8416 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8417 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
8418 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
8419 ; AVX2-ONLY-NEXT: vmovaps 192(%r9), %xmm1
8420 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
8421 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
8422 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8423 ; AVX2-ONLY-NEXT: vmovaps 192(%rcx), %xmm2
8424 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8425 ; AVX2-ONLY-NEXT: vmovaps 192(%rdx), %xmm0
8426 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8427 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
8428 ; AVX2-ONLY-NEXT: vbroadcastsd 200(%r8), %ymm2
8429 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
8430 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
8431 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
8432 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8433 ; AVX2-ONLY-NEXT: vmovaps 224(%r8), %ymm1
8434 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8435 ; AVX2-ONLY-NEXT: vmovaps 224(%rsi), %xmm2
8436 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8437 ; AVX2-ONLY-NEXT: vmovaps 224(%rdi), %xmm0
8438 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8439 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
8440 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
8441 ; AVX2-ONLY-NEXT: vmovaps 224(%r9), %xmm1
8442 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
8443 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
8444 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8445 ; AVX2-ONLY-NEXT: vmovaps 224(%rcx), %xmm2
8446 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8447 ; AVX2-ONLY-NEXT: vmovaps 224(%rdx), %xmm0
8448 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8449 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
8450 ; AVX2-ONLY-NEXT: vbroadcastsd 232(%r8), %ymm2
8451 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
8452 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
8453 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
8454 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8455 ; AVX2-ONLY-NEXT: vmovaps 256(%r8), %ymm1
8456 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8457 ; AVX2-ONLY-NEXT: vmovaps 256(%rsi), %xmm2
8458 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8459 ; AVX2-ONLY-NEXT: vmovaps 256(%rdi), %xmm0
8460 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8461 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
8462 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
8463 ; AVX2-ONLY-NEXT: vmovaps 256(%r9), %xmm1
8464 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
8465 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
8466 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8467 ; AVX2-ONLY-NEXT: vmovaps 256(%rcx), %xmm2
8468 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8469 ; AVX2-ONLY-NEXT: vmovaps 256(%rdx), %xmm0
8470 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8471 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
8472 ; AVX2-ONLY-NEXT: vbroadcastsd 264(%r8), %ymm2
8473 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
8474 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
8475 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
8476 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8477 ; AVX2-ONLY-NEXT: vmovaps 288(%r8), %ymm1
8478 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8479 ; AVX2-ONLY-NEXT: vmovaps 288(%rsi), %xmm2
8480 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8481 ; AVX2-ONLY-NEXT: vmovaps 288(%rdi), %xmm0
8482 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8483 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
8484 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
8485 ; AVX2-ONLY-NEXT: vmovaps 288(%r9), %xmm1
8486 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
8487 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
8488 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8489 ; AVX2-ONLY-NEXT: vmovaps 288(%rcx), %xmm2
8490 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8491 ; AVX2-ONLY-NEXT: vmovaps 288(%rdx), %xmm0
8492 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8493 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
8494 ; AVX2-ONLY-NEXT: vbroadcastsd 296(%r8), %ymm2
8495 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
8496 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
8497 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
8498 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8499 ; AVX2-ONLY-NEXT: vmovaps 320(%r8), %ymm1
8500 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8501 ; AVX2-ONLY-NEXT: vmovaps 320(%rsi), %xmm2
8502 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8503 ; AVX2-ONLY-NEXT: vmovaps 320(%rdi), %xmm0
8504 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8505 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
8506 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
8507 ; AVX2-ONLY-NEXT: vmovaps 320(%r9), %xmm1
8508 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
8509 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
8510 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8511 ; AVX2-ONLY-NEXT: vmovaps 320(%rcx), %xmm2
8512 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8513 ; AVX2-ONLY-NEXT: vmovaps 320(%rdx), %xmm0
8514 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8515 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
8516 ; AVX2-ONLY-NEXT: vbroadcastsd 328(%r8), %ymm2
8517 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
8518 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
8519 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
8520 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8521 ; AVX2-ONLY-NEXT: vmovaps 352(%r8), %ymm1
8522 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8523 ; AVX2-ONLY-NEXT: vmovaps 352(%rsi), %xmm2
8524 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8525 ; AVX2-ONLY-NEXT: vmovaps 352(%rdi), %xmm0
8526 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8527 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
8528 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
8529 ; AVX2-ONLY-NEXT: vmovaps 352(%r9), %xmm1
8530 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
8531 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
8532 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8533 ; AVX2-ONLY-NEXT: vmovaps 352(%rcx), %xmm2
8534 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8535 ; AVX2-ONLY-NEXT: vmovaps 352(%rdx), %xmm0
8536 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8537 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
8538 ; AVX2-ONLY-NEXT: vbroadcastsd 360(%r8), %ymm2
8539 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
8540 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
8541 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
8542 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8543 ; AVX2-ONLY-NEXT: vmovaps 384(%r8), %ymm1
8544 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8545 ; AVX2-ONLY-NEXT: vmovaps 384(%rsi), %xmm2
8546 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8547 ; AVX2-ONLY-NEXT: vmovaps 384(%rdi), %xmm0
8548 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8549 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
8550 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
8551 ; AVX2-ONLY-NEXT: vmovaps 384(%r9), %xmm1
8552 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
8553 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
8554 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8555 ; AVX2-ONLY-NEXT: vmovaps 384(%rcx), %xmm0
8556 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8557 ; AVX2-ONLY-NEXT: vmovaps 384(%rdx), %xmm15
8558 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm15[1],xmm0[1]
8559 ; AVX2-ONLY-NEXT: vbroadcastsd 392(%r8), %ymm2
8560 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
8561 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
8562 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
8563 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8564 ; AVX2-ONLY-NEXT: vmovaps 416(%r8), %ymm1
8565 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8566 ; AVX2-ONLY-NEXT: vmovaps 416(%rsi), %xmm12
8567 ; AVX2-ONLY-NEXT: vmovaps 416(%rdi), %xmm13
8568 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm13[1],xmm12[1]
8569 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
8570 ; AVX2-ONLY-NEXT: vmovaps 416(%r9), %xmm1
8571 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
8572 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
8573 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8574 ; AVX2-ONLY-NEXT: vmovaps 416(%rcx), %xmm10
8575 ; AVX2-ONLY-NEXT: vmovaps 416(%rdx), %xmm11
8576 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm11[1],xmm10[1]
8577 ; AVX2-ONLY-NEXT: vbroadcastsd 424(%r8), %ymm2
8578 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
8579 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
8580 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
8581 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8582 ; AVX2-ONLY-NEXT: vmovaps 448(%r8), %ymm1
8583 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8584 ; AVX2-ONLY-NEXT: vmovaps 448(%rsi), %xmm8
8585 ; AVX2-ONLY-NEXT: vmovaps 448(%rdi), %xmm9
8586 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm9[1],xmm8[1]
8587 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[0,1],ymm0[0,1]
8588 ; AVX2-ONLY-NEXT: vmovaps 448(%r9), %xmm1
8589 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = xmm1[0,0]
8590 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5,6,7]
8591 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8592 ; AVX2-ONLY-NEXT: vmovaps 448(%rcx), %xmm6
8593 ; AVX2-ONLY-NEXT: vmovaps 448(%rdx), %xmm7
8594 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm6[1]
8595 ; AVX2-ONLY-NEXT: vbroadcastsd 456(%r8), %ymm2
8596 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
8597 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
8598 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
8599 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8600 ; AVX2-ONLY-NEXT: vmovaps 480(%r8), %ymm1
8601 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8602 ; AVX2-ONLY-NEXT: vmovaps 480(%rsi), %xmm4
8603 ; AVX2-ONLY-NEXT: vmovaps 480(%rdi), %xmm5
8604 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm5[1],xmm4[1]
8605 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm1[0,1],ymm0[0,1]
8606 ; AVX2-ONLY-NEXT: vmovaps 480(%r9), %xmm0
8607 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = xmm0[0,0]
8608 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3],ymm1[4,5,6,7]
8609 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8610 ; AVX2-ONLY-NEXT: vmovaps 480(%rcx), %xmm2
8611 ; AVX2-ONLY-NEXT: vmovaps 480(%rdx), %xmm3
8612 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm3[1],xmm2[1]
8613 ; AVX2-ONLY-NEXT: vbroadcastsd 488(%r8), %ymm14
8614 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm14[4,5,6,7]
8615 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
8616 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
8617 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8618 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8619 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
8620 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8621 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
8622 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
8623 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8624 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8625 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
8626 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8627 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
8628 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
8629 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8630 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8631 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
8632 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8633 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
8634 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
8635 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8636 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8637 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
8638 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8639 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
8640 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
8641 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8642 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8643 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
8644 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8645 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
8646 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
8647 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8648 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8649 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
8650 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8651 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
8652 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
8653 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8654 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8655 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
8656 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8657 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
8658 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
8659 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8660 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8661 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
8662 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8663 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
8664 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
8665 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8666 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8667 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
8668 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8669 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
8670 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
8671 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8672 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8673 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
8674 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8675 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
8676 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
8677 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8678 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8679 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
8680 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8681 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
8682 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
8683 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8684 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8685 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
8686 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8687 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 16-byte Folded Reload
8688 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
8689 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8690 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8691 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
8692 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8693 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm15, %ymm1, %ymm1
8694 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
8695 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8696 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm12, %ymm0
8697 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm13, %ymm1
8698 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
8699 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8700 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm8, %ymm0
8701 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm9, %ymm1
8702 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
8703 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8704 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm4, %ymm0
8705 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm5, %ymm1
8706 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
8707 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8708 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %ymm0
8709 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %ymm1
8710 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %ymm2
8711 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8712 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
8713 ; AVX2-ONLY-NEXT: vbroadcastsd 16(%rcx), %ymm4
8714 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
8715 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8716 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8717 ; AVX2-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
8718 ; AVX2-ONLY-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
8719 ; AVX2-ONLY-NEXT: vbroadcastsd 16(%r9), %ymm1
8720 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
8721 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8722 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
8723 ; AVX2-ONLY-NEXT: vbroadcastsd 24(%r8), %ymm1
8724 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm12 = ymm0[2,3],ymm1[2,3]
8725 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %ymm0
8726 ; AVX2-ONLY-NEXT: vmovaps 32(%rsi), %ymm1
8727 ; AVX2-ONLY-NEXT: vmovaps 32(%rdx), %ymm2
8728 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8729 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
8730 ; AVX2-ONLY-NEXT: vbroadcastsd 48(%rcx), %ymm4
8731 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
8732 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8733 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8734 ; AVX2-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
8735 ; AVX2-ONLY-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
8736 ; AVX2-ONLY-NEXT: vbroadcastsd 48(%r9), %ymm1
8737 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
8738 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8739 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
8740 ; AVX2-ONLY-NEXT: vbroadcastsd 56(%r8), %ymm1
8741 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm13 = ymm0[2,3],ymm1[2,3]
8742 ; AVX2-ONLY-NEXT: vmovaps 64(%rdi), %ymm0
8743 ; AVX2-ONLY-NEXT: vmovaps 64(%rsi), %ymm1
8744 ; AVX2-ONLY-NEXT: vmovaps 64(%rdx), %ymm2
8745 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8746 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
8747 ; AVX2-ONLY-NEXT: vbroadcastsd 80(%rcx), %ymm4
8748 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
8749 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8750 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8751 ; AVX2-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
8752 ; AVX2-ONLY-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
8753 ; AVX2-ONLY-NEXT: vbroadcastsd 80(%r9), %ymm1
8754 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
8755 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8756 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
8757 ; AVX2-ONLY-NEXT: vbroadcastsd 88(%r8), %ymm1
8758 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm14 = ymm0[2,3],ymm1[2,3]
8759 ; AVX2-ONLY-NEXT: vmovaps 96(%rdi), %ymm0
8760 ; AVX2-ONLY-NEXT: vmovaps 96(%rsi), %ymm1
8761 ; AVX2-ONLY-NEXT: vmovaps 96(%rdx), %ymm2
8762 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8763 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
8764 ; AVX2-ONLY-NEXT: vbroadcastsd 112(%rcx), %ymm4
8765 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
8766 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8767 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8768 ; AVX2-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
8769 ; AVX2-ONLY-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
8770 ; AVX2-ONLY-NEXT: vbroadcastsd 112(%r9), %ymm1
8771 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
8772 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8773 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
8774 ; AVX2-ONLY-NEXT: vbroadcastsd 120(%r8), %ymm1
8775 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8776 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8777 ; AVX2-ONLY-NEXT: vmovaps 128(%rdi), %ymm0
8778 ; AVX2-ONLY-NEXT: vmovaps 128(%rsi), %ymm1
8779 ; AVX2-ONLY-NEXT: vmovaps 128(%rdx), %ymm2
8780 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8781 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
8782 ; AVX2-ONLY-NEXT: vbroadcastsd 144(%rcx), %ymm4
8783 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
8784 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8785 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8786 ; AVX2-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
8787 ; AVX2-ONLY-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
8788 ; AVX2-ONLY-NEXT: vbroadcastsd 144(%r9), %ymm1
8789 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
8790 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8791 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
8792 ; AVX2-ONLY-NEXT: vbroadcastsd 152(%r8), %ymm1
8793 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8794 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8795 ; AVX2-ONLY-NEXT: vmovaps 160(%rdi), %ymm0
8796 ; AVX2-ONLY-NEXT: vmovaps 160(%rsi), %ymm1
8797 ; AVX2-ONLY-NEXT: vmovaps 160(%rdx), %ymm2
8798 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8799 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
8800 ; AVX2-ONLY-NEXT: vbroadcastsd 176(%rcx), %ymm4
8801 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
8802 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8803 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8804 ; AVX2-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
8805 ; AVX2-ONLY-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
8806 ; AVX2-ONLY-NEXT: vbroadcastsd 176(%r9), %ymm1
8807 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
8808 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8809 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
8810 ; AVX2-ONLY-NEXT: vbroadcastsd 184(%r8), %ymm1
8811 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8812 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8813 ; AVX2-ONLY-NEXT: vmovaps 192(%rdi), %ymm0
8814 ; AVX2-ONLY-NEXT: vmovaps 192(%rsi), %ymm1
8815 ; AVX2-ONLY-NEXT: vmovaps 192(%rdx), %ymm2
8816 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8817 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
8818 ; AVX2-ONLY-NEXT: vbroadcastsd 208(%rcx), %ymm4
8819 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
8820 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8821 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8822 ; AVX2-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
8823 ; AVX2-ONLY-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
8824 ; AVX2-ONLY-NEXT: vbroadcastsd 208(%r9), %ymm1
8825 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
8826 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8827 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
8828 ; AVX2-ONLY-NEXT: vbroadcastsd 216(%r8), %ymm1
8829 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8830 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8831 ; AVX2-ONLY-NEXT: vmovaps 224(%rdi), %ymm0
8832 ; AVX2-ONLY-NEXT: vmovaps 224(%rsi), %ymm1
8833 ; AVX2-ONLY-NEXT: vmovaps 224(%rdx), %ymm2
8834 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8835 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
8836 ; AVX2-ONLY-NEXT: vbroadcastsd 240(%rcx), %ymm4
8837 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
8838 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8839 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8840 ; AVX2-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
8841 ; AVX2-ONLY-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
8842 ; AVX2-ONLY-NEXT: vbroadcastsd 240(%r9), %ymm1
8843 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
8844 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8845 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
8846 ; AVX2-ONLY-NEXT: vbroadcastsd 248(%r8), %ymm1
8847 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8848 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8849 ; AVX2-ONLY-NEXT: vmovaps 256(%rdi), %ymm0
8850 ; AVX2-ONLY-NEXT: vmovaps 256(%rsi), %ymm1
8851 ; AVX2-ONLY-NEXT: vmovaps 256(%rdx), %ymm2
8852 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8853 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
8854 ; AVX2-ONLY-NEXT: vbroadcastsd 272(%rcx), %ymm4
8855 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
8856 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8857 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8858 ; AVX2-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
8859 ; AVX2-ONLY-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
8860 ; AVX2-ONLY-NEXT: vbroadcastsd 272(%r9), %ymm1
8861 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
8862 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8863 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
8864 ; AVX2-ONLY-NEXT: vbroadcastsd 280(%r8), %ymm1
8865 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8866 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8867 ; AVX2-ONLY-NEXT: vmovaps 288(%rdi), %ymm0
8868 ; AVX2-ONLY-NEXT: vmovaps 288(%rsi), %ymm1
8869 ; AVX2-ONLY-NEXT: vmovaps 288(%rdx), %ymm2
8870 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8871 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
8872 ; AVX2-ONLY-NEXT: vbroadcastsd 304(%rcx), %ymm4
8873 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
8874 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8875 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8876 ; AVX2-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
8877 ; AVX2-ONLY-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
8878 ; AVX2-ONLY-NEXT: vbroadcastsd 304(%r9), %ymm1
8879 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
8880 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8881 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
8882 ; AVX2-ONLY-NEXT: vbroadcastsd 312(%r8), %ymm1
8883 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8884 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8885 ; AVX2-ONLY-NEXT: vmovaps 320(%rdi), %ymm0
8886 ; AVX2-ONLY-NEXT: vmovaps 320(%rsi), %ymm1
8887 ; AVX2-ONLY-NEXT: vmovaps 320(%rdx), %ymm2
8888 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8889 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
8890 ; AVX2-ONLY-NEXT: vbroadcastsd 336(%rcx), %ymm4
8891 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
8892 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8893 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8894 ; AVX2-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
8895 ; AVX2-ONLY-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
8896 ; AVX2-ONLY-NEXT: vbroadcastsd 336(%r9), %ymm1
8897 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
8898 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8899 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
8900 ; AVX2-ONLY-NEXT: vbroadcastsd 344(%r8), %ymm1
8901 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8902 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8903 ; AVX2-ONLY-NEXT: vmovaps 352(%rdi), %ymm0
8904 ; AVX2-ONLY-NEXT: vmovaps 352(%rsi), %ymm1
8905 ; AVX2-ONLY-NEXT: vmovaps 352(%rdx), %ymm2
8906 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8907 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
8908 ; AVX2-ONLY-NEXT: vbroadcastsd 368(%rcx), %ymm4
8909 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
8910 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8911 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8912 ; AVX2-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
8913 ; AVX2-ONLY-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
8914 ; AVX2-ONLY-NEXT: vbroadcastsd 368(%r9), %ymm1
8915 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
8916 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8917 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
8918 ; AVX2-ONLY-NEXT: vbroadcastsd 376(%r8), %ymm1
8919 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm1[2,3]
8920 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8921 ; AVX2-ONLY-NEXT: vmovaps 384(%rdi), %ymm0
8922 ; AVX2-ONLY-NEXT: vmovaps 384(%rsi), %ymm1
8923 ; AVX2-ONLY-NEXT: vmovaps 384(%rdx), %ymm2
8924 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8925 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
8926 ; AVX2-ONLY-NEXT: vbroadcastsd 400(%rcx), %ymm4
8927 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
8928 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8929 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8930 ; AVX2-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
8931 ; AVX2-ONLY-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
8932 ; AVX2-ONLY-NEXT: vbroadcastsd 400(%r9), %ymm1
8933 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
8934 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8935 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
8936 ; AVX2-ONLY-NEXT: vbroadcastsd 408(%r8), %ymm1
8937 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm15 = ymm0[2,3],ymm1[2,3]
8938 ; AVX2-ONLY-NEXT: vmovaps 416(%rdi), %ymm0
8939 ; AVX2-ONLY-NEXT: vmovaps 416(%rsi), %ymm1
8940 ; AVX2-ONLY-NEXT: vmovaps 416(%rdx), %ymm2
8941 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8942 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm3[2,3],ymm2[2,3]
8943 ; AVX2-ONLY-NEXT: vbroadcastsd 432(%rcx), %ymm4
8944 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
8945 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8946 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8947 ; AVX2-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
8948 ; AVX2-ONLY-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
8949 ; AVX2-ONLY-NEXT: vbroadcastsd 432(%r9), %ymm1
8950 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
8951 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8952 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
8953 ; AVX2-ONLY-NEXT: vbroadcastsd 440(%r8), %ymm1
8954 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm10 = ymm0[2,3],ymm1[2,3]
8955 ; AVX2-ONLY-NEXT: vmovaps 448(%rdi), %ymm0
8956 ; AVX2-ONLY-NEXT: vmovaps 448(%rsi), %ymm1
8957 ; AVX2-ONLY-NEXT: vmovaps 448(%rdx), %ymm2
8958 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
8959 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],ymm2[2,3]
8960 ; AVX2-ONLY-NEXT: vbroadcastsd 464(%rcx), %ymm5
8961 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm4[0,1,2,3,4,5],ymm5[6,7]
8962 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
8963 ; AVX2-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
8964 ; AVX2-ONLY-NEXT: # ymm0 = mem[2,3],ymm0[2,3]
8965 ; AVX2-ONLY-NEXT: vbroadcastsd 464(%r9), %ymm1
8966 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7]
8967 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],mem[1],ymm2[3],mem[3]
8968 ; AVX2-ONLY-NEXT: vbroadcastsd 472(%r8), %ymm2
8969 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm6 = ymm0[2,3],ymm2[2,3]
8970 ; AVX2-ONLY-NEXT: vmovaps 480(%rdi), %ymm1
8971 ; AVX2-ONLY-NEXT: vmovaps 480(%rsi), %ymm2
8972 ; AVX2-ONLY-NEXT: vmovaps 480(%rdx), %ymm4
8973 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm7 = ymm1[0],ymm2[0],ymm1[2],ymm2[2]
8974 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm7 = ymm7[2,3],ymm4[2,3]
8975 ; AVX2-ONLY-NEXT: vbroadcastsd 496(%rcx), %ymm8
8976 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm7[0,1,2,3,4,5],ymm8[6,7]
8977 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
8978 ; AVX2-ONLY-NEXT: vperm2f128 $19, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
8979 ; AVX2-ONLY-NEXT: # ymm1 = mem[2,3],ymm1[2,3]
8980 ; AVX2-ONLY-NEXT: vbroadcastsd 496(%r9), %ymm2
8981 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm1[0,1],ymm2[2,3],ymm1[4,5,6,7]
8982 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm4[1],mem[1],ymm4[3],mem[3]
8983 ; AVX2-ONLY-NEXT: vbroadcastsd 504(%r8), %ymm2
8984 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm1[2,3],ymm2[2,3]
8985 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
8986 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm12[0,1,2,3,4,5],mem[6,7]
8987 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8988 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm13[0,1,2,3,4,5],mem[6,7]
8989 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8990 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm14[0,1,2,3,4,5],mem[6,7]
8991 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8992 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8993 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],mem[6,7]
8994 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8995 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8996 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm1[0,1,2,3,4,5],mem[6,7]
8997 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8998 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm1[0,1,2,3,4,5],mem[6,7]
8999 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9000 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm1[0,1,2,3,4,5],mem[6,7]
9001 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9002 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm1[0,1,2,3,4,5],mem[6,7]
9003 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9004 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm1[0,1,2,3,4,5],mem[6,7]
9005 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9006 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm1[0,1,2,3,4,5],mem[6,7]
9007 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9008 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm1[0,1,2,3,4,5],mem[6,7]
9009 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9010 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],mem[6,7]
9011 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm15 = ymm15[0,1,2,3,4,5],mem[6,7]
9012 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1,2,3,4,5],mem[6,7]
9013 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3,4,5],mem[6,7]
9014 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
9015 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3040(%rax)
9016 ; AVX2-ONLY-NEXT: vmovaps %ymm3, 3008(%rax)
9017 ; AVX2-ONLY-NEXT: vmovaps %ymm5, 2976(%rax)
9018 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9019 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2880(%rax)
9020 ; AVX2-ONLY-NEXT: vmovaps %ymm6, 2848(%rax)
9021 ; AVX2-ONLY-NEXT: vmovaps %ymm9, 2816(%rax)
9022 ; AVX2-ONLY-NEXT: vmovaps %ymm11, 2784(%rax)
9023 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9024 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2688(%rax)
9025 ; AVX2-ONLY-NEXT: vmovaps %ymm10, 2656(%rax)
9026 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9027 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2624(%rax)
9028 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9029 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2592(%rax)
9030 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9031 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2496(%rax)
9032 ; AVX2-ONLY-NEXT: vmovaps %ymm15, 2464(%rax)
9033 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9034 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2432(%rax)
9035 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9036 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2400(%rax)
9037 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9038 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2304(%rax)
9039 ; AVX2-ONLY-NEXT: vmovaps %ymm1, 2272(%rax)
9040 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9041 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2240(%rax)
9042 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9043 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2208(%rax)
9044 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9045 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2112(%rax)
9046 ; AVX2-ONLY-NEXT: vmovaps %ymm2, 2080(%rax)
9047 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9048 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2048(%rax)
9049 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9050 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2016(%rax)
9051 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9052 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1920(%rax)
9053 ; AVX2-ONLY-NEXT: vmovaps %ymm4, 1888(%rax)
9054 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9055 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1856(%rax)
9056 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9057 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1824(%rax)
9058 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9059 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1728(%rax)
9060 ; AVX2-ONLY-NEXT: vmovaps %ymm7, 1696(%rax)
9061 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9062 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1664(%rax)
9063 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9064 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1632(%rax)
9065 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9066 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1536(%rax)
9067 ; AVX2-ONLY-NEXT: vmovaps %ymm8, 1504(%rax)
9068 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9069 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1472(%rax)
9070 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9071 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1440(%rax)
9072 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9073 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1344(%rax)
9074 ; AVX2-ONLY-NEXT: vmovaps %ymm12, 1312(%rax)
9075 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9076 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1280(%rax)
9077 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9078 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1248(%rax)
9079 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9080 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1152(%rax)
9081 ; AVX2-ONLY-NEXT: vmovaps %ymm13, 1120(%rax)
9082 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9083 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1088(%rax)
9084 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9085 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1056(%rax)
9086 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9087 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 960(%rax)
9088 ; AVX2-ONLY-NEXT: vmovaps %ymm14, 928(%rax)
9089 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9090 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 896(%rax)
9091 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9092 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 864(%rax)
9093 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9094 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 768(%rax)
9095 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9096 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 736(%rax)
9097 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9098 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 704(%rax)
9099 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9100 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 672(%rax)
9101 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9102 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 576(%rax)
9103 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9104 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 544(%rax)
9105 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9106 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 512(%rax)
9107 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9108 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 480(%rax)
9109 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9110 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 384(%rax)
9111 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9112 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 352(%rax)
9113 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9114 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 320(%rax)
9115 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9116 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 288(%rax)
9117 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9118 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 192(%rax)
9119 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9120 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 160(%rax)
9121 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9122 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 128(%rax)
9123 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9124 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 96(%rax)
9125 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9126 ; AVX2-ONLY-NEXT: vmovaps %ymm0, (%rax)
9127 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9128 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2944(%rax)
9129 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9130 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2912(%rax)
9131 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9132 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2752(%rax)
9133 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9134 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2720(%rax)
9135 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9136 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2560(%rax)
9137 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9138 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2528(%rax)
9139 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9140 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2368(%rax)
9141 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9142 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2336(%rax)
9143 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9144 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2176(%rax)
9145 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9146 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2144(%rax)
9147 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9148 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1984(%rax)
9149 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9150 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1952(%rax)
9151 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9152 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1792(%rax)
9153 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9154 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1760(%rax)
9155 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9156 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1600(%rax)
9157 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9158 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1568(%rax)
9159 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9160 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1408(%rax)
9161 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9162 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1376(%rax)
9163 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9164 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1216(%rax)
9165 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9166 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1184(%rax)
9167 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9168 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1024(%rax)
9169 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9170 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 992(%rax)
9171 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9172 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 832(%rax)
9173 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9174 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 800(%rax)
9175 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9176 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 640(%rax)
9177 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9178 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 608(%rax)
9179 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9180 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 448(%rax)
9181 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9182 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 416(%rax)
9183 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9184 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 256(%rax)
9185 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9186 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 224(%rax)
9187 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9188 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 64(%rax)
9189 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9190 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
9191 ; AVX2-ONLY-NEXT: addq $2968, %rsp # imm = 0xB98
9192 ; AVX2-ONLY-NEXT: vzeroupper
9193 ; AVX2-ONLY-NEXT: retq
9195 ; AVX512F-ONLY-SLOW-LABEL: store_i64_stride6_vf64:
9196 ; AVX512F-ONLY-SLOW: # %bb.0:
9197 ; AVX512F-ONLY-SLOW-NEXT: subq $3400, %rsp # imm = 0xD48
9198 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rdx), %zmm13
9199 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm12
9200 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%rdx), %zmm11
9201 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%rdx), %zmm10
9202 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 256(%rdx), %zmm9
9203 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 320(%rdx), %zmm8
9204 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 384(%rdx), %zmm7
9205 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 448(%rdx), %zmm6
9206 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rcx), %zmm0
9207 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm1
9208 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%rcx), %zmm2
9209 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%rcx), %zmm30
9210 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 256(%rcx), %zmm27
9211 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 320(%rcx), %zmm24
9212 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 384(%rcx), %zmm22
9213 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 448(%rcx), %zmm21
9214 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [4,12,4,12]
9215 ; AVX512F-ONLY-SLOW-NEXT: # ymm3 = mem[0,1,0,1]
9216 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm4
9217 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm3, %zmm4
9218 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9219 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm4
9220 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm3, %zmm4
9221 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9222 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm4
9223 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm3, %zmm4
9224 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9225 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm4
9226 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm3, %zmm4
9227 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9228 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm4
9229 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm30, %zmm3, %zmm4
9230 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9231 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm4
9232 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm3, %zmm4
9233 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9234 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, %zmm4
9235 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm3, %zmm4
9236 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9237 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm0, %zmm13, %zmm3
9238 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9239 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [1,9,2,10,1,9,2,10]
9240 ; AVX512F-ONLY-SLOW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
9241 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, %zmm3
9242 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm5, %zmm3
9243 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9244 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm4
9245 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [5,13,6,14,5,13,6,14]
9246 ; AVX512F-ONLY-SLOW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
9247 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, %zmm3
9248 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm5, %zmm3
9249 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9250 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, %zmm3
9251 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
9252 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9253 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm14 = [3,11,3,11,3,11,3,11]
9254 ; AVX512F-ONLY-SLOW-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
9255 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, %zmm3
9256 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm14, %zmm3
9257 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9258 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [7,15,7,15,7,15,7,15]
9259 ; AVX512F-ONLY-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
9260 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm3, %zmm13
9261 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9262 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, %zmm0
9263 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm5, %zmm0
9264 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9265 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, %zmm0
9266 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm14, %zmm0
9267 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9268 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm3, %zmm12
9269 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9270 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm0
9271 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm4, %zmm0
9272 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9273 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm0
9274 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm5, %zmm0
9275 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9276 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm0
9277 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm14, %zmm0
9278 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9279 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm3, %zmm11
9280 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9281 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm0
9282 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm30, %zmm4, %zmm0
9283 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9284 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm0
9285 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm30, %zmm5, %zmm0
9286 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9287 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm0
9288 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm30, %zmm14, %zmm0
9289 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9290 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm30, %zmm3, %zmm10
9291 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9292 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm0
9293 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm4, %zmm0
9294 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9295 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm0
9296 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm5, %zmm0
9297 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9298 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm0
9299 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm14, %zmm0
9300 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9301 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm3, %zmm9
9302 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9303 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
9304 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm4, %zmm0
9305 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9306 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
9307 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm5, %zmm0
9308 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9309 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
9310 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm14, %zmm0
9311 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9312 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm3, %zmm8
9313 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9314 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm0
9315 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm4, %zmm0
9316 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9317 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm0
9318 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm5, %zmm0
9319 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9320 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm0
9321 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm14, %zmm0
9322 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9323 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm3, %zmm7
9324 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9325 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm21, %zmm6, %zmm4
9326 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, (%rsp) # 64-byte Spill
9327 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm21, %zmm6, %zmm5
9328 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9329 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm21, %zmm6, %zmm14
9330 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9331 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm3, %zmm6
9332 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9333 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 448(%rdi), %zmm4
9334 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 448(%rsi), %zmm11
9335 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [4,12,5,13,4,12,5,13]
9336 ; AVX512F-ONLY-SLOW-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
9337 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm0
9338 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm28, %zmm0
9339 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm8
9340 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 384(%rdi), %zmm2
9341 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 384(%rsi), %zmm13
9342 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm1
9343 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm28, %zmm1
9344 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 320(%rdi), %zmm3
9345 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 320(%rsi), %zmm15
9346 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm0
9347 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm15, %zmm28, %zmm0
9348 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9349 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 256(%rdi), %zmm31
9350 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 256(%rsi), %zmm17
9351 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, %zmm0
9352 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm17, %zmm28, %zmm0
9353 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9354 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%rdi), %zmm29
9355 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%rsi), %zmm18
9356 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm29, %zmm0
9357 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm28, %zmm0
9358 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9359 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%rdi), %zmm25
9360 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%rsi), %zmm19
9361 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, %zmm0
9362 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm28, %zmm0
9363 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm5
9364 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm6
9365 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm20
9366 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm0
9367 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm28, %zmm0
9368 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9369 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rdi), %zmm7
9370 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rsi), %zmm22
9371 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm22, %zmm7, %zmm28
9372 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm12 = [2,10,2,10,2,10,2,10]
9373 ; AVX512F-ONLY-SLOW-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
9374 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm14
9375 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm12, %zmm14
9376 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [6,14,6,14,6,14,6,14]
9377 ; AVX512F-ONLY-SLOW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
9378 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm16
9379 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm10, %zmm16
9380 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm21 = [0,8,1,9,0,8,1,9]
9381 ; AVX512F-ONLY-SLOW-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3]
9382 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm0
9383 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm21, %zmm0
9384 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9385 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} ymm23 = [7,15,7,15]
9386 ; AVX512F-ONLY-SLOW-NEXT: # ymm23 = mem[0,1,2,3,0,1,2,3]
9387 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm23, %zmm7
9388 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm22
9389 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm12, %zmm22
9390 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm24
9391 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm10, %zmm24
9392 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm0
9393 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm21, %zmm0
9394 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9395 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm23, %zmm6
9396 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, %zmm20
9397 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm12, %zmm20
9398 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, %zmm26
9399 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm10, %zmm26
9400 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, %zmm0
9401 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm21, %zmm0
9402 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9403 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm23, %zmm25
9404 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm29, %zmm19
9405 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm12, %zmm19
9406 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm29, %zmm27
9407 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm10, %zmm27
9408 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm29, %zmm0
9409 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm21, %zmm0
9410 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9411 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm23, %zmm29
9412 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, %zmm18
9413 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm17, %zmm12, %zmm18
9414 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, %zmm9
9415 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm17, %zmm10, %zmm9
9416 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, %zmm0
9417 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm17, %zmm21, %zmm0
9418 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9419 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm17, %zmm23, %zmm31
9420 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm17
9421 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm15, %zmm12, %zmm17
9422 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm30
9423 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm15, %zmm10, %zmm30
9424 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm0
9425 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm15, %zmm21, %zmm0
9426 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9427 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm15, %zmm23, %zmm3
9428 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9429 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm15
9430 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
9431 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm12, %zmm3
9432 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm10, %zmm2
9433 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm15, %zmm0
9434 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm21, %zmm0
9435 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9436 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm23, %zmm15
9437 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9438 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm11, %zmm4, %zmm12
9439 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm11, %zmm4, %zmm10
9440 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm11, %zmm4, %zmm21
9441 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm23, %zmm4
9442 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9443 ; AVX512F-ONLY-SLOW-NEXT: movb $12, %al
9444 ; AVX512F-ONLY-SLOW-NEXT: kmovw %eax, %k1
9445 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
9446 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm8 {%k1}
9447 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9448 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
9449 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm1 {%k1}
9450 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9451 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
9452 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
9453 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm8 {%k1}
9454 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
9455 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9456 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm0 {%k1}
9457 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9458 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
9459 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
9460 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm4 {%k1}
9461 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
9462 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm5 {%k1}
9463 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9464 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
9465 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9466 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm1 {%k1}
9467 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
9468 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm28 {%k1}
9469 ; AVX512F-ONLY-SLOW-NEXT: movb $48, %al
9470 ; AVX512F-ONLY-SLOW-NEXT: kmovw %eax, %k2
9471 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
9472 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm11 {%k2}
9473 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
9474 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm23 {%k2}
9475 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
9476 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm22, %zmm13 {%k2}
9477 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm22 # 64-byte Reload
9478 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm24, %zmm22 {%k2}
9479 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
9480 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm20, %zmm14 {%k2}
9481 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
9482 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm26, %zmm20 {%k2}
9483 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
9484 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm19, %zmm16 {%k2}
9485 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
9486 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, %zmm19 {%k2}
9487 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm24 # 64-byte Reload
9488 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, %zmm24 {%k2}
9489 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
9490 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm18 {%k2}
9491 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm26 # 64-byte Reload
9492 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm26 {%k2}
9493 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
9494 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm30, %zmm17 {%k2}
9495 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
9496 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm27 {%k2}
9497 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9498 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm0 {%k2}
9499 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 (%rsp), %zmm15 # 64-byte Reload
9500 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, %zmm15 {%k2}
9501 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%r8), %zmm5
9502 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <0,1,9,u,4,5,6,7>
9503 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm2, %zmm11
9504 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9505 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%r8), %zmm3
9506 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm2, %zmm13
9507 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9508 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%r8), %zmm9
9509 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm2, %zmm14
9510 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9511 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%r8), %zmm11
9512 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm2, %zmm16
9513 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9514 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 256(%r8), %zmm13
9515 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm2, %zmm24
9516 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9517 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 320(%r8), %zmm14
9518 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm2, %zmm26
9519 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9520 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 384(%r8), %zmm16
9521 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm16, %zmm2, %zmm27
9522 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9523 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 448(%r8), %zmm12
9524 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm2, %zmm15
9525 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm15, (%rsp) # 64-byte Spill
9526 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
9527 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm15 {%k2}
9528 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <0,1,13,u,4,5,6,7>
9529 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm2, %zmm23
9530 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm2, %zmm22
9531 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9532 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm2, %zmm20
9533 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm20, %zmm24
9534 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm2, %zmm19
9535 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm2, %zmm18
9536 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, %zmm26
9537 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm2, %zmm17
9538 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9539 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm16, %zmm2, %zmm0
9540 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9541 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm2, %zmm15
9542 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdi), %ymm2
9543 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
9544 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9545 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm10
9546 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7, %zmm7 # 64-byte Folded Reload
9547 ; AVX512F-ONLY-SLOW-NEXT: # zmm7 = zmm7[0,1,2,3],mem[4,5,6,7]
9548 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rdi), %ymm2
9549 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
9550 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9551 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm0
9552 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm6, %zmm6 # 64-byte Folded Reload
9553 ; AVX512F-ONLY-SLOW-NEXT: # zmm6 = zmm6[0,1,2,3],mem[4,5,6,7]
9554 ; AVX512F-ONLY-SLOW-NEXT: movb $16, %al
9555 ; AVX512F-ONLY-SLOW-NEXT: kmovw %eax, %k2
9556 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm1 {%k2}
9557 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9558 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <10,u,2,3,4,5,11,u>
9559 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm1, %zmm0
9560 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9561 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <14,u,2,3,4,5,15,u>
9562 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm2, %zmm6
9563 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9564 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 128(%rdi), %ymm3
9565 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
9566 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9567 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm3
9568 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm25, %zmm6 # 64-byte Folded Reload
9569 ; AVX512F-ONLY-SLOW-NEXT: # zmm6 = zmm25[0,1,2,3],mem[4,5,6,7]
9570 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9571 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm0 {%k2}
9572 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9573 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm1, %zmm3
9574 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9575 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm2, %zmm6
9576 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9577 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 192(%rdi), %ymm3
9578 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
9579 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9580 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm0
9581 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm29, %zmm3 # 64-byte Folded Reload
9582 ; AVX512F-ONLY-SLOW-NEXT: # zmm3 = zmm29[0,1,2,3],mem[4,5,6,7]
9583 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm4 {%k2}
9584 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9585 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm1, %zmm0
9586 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9587 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm2, %zmm3
9588 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9589 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 256(%rdi), %ymm0
9590 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
9591 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
9592 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm3
9593 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm31, %zmm4 # 64-byte Folded Reload
9594 ; AVX512F-ONLY-SLOW-NEXT: # zmm4 = zmm31[0,1,2,3],mem[4,5,6,7]
9595 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9596 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, %zmm0 {%k2}
9597 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9598 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm1, %zmm3
9599 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9600 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm2, %zmm4
9601 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9602 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 320(%rdi), %ymm0
9603 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
9604 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
9605 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm27
9606 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9607 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm25 # 64-byte Folded Reload
9608 ; AVX512F-ONLY-SLOW-NEXT: # zmm25 = zmm0[0,1,2,3],mem[4,5,6,7]
9609 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm8 {%k2}
9610 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9611 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm1, %zmm27
9612 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm2, %zmm25
9613 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 384(%rdi), %ymm0
9614 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
9615 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
9616 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm22
9617 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9618 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm20 # 64-byte Folded Reload
9619 ; AVX512F-ONLY-SLOW-NEXT: # zmm20 = zmm0[0,1,2,3],mem[4,5,6,7]
9620 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9621 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm0 {%k2}
9622 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9623 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm16, %zmm1, %zmm22
9624 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm16, %zmm2, %zmm20
9625 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 448(%rdi), %ymm0
9626 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
9627 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
9628 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm18
9629 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm1, %zmm10
9630 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm11
9631 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm1, %zmm18
9632 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9633 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm17 # 64-byte Folded Reload
9634 ; AVX512F-ONLY-SLOW-NEXT: # zmm17 = zmm0[0,1,2,3],mem[4,5,6,7]
9635 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9636 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, %zmm0 {%k2}
9637 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9638 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm2, %zmm17
9639 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm2, %zmm7
9640 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm13
9641 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm28 {%k2}
9642 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%r9), %zmm6
9643 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm10 = [0,1,2,9,4,5,6,7]
9644 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9645 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm10, %zmm1
9646 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9647 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%r9), %zmm7
9648 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9649 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm10, %zmm2
9650 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9651 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%r9), %zmm2
9652 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
9653 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
9654 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9655 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%r9), %zmm3
9656 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
9657 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm10, %zmm4
9658 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9659 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 256(%r9), %zmm4
9660 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
9661 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm10, %zmm5
9662 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9663 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 320(%r9), %zmm5
9664 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9665 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm10, %zmm0
9666 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9667 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 384(%r9), %zmm8
9668 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9669 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm10, %zmm0
9670 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9671 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 448(%r9), %zmm9
9672 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
9673 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm10, %zmm0
9674 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
9675 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm10 = [0,1,2,13,4,5,6,7]
9676 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm10, %zmm23
9677 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9678 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
9679 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm10, %zmm12
9680 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9681 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm10, %zmm24
9682 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9683 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm10, %zmm19
9684 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9685 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm10, %zmm26
9686 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9687 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9688 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm10, %zmm0
9689 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9690 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9691 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm10, %zmm0
9692 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9693 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm10, %zmm15
9694 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9695 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdx), %xmm10
9696 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
9697 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
9698 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
9699 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm12 {%k1}
9700 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rdx), %xmm10
9701 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
9702 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
9703 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
9704 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm14 {%k1}
9705 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 128(%rdx), %xmm10
9706 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
9707 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
9708 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
9709 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm16 {%k1}
9710 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 192(%rdx), %xmm10
9711 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
9712 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
9713 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm30 # 64-byte Reload
9714 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm30 {%k1}
9715 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 256(%rdx), %xmm10
9716 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
9717 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
9718 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
9719 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm31 {%k1}
9720 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 320(%rdx), %xmm10
9721 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
9722 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
9723 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9724 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm1 {%k1}
9725 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 384(%rdx), %xmm10
9726 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
9727 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
9728 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9729 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm0 {%k1}
9730 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 448(%rdx), %xmm10
9731 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
9732 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
9733 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm21 {%k1}
9734 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $2, (%r8), %zmm12, %zmm10
9735 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm29 = [0,1,2,3,4,8,6,7]
9736 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm29, %zmm10
9737 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9738 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $2, 64(%r8), %zmm14, %zmm10
9739 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm29, %zmm10
9740 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9741 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $2, 128(%r8), %zmm16, %zmm10
9742 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm29, %zmm10
9743 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9744 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $2, 192(%r8), %zmm30, %zmm26
9745 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm29, %zmm26
9746 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $2, 256(%r8), %zmm31, %zmm24
9747 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm29, %zmm24
9748 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $2, 320(%r8), %zmm1, %zmm23
9749 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm29, %zmm23
9750 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $2, 384(%r8), %zmm0, %zmm19
9751 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm29, %zmm19
9752 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $2, 448(%r8), %zmm21, %zmm21
9753 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm29, %zmm21
9754 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm29 = [0,10,2,3,4,5,6,11]
9755 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm29, %zmm11
9756 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9757 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm30 = [0,1,2,3,4,12,6,7]
9758 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm30, %zmm28
9759 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm31 = [0,14,2,3,4,5,6,15]
9760 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm31, %zmm13
9761 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9762 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
9763 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm29, %zmm16
9764 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
9765 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm30, %zmm15
9766 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
9767 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm31, %zmm14
9768 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
9769 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm29, %zmm13
9770 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
9771 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm30, %zmm12
9772 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
9773 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm31, %zmm11
9774 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
9775 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm29, %zmm10
9776 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
9777 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm30, %zmm7
9778 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9779 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm31, %zmm2
9780 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9781 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm29, %zmm1
9782 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
9783 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm30, %zmm3
9784 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9785 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm31, %zmm0
9786 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm29, %zmm27
9787 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
9788 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm30, %zmm4
9789 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm31, %zmm25
9790 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm29, %zmm22
9791 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
9792 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm30, %zmm5
9793 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm31, %zmm20
9794 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm29, %zmm18
9795 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
9796 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm30, %zmm6
9797 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm31, %zmm17
9798 ; AVX512F-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
9799 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, 3008(%rax)
9800 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
9801 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm8, 2944(%rax)
9802 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, 2880(%rax)
9803 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, 2816(%rax)
9804 ; AVX512F-ONLY-SLOW-NEXT: vmovups (%rsp), %zmm6 # 64-byte Reload
9805 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm6, 2752(%rax)
9806 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm20, 2624(%rax)
9807 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
9808 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm6, 2560(%rax)
9809 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, 2496(%rax)
9810 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm22, 2432(%rax)
9811 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
9812 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm5, 2368(%rax)
9813 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, 2240(%rax)
9814 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
9815 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm5, 2176(%rax)
9816 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, 2112(%rax)
9817 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, 2048(%rax)
9818 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
9819 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm4, 1984(%rax)
9820 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, 1856(%rax)
9821 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9822 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 1792(%rax)
9823 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, 1728(%rax)
9824 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, 1664(%rax)
9825 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
9826 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm3, 1600(%rax)
9827 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, 1472(%rax)
9828 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9829 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 1408(%rax)
9830 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, 1344(%rax)
9831 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, 1280(%rax)
9832 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9833 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm2, 1216(%rax)
9834 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, 1088(%rax)
9835 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9836 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 1024(%rax)
9837 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, 960(%rax)
9838 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, 896(%rax)
9839 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9840 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm1, 832(%rax)
9841 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, 704(%rax)
9842 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9843 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm1, 640(%rax)
9844 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm15, 576(%rax)
9845 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, 512(%rax)
9846 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9847 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 448(%rax)
9848 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9849 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 320(%rax)
9850 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9851 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 256(%rax)
9852 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm28, 192(%rax)
9853 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9854 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 128(%rax)
9855 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9856 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 64(%rax)
9857 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, 2688(%rax)
9858 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm19, 2304(%rax)
9859 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, 1920(%rax)
9860 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm24, 1536(%rax)
9861 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm26, 1152(%rax)
9862 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9863 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 768(%rax)
9864 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9865 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 384(%rax)
9866 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9867 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, (%rax)
9868 ; AVX512F-ONLY-SLOW-NEXT: addq $3400, %rsp # imm = 0xD48
9869 ; AVX512F-ONLY-SLOW-NEXT: vzeroupper
9870 ; AVX512F-ONLY-SLOW-NEXT: retq
9872 ; AVX512F-ONLY-FAST-LABEL: store_i64_stride6_vf64:
9873 ; AVX512F-ONLY-FAST: # %bb.0:
9874 ; AVX512F-ONLY-FAST-NEXT: subq $3400, %rsp # imm = 0xD48
9875 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rdx), %zmm13
9876 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rdx), %zmm12
9877 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%rdx), %zmm11
9878 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%rdx), %zmm10
9879 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 256(%rdx), %zmm9
9880 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 320(%rdx), %zmm8
9881 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 384(%rdx), %zmm7
9882 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 448(%rdx), %zmm6
9883 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rcx), %zmm0
9884 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rcx), %zmm1
9885 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%rcx), %zmm2
9886 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%rcx), %zmm30
9887 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 256(%rcx), %zmm27
9888 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 320(%rcx), %zmm24
9889 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 384(%rcx), %zmm22
9890 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 448(%rcx), %zmm21
9891 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [4,12,4,12]
9892 ; AVX512F-ONLY-FAST-NEXT: # ymm3 = mem[0,1,0,1]
9893 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm4
9894 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm3, %zmm4
9895 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9896 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm4
9897 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm3, %zmm4
9898 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9899 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm4
9900 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm3, %zmm4
9901 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9902 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm4
9903 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm3, %zmm4
9904 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9905 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm4
9906 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm30, %zmm3, %zmm4
9907 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9908 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm4
9909 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm3, %zmm4
9910 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9911 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm4
9912 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm3, %zmm4
9913 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9914 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm13, %zmm3
9915 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9916 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [1,9,2,10,1,9,2,10]
9917 ; AVX512F-ONLY-FAST-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
9918 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm3
9919 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm5, %zmm3
9920 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9921 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm4
9922 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [5,13,6,14,5,13,6,14]
9923 ; AVX512F-ONLY-FAST-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
9924 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm3
9925 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm5, %zmm3
9926 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9927 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm3
9928 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
9929 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9930 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm14 = [3,11,3,11,3,11,3,11]
9931 ; AVX512F-ONLY-FAST-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
9932 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm3
9933 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm14, %zmm3
9934 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9935 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [7,15,7,15,7,15,7,15]
9936 ; AVX512F-ONLY-FAST-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
9937 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm3, %zmm13
9938 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9939 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm0
9940 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm5, %zmm0
9941 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9942 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm0
9943 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm14, %zmm0
9944 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9945 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm3, %zmm12
9946 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9947 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm0
9948 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm4, %zmm0
9949 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9950 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm0
9951 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm5, %zmm0
9952 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9953 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm0
9954 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm14, %zmm0
9955 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9956 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm3, %zmm11
9957 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9958 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm0
9959 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm30, %zmm4, %zmm0
9960 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9961 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm0
9962 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm30, %zmm5, %zmm0
9963 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9964 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm0
9965 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm30, %zmm14, %zmm0
9966 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9967 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm30, %zmm3, %zmm10
9968 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9969 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm0
9970 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm4, %zmm0
9971 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9972 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm0
9973 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm5, %zmm0
9974 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9975 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm0
9976 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm14, %zmm0
9977 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9978 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm3, %zmm9
9979 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9980 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
9981 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm4, %zmm0
9982 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9983 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
9984 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm5, %zmm0
9985 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9986 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
9987 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm14, %zmm0
9988 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9989 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm3, %zmm8
9990 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9991 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm0
9992 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm4, %zmm0
9993 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9994 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm0
9995 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm5, %zmm0
9996 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9997 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm0
9998 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm14, %zmm0
9999 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10000 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm3, %zmm7
10001 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10002 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm21, %zmm6, %zmm4
10003 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm4, (%rsp) # 64-byte Spill
10004 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm21, %zmm6, %zmm5
10005 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10006 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm21, %zmm6, %zmm14
10007 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10008 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm3, %zmm6
10009 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10010 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 448(%rdi), %zmm4
10011 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 448(%rsi), %zmm11
10012 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [4,12,5,13,4,12,5,13]
10013 ; AVX512F-ONLY-FAST-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
10014 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm0
10015 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm28, %zmm0
10016 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm8
10017 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 384(%rdi), %zmm2
10018 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 384(%rsi), %zmm13
10019 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
10020 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm28, %zmm1
10021 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 320(%rdi), %zmm3
10022 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 320(%rsi), %zmm15
10023 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm0
10024 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm28, %zmm0
10025 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10026 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 256(%rdi), %zmm31
10027 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 256(%rsi), %zmm17
10028 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm0
10029 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm28, %zmm0
10030 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10031 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%rdi), %zmm29
10032 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%rsi), %zmm18
10033 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm29, %zmm0
10034 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm28, %zmm0
10035 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10036 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%rdi), %zmm25
10037 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%rsi), %zmm19
10038 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm25, %zmm0
10039 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm28, %zmm0
10040 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm5
10041 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rdi), %zmm6
10042 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rsi), %zmm20
10043 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm0
10044 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm28, %zmm0
10045 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10046 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rdi), %zmm7
10047 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rsi), %zmm22
10048 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm22, %zmm7, %zmm28
10049 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm12 = [2,10,2,10,2,10,2,10]
10050 ; AVX512F-ONLY-FAST-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
10051 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm14
10052 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm12, %zmm14
10053 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [6,14,6,14,6,14,6,14]
10054 ; AVX512F-ONLY-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
10055 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm16
10056 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm10, %zmm16
10057 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm21 = [0,8,1,9,0,8,1,9]
10058 ; AVX512F-ONLY-FAST-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3]
10059 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm0
10060 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm21, %zmm0
10061 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10062 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} ymm23 = [7,15,7,15]
10063 ; AVX512F-ONLY-FAST-NEXT: # ymm23 = mem[0,1,2,3,0,1,2,3]
10064 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm23, %zmm7
10065 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm22
10066 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm12, %zmm22
10067 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm24
10068 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm10, %zmm24
10069 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm0
10070 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm21, %zmm0
10071 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10072 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm23, %zmm6
10073 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm25, %zmm20
10074 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm12, %zmm20
10075 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm25, %zmm26
10076 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm10, %zmm26
10077 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm25, %zmm0
10078 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm21, %zmm0
10079 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10080 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm23, %zmm25
10081 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm29, %zmm19
10082 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm12, %zmm19
10083 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm29, %zmm27
10084 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm10, %zmm27
10085 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm29, %zmm0
10086 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm21, %zmm0
10087 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10088 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm23, %zmm29
10089 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm18
10090 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm12, %zmm18
10091 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm9
10092 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm10, %zmm9
10093 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm0
10094 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm21, %zmm0
10095 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10096 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm23, %zmm31
10097 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm17
10098 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm12, %zmm17
10099 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm30
10100 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm10, %zmm30
10101 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm0
10102 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm21, %zmm0
10103 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10104 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm23, %zmm3
10105 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10106 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm15
10107 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
10108 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm12, %zmm3
10109 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm10, %zmm2
10110 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm15, %zmm0
10111 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm21, %zmm0
10112 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10113 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm23, %zmm15
10114 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10115 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm11, %zmm4, %zmm12
10116 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm11, %zmm4, %zmm10
10117 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm11, %zmm4, %zmm21
10118 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm23, %zmm4
10119 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10120 ; AVX512F-ONLY-FAST-NEXT: movb $12, %al
10121 ; AVX512F-ONLY-FAST-NEXT: kmovw %eax, %k1
10122 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
10123 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm8 {%k1}
10124 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10125 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
10126 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm1 {%k1}
10127 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10128 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
10129 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
10130 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm8 {%k1}
10131 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
10132 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10133 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm0 {%k1}
10134 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10135 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
10136 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
10137 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm4 {%k1}
10138 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
10139 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm5 {%k1}
10140 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10141 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
10142 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
10143 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm1 {%k1}
10144 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
10145 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm28 {%k1}
10146 ; AVX512F-ONLY-FAST-NEXT: movb $48, %al
10147 ; AVX512F-ONLY-FAST-NEXT: kmovw %eax, %k2
10148 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
10149 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm14, %zmm11 {%k2}
10150 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
10151 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm16, %zmm23 {%k2}
10152 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
10153 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm22, %zmm13 {%k2}
10154 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm22 # 64-byte Reload
10155 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm24, %zmm22 {%k2}
10156 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
10157 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm20, %zmm14 {%k2}
10158 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
10159 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm26, %zmm20 {%k2}
10160 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
10161 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm19, %zmm16 {%k2}
10162 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
10163 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm27, %zmm19 {%k2}
10164 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm24 # 64-byte Reload
10165 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm18, %zmm24 {%k2}
10166 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
10167 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm18 {%k2}
10168 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm26 # 64-byte Reload
10169 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm17, %zmm26 {%k2}
10170 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
10171 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm30, %zmm17 {%k2}
10172 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
10173 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm27 {%k2}
10174 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10175 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm0 {%k2}
10176 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 (%rsp), %zmm15 # 64-byte Reload
10177 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm15 {%k2}
10178 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%r8), %zmm5
10179 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm2 = <0,1,9,u,4,5,6,7>
10180 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm2, %zmm11
10181 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10182 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%r8), %zmm3
10183 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm2, %zmm13
10184 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10185 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%r8), %zmm9
10186 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm2, %zmm14
10187 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10188 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%r8), %zmm11
10189 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm2, %zmm16
10190 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10191 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 256(%r8), %zmm13
10192 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm2, %zmm24
10193 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10194 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 320(%r8), %zmm14
10195 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm2, %zmm26
10196 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10197 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 384(%r8), %zmm16
10198 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm16, %zmm2, %zmm27
10199 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10200 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 448(%r8), %zmm12
10201 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm12, %zmm2, %zmm15
10202 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm15, (%rsp) # 64-byte Spill
10203 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
10204 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm15 {%k2}
10205 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm2 = <0,1,13,u,4,5,6,7>
10206 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm2, %zmm23
10207 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm2, %zmm22
10208 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10209 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm2, %zmm20
10210 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm20, %zmm24
10211 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm2, %zmm19
10212 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm2, %zmm18
10213 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm18, %zmm26
10214 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm2, %zmm17
10215 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10216 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm16, %zmm2, %zmm0
10217 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10218 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm12, %zmm2, %zmm15
10219 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdi), %ymm2
10220 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
10221 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10222 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm10
10223 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7, %zmm7 # 64-byte Folded Reload
10224 ; AVX512F-ONLY-FAST-NEXT: # zmm7 = zmm7[0,1,2,3],mem[4,5,6,7]
10225 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rdi), %ymm2
10226 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
10227 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10228 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm0
10229 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm6, %zmm6 # 64-byte Folded Reload
10230 ; AVX512F-ONLY-FAST-NEXT: # zmm6 = zmm6[0,1,2,3],mem[4,5,6,7]
10231 ; AVX512F-ONLY-FAST-NEXT: movb $16, %al
10232 ; AVX512F-ONLY-FAST-NEXT: kmovw %eax, %k2
10233 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm1 {%k2}
10234 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10235 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = <10,u,2,3,4,5,11,u>
10236 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm1, %zmm0
10237 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10238 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm2 = <14,u,2,3,4,5,15,u>
10239 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm2, %zmm6
10240 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10241 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 128(%rdi), %ymm3
10242 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
10243 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10244 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm3
10245 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm25, %zmm6 # 64-byte Folded Reload
10246 ; AVX512F-ONLY-FAST-NEXT: # zmm6 = zmm25[0,1,2,3],mem[4,5,6,7]
10247 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10248 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm0 {%k2}
10249 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10250 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm1, %zmm3
10251 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10252 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm2, %zmm6
10253 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10254 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 192(%rdi), %ymm3
10255 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
10256 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10257 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm0
10258 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm29, %zmm3 # 64-byte Folded Reload
10259 ; AVX512F-ONLY-FAST-NEXT: # zmm3 = zmm29[0,1,2,3],mem[4,5,6,7]
10260 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm4 {%k2}
10261 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10262 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm1, %zmm0
10263 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10264 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm2, %zmm3
10265 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10266 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 256(%rdi), %ymm0
10267 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10268 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
10269 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm3
10270 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm31, %zmm4 # 64-byte Folded Reload
10271 ; AVX512F-ONLY-FAST-NEXT: # zmm4 = zmm31[0,1,2,3],mem[4,5,6,7]
10272 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10273 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm0 {%k2}
10274 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10275 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm1, %zmm3
10276 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10277 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm2, %zmm4
10278 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10279 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 320(%rdi), %ymm0
10280 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10281 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
10282 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm27
10283 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10284 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm25 # 64-byte Folded Reload
10285 ; AVX512F-ONLY-FAST-NEXT: # zmm25 = zmm0[0,1,2,3],mem[4,5,6,7]
10286 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm14, %zmm8 {%k2}
10287 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10288 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm1, %zmm27
10289 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm2, %zmm25
10290 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 384(%rdi), %ymm0
10291 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10292 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
10293 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm22
10294 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10295 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm20 # 64-byte Folded Reload
10296 ; AVX512F-ONLY-FAST-NEXT: # zmm20 = zmm0[0,1,2,3],mem[4,5,6,7]
10297 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10298 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm16, %zmm0 {%k2}
10299 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10300 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm16, %zmm1, %zmm22
10301 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm16, %zmm2, %zmm20
10302 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 448(%rdi), %ymm0
10303 ; AVX512F-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10304 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
10305 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm18
10306 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm1, %zmm10
10307 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm11
10308 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm12, %zmm1, %zmm18
10309 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10310 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm17 # 64-byte Folded Reload
10311 ; AVX512F-ONLY-FAST-NEXT: # zmm17 = zmm0[0,1,2,3],mem[4,5,6,7]
10312 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10313 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm0 {%k2}
10314 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10315 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm12, %zmm2, %zmm17
10316 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm2, %zmm7
10317 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm13
10318 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm28 {%k2}
10319 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%r9), %zmm6
10320 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm10 = [0,1,2,9,4,5,6,7]
10321 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
10322 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm10, %zmm1
10323 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10324 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%r9), %zmm7
10325 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
10326 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm10, %zmm2
10327 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10328 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%r9), %zmm2
10329 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
10330 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
10331 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10332 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%r9), %zmm3
10333 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
10334 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm10, %zmm4
10335 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10336 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 256(%r9), %zmm4
10337 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
10338 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm10, %zmm5
10339 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10340 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 320(%r9), %zmm5
10341 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10342 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm10, %zmm0
10343 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10344 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 384(%r9), %zmm8
10345 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10346 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm10, %zmm0
10347 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10348 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 448(%r9), %zmm9
10349 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
10350 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm10, %zmm0
10351 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
10352 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm10 = [0,1,2,13,4,5,6,7]
10353 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm10, %zmm23
10354 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10355 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
10356 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm10, %zmm12
10357 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10358 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm10, %zmm24
10359 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10360 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm10, %zmm19
10361 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10362 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm10, %zmm26
10363 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10364 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10365 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm10, %zmm0
10366 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10367 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10368 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm10, %zmm0
10369 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10370 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm10, %zmm15
10371 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10372 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdx), %xmm10
10373 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
10374 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
10375 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
10376 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm12 {%k1}
10377 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rdx), %xmm10
10378 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
10379 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
10380 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
10381 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm14 {%k1}
10382 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 128(%rdx), %xmm10
10383 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
10384 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
10385 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
10386 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm16 {%k1}
10387 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 192(%rdx), %xmm10
10388 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
10389 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
10390 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm30 # 64-byte Reload
10391 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm30 {%k1}
10392 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 256(%rdx), %xmm10
10393 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
10394 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
10395 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
10396 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm31 {%k1}
10397 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 320(%rdx), %xmm10
10398 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
10399 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
10400 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
10401 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm1 {%k1}
10402 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 384(%rdx), %xmm10
10403 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
10404 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
10405 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10406 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm0 {%k1}
10407 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 448(%rdx), %xmm10
10408 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
10409 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
10410 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm21 {%k1}
10411 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $2, (%r8), %zmm12, %zmm10
10412 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm29 = [0,1,2,3,4,8,6,7]
10413 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm29, %zmm10
10414 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10415 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $2, 64(%r8), %zmm14, %zmm10
10416 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm29, %zmm10
10417 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10418 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $2, 128(%r8), %zmm16, %zmm10
10419 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm29, %zmm10
10420 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10421 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $2, 192(%r8), %zmm30, %zmm26
10422 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm29, %zmm26
10423 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $2, 256(%r8), %zmm31, %zmm24
10424 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm29, %zmm24
10425 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $2, 320(%r8), %zmm1, %zmm23
10426 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm29, %zmm23
10427 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $2, 384(%r8), %zmm0, %zmm19
10428 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm29, %zmm19
10429 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $2, 448(%r8), %zmm21, %zmm21
10430 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm29, %zmm21
10431 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm29 = [0,10,2,3,4,5,6,11]
10432 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm29, %zmm11
10433 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10434 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm30 = [0,1,2,3,4,12,6,7]
10435 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm30, %zmm28
10436 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm31 = [0,14,2,3,4,5,6,15]
10437 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm31, %zmm13
10438 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10439 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
10440 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm29, %zmm16
10441 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
10442 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm30, %zmm15
10443 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
10444 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm31, %zmm14
10445 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
10446 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm29, %zmm13
10447 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
10448 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm30, %zmm12
10449 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
10450 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm31, %zmm11
10451 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
10452 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm29, %zmm10
10453 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
10454 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm30, %zmm7
10455 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
10456 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm31, %zmm2
10457 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
10458 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm29, %zmm1
10459 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
10460 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm30, %zmm3
10461 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10462 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm31, %zmm0
10463 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm29, %zmm27
10464 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
10465 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm30, %zmm4
10466 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm31, %zmm25
10467 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm29, %zmm22
10468 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
10469 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm30, %zmm5
10470 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm31, %zmm20
10471 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm29, %zmm18
10472 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
10473 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm30, %zmm6
10474 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm31, %zmm17
10475 ; AVX512F-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
10476 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm17, 3008(%rax)
10477 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
10478 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm8, 2944(%rax)
10479 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm6, 2880(%rax)
10480 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm18, 2816(%rax)
10481 ; AVX512F-ONLY-FAST-NEXT: vmovups (%rsp), %zmm6 # 64-byte Reload
10482 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm6, 2752(%rax)
10483 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm20, 2624(%rax)
10484 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
10485 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm6, 2560(%rax)
10486 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, 2496(%rax)
10487 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm22, 2432(%rax)
10488 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
10489 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm5, 2368(%rax)
10490 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm25, 2240(%rax)
10491 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
10492 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm5, 2176(%rax)
10493 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, 2112(%rax)
10494 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm27, 2048(%rax)
10495 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
10496 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm4, 1984(%rax)
10497 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, 1856(%rax)
10498 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10499 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 1792(%rax)
10500 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, 1728(%rax)
10501 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, 1664(%rax)
10502 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
10503 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm3, 1600(%rax)
10504 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, 1472(%rax)
10505 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10506 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 1408(%rax)
10507 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, 1344(%rax)
10508 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, 1280(%rax)
10509 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
10510 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm2, 1216(%rax)
10511 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, 1088(%rax)
10512 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10513 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 1024(%rax)
10514 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm12, 960(%rax)
10515 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, 896(%rax)
10516 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
10517 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm1, 832(%rax)
10518 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm14, 704(%rax)
10519 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
10520 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm1, 640(%rax)
10521 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm15, 576(%rax)
10522 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm16, 512(%rax)
10523 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10524 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 448(%rax)
10525 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10526 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 320(%rax)
10527 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10528 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 256(%rax)
10529 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm28, 192(%rax)
10530 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10531 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 128(%rax)
10532 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10533 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 64(%rax)
10534 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm21, 2688(%rax)
10535 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm19, 2304(%rax)
10536 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm23, 1920(%rax)
10537 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm24, 1536(%rax)
10538 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm26, 1152(%rax)
10539 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10540 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 768(%rax)
10541 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10542 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 384(%rax)
10543 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10544 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, (%rax)
10545 ; AVX512F-ONLY-FAST-NEXT: addq $3400, %rsp # imm = 0xD48
10546 ; AVX512F-ONLY-FAST-NEXT: vzeroupper
10547 ; AVX512F-ONLY-FAST-NEXT: retq
10549 ; AVX512DQ-SLOW-LABEL: store_i64_stride6_vf64:
10550 ; AVX512DQ-SLOW: # %bb.0:
10551 ; AVX512DQ-SLOW-NEXT: subq $3400, %rsp # imm = 0xD48
10552 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rdx), %zmm13
10553 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm12
10554 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%rdx), %zmm11
10555 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%rdx), %zmm10
10556 ; AVX512DQ-SLOW-NEXT: vmovdqa64 256(%rdx), %zmm9
10557 ; AVX512DQ-SLOW-NEXT: vmovdqa64 320(%rdx), %zmm8
10558 ; AVX512DQ-SLOW-NEXT: vmovdqa64 384(%rdx), %zmm7
10559 ; AVX512DQ-SLOW-NEXT: vmovdqa64 448(%rdx), %zmm6
10560 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rcx), %zmm0
10561 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm1
10562 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%rcx), %zmm2
10563 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%rcx), %zmm30
10564 ; AVX512DQ-SLOW-NEXT: vmovdqa64 256(%rcx), %zmm27
10565 ; AVX512DQ-SLOW-NEXT: vmovdqa64 320(%rcx), %zmm24
10566 ; AVX512DQ-SLOW-NEXT: vmovdqa64 384(%rcx), %zmm22
10567 ; AVX512DQ-SLOW-NEXT: vmovdqa64 448(%rcx), %zmm21
10568 ; AVX512DQ-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [4,12,4,12]
10569 ; AVX512DQ-SLOW-NEXT: # ymm3 = mem[0,1,0,1]
10570 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, %zmm4
10571 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm21, %zmm3, %zmm4
10572 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10573 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, %zmm4
10574 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm3, %zmm4
10575 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10576 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, %zmm4
10577 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm24, %zmm3, %zmm4
10578 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10579 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, %zmm4
10580 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm27, %zmm3, %zmm4
10581 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10582 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, %zmm4
10583 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm30, %zmm3, %zmm4
10584 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10585 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm4
10586 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm3, %zmm4
10587 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10588 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm12, %zmm4
10589 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm3, %zmm4
10590 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10591 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm0, %zmm13, %zmm3
10592 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10593 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [1,9,2,10,1,9,2,10]
10594 ; AVX512DQ-SLOW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
10595 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm13, %zmm3
10596 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm0, %zmm5, %zmm3
10597 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10598 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm4
10599 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [5,13,6,14,5,13,6,14]
10600 ; AVX512DQ-SLOW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
10601 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm13, %zmm3
10602 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm0, %zmm5, %zmm3
10603 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10604 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm12, %zmm3
10605 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
10606 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10607 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm14 = [3,11,3,11,3,11,3,11]
10608 ; AVX512DQ-SLOW-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
10609 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm13, %zmm3
10610 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm0, %zmm14, %zmm3
10611 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10612 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [7,15,7,15,7,15,7,15]
10613 ; AVX512DQ-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
10614 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm0, %zmm3, %zmm13
10615 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10616 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm12, %zmm0
10617 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm5, %zmm0
10618 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10619 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm12, %zmm0
10620 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm14, %zmm0
10621 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10622 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm3, %zmm12
10623 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10624 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm0
10625 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm4, %zmm0
10626 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10627 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm0
10628 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm5, %zmm0
10629 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10630 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm0
10631 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm14, %zmm0
10632 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10633 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm3, %zmm11
10634 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10635 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, %zmm0
10636 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm30, %zmm4, %zmm0
10637 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10638 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, %zmm0
10639 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm30, %zmm5, %zmm0
10640 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10641 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, %zmm0
10642 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm30, %zmm14, %zmm0
10643 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10644 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm30, %zmm3, %zmm10
10645 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10646 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, %zmm0
10647 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm27, %zmm4, %zmm0
10648 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10649 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, %zmm0
10650 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm27, %zmm5, %zmm0
10651 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10652 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, %zmm0
10653 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm27, %zmm14, %zmm0
10654 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10655 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm27, %zmm3, %zmm9
10656 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10657 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
10658 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm24, %zmm4, %zmm0
10659 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10660 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
10661 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm24, %zmm5, %zmm0
10662 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10663 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
10664 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm24, %zmm14, %zmm0
10665 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10666 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm24, %zmm3, %zmm8
10667 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10668 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, %zmm0
10669 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm4, %zmm0
10670 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10671 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, %zmm0
10672 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm5, %zmm0
10673 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10674 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, %zmm0
10675 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm14, %zmm0
10676 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10677 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm3, %zmm7
10678 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10679 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm21, %zmm6, %zmm4
10680 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, (%rsp) # 64-byte Spill
10681 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm21, %zmm6, %zmm5
10682 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10683 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm21, %zmm6, %zmm14
10684 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10685 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm21, %zmm3, %zmm6
10686 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10687 ; AVX512DQ-SLOW-NEXT: vmovdqa64 448(%rdi), %zmm4
10688 ; AVX512DQ-SLOW-NEXT: vmovdqa64 448(%rsi), %zmm11
10689 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [4,12,5,13,4,12,5,13]
10690 ; AVX512DQ-SLOW-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
10691 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm0
10692 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm11, %zmm28, %zmm0
10693 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm8
10694 ; AVX512DQ-SLOW-NEXT: vmovdqa64 384(%rdi), %zmm2
10695 ; AVX512DQ-SLOW-NEXT: vmovdqa64 384(%rsi), %zmm13
10696 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm1
10697 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm13, %zmm28, %zmm1
10698 ; AVX512DQ-SLOW-NEXT: vmovdqa64 320(%rdi), %zmm3
10699 ; AVX512DQ-SLOW-NEXT: vmovdqa64 320(%rsi), %zmm15
10700 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm0
10701 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm15, %zmm28, %zmm0
10702 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10703 ; AVX512DQ-SLOW-NEXT: vmovdqa64 256(%rdi), %zmm31
10704 ; AVX512DQ-SLOW-NEXT: vmovdqa64 256(%rsi), %zmm17
10705 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm31, %zmm0
10706 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm17, %zmm28, %zmm0
10707 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10708 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%rdi), %zmm29
10709 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%rsi), %zmm18
10710 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm29, %zmm0
10711 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm18, %zmm28, %zmm0
10712 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10713 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%rdi), %zmm25
10714 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%rsi), %zmm19
10715 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm25, %zmm0
10716 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm19, %zmm28, %zmm0
10717 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm5
10718 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm6
10719 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm20
10720 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, %zmm0
10721 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm20, %zmm28, %zmm0
10722 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10723 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rdi), %zmm7
10724 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rsi), %zmm22
10725 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm22, %zmm7, %zmm28
10726 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm12 = [2,10,2,10,2,10,2,10]
10727 ; AVX512DQ-SLOW-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
10728 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, %zmm14
10729 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm12, %zmm14
10730 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [6,14,6,14,6,14,6,14]
10731 ; AVX512DQ-SLOW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
10732 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, %zmm16
10733 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm10, %zmm16
10734 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm21 = [0,8,1,9,0,8,1,9]
10735 ; AVX512DQ-SLOW-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3]
10736 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, %zmm0
10737 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm21, %zmm0
10738 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10739 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x2 {{.*#+}} ymm23 = [7,15,7,15]
10740 ; AVX512DQ-SLOW-NEXT: # ymm23 = mem[0,1,0,1]
10741 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm23, %zmm7
10742 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, %zmm22
10743 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm20, %zmm12, %zmm22
10744 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, %zmm24
10745 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm20, %zmm10, %zmm24
10746 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, %zmm0
10747 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm20, %zmm21, %zmm0
10748 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10749 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm20, %zmm23, %zmm6
10750 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm25, %zmm20
10751 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm19, %zmm12, %zmm20
10752 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm25, %zmm26
10753 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm19, %zmm10, %zmm26
10754 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm25, %zmm0
10755 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm19, %zmm21, %zmm0
10756 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10757 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm19, %zmm23, %zmm25
10758 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm29, %zmm19
10759 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm18, %zmm12, %zmm19
10760 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm29, %zmm27
10761 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm18, %zmm10, %zmm27
10762 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm29, %zmm0
10763 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm18, %zmm21, %zmm0
10764 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10765 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm18, %zmm23, %zmm29
10766 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm31, %zmm18
10767 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm17, %zmm12, %zmm18
10768 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm31, %zmm9
10769 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm17, %zmm10, %zmm9
10770 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm31, %zmm0
10771 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm17, %zmm21, %zmm0
10772 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10773 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm17, %zmm23, %zmm31
10774 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm17
10775 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm15, %zmm12, %zmm17
10776 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm30
10777 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm15, %zmm10, %zmm30
10778 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm0
10779 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm15, %zmm21, %zmm0
10780 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10781 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm15, %zmm23, %zmm3
10782 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10783 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm15
10784 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
10785 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm13, %zmm12, %zmm3
10786 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm13, %zmm10, %zmm2
10787 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm15, %zmm0
10788 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm13, %zmm21, %zmm0
10789 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10790 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm13, %zmm23, %zmm15
10791 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10792 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm11, %zmm4, %zmm12
10793 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm11, %zmm4, %zmm10
10794 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm11, %zmm4, %zmm21
10795 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm11, %zmm23, %zmm4
10796 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10797 ; AVX512DQ-SLOW-NEXT: movb $12, %al
10798 ; AVX512DQ-SLOW-NEXT: kmovw %eax, %k1
10799 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
10800 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm8 {%k1}
10801 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10802 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
10803 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm1 {%k1}
10804 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10805 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
10806 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
10807 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm8 {%k1}
10808 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
10809 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10810 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm0 {%k1}
10811 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10812 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
10813 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
10814 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm4 {%k1}
10815 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
10816 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm5 {%k1}
10817 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10818 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
10819 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
10820 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm1 {%k1}
10821 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
10822 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm28 {%k1}
10823 ; AVX512DQ-SLOW-NEXT: movb $48, %al
10824 ; AVX512DQ-SLOW-NEXT: kmovw %eax, %k2
10825 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
10826 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm14, %zmm11 {%k2}
10827 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
10828 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm16, %zmm23 {%k2}
10829 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
10830 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm22, %zmm13 {%k2}
10831 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm22 # 64-byte Reload
10832 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm24, %zmm22 {%k2}
10833 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
10834 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm20, %zmm14 {%k2}
10835 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
10836 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm26, %zmm20 {%k2}
10837 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
10838 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm19, %zmm16 {%k2}
10839 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
10840 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm27, %zmm19 {%k2}
10841 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm24 # 64-byte Reload
10842 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm18, %zmm24 {%k2}
10843 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
10844 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, %zmm18 {%k2}
10845 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm26 # 64-byte Reload
10846 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm17, %zmm26 {%k2}
10847 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
10848 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm30, %zmm17 {%k2}
10849 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
10850 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm27 {%k2}
10851 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10852 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm0 {%k2}
10853 ; AVX512DQ-SLOW-NEXT: vmovdqu64 (%rsp), %zmm15 # 64-byte Reload
10854 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm12, %zmm15 {%k2}
10855 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%r8), %zmm5
10856 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <0,1,9,u,4,5,6,7>
10857 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm2, %zmm11
10858 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10859 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%r8), %zmm3
10860 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm2, %zmm13
10861 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10862 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%r8), %zmm9
10863 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm9, %zmm2, %zmm14
10864 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10865 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%r8), %zmm11
10866 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm11, %zmm2, %zmm16
10867 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10868 ; AVX512DQ-SLOW-NEXT: vmovdqa64 256(%r8), %zmm13
10869 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm13, %zmm2, %zmm24
10870 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10871 ; AVX512DQ-SLOW-NEXT: vmovdqa64 320(%r8), %zmm14
10872 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm14, %zmm2, %zmm26
10873 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10874 ; AVX512DQ-SLOW-NEXT: vmovdqa64 384(%r8), %zmm16
10875 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm16, %zmm2, %zmm27
10876 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10877 ; AVX512DQ-SLOW-NEXT: vmovdqa64 448(%r8), %zmm12
10878 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm12, %zmm2, %zmm15
10879 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm15, (%rsp) # 64-byte Spill
10880 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
10881 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, %zmm15 {%k2}
10882 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <0,1,13,u,4,5,6,7>
10883 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm2, %zmm23
10884 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm2, %zmm22
10885 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10886 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm9, %zmm2, %zmm20
10887 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm20, %zmm24
10888 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm11, %zmm2, %zmm19
10889 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm13, %zmm2, %zmm18
10890 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm18, %zmm26
10891 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm14, %zmm2, %zmm17
10892 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10893 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm16, %zmm2, %zmm0
10894 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10895 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm12, %zmm2, %zmm15
10896 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdi), %ymm2
10897 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
10898 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10899 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm10
10900 ; AVX512DQ-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7, %zmm7 # 64-byte Folded Reload
10901 ; AVX512DQ-SLOW-NEXT: # zmm7 = zmm7[0,1,2,3],mem[4,5,6,7]
10902 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rdi), %ymm2
10903 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
10904 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10905 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm0
10906 ; AVX512DQ-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm6, %zmm6 # 64-byte Folded Reload
10907 ; AVX512DQ-SLOW-NEXT: # zmm6 = zmm6[0,1,2,3],mem[4,5,6,7]
10908 ; AVX512DQ-SLOW-NEXT: movb $16, %al
10909 ; AVX512DQ-SLOW-NEXT: kmovw %eax, %k2
10910 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm1 {%k2}
10911 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10912 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <10,u,2,3,4,5,11,u>
10913 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm1, %zmm0
10914 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10915 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <14,u,2,3,4,5,15,u>
10916 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm2, %zmm6
10917 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10918 ; AVX512DQ-SLOW-NEXT: vmovdqa 128(%rdi), %ymm3
10919 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
10920 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10921 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm3
10922 ; AVX512DQ-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm25, %zmm6 # 64-byte Folded Reload
10923 ; AVX512DQ-SLOW-NEXT: # zmm6 = zmm25[0,1,2,3],mem[4,5,6,7]
10924 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10925 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, %zmm0 {%k2}
10926 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10927 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm9, %zmm1, %zmm3
10928 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10929 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm9, %zmm2, %zmm6
10930 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10931 ; AVX512DQ-SLOW-NEXT: vmovdqa 192(%rdi), %ymm3
10932 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
10933 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10934 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm0
10935 ; AVX512DQ-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm29, %zmm3 # 64-byte Folded Reload
10936 ; AVX512DQ-SLOW-NEXT: # zmm3 = zmm29[0,1,2,3],mem[4,5,6,7]
10937 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm4 {%k2}
10938 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10939 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm11, %zmm1, %zmm0
10940 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10941 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm11, %zmm2, %zmm3
10942 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10943 ; AVX512DQ-SLOW-NEXT: vmovdqa 256(%rdi), %ymm0
10944 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10945 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
10946 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm3
10947 ; AVX512DQ-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm31, %zmm4 # 64-byte Folded Reload
10948 ; AVX512DQ-SLOW-NEXT: # zmm4 = zmm31[0,1,2,3],mem[4,5,6,7]
10949 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10950 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm13, %zmm0 {%k2}
10951 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10952 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm13, %zmm1, %zmm3
10953 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10954 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm13, %zmm2, %zmm4
10955 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10956 ; AVX512DQ-SLOW-NEXT: vmovdqa 320(%rdi), %ymm0
10957 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10958 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
10959 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm27
10960 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10961 ; AVX512DQ-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm25 # 64-byte Folded Reload
10962 ; AVX512DQ-SLOW-NEXT: # zmm25 = zmm0[0,1,2,3],mem[4,5,6,7]
10963 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm14, %zmm8 {%k2}
10964 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10965 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm14, %zmm1, %zmm27
10966 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm14, %zmm2, %zmm25
10967 ; AVX512DQ-SLOW-NEXT: vmovdqa 384(%rdi), %ymm0
10968 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10969 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
10970 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm22
10971 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10972 ; AVX512DQ-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm20 # 64-byte Folded Reload
10973 ; AVX512DQ-SLOW-NEXT: # zmm20 = zmm0[0,1,2,3],mem[4,5,6,7]
10974 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10975 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm16, %zmm0 {%k2}
10976 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10977 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm16, %zmm1, %zmm22
10978 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm16, %zmm2, %zmm20
10979 ; AVX512DQ-SLOW-NEXT: vmovdqa 448(%rdi), %ymm0
10980 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
10981 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
10982 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm18
10983 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm1, %zmm10
10984 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, %zmm11
10985 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm12, %zmm1, %zmm18
10986 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10987 ; AVX512DQ-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm17 # 64-byte Folded Reload
10988 ; AVX512DQ-SLOW-NEXT: # zmm17 = zmm0[0,1,2,3],mem[4,5,6,7]
10989 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
10990 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm12, %zmm0 {%k2}
10991 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10992 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm12, %zmm2, %zmm17
10993 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm2, %zmm7
10994 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, %zmm13
10995 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm28 {%k2}
10996 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%r9), %zmm6
10997 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm10 = [0,1,2,9,4,5,6,7]
10998 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
10999 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm10, %zmm1
11000 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11001 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%r9), %zmm7
11002 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
11003 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm10, %zmm2
11004 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11005 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%r9), %zmm2
11006 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
11007 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
11008 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11009 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%r9), %zmm3
11010 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
11011 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm10, %zmm4
11012 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11013 ; AVX512DQ-SLOW-NEXT: vmovdqa64 256(%r9), %zmm4
11014 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
11015 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm10, %zmm5
11016 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11017 ; AVX512DQ-SLOW-NEXT: vmovdqa64 320(%r9), %zmm5
11018 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11019 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm10, %zmm0
11020 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11021 ; AVX512DQ-SLOW-NEXT: vmovdqa64 384(%r9), %zmm8
11022 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11023 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm8, %zmm10, %zmm0
11024 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11025 ; AVX512DQ-SLOW-NEXT: vmovdqa64 448(%r9), %zmm9
11026 ; AVX512DQ-SLOW-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
11027 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm9, %zmm10, %zmm0
11028 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
11029 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm10 = [0,1,2,13,4,5,6,7]
11030 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm10, %zmm23
11031 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11032 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
11033 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm10, %zmm12
11034 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11035 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm10, %zmm24
11036 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11037 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm10, %zmm19
11038 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11039 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm10, %zmm26
11040 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11041 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11042 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm10, %zmm0
11043 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11044 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11045 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm8, %zmm10, %zmm0
11046 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11047 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm9, %zmm10, %zmm15
11048 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11049 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdx), %xmm10
11050 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
11051 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
11052 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
11053 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm12 {%k1}
11054 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rdx), %xmm10
11055 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
11056 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
11057 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
11058 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm14 {%k1}
11059 ; AVX512DQ-SLOW-NEXT: vmovdqa 128(%rdx), %xmm10
11060 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
11061 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
11062 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
11063 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm16 {%k1}
11064 ; AVX512DQ-SLOW-NEXT: vmovdqa 192(%rdx), %xmm10
11065 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
11066 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
11067 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm30 # 64-byte Reload
11068 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm30 {%k1}
11069 ; AVX512DQ-SLOW-NEXT: vmovdqa 256(%rdx), %xmm10
11070 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
11071 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
11072 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
11073 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm31 {%k1}
11074 ; AVX512DQ-SLOW-NEXT: vmovdqa 320(%rdx), %xmm10
11075 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
11076 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
11077 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
11078 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm1 {%k1}
11079 ; AVX512DQ-SLOW-NEXT: vmovdqa 384(%rdx), %xmm10
11080 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
11081 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
11082 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11083 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm0 {%k1}
11084 ; AVX512DQ-SLOW-NEXT: vmovdqa 448(%rdx), %xmm10
11085 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
11086 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
11087 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm21 {%k1}
11088 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $2, (%r8), %zmm12, %zmm10
11089 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm29 = [0,1,2,3,4,8,6,7]
11090 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm29, %zmm10
11091 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11092 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $2, 64(%r8), %zmm14, %zmm10
11093 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm29, %zmm10
11094 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11095 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $2, 128(%r8), %zmm16, %zmm10
11096 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm29, %zmm10
11097 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11098 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $2, 192(%r8), %zmm30, %zmm26
11099 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm29, %zmm26
11100 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $2, 256(%r8), %zmm31, %zmm24
11101 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm29, %zmm24
11102 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $2, 320(%r8), %zmm1, %zmm23
11103 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm29, %zmm23
11104 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $2, 384(%r8), %zmm0, %zmm19
11105 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm8, %zmm29, %zmm19
11106 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $2, 448(%r8), %zmm21, %zmm21
11107 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm9, %zmm29, %zmm21
11108 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm29 = [0,10,2,3,4,5,6,11]
11109 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm29, %zmm11
11110 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11111 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm30 = [0,1,2,3,4,12,6,7]
11112 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm30, %zmm28
11113 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm31 = [0,14,2,3,4,5,6,15]
11114 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm31, %zmm13
11115 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11116 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
11117 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm29, %zmm16
11118 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
11119 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm30, %zmm15
11120 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
11121 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm31, %zmm14
11122 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
11123 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm29, %zmm13
11124 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
11125 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm30, %zmm12
11126 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
11127 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm31, %zmm11
11128 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
11129 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm29, %zmm10
11130 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
11131 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm30, %zmm7
11132 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
11133 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm31, %zmm2
11134 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
11135 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm29, %zmm1
11136 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
11137 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm30, %zmm3
11138 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11139 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm31, %zmm0
11140 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm29, %zmm27
11141 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
11142 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm30, %zmm4
11143 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm31, %zmm25
11144 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm8, %zmm29, %zmm22
11145 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
11146 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm8, %zmm30, %zmm5
11147 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm8, %zmm31, %zmm20
11148 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm9, %zmm29, %zmm18
11149 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
11150 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm9, %zmm30, %zmm6
11151 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm9, %zmm31, %zmm17
11152 ; AVX512DQ-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
11153 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm17, 3008(%rax)
11154 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
11155 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm8, 2944(%rax)
11156 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, 2880(%rax)
11157 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm18, 2816(%rax)
11158 ; AVX512DQ-SLOW-NEXT: vmovups (%rsp), %zmm6 # 64-byte Reload
11159 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm6, 2752(%rax)
11160 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm20, 2624(%rax)
11161 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
11162 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm6, 2560(%rax)
11163 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, 2496(%rax)
11164 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm22, 2432(%rax)
11165 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
11166 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm5, 2368(%rax)
11167 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm25, 2240(%rax)
11168 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
11169 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm5, 2176(%rax)
11170 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, 2112(%rax)
11171 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm27, 2048(%rax)
11172 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
11173 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm4, 1984(%rax)
11174 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, 1856(%rax)
11175 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11176 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 1792(%rax)
11177 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, 1728(%rax)
11178 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, 1664(%rax)
11179 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
11180 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm3, 1600(%rax)
11181 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, 1472(%rax)
11182 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11183 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 1408(%rax)
11184 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, 1344(%rax)
11185 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, 1280(%rax)
11186 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
11187 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm2, 1216(%rax)
11188 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, 1088(%rax)
11189 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11190 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 1024(%rax)
11191 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm12, 960(%rax)
11192 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm13, 896(%rax)
11193 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
11194 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm1, 832(%rax)
11195 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm14, 704(%rax)
11196 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
11197 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm1, 640(%rax)
11198 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm15, 576(%rax)
11199 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm16, 512(%rax)
11200 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11201 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 448(%rax)
11202 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11203 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 320(%rax)
11204 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11205 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 256(%rax)
11206 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm28, 192(%rax)
11207 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11208 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 128(%rax)
11209 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11210 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 64(%rax)
11211 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm21, 2688(%rax)
11212 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm19, 2304(%rax)
11213 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm23, 1920(%rax)
11214 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm24, 1536(%rax)
11215 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm26, 1152(%rax)
11216 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11217 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 768(%rax)
11218 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11219 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 384(%rax)
11220 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11221 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, (%rax)
11222 ; AVX512DQ-SLOW-NEXT: addq $3400, %rsp # imm = 0xD48
11223 ; AVX512DQ-SLOW-NEXT: vzeroupper
11224 ; AVX512DQ-SLOW-NEXT: retq
11226 ; AVX512DQ-FAST-LABEL: store_i64_stride6_vf64:
11227 ; AVX512DQ-FAST: # %bb.0:
11228 ; AVX512DQ-FAST-NEXT: subq $3400, %rsp # imm = 0xD48
11229 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rdx), %zmm13
11230 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rdx), %zmm12
11231 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%rdx), %zmm11
11232 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%rdx), %zmm10
11233 ; AVX512DQ-FAST-NEXT: vmovdqa64 256(%rdx), %zmm9
11234 ; AVX512DQ-FAST-NEXT: vmovdqa64 320(%rdx), %zmm8
11235 ; AVX512DQ-FAST-NEXT: vmovdqa64 384(%rdx), %zmm7
11236 ; AVX512DQ-FAST-NEXT: vmovdqa64 448(%rdx), %zmm6
11237 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rcx), %zmm0
11238 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rcx), %zmm1
11239 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%rcx), %zmm2
11240 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%rcx), %zmm30
11241 ; AVX512DQ-FAST-NEXT: vmovdqa64 256(%rcx), %zmm27
11242 ; AVX512DQ-FAST-NEXT: vmovdqa64 320(%rcx), %zmm24
11243 ; AVX512DQ-FAST-NEXT: vmovdqa64 384(%rcx), %zmm22
11244 ; AVX512DQ-FAST-NEXT: vmovdqa64 448(%rcx), %zmm21
11245 ; AVX512DQ-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [4,12,4,12]
11246 ; AVX512DQ-FAST-NEXT: # ymm3 = mem[0,1,0,1]
11247 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, %zmm4
11248 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm21, %zmm3, %zmm4
11249 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11250 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, %zmm4
11251 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm3, %zmm4
11252 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11253 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm8, %zmm4
11254 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm24, %zmm3, %zmm4
11255 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11256 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm9, %zmm4
11257 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm27, %zmm3, %zmm4
11258 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11259 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, %zmm4
11260 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm30, %zmm3, %zmm4
11261 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11262 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, %zmm4
11263 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm3, %zmm4
11264 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11265 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm12, %zmm4
11266 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm3, %zmm4
11267 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11268 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm0, %zmm13, %zmm3
11269 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11270 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [1,9,2,10,1,9,2,10]
11271 ; AVX512DQ-FAST-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
11272 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm13, %zmm3
11273 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm5, %zmm3
11274 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11275 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm4
11276 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [5,13,6,14,5,13,6,14]
11277 ; AVX512DQ-FAST-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
11278 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm13, %zmm3
11279 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm5, %zmm3
11280 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11281 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm12, %zmm3
11282 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
11283 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11284 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm14 = [3,11,3,11,3,11,3,11]
11285 ; AVX512DQ-FAST-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
11286 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm13, %zmm3
11287 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm14, %zmm3
11288 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11289 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [7,15,7,15,7,15,7,15]
11290 ; AVX512DQ-FAST-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
11291 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm3, %zmm13
11292 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11293 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm12, %zmm0
11294 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm5, %zmm0
11295 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11296 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm12, %zmm0
11297 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm14, %zmm0
11298 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11299 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm3, %zmm12
11300 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11301 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, %zmm0
11302 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm4, %zmm0
11303 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11304 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, %zmm0
11305 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm5, %zmm0
11306 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11307 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, %zmm0
11308 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm14, %zmm0
11309 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11310 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm3, %zmm11
11311 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11312 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, %zmm0
11313 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm30, %zmm4, %zmm0
11314 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11315 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, %zmm0
11316 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm30, %zmm5, %zmm0
11317 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11318 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, %zmm0
11319 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm30, %zmm14, %zmm0
11320 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11321 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm30, %zmm3, %zmm10
11322 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11323 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm9, %zmm0
11324 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm27, %zmm4, %zmm0
11325 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11326 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm9, %zmm0
11327 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm27, %zmm5, %zmm0
11328 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11329 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm9, %zmm0
11330 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm27, %zmm14, %zmm0
11331 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11332 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm27, %zmm3, %zmm9
11333 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11334 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
11335 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm24, %zmm4, %zmm0
11336 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11337 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
11338 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm24, %zmm5, %zmm0
11339 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11340 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
11341 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm24, %zmm14, %zmm0
11342 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11343 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm24, %zmm3, %zmm8
11344 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11345 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, %zmm0
11346 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm4, %zmm0
11347 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11348 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, %zmm0
11349 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm5, %zmm0
11350 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11351 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, %zmm0
11352 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm14, %zmm0
11353 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11354 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm3, %zmm7
11355 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11356 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm21, %zmm6, %zmm4
11357 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm4, (%rsp) # 64-byte Spill
11358 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm21, %zmm6, %zmm5
11359 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11360 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm21, %zmm6, %zmm14
11361 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11362 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm21, %zmm3, %zmm6
11363 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11364 ; AVX512DQ-FAST-NEXT: vmovdqa64 448(%rdi), %zmm4
11365 ; AVX512DQ-FAST-NEXT: vmovdqa64 448(%rsi), %zmm11
11366 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [4,12,5,13,4,12,5,13]
11367 ; AVX512DQ-FAST-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
11368 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, %zmm0
11369 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm11, %zmm28, %zmm0
11370 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm8
11371 ; AVX512DQ-FAST-NEXT: vmovdqa64 384(%rdi), %zmm2
11372 ; AVX512DQ-FAST-NEXT: vmovdqa64 384(%rsi), %zmm13
11373 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
11374 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm13, %zmm28, %zmm1
11375 ; AVX512DQ-FAST-NEXT: vmovdqa64 320(%rdi), %zmm3
11376 ; AVX512DQ-FAST-NEXT: vmovdqa64 320(%rsi), %zmm15
11377 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm0
11378 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm15, %zmm28, %zmm0
11379 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11380 ; AVX512DQ-FAST-NEXT: vmovdqa64 256(%rdi), %zmm31
11381 ; AVX512DQ-FAST-NEXT: vmovdqa64 256(%rsi), %zmm17
11382 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm31, %zmm0
11383 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm17, %zmm28, %zmm0
11384 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11385 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%rdi), %zmm29
11386 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%rsi), %zmm18
11387 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm29, %zmm0
11388 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm18, %zmm28, %zmm0
11389 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11390 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%rdi), %zmm25
11391 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%rsi), %zmm19
11392 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm25, %zmm0
11393 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm19, %zmm28, %zmm0
11394 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm5
11395 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rdi), %zmm6
11396 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rsi), %zmm20
11397 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, %zmm0
11398 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm28, %zmm0
11399 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11400 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rdi), %zmm7
11401 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rsi), %zmm22
11402 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm22, %zmm7, %zmm28
11403 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm12 = [2,10,2,10,2,10,2,10]
11404 ; AVX512DQ-FAST-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
11405 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, %zmm14
11406 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm12, %zmm14
11407 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [6,14,6,14,6,14,6,14]
11408 ; AVX512DQ-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
11409 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, %zmm16
11410 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm10, %zmm16
11411 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm21 = [0,8,1,9,0,8,1,9]
11412 ; AVX512DQ-FAST-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3]
11413 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, %zmm0
11414 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm21, %zmm0
11415 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11416 ; AVX512DQ-FAST-NEXT: vbroadcasti64x2 {{.*#+}} ymm23 = [7,15,7,15]
11417 ; AVX512DQ-FAST-NEXT: # ymm23 = mem[0,1,0,1]
11418 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm23, %zmm7
11419 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, %zmm22
11420 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm12, %zmm22
11421 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, %zmm24
11422 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm10, %zmm24
11423 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, %zmm0
11424 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm21, %zmm0
11425 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11426 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm23, %zmm6
11427 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm25, %zmm20
11428 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm19, %zmm12, %zmm20
11429 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm25, %zmm26
11430 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm19, %zmm10, %zmm26
11431 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm25, %zmm0
11432 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm19, %zmm21, %zmm0
11433 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11434 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm19, %zmm23, %zmm25
11435 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm29, %zmm19
11436 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm18, %zmm12, %zmm19
11437 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm29, %zmm27
11438 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm18, %zmm10, %zmm27
11439 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm29, %zmm0
11440 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm18, %zmm21, %zmm0
11441 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11442 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm18, %zmm23, %zmm29
11443 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm31, %zmm18
11444 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm17, %zmm12, %zmm18
11445 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm31, %zmm9
11446 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm17, %zmm10, %zmm9
11447 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm31, %zmm0
11448 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm17, %zmm21, %zmm0
11449 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11450 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm17, %zmm23, %zmm31
11451 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm17
11452 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm15, %zmm12, %zmm17
11453 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm30
11454 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm15, %zmm10, %zmm30
11455 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm0
11456 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm15, %zmm21, %zmm0
11457 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11458 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm15, %zmm23, %zmm3
11459 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11460 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm15
11461 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
11462 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm13, %zmm12, %zmm3
11463 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm13, %zmm10, %zmm2
11464 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm15, %zmm0
11465 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm13, %zmm21, %zmm0
11466 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11467 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm13, %zmm23, %zmm15
11468 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11469 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm11, %zmm4, %zmm12
11470 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm11, %zmm4, %zmm10
11471 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm11, %zmm4, %zmm21
11472 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm11, %zmm23, %zmm4
11473 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11474 ; AVX512DQ-FAST-NEXT: movb $12, %al
11475 ; AVX512DQ-FAST-NEXT: kmovw %eax, %k1
11476 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
11477 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, %zmm8 {%k1}
11478 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11479 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
11480 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, %zmm1 {%k1}
11481 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11482 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
11483 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
11484 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, %zmm8 {%k1}
11485 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
11486 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11487 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, %zmm0 {%k1}
11488 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11489 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
11490 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
11491 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, %zmm4 {%k1}
11492 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
11493 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, %zmm5 {%k1}
11494 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11495 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
11496 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
11497 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, %zmm1 {%k1}
11498 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
11499 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, %zmm28 {%k1}
11500 ; AVX512DQ-FAST-NEXT: movb $48, %al
11501 ; AVX512DQ-FAST-NEXT: kmovw %eax, %k2
11502 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
11503 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm14, %zmm11 {%k2}
11504 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
11505 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm16, %zmm23 {%k2}
11506 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
11507 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm22, %zmm13 {%k2}
11508 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm22 # 64-byte Reload
11509 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm24, %zmm22 {%k2}
11510 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
11511 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm20, %zmm14 {%k2}
11512 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
11513 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm26, %zmm20 {%k2}
11514 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
11515 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm19, %zmm16 {%k2}
11516 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
11517 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm27, %zmm19 {%k2}
11518 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm24 # 64-byte Reload
11519 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm18, %zmm24 {%k2}
11520 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
11521 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm9, %zmm18 {%k2}
11522 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm26 # 64-byte Reload
11523 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm17, %zmm26 {%k2}
11524 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
11525 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm30, %zmm17 {%k2}
11526 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
11527 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm27 {%k2}
11528 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11529 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm0 {%k2}
11530 ; AVX512DQ-FAST-NEXT: vmovdqu64 (%rsp), %zmm15 # 64-byte Reload
11531 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm12, %zmm15 {%k2}
11532 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%r8), %zmm5
11533 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm2 = <0,1,9,u,4,5,6,7>
11534 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm2, %zmm11
11535 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11536 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%r8), %zmm3
11537 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm2, %zmm13
11538 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11539 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%r8), %zmm9
11540 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm9, %zmm2, %zmm14
11541 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11542 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%r8), %zmm11
11543 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm11, %zmm2, %zmm16
11544 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11545 ; AVX512DQ-FAST-NEXT: vmovdqa64 256(%r8), %zmm13
11546 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm13, %zmm2, %zmm24
11547 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11548 ; AVX512DQ-FAST-NEXT: vmovdqa64 320(%r8), %zmm14
11549 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm14, %zmm2, %zmm26
11550 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11551 ; AVX512DQ-FAST-NEXT: vmovdqa64 384(%r8), %zmm16
11552 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm16, %zmm2, %zmm27
11553 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11554 ; AVX512DQ-FAST-NEXT: vmovdqa64 448(%r8), %zmm12
11555 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm12, %zmm2, %zmm15
11556 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm15, (%rsp) # 64-byte Spill
11557 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
11558 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, %zmm15 {%k2}
11559 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm2 = <0,1,13,u,4,5,6,7>
11560 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm2, %zmm23
11561 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm2, %zmm22
11562 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11563 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm9, %zmm2, %zmm20
11564 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm20, %zmm24
11565 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm11, %zmm2, %zmm19
11566 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm13, %zmm2, %zmm18
11567 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm18, %zmm26
11568 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm14, %zmm2, %zmm17
11569 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11570 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm16, %zmm2, %zmm0
11571 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11572 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm12, %zmm2, %zmm15
11573 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdi), %ymm2
11574 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
11575 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11576 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm10
11577 ; AVX512DQ-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7, %zmm7 # 64-byte Folded Reload
11578 ; AVX512DQ-FAST-NEXT: # zmm7 = zmm7[0,1,2,3],mem[4,5,6,7]
11579 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rdi), %ymm2
11580 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
11581 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11582 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm0
11583 ; AVX512DQ-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm6, %zmm6 # 64-byte Folded Reload
11584 ; AVX512DQ-FAST-NEXT: # zmm6 = zmm6[0,1,2,3],mem[4,5,6,7]
11585 ; AVX512DQ-FAST-NEXT: movb $16, %al
11586 ; AVX512DQ-FAST-NEXT: kmovw %eax, %k2
11587 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm1 {%k2}
11588 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11589 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = <10,u,2,3,4,5,11,u>
11590 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm1, %zmm0
11591 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11592 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm2 = <14,u,2,3,4,5,15,u>
11593 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm2, %zmm6
11594 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11595 ; AVX512DQ-FAST-NEXT: vmovdqa 128(%rdi), %ymm3
11596 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
11597 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11598 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm3
11599 ; AVX512DQ-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm25, %zmm6 # 64-byte Folded Reload
11600 ; AVX512DQ-FAST-NEXT: # zmm6 = zmm25[0,1,2,3],mem[4,5,6,7]
11601 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11602 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm9, %zmm0 {%k2}
11603 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11604 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm9, %zmm1, %zmm3
11605 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11606 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm9, %zmm2, %zmm6
11607 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11608 ; AVX512DQ-FAST-NEXT: vmovdqa 192(%rdi), %ymm3
11609 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
11610 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11611 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm0
11612 ; AVX512DQ-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm29, %zmm3 # 64-byte Folded Reload
11613 ; AVX512DQ-FAST-NEXT: # zmm3 = zmm29[0,1,2,3],mem[4,5,6,7]
11614 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, %zmm4 {%k2}
11615 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11616 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm11, %zmm1, %zmm0
11617 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11618 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm11, %zmm2, %zmm3
11619 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11620 ; AVX512DQ-FAST-NEXT: vmovdqa 256(%rdi), %ymm0
11621 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11622 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
11623 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm3
11624 ; AVX512DQ-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm31, %zmm4 # 64-byte Folded Reload
11625 ; AVX512DQ-FAST-NEXT: # zmm4 = zmm31[0,1,2,3],mem[4,5,6,7]
11626 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11627 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm13, %zmm0 {%k2}
11628 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11629 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm13, %zmm1, %zmm3
11630 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11631 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm13, %zmm2, %zmm4
11632 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11633 ; AVX512DQ-FAST-NEXT: vmovdqa 320(%rdi), %ymm0
11634 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11635 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
11636 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm27
11637 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11638 ; AVX512DQ-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm25 # 64-byte Folded Reload
11639 ; AVX512DQ-FAST-NEXT: # zmm25 = zmm0[0,1,2,3],mem[4,5,6,7]
11640 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm14, %zmm8 {%k2}
11641 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11642 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm14, %zmm1, %zmm27
11643 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm14, %zmm2, %zmm25
11644 ; AVX512DQ-FAST-NEXT: vmovdqa 384(%rdi), %ymm0
11645 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11646 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
11647 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm22
11648 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11649 ; AVX512DQ-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm20 # 64-byte Folded Reload
11650 ; AVX512DQ-FAST-NEXT: # zmm20 = zmm0[0,1,2,3],mem[4,5,6,7]
11651 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11652 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm16, %zmm0 {%k2}
11653 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11654 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm16, %zmm1, %zmm22
11655 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm16, %zmm2, %zmm20
11656 ; AVX512DQ-FAST-NEXT: vmovdqa 448(%rdi), %ymm0
11657 ; AVX512DQ-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
11658 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
11659 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm18
11660 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm1, %zmm10
11661 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, %zmm11
11662 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm12, %zmm1, %zmm18
11663 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11664 ; AVX512DQ-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm17 # 64-byte Folded Reload
11665 ; AVX512DQ-FAST-NEXT: # zmm17 = zmm0[0,1,2,3],mem[4,5,6,7]
11666 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11667 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm12, %zmm0 {%k2}
11668 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11669 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm12, %zmm2, %zmm17
11670 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm2, %zmm7
11671 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, %zmm13
11672 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm28 {%k2}
11673 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%r9), %zmm6
11674 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm10 = [0,1,2,9,4,5,6,7]
11675 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
11676 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm10, %zmm1
11677 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11678 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%r9), %zmm7
11679 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
11680 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm10, %zmm2
11681 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11682 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%r9), %zmm2
11683 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
11684 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
11685 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11686 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%r9), %zmm3
11687 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
11688 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm10, %zmm4
11689 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11690 ; AVX512DQ-FAST-NEXT: vmovdqa64 256(%r9), %zmm4
11691 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
11692 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm10, %zmm5
11693 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11694 ; AVX512DQ-FAST-NEXT: vmovdqa64 320(%r9), %zmm5
11695 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11696 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm10, %zmm0
11697 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11698 ; AVX512DQ-FAST-NEXT: vmovdqa64 384(%r9), %zmm8
11699 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11700 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm8, %zmm10, %zmm0
11701 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11702 ; AVX512DQ-FAST-NEXT: vmovdqa64 448(%r9), %zmm9
11703 ; AVX512DQ-FAST-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
11704 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm9, %zmm10, %zmm0
11705 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
11706 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm10 = [0,1,2,13,4,5,6,7]
11707 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm10, %zmm23
11708 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11709 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
11710 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm10, %zmm12
11711 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11712 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm10, %zmm24
11713 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11714 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm10, %zmm19
11715 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11716 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm10, %zmm26
11717 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11718 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11719 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm10, %zmm0
11720 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11721 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11722 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm8, %zmm10, %zmm0
11723 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11724 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm9, %zmm10, %zmm15
11725 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11726 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdx), %xmm10
11727 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
11728 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
11729 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
11730 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm12 {%k1}
11731 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rdx), %xmm10
11732 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
11733 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
11734 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
11735 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm14 {%k1}
11736 ; AVX512DQ-FAST-NEXT: vmovdqa 128(%rdx), %xmm10
11737 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
11738 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
11739 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
11740 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm16 {%k1}
11741 ; AVX512DQ-FAST-NEXT: vmovdqa 192(%rdx), %xmm10
11742 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
11743 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
11744 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm30 # 64-byte Reload
11745 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm30 {%k1}
11746 ; AVX512DQ-FAST-NEXT: vmovdqa 256(%rdx), %xmm10
11747 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
11748 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
11749 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
11750 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm31 {%k1}
11751 ; AVX512DQ-FAST-NEXT: vmovdqa 320(%rdx), %xmm10
11752 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
11753 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
11754 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
11755 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm1 {%k1}
11756 ; AVX512DQ-FAST-NEXT: vmovdqa 384(%rdx), %xmm10
11757 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
11758 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
11759 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11760 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm0 {%k1}
11761 ; AVX512DQ-FAST-NEXT: vmovdqa 448(%rdx), %xmm10
11762 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
11763 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
11764 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm21 {%k1}
11765 ; AVX512DQ-FAST-NEXT: vinserti32x4 $2, (%r8), %zmm12, %zmm10
11766 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm29 = [0,1,2,3,4,8,6,7]
11767 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm29, %zmm10
11768 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11769 ; AVX512DQ-FAST-NEXT: vinserti32x4 $2, 64(%r8), %zmm14, %zmm10
11770 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm29, %zmm10
11771 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11772 ; AVX512DQ-FAST-NEXT: vinserti32x4 $2, 128(%r8), %zmm16, %zmm10
11773 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm29, %zmm10
11774 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11775 ; AVX512DQ-FAST-NEXT: vinserti32x4 $2, 192(%r8), %zmm30, %zmm26
11776 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm29, %zmm26
11777 ; AVX512DQ-FAST-NEXT: vinserti32x4 $2, 256(%r8), %zmm31, %zmm24
11778 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm29, %zmm24
11779 ; AVX512DQ-FAST-NEXT: vinserti32x4 $2, 320(%r8), %zmm1, %zmm23
11780 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm29, %zmm23
11781 ; AVX512DQ-FAST-NEXT: vinserti32x4 $2, 384(%r8), %zmm0, %zmm19
11782 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm8, %zmm29, %zmm19
11783 ; AVX512DQ-FAST-NEXT: vinserti32x4 $2, 448(%r8), %zmm21, %zmm21
11784 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm9, %zmm29, %zmm21
11785 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm29 = [0,10,2,3,4,5,6,11]
11786 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm29, %zmm11
11787 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11788 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm30 = [0,1,2,3,4,12,6,7]
11789 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm30, %zmm28
11790 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm31 = [0,14,2,3,4,5,6,15]
11791 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm31, %zmm13
11792 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11793 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
11794 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm29, %zmm16
11795 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
11796 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm30, %zmm15
11797 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
11798 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm31, %zmm14
11799 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
11800 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm29, %zmm13
11801 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
11802 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm30, %zmm12
11803 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
11804 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm31, %zmm11
11805 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
11806 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm29, %zmm10
11807 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
11808 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm30, %zmm7
11809 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
11810 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm31, %zmm2
11811 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
11812 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm29, %zmm1
11813 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
11814 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm30, %zmm3
11815 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11816 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm31, %zmm0
11817 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm29, %zmm27
11818 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
11819 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm30, %zmm4
11820 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm31, %zmm25
11821 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm8, %zmm29, %zmm22
11822 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
11823 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm8, %zmm30, %zmm5
11824 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm8, %zmm31, %zmm20
11825 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm9, %zmm29, %zmm18
11826 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
11827 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm9, %zmm30, %zmm6
11828 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm9, %zmm31, %zmm17
11829 ; AVX512DQ-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
11830 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm17, 3008(%rax)
11831 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
11832 ; AVX512DQ-FAST-NEXT: vmovaps %zmm8, 2944(%rax)
11833 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, 2880(%rax)
11834 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm18, 2816(%rax)
11835 ; AVX512DQ-FAST-NEXT: vmovups (%rsp), %zmm6 # 64-byte Reload
11836 ; AVX512DQ-FAST-NEXT: vmovaps %zmm6, 2752(%rax)
11837 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm20, 2624(%rax)
11838 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
11839 ; AVX512DQ-FAST-NEXT: vmovaps %zmm6, 2560(%rax)
11840 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, 2496(%rax)
11841 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm22, 2432(%rax)
11842 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
11843 ; AVX512DQ-FAST-NEXT: vmovaps %zmm5, 2368(%rax)
11844 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm25, 2240(%rax)
11845 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
11846 ; AVX512DQ-FAST-NEXT: vmovaps %zmm5, 2176(%rax)
11847 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, 2112(%rax)
11848 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm27, 2048(%rax)
11849 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
11850 ; AVX512DQ-FAST-NEXT: vmovaps %zmm4, 1984(%rax)
11851 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, 1856(%rax)
11852 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11853 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 1792(%rax)
11854 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, 1728(%rax)
11855 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, 1664(%rax)
11856 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
11857 ; AVX512DQ-FAST-NEXT: vmovaps %zmm3, 1600(%rax)
11858 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, 1472(%rax)
11859 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11860 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 1408(%rax)
11861 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, 1344(%rax)
11862 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, 1280(%rax)
11863 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
11864 ; AVX512DQ-FAST-NEXT: vmovaps %zmm2, 1216(%rax)
11865 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, 1088(%rax)
11866 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11867 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 1024(%rax)
11868 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm12, 960(%rax)
11869 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm13, 896(%rax)
11870 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
11871 ; AVX512DQ-FAST-NEXT: vmovaps %zmm1, 832(%rax)
11872 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm14, 704(%rax)
11873 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
11874 ; AVX512DQ-FAST-NEXT: vmovaps %zmm1, 640(%rax)
11875 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm15, 576(%rax)
11876 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm16, 512(%rax)
11877 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11878 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 448(%rax)
11879 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11880 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 320(%rax)
11881 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11882 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 256(%rax)
11883 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm28, 192(%rax)
11884 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11885 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 128(%rax)
11886 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11887 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 64(%rax)
11888 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm21, 2688(%rax)
11889 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm19, 2304(%rax)
11890 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, 1920(%rax)
11891 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm24, 1536(%rax)
11892 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm26, 1152(%rax)
11893 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11894 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 768(%rax)
11895 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11896 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 384(%rax)
11897 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
11898 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, (%rax)
11899 ; AVX512DQ-FAST-NEXT: addq $3400, %rsp # imm = 0xD48
11900 ; AVX512DQ-FAST-NEXT: vzeroupper
11901 ; AVX512DQ-FAST-NEXT: retq
11903 ; AVX512BW-ONLY-SLOW-LABEL: store_i64_stride6_vf64:
11904 ; AVX512BW-ONLY-SLOW: # %bb.0:
11905 ; AVX512BW-ONLY-SLOW-NEXT: subq $3400, %rsp # imm = 0xD48
11906 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rdx), %zmm13
11907 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm12
11908 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%rdx), %zmm11
11909 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%rdx), %zmm10
11910 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 256(%rdx), %zmm9
11911 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 320(%rdx), %zmm8
11912 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 384(%rdx), %zmm7
11913 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 448(%rdx), %zmm6
11914 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rcx), %zmm0
11915 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm1
11916 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%rcx), %zmm2
11917 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%rcx), %zmm30
11918 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 256(%rcx), %zmm27
11919 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 320(%rcx), %zmm24
11920 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 384(%rcx), %zmm22
11921 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 448(%rcx), %zmm21
11922 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [4,12,4,12]
11923 ; AVX512BW-ONLY-SLOW-NEXT: # ymm3 = mem[0,1,0,1]
11924 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm4
11925 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm3, %zmm4
11926 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11927 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm4
11928 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm3, %zmm4
11929 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11930 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm4
11931 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm3, %zmm4
11932 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11933 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm4
11934 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm3, %zmm4
11935 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11936 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm4
11937 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm30, %zmm3, %zmm4
11938 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11939 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm4
11940 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm3, %zmm4
11941 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11942 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, %zmm4
11943 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm3, %zmm4
11944 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11945 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm0, %zmm13, %zmm3
11946 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11947 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [1,9,2,10,1,9,2,10]
11948 ; AVX512BW-ONLY-SLOW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
11949 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, %zmm3
11950 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm5, %zmm3
11951 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11952 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm4
11953 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [5,13,6,14,5,13,6,14]
11954 ; AVX512BW-ONLY-SLOW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
11955 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, %zmm3
11956 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm5, %zmm3
11957 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11958 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, %zmm3
11959 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
11960 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11961 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm14 = [3,11,3,11,3,11,3,11]
11962 ; AVX512BW-ONLY-SLOW-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
11963 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, %zmm3
11964 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm14, %zmm3
11965 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11966 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [7,15,7,15,7,15,7,15]
11967 ; AVX512BW-ONLY-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
11968 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm3, %zmm13
11969 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11970 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, %zmm0
11971 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm5, %zmm0
11972 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11973 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, %zmm0
11974 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm14, %zmm0
11975 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11976 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm3, %zmm12
11977 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11978 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm0
11979 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm4, %zmm0
11980 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11981 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm0
11982 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm5, %zmm0
11983 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11984 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm0
11985 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm14, %zmm0
11986 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11987 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm3, %zmm11
11988 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11989 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm0
11990 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm30, %zmm4, %zmm0
11991 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11992 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm0
11993 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm30, %zmm5, %zmm0
11994 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11995 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm0
11996 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm30, %zmm14, %zmm0
11997 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11998 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm30, %zmm3, %zmm10
11999 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12000 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm0
12001 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm4, %zmm0
12002 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12003 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm0
12004 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm5, %zmm0
12005 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12006 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm0
12007 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm14, %zmm0
12008 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12009 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm3, %zmm9
12010 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12011 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
12012 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm4, %zmm0
12013 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12014 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
12015 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm5, %zmm0
12016 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12017 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
12018 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm14, %zmm0
12019 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12020 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm3, %zmm8
12021 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12022 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm0
12023 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm4, %zmm0
12024 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12025 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm0
12026 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm5, %zmm0
12027 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12028 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm0
12029 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm14, %zmm0
12030 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12031 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm3, %zmm7
12032 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12033 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm21, %zmm6, %zmm4
12034 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, (%rsp) # 64-byte Spill
12035 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm21, %zmm6, %zmm5
12036 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12037 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm21, %zmm6, %zmm14
12038 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12039 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm3, %zmm6
12040 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12041 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 448(%rdi), %zmm4
12042 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 448(%rsi), %zmm11
12043 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [4,12,5,13,4,12,5,13]
12044 ; AVX512BW-ONLY-SLOW-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
12045 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm0
12046 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm28, %zmm0
12047 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm8
12048 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 384(%rdi), %zmm2
12049 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 384(%rsi), %zmm13
12050 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm1
12051 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm28, %zmm1
12052 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 320(%rdi), %zmm3
12053 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 320(%rsi), %zmm15
12054 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm0
12055 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm15, %zmm28, %zmm0
12056 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12057 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 256(%rdi), %zmm31
12058 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 256(%rsi), %zmm17
12059 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, %zmm0
12060 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm17, %zmm28, %zmm0
12061 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12062 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%rdi), %zmm29
12063 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%rsi), %zmm18
12064 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm29, %zmm0
12065 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm28, %zmm0
12066 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12067 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%rdi), %zmm25
12068 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%rsi), %zmm19
12069 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, %zmm0
12070 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm28, %zmm0
12071 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm5
12072 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm6
12073 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm20
12074 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm0
12075 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm28, %zmm0
12076 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12077 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rdi), %zmm7
12078 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rsi), %zmm22
12079 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm22, %zmm7, %zmm28
12080 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm12 = [2,10,2,10,2,10,2,10]
12081 ; AVX512BW-ONLY-SLOW-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
12082 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm14
12083 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm12, %zmm14
12084 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [6,14,6,14,6,14,6,14]
12085 ; AVX512BW-ONLY-SLOW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
12086 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm16
12087 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm10, %zmm16
12088 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm21 = [0,8,1,9,0,8,1,9]
12089 ; AVX512BW-ONLY-SLOW-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3]
12090 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm0
12091 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm21, %zmm0
12092 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12093 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} ymm23 = [7,15,7,15]
12094 ; AVX512BW-ONLY-SLOW-NEXT: # ymm23 = mem[0,1,2,3,0,1,2,3]
12095 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm23, %zmm7
12096 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm22
12097 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm12, %zmm22
12098 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm24
12099 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm10, %zmm24
12100 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm0
12101 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm21, %zmm0
12102 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12103 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm23, %zmm6
12104 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, %zmm20
12105 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm12, %zmm20
12106 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, %zmm26
12107 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm10, %zmm26
12108 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, %zmm0
12109 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm21, %zmm0
12110 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12111 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm23, %zmm25
12112 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm29, %zmm19
12113 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm12, %zmm19
12114 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm29, %zmm27
12115 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm10, %zmm27
12116 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm29, %zmm0
12117 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm21, %zmm0
12118 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12119 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm23, %zmm29
12120 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, %zmm18
12121 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm17, %zmm12, %zmm18
12122 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, %zmm9
12123 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm17, %zmm10, %zmm9
12124 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, %zmm0
12125 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm17, %zmm21, %zmm0
12126 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12127 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm17, %zmm23, %zmm31
12128 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm17
12129 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm15, %zmm12, %zmm17
12130 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm30
12131 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm15, %zmm10, %zmm30
12132 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm0
12133 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm15, %zmm21, %zmm0
12134 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12135 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm15, %zmm23, %zmm3
12136 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12137 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm15
12138 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
12139 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm12, %zmm3
12140 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm10, %zmm2
12141 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm15, %zmm0
12142 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm21, %zmm0
12143 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12144 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm23, %zmm15
12145 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12146 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm11, %zmm4, %zmm12
12147 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm11, %zmm4, %zmm10
12148 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm11, %zmm4, %zmm21
12149 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm23, %zmm4
12150 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12151 ; AVX512BW-ONLY-SLOW-NEXT: movb $12, %al
12152 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %eax, %k1
12153 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
12154 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm8 {%k1}
12155 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12156 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
12157 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm1 {%k1}
12158 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12159 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
12160 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
12161 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm8 {%k1}
12162 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
12163 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12164 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm0 {%k1}
12165 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12166 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
12167 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
12168 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm4 {%k1}
12169 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
12170 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm5 {%k1}
12171 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12172 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
12173 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
12174 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm1 {%k1}
12175 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
12176 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm28 {%k1}
12177 ; AVX512BW-ONLY-SLOW-NEXT: movb $48, %al
12178 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %eax, %k2
12179 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
12180 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm11 {%k2}
12181 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
12182 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm23 {%k2}
12183 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
12184 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm22, %zmm13 {%k2}
12185 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm22 # 64-byte Reload
12186 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm24, %zmm22 {%k2}
12187 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
12188 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm20, %zmm14 {%k2}
12189 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
12190 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm26, %zmm20 {%k2}
12191 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
12192 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm19, %zmm16 {%k2}
12193 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
12194 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, %zmm19 {%k2}
12195 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm24 # 64-byte Reload
12196 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, %zmm24 {%k2}
12197 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
12198 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm18 {%k2}
12199 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm26 # 64-byte Reload
12200 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm26 {%k2}
12201 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
12202 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm30, %zmm17 {%k2}
12203 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
12204 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm27 {%k2}
12205 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12206 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm0 {%k2}
12207 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 (%rsp), %zmm15 # 64-byte Reload
12208 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, %zmm15 {%k2}
12209 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%r8), %zmm5
12210 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <0,1,9,u,4,5,6,7>
12211 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm2, %zmm11
12212 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12213 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%r8), %zmm3
12214 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm2, %zmm13
12215 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12216 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%r8), %zmm9
12217 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm2, %zmm14
12218 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12219 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%r8), %zmm11
12220 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm2, %zmm16
12221 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12222 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 256(%r8), %zmm13
12223 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm2, %zmm24
12224 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12225 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 320(%r8), %zmm14
12226 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm2, %zmm26
12227 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12228 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 384(%r8), %zmm16
12229 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm16, %zmm2, %zmm27
12230 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12231 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 448(%r8), %zmm12
12232 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm2, %zmm15
12233 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm15, (%rsp) # 64-byte Spill
12234 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
12235 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm15 {%k2}
12236 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <0,1,13,u,4,5,6,7>
12237 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm2, %zmm23
12238 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm2, %zmm22
12239 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12240 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm2, %zmm20
12241 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm20, %zmm24
12242 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm2, %zmm19
12243 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm2, %zmm18
12244 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, %zmm26
12245 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm2, %zmm17
12246 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12247 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm16, %zmm2, %zmm0
12248 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12249 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm2, %zmm15
12250 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%rdi), %ymm2
12251 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
12252 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12253 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm10
12254 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7, %zmm7 # 64-byte Folded Reload
12255 ; AVX512BW-ONLY-SLOW-NEXT: # zmm7 = zmm7[0,1,2,3],mem[4,5,6,7]
12256 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 64(%rdi), %ymm2
12257 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
12258 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12259 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm0
12260 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm6, %zmm6 # 64-byte Folded Reload
12261 ; AVX512BW-ONLY-SLOW-NEXT: # zmm6 = zmm6[0,1,2,3],mem[4,5,6,7]
12262 ; AVX512BW-ONLY-SLOW-NEXT: movb $16, %al
12263 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %eax, %k2
12264 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm1 {%k2}
12265 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12266 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <10,u,2,3,4,5,11,u>
12267 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm1, %zmm0
12268 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12269 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <14,u,2,3,4,5,15,u>
12270 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm2, %zmm6
12271 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12272 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 128(%rdi), %ymm3
12273 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
12274 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12275 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm3
12276 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm25, %zmm6 # 64-byte Folded Reload
12277 ; AVX512BW-ONLY-SLOW-NEXT: # zmm6 = zmm25[0,1,2,3],mem[4,5,6,7]
12278 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12279 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm0 {%k2}
12280 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12281 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm1, %zmm3
12282 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12283 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm2, %zmm6
12284 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12285 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 192(%rdi), %ymm3
12286 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
12287 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12288 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm0
12289 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm29, %zmm3 # 64-byte Folded Reload
12290 ; AVX512BW-ONLY-SLOW-NEXT: # zmm3 = zmm29[0,1,2,3],mem[4,5,6,7]
12291 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm4 {%k2}
12292 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12293 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm1, %zmm0
12294 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12295 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm2, %zmm3
12296 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12297 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 256(%rdi), %ymm0
12298 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12299 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
12300 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm3
12301 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm31, %zmm4 # 64-byte Folded Reload
12302 ; AVX512BW-ONLY-SLOW-NEXT: # zmm4 = zmm31[0,1,2,3],mem[4,5,6,7]
12303 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12304 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, %zmm0 {%k2}
12305 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12306 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm1, %zmm3
12307 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12308 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm2, %zmm4
12309 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12310 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 320(%rdi), %ymm0
12311 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12312 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
12313 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm27
12314 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12315 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm25 # 64-byte Folded Reload
12316 ; AVX512BW-ONLY-SLOW-NEXT: # zmm25 = zmm0[0,1,2,3],mem[4,5,6,7]
12317 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm8 {%k2}
12318 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12319 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm1, %zmm27
12320 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm2, %zmm25
12321 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 384(%rdi), %ymm0
12322 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12323 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
12324 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm22
12325 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12326 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm20 # 64-byte Folded Reload
12327 ; AVX512BW-ONLY-SLOW-NEXT: # zmm20 = zmm0[0,1,2,3],mem[4,5,6,7]
12328 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12329 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm0 {%k2}
12330 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12331 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm16, %zmm1, %zmm22
12332 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm16, %zmm2, %zmm20
12333 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 448(%rdi), %ymm0
12334 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12335 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
12336 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm18
12337 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm1, %zmm10
12338 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm11
12339 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm1, %zmm18
12340 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12341 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm17 # 64-byte Folded Reload
12342 ; AVX512BW-ONLY-SLOW-NEXT: # zmm17 = zmm0[0,1,2,3],mem[4,5,6,7]
12343 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12344 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, %zmm0 {%k2}
12345 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12346 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm2, %zmm17
12347 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm2, %zmm7
12348 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm13
12349 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm28 {%k2}
12350 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%r9), %zmm6
12351 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm10 = [0,1,2,9,4,5,6,7]
12352 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
12353 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm10, %zmm1
12354 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12355 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%r9), %zmm7
12356 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
12357 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm10, %zmm2
12358 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12359 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%r9), %zmm2
12360 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
12361 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
12362 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12363 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%r9), %zmm3
12364 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
12365 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm10, %zmm4
12366 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12367 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 256(%r9), %zmm4
12368 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
12369 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm10, %zmm5
12370 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12371 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 320(%r9), %zmm5
12372 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12373 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm10, %zmm0
12374 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12375 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 384(%r9), %zmm8
12376 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12377 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm10, %zmm0
12378 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12379 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 448(%r9), %zmm9
12380 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
12381 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm10, %zmm0
12382 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
12383 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm10 = [0,1,2,13,4,5,6,7]
12384 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm10, %zmm23
12385 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12386 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
12387 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm10, %zmm12
12388 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12389 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm10, %zmm24
12390 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12391 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm10, %zmm19
12392 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12393 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm10, %zmm26
12394 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12395 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12396 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm10, %zmm0
12397 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12398 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12399 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm10, %zmm0
12400 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12401 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm10, %zmm15
12402 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12403 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%rdx), %xmm10
12404 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
12405 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
12406 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
12407 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm12 {%k1}
12408 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 64(%rdx), %xmm10
12409 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
12410 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
12411 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
12412 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm14 {%k1}
12413 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 128(%rdx), %xmm10
12414 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
12415 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
12416 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
12417 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm16 {%k1}
12418 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 192(%rdx), %xmm10
12419 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
12420 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
12421 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm30 # 64-byte Reload
12422 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm30 {%k1}
12423 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 256(%rdx), %xmm10
12424 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
12425 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
12426 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
12427 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm31 {%k1}
12428 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 320(%rdx), %xmm10
12429 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
12430 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
12431 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
12432 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm1 {%k1}
12433 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 384(%rdx), %xmm10
12434 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
12435 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
12436 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12437 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm0 {%k1}
12438 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 448(%rdx), %xmm10
12439 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
12440 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
12441 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm21 {%k1}
12442 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $2, (%r8), %zmm12, %zmm10
12443 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm29 = [0,1,2,3,4,8,6,7]
12444 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm29, %zmm10
12445 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12446 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $2, 64(%r8), %zmm14, %zmm10
12447 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm29, %zmm10
12448 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12449 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $2, 128(%r8), %zmm16, %zmm10
12450 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm29, %zmm10
12451 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12452 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $2, 192(%r8), %zmm30, %zmm26
12453 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm29, %zmm26
12454 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $2, 256(%r8), %zmm31, %zmm24
12455 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm29, %zmm24
12456 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $2, 320(%r8), %zmm1, %zmm23
12457 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm29, %zmm23
12458 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $2, 384(%r8), %zmm0, %zmm19
12459 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm29, %zmm19
12460 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $2, 448(%r8), %zmm21, %zmm21
12461 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm29, %zmm21
12462 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm29 = [0,10,2,3,4,5,6,11]
12463 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm29, %zmm11
12464 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12465 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm30 = [0,1,2,3,4,12,6,7]
12466 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm30, %zmm28
12467 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm31 = [0,14,2,3,4,5,6,15]
12468 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm31, %zmm13
12469 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12470 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
12471 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm29, %zmm16
12472 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
12473 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm30, %zmm15
12474 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
12475 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm31, %zmm14
12476 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
12477 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm29, %zmm13
12478 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
12479 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm30, %zmm12
12480 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
12481 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm31, %zmm11
12482 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
12483 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm29, %zmm10
12484 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
12485 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm30, %zmm7
12486 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
12487 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm31, %zmm2
12488 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
12489 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm29, %zmm1
12490 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
12491 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm30, %zmm3
12492 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12493 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm31, %zmm0
12494 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm29, %zmm27
12495 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
12496 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm30, %zmm4
12497 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm31, %zmm25
12498 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm29, %zmm22
12499 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
12500 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm30, %zmm5
12501 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm31, %zmm20
12502 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm29, %zmm18
12503 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
12504 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm30, %zmm6
12505 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm31, %zmm17
12506 ; AVX512BW-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
12507 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, 3008(%rax)
12508 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
12509 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm8, 2944(%rax)
12510 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, 2880(%rax)
12511 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, 2816(%rax)
12512 ; AVX512BW-ONLY-SLOW-NEXT: vmovups (%rsp), %zmm6 # 64-byte Reload
12513 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm6, 2752(%rax)
12514 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm20, 2624(%rax)
12515 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
12516 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm6, 2560(%rax)
12517 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, 2496(%rax)
12518 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm22, 2432(%rax)
12519 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
12520 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm5, 2368(%rax)
12521 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, 2240(%rax)
12522 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
12523 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm5, 2176(%rax)
12524 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, 2112(%rax)
12525 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, 2048(%rax)
12526 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
12527 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm4, 1984(%rax)
12528 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, 1856(%rax)
12529 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12530 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 1792(%rax)
12531 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, 1728(%rax)
12532 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, 1664(%rax)
12533 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
12534 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm3, 1600(%rax)
12535 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, 1472(%rax)
12536 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12537 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 1408(%rax)
12538 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, 1344(%rax)
12539 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, 1280(%rax)
12540 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
12541 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm2, 1216(%rax)
12542 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, 1088(%rax)
12543 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12544 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 1024(%rax)
12545 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, 960(%rax)
12546 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, 896(%rax)
12547 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
12548 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm1, 832(%rax)
12549 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, 704(%rax)
12550 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
12551 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm1, 640(%rax)
12552 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm15, 576(%rax)
12553 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, 512(%rax)
12554 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12555 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 448(%rax)
12556 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12557 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 320(%rax)
12558 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12559 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 256(%rax)
12560 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm28, 192(%rax)
12561 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12562 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 128(%rax)
12563 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12564 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 64(%rax)
12565 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, 2688(%rax)
12566 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm19, 2304(%rax)
12567 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, 1920(%rax)
12568 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm24, 1536(%rax)
12569 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm26, 1152(%rax)
12570 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12571 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 768(%rax)
12572 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12573 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 384(%rax)
12574 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12575 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, (%rax)
12576 ; AVX512BW-ONLY-SLOW-NEXT: addq $3400, %rsp # imm = 0xD48
12577 ; AVX512BW-ONLY-SLOW-NEXT: vzeroupper
12578 ; AVX512BW-ONLY-SLOW-NEXT: retq
12580 ; AVX512BW-ONLY-FAST-LABEL: store_i64_stride6_vf64:
12581 ; AVX512BW-ONLY-FAST: # %bb.0:
12582 ; AVX512BW-ONLY-FAST-NEXT: subq $3400, %rsp # imm = 0xD48
12583 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rdx), %zmm13
12584 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rdx), %zmm12
12585 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%rdx), %zmm11
12586 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%rdx), %zmm10
12587 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 256(%rdx), %zmm9
12588 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 320(%rdx), %zmm8
12589 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 384(%rdx), %zmm7
12590 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 448(%rdx), %zmm6
12591 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rcx), %zmm0
12592 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rcx), %zmm1
12593 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%rcx), %zmm2
12594 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%rcx), %zmm30
12595 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 256(%rcx), %zmm27
12596 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 320(%rcx), %zmm24
12597 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 384(%rcx), %zmm22
12598 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 448(%rcx), %zmm21
12599 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [4,12,4,12]
12600 ; AVX512BW-ONLY-FAST-NEXT: # ymm3 = mem[0,1,0,1]
12601 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm4
12602 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm3, %zmm4
12603 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12604 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm4
12605 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm3, %zmm4
12606 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12607 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm4
12608 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm3, %zmm4
12609 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12610 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm4
12611 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm3, %zmm4
12612 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12613 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm4
12614 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm30, %zmm3, %zmm4
12615 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12616 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm4
12617 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm3, %zmm4
12618 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12619 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm4
12620 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm3, %zmm4
12621 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12622 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm13, %zmm3
12623 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12624 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [1,9,2,10,1,9,2,10]
12625 ; AVX512BW-ONLY-FAST-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
12626 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm3
12627 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm5, %zmm3
12628 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12629 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm4
12630 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [5,13,6,14,5,13,6,14]
12631 ; AVX512BW-ONLY-FAST-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
12632 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm3
12633 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm5, %zmm3
12634 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12635 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm3
12636 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
12637 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12638 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm14 = [3,11,3,11,3,11,3,11]
12639 ; AVX512BW-ONLY-FAST-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
12640 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm3
12641 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm14, %zmm3
12642 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12643 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [7,15,7,15,7,15,7,15]
12644 ; AVX512BW-ONLY-FAST-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
12645 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm3, %zmm13
12646 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12647 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm0
12648 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm5, %zmm0
12649 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12650 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm0
12651 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm14, %zmm0
12652 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12653 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm3, %zmm12
12654 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12655 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm0
12656 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm4, %zmm0
12657 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12658 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm0
12659 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm5, %zmm0
12660 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12661 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm0
12662 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm14, %zmm0
12663 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12664 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm3, %zmm11
12665 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12666 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm0
12667 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm30, %zmm4, %zmm0
12668 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12669 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm0
12670 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm30, %zmm5, %zmm0
12671 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12672 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm0
12673 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm30, %zmm14, %zmm0
12674 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12675 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm30, %zmm3, %zmm10
12676 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12677 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm0
12678 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm4, %zmm0
12679 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12680 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm0
12681 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm5, %zmm0
12682 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12683 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm0
12684 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm14, %zmm0
12685 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12686 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm3, %zmm9
12687 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12688 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
12689 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm4, %zmm0
12690 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12691 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
12692 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm5, %zmm0
12693 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12694 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
12695 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm14, %zmm0
12696 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12697 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm3, %zmm8
12698 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12699 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm0
12700 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm4, %zmm0
12701 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12702 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm0
12703 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm5, %zmm0
12704 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12705 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm0
12706 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm14, %zmm0
12707 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12708 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm3, %zmm7
12709 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12710 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm21, %zmm6, %zmm4
12711 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm4, (%rsp) # 64-byte Spill
12712 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm21, %zmm6, %zmm5
12713 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12714 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm21, %zmm6, %zmm14
12715 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12716 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm3, %zmm6
12717 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12718 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 448(%rdi), %zmm4
12719 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 448(%rsi), %zmm11
12720 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [4,12,5,13,4,12,5,13]
12721 ; AVX512BW-ONLY-FAST-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
12722 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm0
12723 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm28, %zmm0
12724 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm8
12725 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 384(%rdi), %zmm2
12726 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 384(%rsi), %zmm13
12727 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
12728 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm28, %zmm1
12729 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 320(%rdi), %zmm3
12730 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 320(%rsi), %zmm15
12731 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm0
12732 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm28, %zmm0
12733 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12734 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 256(%rdi), %zmm31
12735 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 256(%rsi), %zmm17
12736 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm0
12737 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm28, %zmm0
12738 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12739 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%rdi), %zmm29
12740 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%rsi), %zmm18
12741 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm29, %zmm0
12742 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm28, %zmm0
12743 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12744 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%rdi), %zmm25
12745 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%rsi), %zmm19
12746 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm25, %zmm0
12747 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm28, %zmm0
12748 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm5
12749 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rdi), %zmm6
12750 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rsi), %zmm20
12751 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm0
12752 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm28, %zmm0
12753 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12754 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rdi), %zmm7
12755 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rsi), %zmm22
12756 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm22, %zmm7, %zmm28
12757 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm12 = [2,10,2,10,2,10,2,10]
12758 ; AVX512BW-ONLY-FAST-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
12759 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm14
12760 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm12, %zmm14
12761 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [6,14,6,14,6,14,6,14]
12762 ; AVX512BW-ONLY-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
12763 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm16
12764 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm10, %zmm16
12765 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm21 = [0,8,1,9,0,8,1,9]
12766 ; AVX512BW-ONLY-FAST-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3]
12767 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm0
12768 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm21, %zmm0
12769 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12770 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} ymm23 = [7,15,7,15]
12771 ; AVX512BW-ONLY-FAST-NEXT: # ymm23 = mem[0,1,2,3,0,1,2,3]
12772 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm23, %zmm7
12773 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm22
12774 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm12, %zmm22
12775 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm24
12776 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm10, %zmm24
12777 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm0
12778 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm21, %zmm0
12779 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12780 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm23, %zmm6
12781 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm25, %zmm20
12782 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm12, %zmm20
12783 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm25, %zmm26
12784 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm10, %zmm26
12785 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm25, %zmm0
12786 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm21, %zmm0
12787 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12788 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm23, %zmm25
12789 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm29, %zmm19
12790 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm12, %zmm19
12791 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm29, %zmm27
12792 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm10, %zmm27
12793 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm29, %zmm0
12794 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm21, %zmm0
12795 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12796 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm23, %zmm29
12797 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm18
12798 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm12, %zmm18
12799 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm9
12800 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm10, %zmm9
12801 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm0
12802 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm21, %zmm0
12803 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12804 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm23, %zmm31
12805 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm17
12806 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm12, %zmm17
12807 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm30
12808 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm10, %zmm30
12809 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm0
12810 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm21, %zmm0
12811 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12812 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm23, %zmm3
12813 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12814 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm15
12815 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
12816 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm12, %zmm3
12817 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm10, %zmm2
12818 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm15, %zmm0
12819 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm21, %zmm0
12820 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12821 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm23, %zmm15
12822 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12823 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm11, %zmm4, %zmm12
12824 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm11, %zmm4, %zmm10
12825 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm11, %zmm4, %zmm21
12826 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm23, %zmm4
12827 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12828 ; AVX512BW-ONLY-FAST-NEXT: movb $12, %al
12829 ; AVX512BW-ONLY-FAST-NEXT: kmovd %eax, %k1
12830 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
12831 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm8 {%k1}
12832 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12833 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
12834 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm1 {%k1}
12835 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12836 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
12837 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
12838 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm8 {%k1}
12839 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
12840 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12841 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm0 {%k1}
12842 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12843 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
12844 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
12845 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm4 {%k1}
12846 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
12847 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm5 {%k1}
12848 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12849 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
12850 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
12851 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm1 {%k1}
12852 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
12853 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm28 {%k1}
12854 ; AVX512BW-ONLY-FAST-NEXT: movb $48, %al
12855 ; AVX512BW-ONLY-FAST-NEXT: kmovd %eax, %k2
12856 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
12857 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm14, %zmm11 {%k2}
12858 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
12859 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm16, %zmm23 {%k2}
12860 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
12861 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm22, %zmm13 {%k2}
12862 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm22 # 64-byte Reload
12863 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm24, %zmm22 {%k2}
12864 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
12865 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm20, %zmm14 {%k2}
12866 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
12867 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm26, %zmm20 {%k2}
12868 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
12869 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm19, %zmm16 {%k2}
12870 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
12871 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm27, %zmm19 {%k2}
12872 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm24 # 64-byte Reload
12873 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm18, %zmm24 {%k2}
12874 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
12875 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm18 {%k2}
12876 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm26 # 64-byte Reload
12877 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm17, %zmm26 {%k2}
12878 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
12879 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm30, %zmm17 {%k2}
12880 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
12881 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm27 {%k2}
12882 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12883 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm0 {%k2}
12884 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 (%rsp), %zmm15 # 64-byte Reload
12885 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm15 {%k2}
12886 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%r8), %zmm5
12887 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm2 = <0,1,9,u,4,5,6,7>
12888 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm2, %zmm11
12889 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12890 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%r8), %zmm3
12891 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm2, %zmm13
12892 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12893 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%r8), %zmm9
12894 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm2, %zmm14
12895 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12896 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%r8), %zmm11
12897 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm2, %zmm16
12898 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12899 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 256(%r8), %zmm13
12900 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm2, %zmm24
12901 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12902 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 320(%r8), %zmm14
12903 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm2, %zmm26
12904 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12905 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 384(%r8), %zmm16
12906 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm16, %zmm2, %zmm27
12907 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12908 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 448(%r8), %zmm12
12909 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm12, %zmm2, %zmm15
12910 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm15, (%rsp) # 64-byte Spill
12911 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
12912 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm15 {%k2}
12913 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm2 = <0,1,13,u,4,5,6,7>
12914 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm2, %zmm23
12915 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm2, %zmm22
12916 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12917 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm2, %zmm20
12918 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm20, %zmm24
12919 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm2, %zmm19
12920 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm2, %zmm18
12921 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm18, %zmm26
12922 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm2, %zmm17
12923 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12924 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm16, %zmm2, %zmm0
12925 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12926 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm12, %zmm2, %zmm15
12927 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa (%rdi), %ymm2
12928 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
12929 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12930 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm10
12931 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7, %zmm7 # 64-byte Folded Reload
12932 ; AVX512BW-ONLY-FAST-NEXT: # zmm7 = zmm7[0,1,2,3],mem[4,5,6,7]
12933 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 64(%rdi), %ymm2
12934 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
12935 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12936 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm0
12937 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm6, %zmm6 # 64-byte Folded Reload
12938 ; AVX512BW-ONLY-FAST-NEXT: # zmm6 = zmm6[0,1,2,3],mem[4,5,6,7]
12939 ; AVX512BW-ONLY-FAST-NEXT: movb $16, %al
12940 ; AVX512BW-ONLY-FAST-NEXT: kmovd %eax, %k2
12941 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm1 {%k2}
12942 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12943 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = <10,u,2,3,4,5,11,u>
12944 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm1, %zmm0
12945 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12946 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm2 = <14,u,2,3,4,5,15,u>
12947 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm2, %zmm6
12948 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12949 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 128(%rdi), %ymm3
12950 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
12951 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12952 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm3
12953 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm25, %zmm6 # 64-byte Folded Reload
12954 ; AVX512BW-ONLY-FAST-NEXT: # zmm6 = zmm25[0,1,2,3],mem[4,5,6,7]
12955 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12956 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm0 {%k2}
12957 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12958 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm1, %zmm3
12959 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12960 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm2, %zmm6
12961 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12962 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 192(%rdi), %ymm3
12963 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
12964 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12965 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm0
12966 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm29, %zmm3 # 64-byte Folded Reload
12967 ; AVX512BW-ONLY-FAST-NEXT: # zmm3 = zmm29[0,1,2,3],mem[4,5,6,7]
12968 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm4 {%k2}
12969 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12970 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm1, %zmm0
12971 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12972 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm2, %zmm3
12973 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12974 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 256(%rdi), %ymm0
12975 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12976 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
12977 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm3
12978 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm31, %zmm4 # 64-byte Folded Reload
12979 ; AVX512BW-ONLY-FAST-NEXT: # zmm4 = zmm31[0,1,2,3],mem[4,5,6,7]
12980 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12981 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm0 {%k2}
12982 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12983 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm1, %zmm3
12984 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12985 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm2, %zmm4
12986 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12987 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 320(%rdi), %ymm0
12988 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12989 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
12990 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm27
12991 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12992 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm25 # 64-byte Folded Reload
12993 ; AVX512BW-ONLY-FAST-NEXT: # zmm25 = zmm0[0,1,2,3],mem[4,5,6,7]
12994 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm14, %zmm8 {%k2}
12995 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12996 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm1, %zmm27
12997 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm2, %zmm25
12998 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 384(%rdi), %ymm0
12999 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13000 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
13001 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm22
13002 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13003 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm20 # 64-byte Folded Reload
13004 ; AVX512BW-ONLY-FAST-NEXT: # zmm20 = zmm0[0,1,2,3],mem[4,5,6,7]
13005 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13006 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm16, %zmm0 {%k2}
13007 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13008 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm16, %zmm1, %zmm22
13009 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm16, %zmm2, %zmm20
13010 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 448(%rdi), %ymm0
13011 ; AVX512BW-ONLY-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13012 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
13013 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm18
13014 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm1, %zmm10
13015 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm11
13016 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm12, %zmm1, %zmm18
13017 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13018 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm17 # 64-byte Folded Reload
13019 ; AVX512BW-ONLY-FAST-NEXT: # zmm17 = zmm0[0,1,2,3],mem[4,5,6,7]
13020 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13021 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm0 {%k2}
13022 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13023 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm12, %zmm2, %zmm17
13024 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm2, %zmm7
13025 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm13
13026 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm28 {%k2}
13027 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%r9), %zmm6
13028 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm10 = [0,1,2,9,4,5,6,7]
13029 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13030 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm10, %zmm1
13031 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13032 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%r9), %zmm7
13033 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13034 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm10, %zmm2
13035 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13036 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%r9), %zmm2
13037 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
13038 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
13039 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13040 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%r9), %zmm3
13041 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
13042 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm10, %zmm4
13043 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13044 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 256(%r9), %zmm4
13045 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
13046 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm10, %zmm5
13047 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13048 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 320(%r9), %zmm5
13049 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13050 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm10, %zmm0
13051 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13052 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 384(%r9), %zmm8
13053 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13054 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm10, %zmm0
13055 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13056 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 448(%r9), %zmm9
13057 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
13058 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm10, %zmm0
13059 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
13060 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm10 = [0,1,2,13,4,5,6,7]
13061 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm10, %zmm23
13062 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13063 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
13064 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm10, %zmm12
13065 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13066 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm10, %zmm24
13067 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13068 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm10, %zmm19
13069 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13070 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm10, %zmm26
13071 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13072 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13073 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm10, %zmm0
13074 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13075 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13076 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm10, %zmm0
13077 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13078 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm10, %zmm15
13079 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13080 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa (%rdx), %xmm10
13081 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
13082 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
13083 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
13084 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm12 {%k1}
13085 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 64(%rdx), %xmm10
13086 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
13087 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
13088 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
13089 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm14 {%k1}
13090 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 128(%rdx), %xmm10
13091 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
13092 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
13093 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
13094 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm16 {%k1}
13095 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 192(%rdx), %xmm10
13096 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
13097 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
13098 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm30 # 64-byte Reload
13099 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm30 {%k1}
13100 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 256(%rdx), %xmm10
13101 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
13102 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
13103 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
13104 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm31 {%k1}
13105 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 320(%rdx), %xmm10
13106 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
13107 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
13108 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13109 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm1 {%k1}
13110 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 384(%rdx), %xmm10
13111 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
13112 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
13113 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13114 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm0 {%k1}
13115 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 448(%rdx), %xmm10
13116 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
13117 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
13118 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm21 {%k1}
13119 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $2, (%r8), %zmm12, %zmm10
13120 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm29 = [0,1,2,3,4,8,6,7]
13121 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm29, %zmm10
13122 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13123 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $2, 64(%r8), %zmm14, %zmm10
13124 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm29, %zmm10
13125 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13126 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $2, 128(%r8), %zmm16, %zmm10
13127 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm29, %zmm10
13128 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13129 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $2, 192(%r8), %zmm30, %zmm26
13130 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm29, %zmm26
13131 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $2, 256(%r8), %zmm31, %zmm24
13132 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm29, %zmm24
13133 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $2, 320(%r8), %zmm1, %zmm23
13134 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm29, %zmm23
13135 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $2, 384(%r8), %zmm0, %zmm19
13136 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm29, %zmm19
13137 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $2, 448(%r8), %zmm21, %zmm21
13138 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm29, %zmm21
13139 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm29 = [0,10,2,3,4,5,6,11]
13140 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm29, %zmm11
13141 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13142 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm30 = [0,1,2,3,4,12,6,7]
13143 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm30, %zmm28
13144 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm31 = [0,14,2,3,4,5,6,15]
13145 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm31, %zmm13
13146 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13147 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
13148 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm29, %zmm16
13149 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
13150 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm30, %zmm15
13151 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
13152 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm31, %zmm14
13153 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
13154 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm29, %zmm13
13155 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
13156 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm30, %zmm12
13157 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
13158 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm31, %zmm11
13159 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
13160 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm29, %zmm10
13161 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
13162 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm30, %zmm7
13163 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13164 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm31, %zmm2
13165 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13166 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm29, %zmm1
13167 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
13168 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm30, %zmm3
13169 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13170 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm31, %zmm0
13171 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm29, %zmm27
13172 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
13173 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm30, %zmm4
13174 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm31, %zmm25
13175 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm29, %zmm22
13176 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
13177 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm30, %zmm5
13178 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm31, %zmm20
13179 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm29, %zmm18
13180 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
13181 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm30, %zmm6
13182 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm31, %zmm17
13183 ; AVX512BW-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
13184 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm17, 3008(%rax)
13185 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
13186 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm8, 2944(%rax)
13187 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm6, 2880(%rax)
13188 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm18, 2816(%rax)
13189 ; AVX512BW-ONLY-FAST-NEXT: vmovups (%rsp), %zmm6 # 64-byte Reload
13190 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm6, 2752(%rax)
13191 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm20, 2624(%rax)
13192 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
13193 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm6, 2560(%rax)
13194 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, 2496(%rax)
13195 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm22, 2432(%rax)
13196 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
13197 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm5, 2368(%rax)
13198 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm25, 2240(%rax)
13199 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
13200 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm5, 2176(%rax)
13201 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, 2112(%rax)
13202 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm27, 2048(%rax)
13203 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
13204 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm4, 1984(%rax)
13205 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, 1856(%rax)
13206 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13207 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 1792(%rax)
13208 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, 1728(%rax)
13209 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, 1664(%rax)
13210 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
13211 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm3, 1600(%rax)
13212 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, 1472(%rax)
13213 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13214 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 1408(%rax)
13215 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, 1344(%rax)
13216 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm10, 1280(%rax)
13217 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13218 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm2, 1216(%rax)
13219 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, 1088(%rax)
13220 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13221 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 1024(%rax)
13222 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm12, 960(%rax)
13223 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, 896(%rax)
13224 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13225 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm1, 832(%rax)
13226 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm14, 704(%rax)
13227 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13228 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm1, 640(%rax)
13229 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm15, 576(%rax)
13230 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm16, 512(%rax)
13231 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13232 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 448(%rax)
13233 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13234 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 320(%rax)
13235 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13236 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 256(%rax)
13237 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm28, 192(%rax)
13238 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13239 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 128(%rax)
13240 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13241 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 64(%rax)
13242 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm21, 2688(%rax)
13243 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm19, 2304(%rax)
13244 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm23, 1920(%rax)
13245 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm24, 1536(%rax)
13246 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm26, 1152(%rax)
13247 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13248 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 768(%rax)
13249 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13250 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 384(%rax)
13251 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13252 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, (%rax)
13253 ; AVX512BW-ONLY-FAST-NEXT: addq $3400, %rsp # imm = 0xD48
13254 ; AVX512BW-ONLY-FAST-NEXT: vzeroupper
13255 ; AVX512BW-ONLY-FAST-NEXT: retq
13257 ; AVX512DQBW-SLOW-LABEL: store_i64_stride6_vf64:
13258 ; AVX512DQBW-SLOW: # %bb.0:
13259 ; AVX512DQBW-SLOW-NEXT: subq $3400, %rsp # imm = 0xD48
13260 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rdx), %zmm13
13261 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm12
13262 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%rdx), %zmm11
13263 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%rdx), %zmm10
13264 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 256(%rdx), %zmm9
13265 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 320(%rdx), %zmm8
13266 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 384(%rdx), %zmm7
13267 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 448(%rdx), %zmm6
13268 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rcx), %zmm0
13269 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm1
13270 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%rcx), %zmm2
13271 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%rcx), %zmm30
13272 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 256(%rcx), %zmm27
13273 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 320(%rcx), %zmm24
13274 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 384(%rcx), %zmm22
13275 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 448(%rcx), %zmm21
13276 ; AVX512DQBW-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [4,12,4,12]
13277 ; AVX512DQBW-SLOW-NEXT: # ymm3 = mem[0,1,0,1]
13278 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, %zmm4
13279 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm21, %zmm3, %zmm4
13280 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13281 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, %zmm4
13282 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm3, %zmm4
13283 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13284 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm8, %zmm4
13285 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm24, %zmm3, %zmm4
13286 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13287 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, %zmm4
13288 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm27, %zmm3, %zmm4
13289 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13290 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, %zmm4
13291 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm30, %zmm3, %zmm4
13292 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13293 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm4
13294 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm3, %zmm4
13295 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13296 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm12, %zmm4
13297 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm3, %zmm4
13298 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13299 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm0, %zmm13, %zmm3
13300 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13301 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [1,9,2,10,1,9,2,10]
13302 ; AVX512DQBW-SLOW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
13303 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm13, %zmm3
13304 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm0, %zmm5, %zmm3
13305 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13306 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm4
13307 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [5,13,6,14,5,13,6,14]
13308 ; AVX512DQBW-SLOW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
13309 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm13, %zmm3
13310 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm0, %zmm5, %zmm3
13311 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13312 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm12, %zmm3
13313 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
13314 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13315 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm14 = [3,11,3,11,3,11,3,11]
13316 ; AVX512DQBW-SLOW-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
13317 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm13, %zmm3
13318 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm0, %zmm14, %zmm3
13319 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13320 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [7,15,7,15,7,15,7,15]
13321 ; AVX512DQBW-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
13322 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm0, %zmm3, %zmm13
13323 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13324 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm12, %zmm0
13325 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm5, %zmm0
13326 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13327 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm12, %zmm0
13328 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm14, %zmm0
13329 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13330 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm3, %zmm12
13331 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13332 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm0
13333 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm4, %zmm0
13334 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13335 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm0
13336 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm5, %zmm0
13337 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13338 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm0
13339 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm14, %zmm0
13340 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13341 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm3, %zmm11
13342 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13343 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, %zmm0
13344 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm30, %zmm4, %zmm0
13345 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13346 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, %zmm0
13347 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm30, %zmm5, %zmm0
13348 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13349 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, %zmm0
13350 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm30, %zmm14, %zmm0
13351 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13352 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm30, %zmm3, %zmm10
13353 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13354 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, %zmm0
13355 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm27, %zmm4, %zmm0
13356 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13357 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, %zmm0
13358 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm27, %zmm5, %zmm0
13359 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13360 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, %zmm0
13361 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm27, %zmm14, %zmm0
13362 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13363 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm27, %zmm3, %zmm9
13364 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13365 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
13366 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm24, %zmm4, %zmm0
13367 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13368 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
13369 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm24, %zmm5, %zmm0
13370 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13371 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm8, %zmm0
13372 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm24, %zmm14, %zmm0
13373 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13374 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm24, %zmm3, %zmm8
13375 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13376 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, %zmm0
13377 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm4, %zmm0
13378 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13379 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, %zmm0
13380 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm5, %zmm0
13381 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13382 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, %zmm0
13383 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm14, %zmm0
13384 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13385 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm3, %zmm7
13386 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13387 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm21, %zmm6, %zmm4
13388 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm4, (%rsp) # 64-byte Spill
13389 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm21, %zmm6, %zmm5
13390 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13391 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm21, %zmm6, %zmm14
13392 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13393 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm21, %zmm3, %zmm6
13394 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13395 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 448(%rdi), %zmm4
13396 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 448(%rsi), %zmm11
13397 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [4,12,5,13,4,12,5,13]
13398 ; AVX512DQBW-SLOW-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
13399 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm0
13400 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm11, %zmm28, %zmm0
13401 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm8
13402 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 384(%rdi), %zmm2
13403 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 384(%rsi), %zmm13
13404 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm1
13405 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm13, %zmm28, %zmm1
13406 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 320(%rdi), %zmm3
13407 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 320(%rsi), %zmm15
13408 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm0
13409 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm15, %zmm28, %zmm0
13410 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13411 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 256(%rdi), %zmm31
13412 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 256(%rsi), %zmm17
13413 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm31, %zmm0
13414 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm17, %zmm28, %zmm0
13415 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13416 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%rdi), %zmm29
13417 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%rsi), %zmm18
13418 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm29, %zmm0
13419 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm18, %zmm28, %zmm0
13420 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13421 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%rdi), %zmm25
13422 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%rsi), %zmm19
13423 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm25, %zmm0
13424 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm19, %zmm28, %zmm0
13425 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm5
13426 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm6
13427 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm20
13428 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, %zmm0
13429 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm20, %zmm28, %zmm0
13430 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13431 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rdi), %zmm7
13432 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rsi), %zmm22
13433 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm22, %zmm7, %zmm28
13434 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm12 = [2,10,2,10,2,10,2,10]
13435 ; AVX512DQBW-SLOW-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
13436 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, %zmm14
13437 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm12, %zmm14
13438 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [6,14,6,14,6,14,6,14]
13439 ; AVX512DQBW-SLOW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
13440 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, %zmm16
13441 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm10, %zmm16
13442 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm21 = [0,8,1,9,0,8,1,9]
13443 ; AVX512DQBW-SLOW-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3]
13444 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, %zmm0
13445 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm21, %zmm0
13446 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13447 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x2 {{.*#+}} ymm23 = [7,15,7,15]
13448 ; AVX512DQBW-SLOW-NEXT: # ymm23 = mem[0,1,0,1]
13449 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm23, %zmm7
13450 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, %zmm22
13451 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm20, %zmm12, %zmm22
13452 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, %zmm24
13453 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm20, %zmm10, %zmm24
13454 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, %zmm0
13455 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm20, %zmm21, %zmm0
13456 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13457 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm20, %zmm23, %zmm6
13458 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm25, %zmm20
13459 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm19, %zmm12, %zmm20
13460 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm25, %zmm26
13461 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm19, %zmm10, %zmm26
13462 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm25, %zmm0
13463 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm19, %zmm21, %zmm0
13464 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13465 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm19, %zmm23, %zmm25
13466 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm29, %zmm19
13467 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm18, %zmm12, %zmm19
13468 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm29, %zmm27
13469 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm18, %zmm10, %zmm27
13470 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm29, %zmm0
13471 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm18, %zmm21, %zmm0
13472 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13473 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm18, %zmm23, %zmm29
13474 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm31, %zmm18
13475 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm17, %zmm12, %zmm18
13476 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm31, %zmm9
13477 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm17, %zmm10, %zmm9
13478 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm31, %zmm0
13479 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm17, %zmm21, %zmm0
13480 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13481 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm17, %zmm23, %zmm31
13482 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm17
13483 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm15, %zmm12, %zmm17
13484 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm30
13485 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm15, %zmm10, %zmm30
13486 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm0
13487 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm15, %zmm21, %zmm0
13488 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13489 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm15, %zmm23, %zmm3
13490 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13491 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm15
13492 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm3
13493 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm13, %zmm12, %zmm3
13494 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm13, %zmm10, %zmm2
13495 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm15, %zmm0
13496 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm13, %zmm21, %zmm0
13497 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13498 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm13, %zmm23, %zmm15
13499 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13500 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm11, %zmm4, %zmm12
13501 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm11, %zmm4, %zmm10
13502 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm11, %zmm4, %zmm21
13503 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm11, %zmm23, %zmm4
13504 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13505 ; AVX512DQBW-SLOW-NEXT: movb $12, %al
13506 ; AVX512DQBW-SLOW-NEXT: kmovd %eax, %k1
13507 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
13508 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm8 {%k1}
13509 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13510 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
13511 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm1 {%k1}
13512 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13513 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
13514 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
13515 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm8 {%k1}
13516 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
13517 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13518 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm0 {%k1}
13519 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13520 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
13521 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
13522 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm4 {%k1}
13523 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
13524 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm5 {%k1}
13525 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13526 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
13527 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13528 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm1 {%k1}
13529 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
13530 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm28 {%k1}
13531 ; AVX512DQBW-SLOW-NEXT: movb $48, %al
13532 ; AVX512DQBW-SLOW-NEXT: kmovd %eax, %k2
13533 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
13534 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm14, %zmm11 {%k2}
13535 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
13536 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm16, %zmm23 {%k2}
13537 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
13538 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm22, %zmm13 {%k2}
13539 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm22 # 64-byte Reload
13540 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm24, %zmm22 {%k2}
13541 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
13542 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm20, %zmm14 {%k2}
13543 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
13544 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm26, %zmm20 {%k2}
13545 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
13546 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm19, %zmm16 {%k2}
13547 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
13548 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm27, %zmm19 {%k2}
13549 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm24 # 64-byte Reload
13550 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm18, %zmm24 {%k2}
13551 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
13552 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, %zmm18 {%k2}
13553 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm26 # 64-byte Reload
13554 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm17, %zmm26 {%k2}
13555 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
13556 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm30, %zmm17 {%k2}
13557 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
13558 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm27 {%k2}
13559 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13560 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm0 {%k2}
13561 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 (%rsp), %zmm15 # 64-byte Reload
13562 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm12, %zmm15 {%k2}
13563 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%r8), %zmm5
13564 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <0,1,9,u,4,5,6,7>
13565 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm2, %zmm11
13566 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13567 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%r8), %zmm3
13568 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm2, %zmm13
13569 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13570 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%r8), %zmm9
13571 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm9, %zmm2, %zmm14
13572 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13573 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%r8), %zmm11
13574 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm11, %zmm2, %zmm16
13575 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13576 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 256(%r8), %zmm13
13577 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm13, %zmm2, %zmm24
13578 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13579 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 320(%r8), %zmm14
13580 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm14, %zmm2, %zmm26
13581 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13582 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 384(%r8), %zmm16
13583 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm16, %zmm2, %zmm27
13584 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13585 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 448(%r8), %zmm12
13586 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm12, %zmm2, %zmm15
13587 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm15, (%rsp) # 64-byte Spill
13588 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
13589 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, %zmm15 {%k2}
13590 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <0,1,13,u,4,5,6,7>
13591 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm2, %zmm23
13592 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm2, %zmm22
13593 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13594 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm9, %zmm2, %zmm20
13595 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm20, %zmm24
13596 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm11, %zmm2, %zmm19
13597 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm13, %zmm2, %zmm18
13598 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm18, %zmm26
13599 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm14, %zmm2, %zmm17
13600 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13601 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm16, %zmm2, %zmm0
13602 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13603 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm12, %zmm2, %zmm15
13604 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%rdi), %ymm2
13605 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
13606 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13607 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm10
13608 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7, %zmm7 # 64-byte Folded Reload
13609 ; AVX512DQBW-SLOW-NEXT: # zmm7 = zmm7[0,1,2,3],mem[4,5,6,7]
13610 ; AVX512DQBW-SLOW-NEXT: vmovdqa 64(%rdi), %ymm2
13611 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
13612 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13613 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm0
13614 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm6, %zmm6 # 64-byte Folded Reload
13615 ; AVX512DQBW-SLOW-NEXT: # zmm6 = zmm6[0,1,2,3],mem[4,5,6,7]
13616 ; AVX512DQBW-SLOW-NEXT: movb $16, %al
13617 ; AVX512DQBW-SLOW-NEXT: kmovd %eax, %k2
13618 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm1 {%k2}
13619 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13620 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <10,u,2,3,4,5,11,u>
13621 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm1, %zmm0
13622 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13623 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <14,u,2,3,4,5,15,u>
13624 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm2, %zmm6
13625 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13626 ; AVX512DQBW-SLOW-NEXT: vmovdqa 128(%rdi), %ymm3
13627 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
13628 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13629 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm3
13630 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm25, %zmm6 # 64-byte Folded Reload
13631 ; AVX512DQBW-SLOW-NEXT: # zmm6 = zmm25[0,1,2,3],mem[4,5,6,7]
13632 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13633 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, %zmm0 {%k2}
13634 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13635 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm9, %zmm1, %zmm3
13636 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13637 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm9, %zmm2, %zmm6
13638 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13639 ; AVX512DQBW-SLOW-NEXT: vmovdqa 192(%rdi), %ymm3
13640 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
13641 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13642 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm0
13643 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm29, %zmm3 # 64-byte Folded Reload
13644 ; AVX512DQBW-SLOW-NEXT: # zmm3 = zmm29[0,1,2,3],mem[4,5,6,7]
13645 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm4 {%k2}
13646 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13647 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm11, %zmm1, %zmm0
13648 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13649 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm11, %zmm2, %zmm3
13650 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13651 ; AVX512DQBW-SLOW-NEXT: vmovdqa 256(%rdi), %ymm0
13652 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13653 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
13654 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm3
13655 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm31, %zmm4 # 64-byte Folded Reload
13656 ; AVX512DQBW-SLOW-NEXT: # zmm4 = zmm31[0,1,2,3],mem[4,5,6,7]
13657 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13658 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm13, %zmm0 {%k2}
13659 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13660 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm13, %zmm1, %zmm3
13661 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13662 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm13, %zmm2, %zmm4
13663 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13664 ; AVX512DQBW-SLOW-NEXT: vmovdqa 320(%rdi), %ymm0
13665 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13666 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
13667 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm27
13668 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13669 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm25 # 64-byte Folded Reload
13670 ; AVX512DQBW-SLOW-NEXT: # zmm25 = zmm0[0,1,2,3],mem[4,5,6,7]
13671 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm14, %zmm8 {%k2}
13672 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13673 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm14, %zmm1, %zmm27
13674 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm14, %zmm2, %zmm25
13675 ; AVX512DQBW-SLOW-NEXT: vmovdqa 384(%rdi), %ymm0
13676 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13677 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
13678 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm22
13679 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13680 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm20 # 64-byte Folded Reload
13681 ; AVX512DQBW-SLOW-NEXT: # zmm20 = zmm0[0,1,2,3],mem[4,5,6,7]
13682 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13683 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm16, %zmm0 {%k2}
13684 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13685 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm16, %zmm1, %zmm22
13686 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm16, %zmm2, %zmm20
13687 ; AVX512DQBW-SLOW-NEXT: vmovdqa 448(%rdi), %ymm0
13688 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
13689 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
13690 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm18
13691 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm1, %zmm10
13692 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, %zmm11
13693 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm12, %zmm1, %zmm18
13694 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13695 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm17 # 64-byte Folded Reload
13696 ; AVX512DQBW-SLOW-NEXT: # zmm17 = zmm0[0,1,2,3],mem[4,5,6,7]
13697 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13698 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm12, %zmm0 {%k2}
13699 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13700 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm12, %zmm2, %zmm17
13701 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm2, %zmm7
13702 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, %zmm13
13703 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm28 {%k2}
13704 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%r9), %zmm6
13705 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm10 = [0,1,2,9,4,5,6,7]
13706 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13707 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm10, %zmm1
13708 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13709 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%r9), %zmm7
13710 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13711 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm10, %zmm2
13712 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13713 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%r9), %zmm2
13714 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
13715 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
13716 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13717 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%r9), %zmm3
13718 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
13719 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm10, %zmm4
13720 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13721 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 256(%r9), %zmm4
13722 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
13723 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm10, %zmm5
13724 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13725 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 320(%r9), %zmm5
13726 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13727 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm10, %zmm0
13728 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13729 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 384(%r9), %zmm8
13730 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13731 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm8, %zmm10, %zmm0
13732 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13733 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 448(%r9), %zmm9
13734 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
13735 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm9, %zmm10, %zmm0
13736 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
13737 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm10 = [0,1,2,13,4,5,6,7]
13738 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm10, %zmm23
13739 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13740 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
13741 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm10, %zmm12
13742 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13743 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm10, %zmm24
13744 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13745 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm10, %zmm19
13746 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13747 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm10, %zmm26
13748 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13749 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13750 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm10, %zmm0
13751 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13752 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13753 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm8, %zmm10, %zmm0
13754 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13755 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm9, %zmm10, %zmm15
13756 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13757 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%rdx), %xmm10
13758 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
13759 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
13760 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
13761 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm12 {%k1}
13762 ; AVX512DQBW-SLOW-NEXT: vmovdqa 64(%rdx), %xmm10
13763 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
13764 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
13765 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
13766 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm14 {%k1}
13767 ; AVX512DQBW-SLOW-NEXT: vmovdqa 128(%rdx), %xmm10
13768 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
13769 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
13770 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
13771 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm16 {%k1}
13772 ; AVX512DQBW-SLOW-NEXT: vmovdqa 192(%rdx), %xmm10
13773 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
13774 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
13775 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm30 # 64-byte Reload
13776 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm30 {%k1}
13777 ; AVX512DQBW-SLOW-NEXT: vmovdqa 256(%rdx), %xmm10
13778 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
13779 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
13780 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
13781 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm31 {%k1}
13782 ; AVX512DQBW-SLOW-NEXT: vmovdqa 320(%rdx), %xmm10
13783 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
13784 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
13785 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13786 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm1 {%k1}
13787 ; AVX512DQBW-SLOW-NEXT: vmovdqa 384(%rdx), %xmm10
13788 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
13789 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
13790 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13791 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm0 {%k1}
13792 ; AVX512DQBW-SLOW-NEXT: vmovdqa 448(%rdx), %xmm10
13793 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
13794 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
13795 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm21 {%k1}
13796 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $2, (%r8), %zmm12, %zmm10
13797 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm29 = [0,1,2,3,4,8,6,7]
13798 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm29, %zmm10
13799 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13800 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $2, 64(%r8), %zmm14, %zmm10
13801 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm29, %zmm10
13802 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13803 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $2, 128(%r8), %zmm16, %zmm10
13804 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm29, %zmm10
13805 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13806 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $2, 192(%r8), %zmm30, %zmm26
13807 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm29, %zmm26
13808 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $2, 256(%r8), %zmm31, %zmm24
13809 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm29, %zmm24
13810 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $2, 320(%r8), %zmm1, %zmm23
13811 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm29, %zmm23
13812 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $2, 384(%r8), %zmm0, %zmm19
13813 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm8, %zmm29, %zmm19
13814 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $2, 448(%r8), %zmm21, %zmm21
13815 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm9, %zmm29, %zmm21
13816 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm29 = [0,10,2,3,4,5,6,11]
13817 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm29, %zmm11
13818 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13819 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm30 = [0,1,2,3,4,12,6,7]
13820 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm30, %zmm28
13821 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm31 = [0,14,2,3,4,5,6,15]
13822 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm31, %zmm13
13823 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13824 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
13825 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm29, %zmm16
13826 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
13827 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm30, %zmm15
13828 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
13829 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm31, %zmm14
13830 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
13831 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm29, %zmm13
13832 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
13833 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm30, %zmm12
13834 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
13835 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm31, %zmm11
13836 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
13837 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm29, %zmm10
13838 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
13839 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm30, %zmm7
13840 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13841 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm31, %zmm2
13842 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13843 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm29, %zmm1
13844 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
13845 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm30, %zmm3
13846 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13847 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm31, %zmm0
13848 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm29, %zmm27
13849 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
13850 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm30, %zmm4
13851 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm31, %zmm25
13852 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm8, %zmm29, %zmm22
13853 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
13854 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm8, %zmm30, %zmm5
13855 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm8, %zmm31, %zmm20
13856 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm9, %zmm29, %zmm18
13857 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
13858 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm9, %zmm30, %zmm6
13859 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm9, %zmm31, %zmm17
13860 ; AVX512DQBW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
13861 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm17, 3008(%rax)
13862 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
13863 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm8, 2944(%rax)
13864 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, 2880(%rax)
13865 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm18, 2816(%rax)
13866 ; AVX512DQBW-SLOW-NEXT: vmovups (%rsp), %zmm6 # 64-byte Reload
13867 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm6, 2752(%rax)
13868 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm20, 2624(%rax)
13869 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
13870 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm6, 2560(%rax)
13871 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, 2496(%rax)
13872 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm22, 2432(%rax)
13873 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
13874 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm5, 2368(%rax)
13875 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm25, 2240(%rax)
13876 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
13877 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm5, 2176(%rax)
13878 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, 2112(%rax)
13879 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm27, 2048(%rax)
13880 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
13881 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm4, 1984(%rax)
13882 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, 1856(%rax)
13883 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13884 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 1792(%rax)
13885 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, 1728(%rax)
13886 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, 1664(%rax)
13887 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
13888 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm3, 1600(%rax)
13889 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, 1472(%rax)
13890 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13891 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 1408(%rax)
13892 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, 1344(%rax)
13893 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, 1280(%rax)
13894 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13895 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm2, 1216(%rax)
13896 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, 1088(%rax)
13897 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13898 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 1024(%rax)
13899 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm12, 960(%rax)
13900 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm13, 896(%rax)
13901 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13902 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm1, 832(%rax)
13903 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm14, 704(%rax)
13904 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13905 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm1, 640(%rax)
13906 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm15, 576(%rax)
13907 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm16, 512(%rax)
13908 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13909 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 448(%rax)
13910 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13911 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 320(%rax)
13912 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13913 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 256(%rax)
13914 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm28, 192(%rax)
13915 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13916 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 128(%rax)
13917 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13918 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 64(%rax)
13919 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm21, 2688(%rax)
13920 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm19, 2304(%rax)
13921 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm23, 1920(%rax)
13922 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm24, 1536(%rax)
13923 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm26, 1152(%rax)
13924 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13925 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 768(%rax)
13926 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13927 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 384(%rax)
13928 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13929 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, (%rax)
13930 ; AVX512DQBW-SLOW-NEXT: addq $3400, %rsp # imm = 0xD48
13931 ; AVX512DQBW-SLOW-NEXT: vzeroupper
13932 ; AVX512DQBW-SLOW-NEXT: retq
13934 ; AVX512DQBW-FAST-LABEL: store_i64_stride6_vf64:
13935 ; AVX512DQBW-FAST: # %bb.0:
13936 ; AVX512DQBW-FAST-NEXT: subq $3400, %rsp # imm = 0xD48
13937 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rdx), %zmm13
13938 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rdx), %zmm12
13939 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%rdx), %zmm11
13940 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%rdx), %zmm10
13941 ; AVX512DQBW-FAST-NEXT: vmovdqa64 256(%rdx), %zmm9
13942 ; AVX512DQBW-FAST-NEXT: vmovdqa64 320(%rdx), %zmm8
13943 ; AVX512DQBW-FAST-NEXT: vmovdqa64 384(%rdx), %zmm7
13944 ; AVX512DQBW-FAST-NEXT: vmovdqa64 448(%rdx), %zmm6
13945 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rcx), %zmm0
13946 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rcx), %zmm1
13947 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%rcx), %zmm2
13948 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%rcx), %zmm30
13949 ; AVX512DQBW-FAST-NEXT: vmovdqa64 256(%rcx), %zmm27
13950 ; AVX512DQBW-FAST-NEXT: vmovdqa64 320(%rcx), %zmm24
13951 ; AVX512DQBW-FAST-NEXT: vmovdqa64 384(%rcx), %zmm22
13952 ; AVX512DQBW-FAST-NEXT: vmovdqa64 448(%rcx), %zmm21
13953 ; AVX512DQBW-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [4,12,4,12]
13954 ; AVX512DQBW-FAST-NEXT: # ymm3 = mem[0,1,0,1]
13955 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm6, %zmm4
13956 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm21, %zmm3, %zmm4
13957 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13958 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, %zmm4
13959 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm3, %zmm4
13960 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13961 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm8, %zmm4
13962 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm24, %zmm3, %zmm4
13963 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13964 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm9, %zmm4
13965 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm27, %zmm3, %zmm4
13966 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13967 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, %zmm4
13968 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm30, %zmm3, %zmm4
13969 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13970 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, %zmm4
13971 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm3, %zmm4
13972 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13973 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm12, %zmm4
13974 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm3, %zmm4
13975 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13976 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm0, %zmm13, %zmm3
13977 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13978 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [1,9,2,10,1,9,2,10]
13979 ; AVX512DQBW-FAST-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
13980 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm13, %zmm3
13981 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm5, %zmm3
13982 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13983 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm4
13984 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [5,13,6,14,5,13,6,14]
13985 ; AVX512DQBW-FAST-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
13986 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm13, %zmm3
13987 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm5, %zmm3
13988 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13989 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm12, %zmm3
13990 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
13991 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13992 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm14 = [3,11,3,11,3,11,3,11]
13993 ; AVX512DQBW-FAST-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
13994 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm13, %zmm3
13995 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm14, %zmm3
13996 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13997 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [7,15,7,15,7,15,7,15]
13998 ; AVX512DQBW-FAST-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
13999 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm3, %zmm13
14000 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14001 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm12, %zmm0
14002 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm5, %zmm0
14003 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14004 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm12, %zmm0
14005 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm14, %zmm0
14006 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14007 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm3, %zmm12
14008 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14009 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, %zmm0
14010 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm4, %zmm0
14011 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14012 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, %zmm0
14013 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm5, %zmm0
14014 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14015 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, %zmm0
14016 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm14, %zmm0
14017 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14018 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm3, %zmm11
14019 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14020 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, %zmm0
14021 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm30, %zmm4, %zmm0
14022 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14023 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, %zmm0
14024 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm30, %zmm5, %zmm0
14025 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14026 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, %zmm0
14027 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm30, %zmm14, %zmm0
14028 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14029 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm30, %zmm3, %zmm10
14030 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14031 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm9, %zmm0
14032 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm27, %zmm4, %zmm0
14033 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14034 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm9, %zmm0
14035 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm27, %zmm5, %zmm0
14036 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14037 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm9, %zmm0
14038 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm27, %zmm14, %zmm0
14039 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14040 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm27, %zmm3, %zmm9
14041 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14042 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
14043 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm24, %zmm4, %zmm0
14044 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14045 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
14046 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm24, %zmm5, %zmm0
14047 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14048 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
14049 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm24, %zmm14, %zmm0
14050 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14051 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm24, %zmm3, %zmm8
14052 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14053 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, %zmm0
14054 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm4, %zmm0
14055 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14056 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, %zmm0
14057 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm5, %zmm0
14058 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14059 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, %zmm0
14060 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm14, %zmm0
14061 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14062 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm3, %zmm7
14063 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14064 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm21, %zmm6, %zmm4
14065 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm4, (%rsp) # 64-byte Spill
14066 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm21, %zmm6, %zmm5
14067 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14068 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm21, %zmm6, %zmm14
14069 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14070 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm21, %zmm3, %zmm6
14071 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14072 ; AVX512DQBW-FAST-NEXT: vmovdqa64 448(%rdi), %zmm4
14073 ; AVX512DQBW-FAST-NEXT: vmovdqa64 448(%rsi), %zmm11
14074 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [4,12,5,13,4,12,5,13]
14075 ; AVX512DQBW-FAST-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
14076 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, %zmm0
14077 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm11, %zmm28, %zmm0
14078 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm8
14079 ; AVX512DQBW-FAST-NEXT: vmovdqa64 384(%rdi), %zmm2
14080 ; AVX512DQBW-FAST-NEXT: vmovdqa64 384(%rsi), %zmm13
14081 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
14082 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm13, %zmm28, %zmm1
14083 ; AVX512DQBW-FAST-NEXT: vmovdqa64 320(%rdi), %zmm3
14084 ; AVX512DQBW-FAST-NEXT: vmovdqa64 320(%rsi), %zmm15
14085 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm0
14086 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm15, %zmm28, %zmm0
14087 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14088 ; AVX512DQBW-FAST-NEXT: vmovdqa64 256(%rdi), %zmm31
14089 ; AVX512DQBW-FAST-NEXT: vmovdqa64 256(%rsi), %zmm17
14090 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm31, %zmm0
14091 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm17, %zmm28, %zmm0
14092 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14093 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%rdi), %zmm29
14094 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%rsi), %zmm18
14095 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm29, %zmm0
14096 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm18, %zmm28, %zmm0
14097 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14098 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%rdi), %zmm25
14099 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%rsi), %zmm19
14100 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm25, %zmm0
14101 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm19, %zmm28, %zmm0
14102 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm5
14103 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rdi), %zmm6
14104 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rsi), %zmm20
14105 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm6, %zmm0
14106 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm28, %zmm0
14107 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14108 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rdi), %zmm7
14109 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rsi), %zmm22
14110 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm22, %zmm7, %zmm28
14111 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm12 = [2,10,2,10,2,10,2,10]
14112 ; AVX512DQBW-FAST-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14113 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, %zmm14
14114 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm12, %zmm14
14115 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [6,14,6,14,6,14,6,14]
14116 ; AVX512DQBW-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14117 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, %zmm16
14118 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm10, %zmm16
14119 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm21 = [0,8,1,9,0,8,1,9]
14120 ; AVX512DQBW-FAST-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3]
14121 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, %zmm0
14122 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm21, %zmm0
14123 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14124 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x2 {{.*#+}} ymm23 = [7,15,7,15]
14125 ; AVX512DQBW-FAST-NEXT: # ymm23 = mem[0,1,0,1]
14126 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm23, %zmm7
14127 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm6, %zmm22
14128 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm12, %zmm22
14129 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm6, %zmm24
14130 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm10, %zmm24
14131 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm6, %zmm0
14132 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm21, %zmm0
14133 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14134 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm23, %zmm6
14135 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm25, %zmm20
14136 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm19, %zmm12, %zmm20
14137 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm25, %zmm26
14138 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm19, %zmm10, %zmm26
14139 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm25, %zmm0
14140 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm19, %zmm21, %zmm0
14141 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14142 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm19, %zmm23, %zmm25
14143 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm29, %zmm19
14144 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm18, %zmm12, %zmm19
14145 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm29, %zmm27
14146 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm18, %zmm10, %zmm27
14147 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm29, %zmm0
14148 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm18, %zmm21, %zmm0
14149 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14150 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm18, %zmm23, %zmm29
14151 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm31, %zmm18
14152 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm17, %zmm12, %zmm18
14153 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm31, %zmm9
14154 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm17, %zmm10, %zmm9
14155 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm31, %zmm0
14156 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm17, %zmm21, %zmm0
14157 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14158 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm17, %zmm23, %zmm31
14159 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm17
14160 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm15, %zmm12, %zmm17
14161 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm30
14162 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm15, %zmm10, %zmm30
14163 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm0
14164 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm15, %zmm21, %zmm0
14165 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14166 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm15, %zmm23, %zmm3
14167 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14168 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm15
14169 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
14170 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm13, %zmm12, %zmm3
14171 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm13, %zmm10, %zmm2
14172 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm15, %zmm0
14173 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm13, %zmm21, %zmm0
14174 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14175 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm13, %zmm23, %zmm15
14176 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14177 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm11, %zmm4, %zmm12
14178 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm11, %zmm4, %zmm10
14179 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm11, %zmm4, %zmm21
14180 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm11, %zmm23, %zmm4
14181 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14182 ; AVX512DQBW-FAST-NEXT: movb $12, %al
14183 ; AVX512DQBW-FAST-NEXT: kmovd %eax, %k1
14184 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
14185 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, %zmm8 {%k1}
14186 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14187 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
14188 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, %zmm1 {%k1}
14189 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14190 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
14191 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
14192 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, %zmm8 {%k1}
14193 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
14194 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14195 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, %zmm0 {%k1}
14196 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14197 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
14198 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
14199 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, %zmm4 {%k1}
14200 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
14201 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, %zmm5 {%k1}
14202 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14203 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
14204 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14205 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, %zmm1 {%k1}
14206 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
14207 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, %zmm28 {%k1}
14208 ; AVX512DQBW-FAST-NEXT: movb $48, %al
14209 ; AVX512DQBW-FAST-NEXT: kmovd %eax, %k2
14210 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
14211 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm14, %zmm11 {%k2}
14212 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
14213 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm16, %zmm23 {%k2}
14214 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
14215 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm22, %zmm13 {%k2}
14216 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm22 # 64-byte Reload
14217 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm24, %zmm22 {%k2}
14218 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
14219 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm20, %zmm14 {%k2}
14220 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
14221 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm26, %zmm20 {%k2}
14222 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
14223 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm19, %zmm16 {%k2}
14224 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
14225 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm27, %zmm19 {%k2}
14226 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm24 # 64-byte Reload
14227 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm18, %zmm24 {%k2}
14228 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
14229 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm9, %zmm18 {%k2}
14230 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm26 # 64-byte Reload
14231 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm17, %zmm26 {%k2}
14232 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
14233 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm30, %zmm17 {%k2}
14234 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
14235 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm27 {%k2}
14236 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14237 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm0 {%k2}
14238 ; AVX512DQBW-FAST-NEXT: vmovdqu64 (%rsp), %zmm15 # 64-byte Reload
14239 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm12, %zmm15 {%k2}
14240 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%r8), %zmm5
14241 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm2 = <0,1,9,u,4,5,6,7>
14242 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm2, %zmm11
14243 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14244 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%r8), %zmm3
14245 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm2, %zmm13
14246 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14247 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%r8), %zmm9
14248 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm9, %zmm2, %zmm14
14249 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14250 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%r8), %zmm11
14251 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm11, %zmm2, %zmm16
14252 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14253 ; AVX512DQBW-FAST-NEXT: vmovdqa64 256(%r8), %zmm13
14254 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm13, %zmm2, %zmm24
14255 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14256 ; AVX512DQBW-FAST-NEXT: vmovdqa64 320(%r8), %zmm14
14257 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm14, %zmm2, %zmm26
14258 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14259 ; AVX512DQBW-FAST-NEXT: vmovdqa64 384(%r8), %zmm16
14260 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm16, %zmm2, %zmm27
14261 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14262 ; AVX512DQBW-FAST-NEXT: vmovdqa64 448(%r8), %zmm12
14263 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm12, %zmm2, %zmm15
14264 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm15, (%rsp) # 64-byte Spill
14265 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
14266 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, %zmm15 {%k2}
14267 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm2 = <0,1,13,u,4,5,6,7>
14268 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm2, %zmm23
14269 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm2, %zmm22
14270 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14271 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm9, %zmm2, %zmm20
14272 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm20, %zmm24
14273 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm11, %zmm2, %zmm19
14274 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm13, %zmm2, %zmm18
14275 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm18, %zmm26
14276 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm14, %zmm2, %zmm17
14277 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14278 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm16, %zmm2, %zmm0
14279 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14280 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm12, %zmm2, %zmm15
14281 ; AVX512DQBW-FAST-NEXT: vmovdqa (%rdi), %ymm2
14282 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
14283 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14284 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm10
14285 ; AVX512DQBW-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7, %zmm7 # 64-byte Folded Reload
14286 ; AVX512DQBW-FAST-NEXT: # zmm7 = zmm7[0,1,2,3],mem[4,5,6,7]
14287 ; AVX512DQBW-FAST-NEXT: vmovdqa 64(%rdi), %ymm2
14288 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
14289 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14290 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm0
14291 ; AVX512DQBW-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm6, %zmm6 # 64-byte Folded Reload
14292 ; AVX512DQBW-FAST-NEXT: # zmm6 = zmm6[0,1,2,3],mem[4,5,6,7]
14293 ; AVX512DQBW-FAST-NEXT: movb $16, %al
14294 ; AVX512DQBW-FAST-NEXT: kmovd %eax, %k2
14295 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm1 {%k2}
14296 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14297 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = <10,u,2,3,4,5,11,u>
14298 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm1, %zmm0
14299 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14300 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm2 = <14,u,2,3,4,5,15,u>
14301 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm2, %zmm6
14302 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14303 ; AVX512DQBW-FAST-NEXT: vmovdqa 128(%rdi), %ymm3
14304 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
14305 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14306 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm3
14307 ; AVX512DQBW-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm25, %zmm6 # 64-byte Folded Reload
14308 ; AVX512DQBW-FAST-NEXT: # zmm6 = zmm25[0,1,2,3],mem[4,5,6,7]
14309 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14310 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm9, %zmm0 {%k2}
14311 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14312 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm9, %zmm1, %zmm3
14313 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14314 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm9, %zmm2, %zmm6
14315 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14316 ; AVX512DQBW-FAST-NEXT: vmovdqa 192(%rdi), %ymm3
14317 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
14318 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14319 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm0
14320 ; AVX512DQBW-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm29, %zmm3 # 64-byte Folded Reload
14321 ; AVX512DQBW-FAST-NEXT: # zmm3 = zmm29[0,1,2,3],mem[4,5,6,7]
14322 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, %zmm4 {%k2}
14323 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14324 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm11, %zmm1, %zmm0
14325 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14326 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm11, %zmm2, %zmm3
14327 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14328 ; AVX512DQBW-FAST-NEXT: vmovdqa 256(%rdi), %ymm0
14329 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14330 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
14331 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm3
14332 ; AVX512DQBW-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm31, %zmm4 # 64-byte Folded Reload
14333 ; AVX512DQBW-FAST-NEXT: # zmm4 = zmm31[0,1,2,3],mem[4,5,6,7]
14334 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14335 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm13, %zmm0 {%k2}
14336 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14337 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm13, %zmm1, %zmm3
14338 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14339 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm13, %zmm2, %zmm4
14340 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14341 ; AVX512DQBW-FAST-NEXT: vmovdqa 320(%rdi), %ymm0
14342 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14343 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
14344 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm27
14345 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14346 ; AVX512DQBW-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm25 # 64-byte Folded Reload
14347 ; AVX512DQBW-FAST-NEXT: # zmm25 = zmm0[0,1,2,3],mem[4,5,6,7]
14348 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm14, %zmm8 {%k2}
14349 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14350 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm14, %zmm1, %zmm27
14351 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm14, %zmm2, %zmm25
14352 ; AVX512DQBW-FAST-NEXT: vmovdqa 384(%rdi), %ymm0
14353 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14354 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
14355 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm22
14356 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14357 ; AVX512DQBW-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm20 # 64-byte Folded Reload
14358 ; AVX512DQBW-FAST-NEXT: # zmm20 = zmm0[0,1,2,3],mem[4,5,6,7]
14359 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14360 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm16, %zmm0 {%k2}
14361 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14362 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm16, %zmm1, %zmm22
14363 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm16, %zmm2, %zmm20
14364 ; AVX512DQBW-FAST-NEXT: vmovdqa 448(%rdi), %ymm0
14365 ; AVX512DQBW-FAST-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
14366 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
14367 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm18
14368 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm1, %zmm10
14369 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, %zmm11
14370 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm12, %zmm1, %zmm18
14371 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14372 ; AVX512DQBW-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm17 # 64-byte Folded Reload
14373 ; AVX512DQBW-FAST-NEXT: # zmm17 = zmm0[0,1,2,3],mem[4,5,6,7]
14374 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14375 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm12, %zmm0 {%k2}
14376 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14377 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm12, %zmm2, %zmm17
14378 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm2, %zmm7
14379 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, %zmm13
14380 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm28 {%k2}
14381 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%r9), %zmm6
14382 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm10 = [0,1,2,9,4,5,6,7]
14383 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14384 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm10, %zmm1
14385 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14386 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%r9), %zmm7
14387 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
14388 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm7, %zmm10, %zmm2
14389 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14390 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%r9), %zmm2
14391 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
14392 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm10, %zmm3
14393 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14394 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%r9), %zmm3
14395 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
14396 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm10, %zmm4
14397 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14398 ; AVX512DQBW-FAST-NEXT: vmovdqa64 256(%r9), %zmm4
14399 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
14400 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm10, %zmm5
14401 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14402 ; AVX512DQBW-FAST-NEXT: vmovdqa64 320(%r9), %zmm5
14403 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14404 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm10, %zmm0
14405 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14406 ; AVX512DQBW-FAST-NEXT: vmovdqa64 384(%r9), %zmm8
14407 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14408 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm8, %zmm10, %zmm0
14409 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14410 ; AVX512DQBW-FAST-NEXT: vmovdqa64 448(%r9), %zmm9
14411 ; AVX512DQBW-FAST-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
14412 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm9, %zmm10, %zmm0
14413 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
14414 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm10 = [0,1,2,13,4,5,6,7]
14415 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm10, %zmm23
14416 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14417 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
14418 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm7, %zmm10, %zmm12
14419 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14420 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm10, %zmm24
14421 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14422 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm10, %zmm19
14423 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14424 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm10, %zmm26
14425 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14426 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14427 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm10, %zmm0
14428 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14429 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14430 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm8, %zmm10, %zmm0
14431 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14432 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm9, %zmm10, %zmm15
14433 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14434 ; AVX512DQBW-FAST-NEXT: vmovdqa (%rdx), %xmm10
14435 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
14436 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
14437 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
14438 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm12 {%k1}
14439 ; AVX512DQBW-FAST-NEXT: vmovdqa 64(%rdx), %xmm10
14440 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
14441 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
14442 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
14443 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm14 {%k1}
14444 ; AVX512DQBW-FAST-NEXT: vmovdqa 128(%rdx), %xmm10
14445 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
14446 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
14447 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
14448 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm16 {%k1}
14449 ; AVX512DQBW-FAST-NEXT: vmovdqa 192(%rdx), %xmm10
14450 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
14451 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
14452 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm30 # 64-byte Reload
14453 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm30 {%k1}
14454 ; AVX512DQBW-FAST-NEXT: vmovdqa 256(%rdx), %xmm10
14455 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
14456 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
14457 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
14458 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm31 {%k1}
14459 ; AVX512DQBW-FAST-NEXT: vmovdqa 320(%rdx), %xmm10
14460 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
14461 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
14462 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14463 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm1 {%k1}
14464 ; AVX512DQBW-FAST-NEXT: vmovdqa 384(%rdx), %xmm10
14465 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
14466 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
14467 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14468 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm0 {%k1}
14469 ; AVX512DQBW-FAST-NEXT: vmovdqa 448(%rdx), %xmm10
14470 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
14471 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
14472 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm21 {%k1}
14473 ; AVX512DQBW-FAST-NEXT: vinserti32x4 $2, (%r8), %zmm12, %zmm10
14474 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm29 = [0,1,2,3,4,8,6,7]
14475 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm29, %zmm10
14476 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14477 ; AVX512DQBW-FAST-NEXT: vinserti32x4 $2, 64(%r8), %zmm14, %zmm10
14478 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm7, %zmm29, %zmm10
14479 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14480 ; AVX512DQBW-FAST-NEXT: vinserti32x4 $2, 128(%r8), %zmm16, %zmm10
14481 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm29, %zmm10
14482 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14483 ; AVX512DQBW-FAST-NEXT: vinserti32x4 $2, 192(%r8), %zmm30, %zmm26
14484 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm29, %zmm26
14485 ; AVX512DQBW-FAST-NEXT: vinserti32x4 $2, 256(%r8), %zmm31, %zmm24
14486 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm29, %zmm24
14487 ; AVX512DQBW-FAST-NEXT: vinserti32x4 $2, 320(%r8), %zmm1, %zmm23
14488 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm29, %zmm23
14489 ; AVX512DQBW-FAST-NEXT: vinserti32x4 $2, 384(%r8), %zmm0, %zmm19
14490 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm8, %zmm29, %zmm19
14491 ; AVX512DQBW-FAST-NEXT: vinserti32x4 $2, 448(%r8), %zmm21, %zmm21
14492 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm9, %zmm29, %zmm21
14493 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm29 = [0,10,2,3,4,5,6,11]
14494 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm29, %zmm11
14495 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14496 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm30 = [0,1,2,3,4,12,6,7]
14497 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm30, %zmm28
14498 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm31 = [0,14,2,3,4,5,6,15]
14499 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm31, %zmm13
14500 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14501 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
14502 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm7, %zmm29, %zmm16
14503 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
14504 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm7, %zmm30, %zmm15
14505 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
14506 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm7, %zmm31, %zmm14
14507 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
14508 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm29, %zmm13
14509 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
14510 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm30, %zmm12
14511 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
14512 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm31, %zmm11
14513 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
14514 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm29, %zmm10
14515 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
14516 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm30, %zmm7
14517 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
14518 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm31, %zmm2
14519 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14520 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm29, %zmm1
14521 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
14522 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm30, %zmm3
14523 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14524 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm31, %zmm0
14525 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm29, %zmm27
14526 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
14527 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm30, %zmm4
14528 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm31, %zmm25
14529 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm8, %zmm29, %zmm22
14530 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
14531 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm8, %zmm30, %zmm5
14532 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm8, %zmm31, %zmm20
14533 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm9, %zmm29, %zmm18
14534 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
14535 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm9, %zmm30, %zmm6
14536 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm9, %zmm31, %zmm17
14537 ; AVX512DQBW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
14538 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm17, 3008(%rax)
14539 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
14540 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm8, 2944(%rax)
14541 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm6, 2880(%rax)
14542 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm18, 2816(%rax)
14543 ; AVX512DQBW-FAST-NEXT: vmovups (%rsp), %zmm6 # 64-byte Reload
14544 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm6, 2752(%rax)
14545 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm20, 2624(%rax)
14546 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
14547 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm6, 2560(%rax)
14548 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, 2496(%rax)
14549 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm22, 2432(%rax)
14550 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
14551 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm5, 2368(%rax)
14552 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm25, 2240(%rax)
14553 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
14554 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm5, 2176(%rax)
14555 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, 2112(%rax)
14556 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm27, 2048(%rax)
14557 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
14558 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm4, 1984(%rax)
14559 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, 1856(%rax)
14560 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14561 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 1792(%rax)
14562 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, 1728(%rax)
14563 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, 1664(%rax)
14564 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
14565 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm3, 1600(%rax)
14566 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, 1472(%rax)
14567 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14568 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 1408(%rax)
14569 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, 1344(%rax)
14570 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, 1280(%rax)
14571 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
14572 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm2, 1216(%rax)
14573 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, 1088(%rax)
14574 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14575 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 1024(%rax)
14576 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm12, 960(%rax)
14577 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm13, 896(%rax)
14578 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14579 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm1, 832(%rax)
14580 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm14, 704(%rax)
14581 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14582 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm1, 640(%rax)
14583 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm15, 576(%rax)
14584 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm16, 512(%rax)
14585 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14586 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 448(%rax)
14587 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14588 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 320(%rax)
14589 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14590 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 256(%rax)
14591 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm28, 192(%rax)
14592 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14593 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 128(%rax)
14594 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14595 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 64(%rax)
14596 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm21, 2688(%rax)
14597 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm19, 2304(%rax)
14598 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm23, 1920(%rax)
14599 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm24, 1536(%rax)
14600 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm26, 1152(%rax)
14601 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14602 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 768(%rax)
14603 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14604 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 384(%rax)
14605 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14606 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, (%rax)
14607 ; AVX512DQBW-FAST-NEXT: addq $3400, %rsp # imm = 0xD48
14608 ; AVX512DQBW-FAST-NEXT: vzeroupper
14609 ; AVX512DQBW-FAST-NEXT: retq
14610 %in.vec0 = load <64 x i64>, ptr %in.vecptr0, align 64
14611 %in.vec1 = load <64 x i64>, ptr %in.vecptr1, align 64
14612 %in.vec2 = load <64 x i64>, ptr %in.vecptr2, align 64
14613 %in.vec3 = load <64 x i64>, ptr %in.vecptr3, align 64
14614 %in.vec4 = load <64 x i64>, ptr %in.vecptr4, align 64
14615 %in.vec5 = load <64 x i64>, ptr %in.vecptr5, align 64
14616 %1 = shufflevector <64 x i64> %in.vec0, <64 x i64> %in.vec1, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
14617 %2 = shufflevector <64 x i64> %in.vec2, <64 x i64> %in.vec3, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
14618 %3 = shufflevector <64 x i64> %in.vec4, <64 x i64> %in.vec5, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
14619 %4 = shufflevector <128 x i64> %1, <128 x i64> %2, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255>
14620 %5 = shufflevector <128 x i64> %3, <128 x i64> poison, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
14621 %6 = shufflevector <256 x i64> %4, <256 x i64> %5, <384 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255, i32 256, i32 257, i32 258, i32 259, i32 260, i32 261, i32 262, i32 263, i32 264, i32 265, i32 266, i32 267, i32 268, i32 269, i32 270, i32 271, i32 272, i32 273, i32 274, i32 275, i32 276, i32 277, i32 278, i32 279, i32 280, i32 281, i32 282, i32 283, i32 284, i32 285, i32 286, i32 287, i32 288, i32 289, i32 290, i32 291, i32 292, i32 293, i32 294, i32 295, i32 296, i32 297, i32 298, i32 299, i32 300, i32 301, i32 302, i32 303, i32 304, i32 305, i32 306, i32 307, i32 308, i32 309, i32 310, i32 311, i32 312, i32 313, i32 314, i32 315, i32 316, i32 317, i32 318, i32 319, i32 320, i32 321, i32 322, i32 323, i32 324, i32 325, i32 326, i32 327, i32 328, i32 329, i32 330, i32 331, i32 332, i32 333, i32 334, i32 335, i32 336, i32 337, i32 338, i32 339, i32 340, i32 341, i32 342, i32 343, i32 344, i32 345, i32 346, i32 347, i32 348, i32 349, i32 350, i32 351, i32 352, i32 353, i32 354, i32 355, i32 356, i32 357, i32 358, i32 359, i32 360, i32 361, i32 362, i32 363, i32 364, i32 365, i32 366, i32 367, i32 368, i32 369, i32 370, i32 371, i32 372, i32 373, i32 374, i32 375, i32 376, i32 377, i32 378, i32 379, i32 380, i32 381, i32 382, i32 383>
14622 %interleaved.vec = shufflevector <384 x i64> %6, <384 x i64> poison, <384 x i32> <i32 0, i32 64, i32 128, i32 192, i32 256, i32 320, i32 1, i32 65, i32 129, i32 193, i32 257, i32 321, i32 2, i32 66, i32 130, i32 194, i32 258, i32 322, i32 3, i32 67, i32 131, i32 195, i32 259, i32 323, i32 4, i32 68, i32 132, i32 196, i32 260, i32 324, i32 5, i32 69, i32 133, i32 197, i32 261, i32 325, i32 6, i32 70, i32 134, i32 198, i32 262, i32 326, i32 7, i32 71, i32 135, i32 199, i32 263, i32 327, i32 8, i32 72, i32 136, i32 200, i32 264, i32 328, i32 9, i32 73, i32 137, i32 201, i32 265, i32 329, i32 10, i32 74, i32 138, i32 202, i32 266, i32 330, i32 11, i32 75, i32 139, i32 203, i32 267, i32 331, i32 12, i32 76, i32 140, i32 204, i32 268, i32 332, i32 13, i32 77, i32 141, i32 205, i32 269, i32 333, i32 14, i32 78, i32 142, i32 206, i32 270, i32 334, i32 15, i32 79, i32 143, i32 207, i32 271, i32 335, i32 16, i32 80, i32 144, i32 208, i32 272, i32 336, i32 17, i32 81, i32 145, i32 209, i32 273, i32 337, i32 18, i32 82, i32 146, i32 210, i32 274, i32 338, i32 19, i32 83, i32 147, i32 211, i32 275, i32 339, i32 20, i32 84, i32 148, i32 212, i32 276, i32 340, i32 21, i32 85, i32 149, i32 213, i32 277, i32 341, i32 22, i32 86, i32 150, i32 214, i32 278, i32 342, i32 23, i32 87, i32 151, i32 215, i32 279, i32 343, i32 24, i32 88, i32 152, i32 216, i32 280, i32 344, i32 25, i32 89, i32 153, i32 217, i32 281, i32 345, i32 26, i32 90, i32 154, i32 218, i32 282, i32 346, i32 27, i32 91, i32 155, i32 219, i32 283, i32 347, i32 28, i32 92, i32 156, i32 220, i32 284, i32 348, i32 29, i32 93, i32 157, i32 221, i32 285, i32 349, i32 30, i32 94, i32 158, i32 222, i32 286, i32 350, i32 31, i32 95, i32 159, i32 223, i32 287, i32 351, i32 32, i32 96, i32 160, i32 224, i32 288, i32 352, i32 33, i32 97, i32 161, i32 225, i32 289, i32 353, i32 34, i32 98, i32 162, i32 226, i32 290, i32 354, i32 35, i32 99, i32 163, i32 227, i32 291, i32 355, i32 36, i32 100, i32 164, i32 228, i32 292, i32 356, i32 37, i32 101, i32 165, i32 229, i32 293, i32 357, i32 38, i32 102, i32 166, i32 230, i32 294, i32 358, i32 39, i32 103, i32 167, i32 231, i32 295, i32 359, i32 40, i32 104, i32 168, i32 232, i32 296, i32 360, i32 41, i32 105, i32 169, i32 233, i32 297, i32 361, i32 42, i32 106, i32 170, i32 234, i32 298, i32 362, i32 43, i32 107, i32 171, i32 235, i32 299, i32 363, i32 44, i32 108, i32 172, i32 236, i32 300, i32 364, i32 45, i32 109, i32 173, i32 237, i32 301, i32 365, i32 46, i32 110, i32 174, i32 238, i32 302, i32 366, i32 47, i32 111, i32 175, i32 239, i32 303, i32 367, i32 48, i32 112, i32 176, i32 240, i32 304, i32 368, i32 49, i32 113, i32 177, i32 241, i32 305, i32 369, i32 50, i32 114, i32 178, i32 242, i32 306, i32 370, i32 51, i32 115, i32 179, i32 243, i32 307, i32 371, i32 52, i32 116, i32 180, i32 244, i32 308, i32 372, i32 53, i32 117, i32 181, i32 245, i32 309, i32 373, i32 54, i32 118, i32 182, i32 246, i32 310, i32 374, i32 55, i32 119, i32 183, i32 247, i32 311, i32 375, i32 56, i32 120, i32 184, i32 248, i32 312, i32 376, i32 57, i32 121, i32 185, i32 249, i32 313, i32 377, i32 58, i32 122, i32 186, i32 250, i32 314, i32 378, i32 59, i32 123, i32 187, i32 251, i32 315, i32 379, i32 60, i32 124, i32 188, i32 252, i32 316, i32 380, i32 61, i32 125, i32 189, i32 253, i32 317, i32 381, i32 62, i32 126, i32 190, i32 254, i32 318, i32 382, i32 63, i32 127, i32 191, i32 255, i32 319, i32 383>
14623 store <384 x i64> %interleaved.vec, ptr %out.vec, align 64
14626 ;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
14630 ; AVX2-FAST: {{.*}}
14631 ; AVX2-FAST-PERLANE: {{.*}}
14632 ; AVX2-SLOW: {{.*}}
14633 ; AVX512-FAST: {{.*}}
14634 ; AVX512-SLOW: {{.*}}
14635 ; AVX512BW-FAST: {{.*}}
14636 ; AVX512BW-SLOW: {{.*}}
14637 ; AVX512F-FAST: {{.*}}
14638 ; AVX512F-SLOW: {{.*}}
14639 ; FALLBACK0: {{.*}}
14640 ; FALLBACK1: {{.*}}
14641 ; FALLBACK10: {{.*}}
14642 ; FALLBACK11: {{.*}}
14643 ; FALLBACK12: {{.*}}
14644 ; FALLBACK2: {{.*}}
14645 ; FALLBACK3: {{.*}}
14646 ; FALLBACK4: {{.*}}
14647 ; FALLBACK5: {{.*}}
14648 ; FALLBACK6: {{.*}}
14649 ; FALLBACK7: {{.*}}
14650 ; FALLBACK8: {{.*}}
14651 ; FALLBACK9: {{.*}}