1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+sse2 | FileCheck %s --check-prefixes=SSE,FALLBACK0
3 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx | FileCheck %s --check-prefixes=AVX,AVX1,AVX1-ONLY,FALLBACK1
4 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2 | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-SLOW,FALLBACK2
5 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-FAST,FALLBACK3
6 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-FAST-PERLANE,FALLBACK4
7 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-SLOW,AVX512F-SLOW,AVX512F-ONLY-SLOW,FALLBACK5
8 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-FAST,AVX512F-FAST,AVX512F-ONLY-FAST,FALLBACK6
9 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-SLOW,AVX512F-SLOW,AVX512DQ-SLOW,FALLBACK7
10 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-FAST,AVX512F-FAST,AVX512DQ-FAST,FALLBACK8
11 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-SLOW,AVX512BW-SLOW,AVX512BW-ONLY-SLOW,FALLBACK9
12 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-FAST,AVX512BW-FAST,AVX512BW-ONLY-FAST,FALLBACK10
13 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-SLOW,AVX512BW-SLOW,AVX512DQBW-SLOW,FALLBACK11
14 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-FAST,AVX512BW-FAST,AVX512DQBW-FAST,FALLBACK12
16 ; These patterns are produced by LoopVectorizer for interleaved stores.
18 define void @store_i64_stride7_vf2(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %out.vec) nounwind {
19 ; SSE-LABEL: store_i64_stride7_vf2:
21 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
22 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
23 ; SSE-NEXT: movapd (%rdi), %xmm0
24 ; SSE-NEXT: movapd (%rsi), %xmm1
25 ; SSE-NEXT: movapd (%rdx), %xmm2
26 ; SSE-NEXT: movapd (%rcx), %xmm3
27 ; SSE-NEXT: movapd (%r8), %xmm4
28 ; SSE-NEXT: movapd (%r9), %xmm5
29 ; SSE-NEXT: movapd (%r10), %xmm6
30 ; SSE-NEXT: movapd %xmm0, %xmm7
31 ; SSE-NEXT: unpcklpd {{.*#+}} xmm7 = xmm7[0],xmm1[0]
32 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
33 ; SSE-NEXT: unpcklpd {{.*#+}} xmm2 = xmm2[0],xmm3[0]
34 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm4[1]
35 ; SSE-NEXT: unpcklpd {{.*#+}} xmm4 = xmm4[0],xmm5[0]
36 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm6[1]
37 ; SSE-NEXT: movsd {{.*#+}} xmm0 = xmm6[0],xmm0[1]
38 ; SSE-NEXT: movapd %xmm2, 16(%rax)
39 ; SSE-NEXT: movapd %xmm4, 32(%rax)
40 ; SSE-NEXT: movapd %xmm0, 48(%rax)
41 ; SSE-NEXT: movapd %xmm3, 80(%rax)
42 ; SSE-NEXT: movapd %xmm5, 96(%rax)
43 ; SSE-NEXT: movapd %xmm1, 64(%rax)
44 ; SSE-NEXT: movapd %xmm7, (%rax)
47 ; AVX1-ONLY-LABEL: store_i64_stride7_vf2:
49 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
50 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
51 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %xmm0
52 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %xmm1
53 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %xmm2
54 ; AVX1-ONLY-NEXT: vmovaps (%r9), %xmm3
55 ; AVX1-ONLY-NEXT: vmovaps (%r10), %xmm4
56 ; AVX1-ONLY-NEXT: vinsertf128 $1, (%rcx), %ymm1, %ymm1
57 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm5
58 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm5 = ymm5[0],ymm1[0],ymm5[2],ymm1[2]
59 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm3, %ymm0
60 ; AVX1-ONLY-NEXT: vmovaps (%r8), %xmm6
61 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm6, %ymm7
62 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm0 = ymm7[0],ymm0[0],ymm7[2],ymm0[3]
63 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm2, %ymm2
64 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
65 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm3[1],xmm4[1]
66 ; AVX1-ONLY-NEXT: vmovaps %xmm2, 96(%rax)
67 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 64(%rax)
68 ; AVX1-ONLY-NEXT: vmovapd %ymm0, 32(%rax)
69 ; AVX1-ONLY-NEXT: vmovaps %ymm5, (%rax)
70 ; AVX1-ONLY-NEXT: vzeroupper
71 ; AVX1-ONLY-NEXT: retq
73 ; AVX2-ONLY-LABEL: store_i64_stride7_vf2:
75 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
76 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
77 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %xmm0
78 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %xmm1
79 ; AVX2-ONLY-NEXT: vmovaps (%r8), %xmm2
80 ; AVX2-ONLY-NEXT: vmovaps (%r9), %xmm3
81 ; AVX2-ONLY-NEXT: vmovaps (%r10), %xmm4
82 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%rsi), %ymm0, %ymm0
83 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%rcx), %ymm1, %ymm1
84 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm5
85 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1],ymm0[2,3],ymm5[4,5,6,7]
86 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,2,2,1]
87 ; AVX2-ONLY-NEXT: vbroadcastsd %xmm4, %ymm6
88 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm6[4,5],ymm5[6,7]
89 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm6 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
90 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm6 = ymm6[2,1,3,3]
91 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
92 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm6[0,1,2,3,4,5],ymm2[6,7]
93 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
94 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,1,3]
95 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm3[1],xmm4[1]
96 ; AVX2-ONLY-NEXT: vmovaps %xmm1, 96(%rax)
97 ; AVX2-ONLY-NEXT: vmovaps %ymm0, (%rax)
98 ; AVX2-ONLY-NEXT: vmovaps %ymm2, 64(%rax)
99 ; AVX2-ONLY-NEXT: vmovaps %ymm5, 32(%rax)
100 ; AVX2-ONLY-NEXT: vzeroupper
101 ; AVX2-ONLY-NEXT: retq
103 ; AVX512-LABEL: store_i64_stride7_vf2:
105 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
106 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %r10
107 ; AVX512-NEXT: vmovdqa (%rdi), %xmm0
108 ; AVX512-NEXT: vmovdqa (%rdx), %xmm1
109 ; AVX512-NEXT: vmovdqa (%r8), %xmm2
110 ; AVX512-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
111 ; AVX512-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
112 ; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
113 ; AVX512-NEXT: vinserti128 $1, (%r9), %ymm2, %ymm1
114 ; AVX512-NEXT: vinserti32x4 $2, (%r10), %zmm1, %zmm1
115 ; AVX512-NEXT: vmovdqa64 {{.*#+}} zmm2 = [0,2,4,6,8,10,12,1]
116 ; AVX512-NEXT: vpermi2q %zmm1, %zmm0, %zmm2
117 ; AVX512-NEXT: vmovdqa64 {{.*#+}} zmm3 = <3,5,7,9,11,13,u,u>
118 ; AVX512-NEXT: vpermi2q %zmm1, %zmm0, %zmm3
119 ; AVX512-NEXT: vextracti32x4 $2, %zmm3, 96(%rax)
120 ; AVX512-NEXT: vmovdqa64 %zmm2, (%rax)
121 ; AVX512-NEXT: vmovdqa %ymm3, 64(%rax)
122 ; AVX512-NEXT: vzeroupper
124 %in.vec0 = load <2 x i64>, ptr %in.vecptr0, align 64
125 %in.vec1 = load <2 x i64>, ptr %in.vecptr1, align 64
126 %in.vec2 = load <2 x i64>, ptr %in.vecptr2, align 64
127 %in.vec3 = load <2 x i64>, ptr %in.vecptr3, align 64
128 %in.vec4 = load <2 x i64>, ptr %in.vecptr4, align 64
129 %in.vec5 = load <2 x i64>, ptr %in.vecptr5, align 64
130 %in.vec6 = load <2 x i64>, ptr %in.vecptr6, align 64
131 %1 = shufflevector <2 x i64> %in.vec0, <2 x i64> %in.vec1, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
132 %2 = shufflevector <2 x i64> %in.vec2, <2 x i64> %in.vec3, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
133 %3 = shufflevector <2 x i64> %in.vec4, <2 x i64> %in.vec5, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
134 %4 = shufflevector <4 x i64> %1, <4 x i64> %2, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
135 %5 = shufflevector <2 x i64> %in.vec6, <2 x i64> poison, <4 x i32> <i32 0, i32 1, i32 undef, i32 undef>
136 %6 = shufflevector <4 x i64> %3, <4 x i64> %5, <6 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5>
137 %7 = shufflevector <6 x i64> %6, <6 x i64> poison, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 undef, i32 undef>
138 %8 = shufflevector <8 x i64> %4, <8 x i64> %7, <14 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13>
139 %interleaved.vec = shufflevector <14 x i64> %8, <14 x i64> poison, <14 x i32> <i32 0, i32 2, i32 4, i32 6, i32 8, i32 10, i32 12, i32 1, i32 3, i32 5, i32 7, i32 9, i32 11, i32 13>
140 store <14 x i64> %interleaved.vec, ptr %out.vec, align 64
144 define void @store_i64_stride7_vf4(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %out.vec) nounwind {
145 ; SSE-LABEL: store_i64_stride7_vf4:
147 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
148 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
149 ; SSE-NEXT: movapd (%rdi), %xmm8
150 ; SSE-NEXT: movaps 16(%rdi), %xmm7
151 ; SSE-NEXT: movapd (%rsi), %xmm0
152 ; SSE-NEXT: movaps 16(%rsi), %xmm13
153 ; SSE-NEXT: movapd (%rdx), %xmm4
154 ; SSE-NEXT: movaps 16(%rdx), %xmm2
155 ; SSE-NEXT: movapd (%rcx), %xmm3
156 ; SSE-NEXT: movaps 16(%rcx), %xmm1
157 ; SSE-NEXT: movapd (%r8), %xmm10
158 ; SSE-NEXT: movaps 16(%r8), %xmm6
159 ; SSE-NEXT: movapd (%r9), %xmm9
160 ; SSE-NEXT: movaps 16(%r9), %xmm5
161 ; SSE-NEXT: movapd (%r10), %xmm14
162 ; SSE-NEXT: movaps 16(%r10), %xmm12
163 ; SSE-NEXT: movaps %xmm13, %xmm11
164 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm2[1]
165 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm1[0]
166 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm6[1]
167 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm5[0]
168 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm12[1]
169 ; SSE-NEXT: movapd %xmm8, %xmm15
170 ; SSE-NEXT: unpcklpd {{.*#+}} xmm15 = xmm15[0],xmm0[0]
171 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm4[1]
172 ; SSE-NEXT: unpcklpd {{.*#+}} xmm4 = xmm4[0],xmm3[0]
173 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm10[1]
174 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[0,1],xmm7[2,3]
175 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm13[0]
176 ; SSE-NEXT: unpcklpd {{.*#+}} xmm10 = xmm10[0],xmm9[0]
177 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm14[1]
178 ; SSE-NEXT: movsd {{.*#+}} xmm8 = xmm14[0],xmm8[1]
179 ; SSE-NEXT: movapd %xmm10, 32(%rax)
180 ; SSE-NEXT: movapd %xmm8, 48(%rax)
181 ; SSE-NEXT: movapd %xmm9, 96(%rax)
182 ; SSE-NEXT: movaps %xmm7, 112(%rax)
183 ; SSE-NEXT: movaps %xmm12, 160(%rax)
184 ; SSE-NEXT: movaps %xmm11, 176(%rax)
185 ; SSE-NEXT: movapd %xmm15, (%rax)
186 ; SSE-NEXT: movapd %xmm4, 16(%rax)
187 ; SSE-NEXT: movapd %xmm0, 64(%rax)
188 ; SSE-NEXT: movapd %xmm3, 80(%rax)
189 ; SSE-NEXT: movaps %xmm2, 128(%rax)
190 ; SSE-NEXT: movaps %xmm6, 144(%rax)
191 ; SSE-NEXT: movaps %xmm1, 192(%rax)
192 ; SSE-NEXT: movaps %xmm5, 208(%rax)
195 ; AVX1-ONLY-LABEL: store_i64_stride7_vf4:
196 ; AVX1-ONLY: # %bb.0:
197 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
198 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
199 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %ymm1
200 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %ymm2
201 ; AVX1-ONLY-NEXT: vmovaps (%r8), %ymm3
202 ; AVX1-ONLY-NEXT: vmovaps (%r10), %xmm4
203 ; AVX1-ONLY-NEXT: vmovaps 16(%r10), %xmm0
204 ; AVX1-ONLY-NEXT: vmovaps 16(%rdi), %xmm5
205 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm2[1],ymm5[1],ymm2[3],ymm5[3]
206 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],mem[6,7]
207 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm5[2,3,4,5,6,7]
208 ; AVX1-ONLY-NEXT: vmovaps (%r9), %xmm5
209 ; AVX1-ONLY-NEXT: vmovaps (%r8), %xmm6
210 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm7 = xmm6[0],xmm5[0]
211 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %xmm8
212 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm7, %ymm9
213 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm7, %ymm7
214 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1],ymm9[2,3],ymm7[4,5],ymm9[6,7]
215 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
216 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %xmm9
217 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %xmm10
218 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm10[1],xmm9[1]
219 ; AVX1-ONLY-NEXT: vbroadcastsd 8(%rcx), %ymm12
220 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm11[0,1,2,3],ymm12[4,5,6,7]
221 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm11[0,1,2,3,4,5],ymm6[6,7]
222 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm3[0],mem[0],ymm3[2],mem[2]
223 ; AVX1-ONLY-NEXT: vmovaps 16(%rcx), %xmm11
224 ; AVX1-ONLY-NEXT: vmovaps 16(%rdx), %xmm12
225 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm12 = xmm12[0],xmm11[0]
226 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm12[0,1,2,3],ymm3[4,5,6,7]
227 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm1[0],ymm2[0],ymm1[2],ymm2[2]
228 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm5[1],xmm4[1]
229 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
230 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm11[1],mem[1]
231 ; AVX1-ONLY-NEXT: vbroadcastsd 24(%r9), %ymm4
232 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm4[4,5,6,7]
233 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],mem[6,7]
234 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm8[0],xmm10[0]
235 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm5 = xmm9[0],mem[0]
236 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 96(%rax)
237 ; AVX1-ONLY-NEXT: vmovaps %xmm5, 16(%rax)
238 ; AVX1-ONLY-NEXT: vmovaps %xmm4, (%rax)
239 ; AVX1-ONLY-NEXT: vmovaps %ymm3, 128(%rax)
240 ; AVX1-ONLY-NEXT: vmovaps %ymm2, 192(%rax)
241 ; AVX1-ONLY-NEXT: vmovaps %ymm6, 64(%rax)
242 ; AVX1-ONLY-NEXT: vmovaps %ymm7, 32(%rax)
243 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 160(%rax)
244 ; AVX1-ONLY-NEXT: vzeroupper
245 ; AVX1-ONLY-NEXT: retq
247 ; AVX2-ONLY-LABEL: store_i64_stride7_vf4:
248 ; AVX2-ONLY: # %bb.0:
249 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
250 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
251 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %ymm4
252 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %ymm5
253 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %ymm6
254 ; AVX2-ONLY-NEXT: vmovaps (%rcx), %ymm1
255 ; AVX2-ONLY-NEXT: vmovaps (%r8), %ymm2
256 ; AVX2-ONLY-NEXT: vmovaps (%r10), %xmm3
257 ; AVX2-ONLY-NEXT: vmovaps 16(%r10), %xmm0
258 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm7 = ymm4[1],ymm5[1],ymm4[3],ymm5[3]
259 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm7 = ymm7[0,2,3,3]
260 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3,4,5],ymm6[6,7]
261 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm7[2,3,4,5,6,7]
262 ; AVX2-ONLY-NEXT: vmovaps (%r8), %xmm7
263 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm0, %ymm8
264 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %xmm9
265 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %xmm10
266 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm10[1],xmm9[1]
267 ; AVX2-ONLY-NEXT: vbroadcastsd 8(%rcx), %ymm12
268 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm11[0,1,2,3],ymm12[4,5,6,7]
269 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm11[0,1,2,3,4,5],ymm8[6,7]
270 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm4[0],ymm5[0],ymm4[2],ymm5[2]
271 ; AVX2-ONLY-NEXT: vmovaps (%r9), %xmm5
272 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm5[1],xmm3[1]
273 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm11[0,1,2,3],ymm4[4,5,6,7]
274 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%rcx), %ymm10, %ymm10
275 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %xmm11
276 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm11, %ymm9
277 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm9 = ymm9[0],ymm10[0],ymm9[2],ymm10[2]
278 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm10 = ymm2[0],mem[0],ymm2[2],mem[2]
279 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm6 = ymm6[0],ymm1[0],ymm6[2],ymm1[2]
280 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm6 = ymm6[2,3],ymm10[2,3]
281 ; AVX2-ONLY-NEXT: vmovlhps {{.*#+}} xmm5 = xmm7[0],xmm5[0]
282 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm5, %ymm5
283 ; AVX2-ONLY-NEXT: vbroadcastsd %xmm3, %ymm3
284 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm5[0,1,2,3],ymm3[4,5],ymm5[6,7]
285 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
286 ; AVX2-ONLY-NEXT: vbroadcastsd 24(%r9), %ymm2
287 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm1[2,3],ymm2[2,3]
288 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],mem[6,7]
289 ; AVX2-ONLY-NEXT: vmovaps %ymm6, 128(%rax)
290 ; AVX2-ONLY-NEXT: vmovaps %ymm1, 192(%rax)
291 ; AVX2-ONLY-NEXT: vmovaps %ymm9, (%rax)
292 ; AVX2-ONLY-NEXT: vmovaps %ymm4, 96(%rax)
293 ; AVX2-ONLY-NEXT: vmovaps %ymm8, 64(%rax)
294 ; AVX2-ONLY-NEXT: vmovaps %ymm3, 32(%rax)
295 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 160(%rax)
296 ; AVX2-ONLY-NEXT: vzeroupper
297 ; AVX2-ONLY-NEXT: retq
299 ; AVX512F-LABEL: store_i64_stride7_vf4:
301 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %rax
302 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %r10
303 ; AVX512F-NEXT: vmovdqa (%rdi), %ymm1
304 ; AVX512F-NEXT: vmovdqa (%rdx), %ymm2
305 ; AVX512F-NEXT: vmovdqa (%r8), %ymm3
306 ; AVX512F-NEXT: vmovdqa (%r10), %ymm0
307 ; AVX512F-NEXT: vinserti64x4 $1, (%rsi), %zmm1, %zmm4
308 ; AVX512F-NEXT: vinserti64x4 $1, (%rcx), %zmm2, %zmm2
309 ; AVX512F-NEXT: vinserti64x4 $1, (%r9), %zmm3, %zmm3
310 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm1 = <15,3,7,u>
311 ; AVX512F-NEXT: vpermi2q %zmm2, %zmm3, %zmm1
312 ; AVX512F-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [0,4,8,0,0,4,8,0]
313 ; AVX512F-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
314 ; AVX512F-NEXT: vpermi2q %zmm0, %zmm3, %zmm5
315 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm6 = <0,4,8,12,u,u,u,1>
316 ; AVX512F-NEXT: vpermi2q %zmm2, %zmm4, %zmm6
317 ; AVX512F-NEXT: movb $112, %cl
318 ; AVX512F-NEXT: kmovw %ecx, %k1
319 ; AVX512F-NEXT: vmovdqa64 %zmm5, %zmm6 {%k1}
320 ; AVX512F-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [5,9,0,1,5,9,0,1]
321 ; AVX512F-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
322 ; AVX512F-NEXT: vpermi2q %zmm0, %zmm3, %zmm5
323 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm7 = <5,9,13,u,u,u,2,6>
324 ; AVX512F-NEXT: vpermi2q %zmm2, %zmm4, %zmm7
325 ; AVX512F-NEXT: movb $56, %cl
326 ; AVX512F-NEXT: kmovw %ecx, %k1
327 ; AVX512F-NEXT: vmovdqa64 %zmm5, %zmm7 {%k1}
328 ; AVX512F-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [10,0,2,6,10,0,2,6]
329 ; AVX512F-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
330 ; AVX512F-NEXT: vpermi2q %zmm0, %zmm3, %zmm5
331 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm3 = <2,6,u,u,u,11,15,3>
332 ; AVX512F-NEXT: vpermi2q %zmm4, %zmm2, %zmm3
333 ; AVX512F-NEXT: movb $28, %cl
334 ; AVX512F-NEXT: kmovw %ecx, %k1
335 ; AVX512F-NEXT: vmovdqa64 %zmm5, %zmm3 {%k1}
336 ; AVX512F-NEXT: vmovdqa64 %zmm3, 128(%rax)
337 ; AVX512F-NEXT: vmovdqa64 %zmm7, 64(%rax)
338 ; AVX512F-NEXT: vmovdqa64 %zmm6, (%rax)
339 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
340 ; AVX512F-NEXT: vmovdqa %ymm0, 192(%rax)
341 ; AVX512F-NEXT: vzeroupper
344 ; AVX512BW-LABEL: store_i64_stride7_vf4:
346 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
347 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r10
348 ; AVX512BW-NEXT: vmovdqa (%rdi), %ymm1
349 ; AVX512BW-NEXT: vmovdqa (%rdx), %ymm2
350 ; AVX512BW-NEXT: vmovdqa (%r8), %ymm3
351 ; AVX512BW-NEXT: vmovdqa (%r10), %ymm0
352 ; AVX512BW-NEXT: vinserti64x4 $1, (%rsi), %zmm1, %zmm4
353 ; AVX512BW-NEXT: vinserti64x4 $1, (%rcx), %zmm2, %zmm2
354 ; AVX512BW-NEXT: vinserti64x4 $1, (%r9), %zmm3, %zmm3
355 ; AVX512BW-NEXT: vmovdqa {{.*#+}} ymm1 = <15,3,7,u>
356 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm3, %zmm1
357 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [0,4,8,0,0,4,8,0]
358 ; AVX512BW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
359 ; AVX512BW-NEXT: vpermi2q %zmm0, %zmm3, %zmm5
360 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm6 = <0,4,8,12,u,u,u,1>
361 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm4, %zmm6
362 ; AVX512BW-NEXT: movb $112, %cl
363 ; AVX512BW-NEXT: kmovd %ecx, %k1
364 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm6 {%k1}
365 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [5,9,0,1,5,9,0,1]
366 ; AVX512BW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
367 ; AVX512BW-NEXT: vpermi2q %zmm0, %zmm3, %zmm5
368 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <5,9,13,u,u,u,2,6>
369 ; AVX512BW-NEXT: vpermi2q %zmm2, %zmm4, %zmm7
370 ; AVX512BW-NEXT: movb $56, %cl
371 ; AVX512BW-NEXT: kmovd %ecx, %k1
372 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm7 {%k1}
373 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [10,0,2,6,10,0,2,6]
374 ; AVX512BW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
375 ; AVX512BW-NEXT: vpermi2q %zmm0, %zmm3, %zmm5
376 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = <2,6,u,u,u,11,15,3>
377 ; AVX512BW-NEXT: vpermi2q %zmm4, %zmm2, %zmm3
378 ; AVX512BW-NEXT: movb $28, %cl
379 ; AVX512BW-NEXT: kmovd %ecx, %k1
380 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm3 {%k1}
381 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 128(%rax)
382 ; AVX512BW-NEXT: vmovdqa64 %zmm7, 64(%rax)
383 ; AVX512BW-NEXT: vmovdqa64 %zmm6, (%rax)
384 ; AVX512BW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
385 ; AVX512BW-NEXT: vmovdqa %ymm0, 192(%rax)
386 ; AVX512BW-NEXT: vzeroupper
387 ; AVX512BW-NEXT: retq
388 %in.vec0 = load <4 x i64>, ptr %in.vecptr0, align 64
389 %in.vec1 = load <4 x i64>, ptr %in.vecptr1, align 64
390 %in.vec2 = load <4 x i64>, ptr %in.vecptr2, align 64
391 %in.vec3 = load <4 x i64>, ptr %in.vecptr3, align 64
392 %in.vec4 = load <4 x i64>, ptr %in.vecptr4, align 64
393 %in.vec5 = load <4 x i64>, ptr %in.vecptr5, align 64
394 %in.vec6 = load <4 x i64>, ptr %in.vecptr6, align 64
395 %1 = shufflevector <4 x i64> %in.vec0, <4 x i64> %in.vec1, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
396 %2 = shufflevector <4 x i64> %in.vec2, <4 x i64> %in.vec3, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
397 %3 = shufflevector <4 x i64> %in.vec4, <4 x i64> %in.vec5, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
398 %4 = shufflevector <8 x i64> %1, <8 x i64> %2, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
399 %5 = shufflevector <4 x i64> %in.vec6, <4 x i64> poison, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 undef, i32 undef, i32 undef, i32 undef>
400 %6 = shufflevector <8 x i64> %3, <8 x i64> %5, <12 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11>
401 %7 = shufflevector <12 x i64> %6, <12 x i64> poison, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 undef, i32 undef, i32 undef, i32 undef>
402 %8 = shufflevector <16 x i64> %4, <16 x i64> %7, <28 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27>
403 %interleaved.vec = shufflevector <28 x i64> %8, <28 x i64> poison, <28 x i32> <i32 0, i32 4, i32 8, i32 12, i32 16, i32 20, i32 24, i32 1, i32 5, i32 9, i32 13, i32 17, i32 21, i32 25, i32 2, i32 6, i32 10, i32 14, i32 18, i32 22, i32 26, i32 3, i32 7, i32 11, i32 15, i32 19, i32 23, i32 27>
404 store <28 x i64> %interleaved.vec, ptr %out.vec, align 64
408 define void @store_i64_stride7_vf8(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %out.vec) nounwind {
409 ; SSE-LABEL: store_i64_stride7_vf8:
411 ; SSE-NEXT: subq $88, %rsp
412 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
413 ; SSE-NEXT: movapd (%rdi), %xmm2
414 ; SSE-NEXT: movapd 16(%rdi), %xmm5
415 ; SSE-NEXT: movapd 32(%rdi), %xmm10
416 ; SSE-NEXT: movapd (%rsi), %xmm3
417 ; SSE-NEXT: movapd 16(%rsi), %xmm6
418 ; SSE-NEXT: movapd (%rdx), %xmm4
419 ; SSE-NEXT: movapd 16(%rdx), %xmm8
420 ; SSE-NEXT: movapd (%rcx), %xmm7
421 ; SSE-NEXT: movapd 16(%rcx), %xmm11
422 ; SSE-NEXT: movapd (%r8), %xmm9
423 ; SSE-NEXT: movapd 16(%r8), %xmm14
424 ; SSE-NEXT: movapd (%r9), %xmm12
425 ; SSE-NEXT: movapd 16(%r9), %xmm13
426 ; SSE-NEXT: movapd (%rax), %xmm0
427 ; SSE-NEXT: movapd 16(%rax), %xmm1
428 ; SSE-NEXT: movapd %xmm2, %xmm15
429 ; SSE-NEXT: unpcklpd {{.*#+}} xmm15 = xmm15[0],xmm3[0]
430 ; SSE-NEXT: movapd %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
431 ; SSE-NEXT: movsd {{.*#+}} xmm2 = xmm0[0],xmm2[1]
432 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
433 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm4[1]
434 ; SSE-NEXT: movapd %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
435 ; SSE-NEXT: unpcklpd {{.*#+}} xmm4 = xmm4[0],xmm7[0]
436 ; SSE-NEXT: movapd %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
437 ; SSE-NEXT: unpckhpd {{.*#+}} xmm7 = xmm7[1],xmm9[1]
438 ; SSE-NEXT: movapd %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
439 ; SSE-NEXT: unpcklpd {{.*#+}} xmm9 = xmm9[0],xmm12[0]
440 ; SSE-NEXT: movapd %xmm9, (%rsp) # 16-byte Spill
441 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm0[1]
442 ; SSE-NEXT: movapd %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
443 ; SSE-NEXT: movapd %xmm5, %xmm0
444 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm6[0]
445 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
446 ; SSE-NEXT: movsd {{.*#+}} xmm5 = xmm1[0],xmm5[1]
447 ; SSE-NEXT: movapd %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
448 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm8[1]
449 ; SSE-NEXT: movapd %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
450 ; SSE-NEXT: unpcklpd {{.*#+}} xmm8 = xmm8[0],xmm11[0]
451 ; SSE-NEXT: movapd %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
452 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm14[1]
453 ; SSE-NEXT: movapd %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
454 ; SSE-NEXT: unpcklpd {{.*#+}} xmm14 = xmm14[0],xmm13[0]
455 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm1[1]
456 ; SSE-NEXT: movapd 32(%rsi), %xmm12
457 ; SSE-NEXT: movapd %xmm10, %xmm15
458 ; SSE-NEXT: unpcklpd {{.*#+}} xmm15 = xmm15[0],xmm12[0]
459 ; SSE-NEXT: movapd 32(%rax), %xmm3
460 ; SSE-NEXT: movsd {{.*#+}} xmm10 = xmm3[0],xmm10[1]
461 ; SSE-NEXT: movapd %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
462 ; SSE-NEXT: movapd 32(%rdx), %xmm11
463 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm11[1]
464 ; SSE-NEXT: movapd 32(%rcx), %xmm8
465 ; SSE-NEXT: unpcklpd {{.*#+}} xmm11 = xmm11[0],xmm8[0]
466 ; SSE-NEXT: movapd 32(%r8), %xmm9
467 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm9[1]
468 ; SSE-NEXT: movapd 32(%r9), %xmm6
469 ; SSE-NEXT: unpcklpd {{.*#+}} xmm9 = xmm9[0],xmm6[0]
470 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm3[1]
471 ; SSE-NEXT: movapd 48(%rdi), %xmm5
472 ; SSE-NEXT: movapd 48(%rsi), %xmm4
473 ; SSE-NEXT: movapd %xmm5, %xmm7
474 ; SSE-NEXT: unpcklpd {{.*#+}} xmm7 = xmm7[0],xmm4[0]
475 ; SSE-NEXT: movapd 48(%rax), %xmm10
476 ; SSE-NEXT: movsd {{.*#+}} xmm5 = xmm10[0],xmm5[1]
477 ; SSE-NEXT: movapd 48(%rdx), %xmm3
478 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm3[1]
479 ; SSE-NEXT: movapd 48(%rcx), %xmm2
480 ; SSE-NEXT: unpcklpd {{.*#+}} xmm3 = xmm3[0],xmm2[0]
481 ; SSE-NEXT: movapd 48(%r8), %xmm1
482 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
483 ; SSE-NEXT: movapd 48(%r9), %xmm0
484 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm0[0]
485 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm10[1]
486 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
487 ; SSE-NEXT: movapd %xmm0, 432(%rax)
488 ; SSE-NEXT: movapd %xmm2, 416(%rax)
489 ; SSE-NEXT: movapd %xmm4, 400(%rax)
490 ; SSE-NEXT: movapd %xmm5, 384(%rax)
491 ; SSE-NEXT: movapd %xmm1, 368(%rax)
492 ; SSE-NEXT: movapd %xmm3, 352(%rax)
493 ; SSE-NEXT: movapd %xmm7, 336(%rax)
494 ; SSE-NEXT: movapd %xmm6, 320(%rax)
495 ; SSE-NEXT: movapd %xmm8, 304(%rax)
496 ; SSE-NEXT: movapd %xmm12, 288(%rax)
497 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
498 ; SSE-NEXT: movaps %xmm0, 272(%rax)
499 ; SSE-NEXT: movapd %xmm9, 256(%rax)
500 ; SSE-NEXT: movapd %xmm11, 240(%rax)
501 ; SSE-NEXT: movapd %xmm15, 224(%rax)
502 ; SSE-NEXT: movapd %xmm13, 208(%rax)
503 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
504 ; SSE-NEXT: movaps %xmm0, 192(%rax)
505 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
506 ; SSE-NEXT: movaps %xmm0, 176(%rax)
507 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
508 ; SSE-NEXT: movaps %xmm0, 160(%rax)
509 ; SSE-NEXT: movapd %xmm14, 144(%rax)
510 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
511 ; SSE-NEXT: movaps %xmm0, 128(%rax)
512 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
513 ; SSE-NEXT: movaps %xmm0, 112(%rax)
514 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
515 ; SSE-NEXT: movaps %xmm0, 96(%rax)
516 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
517 ; SSE-NEXT: movaps %xmm0, 80(%rax)
518 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
519 ; SSE-NEXT: movaps %xmm0, 64(%rax)
520 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
521 ; SSE-NEXT: movaps %xmm0, 48(%rax)
522 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
523 ; SSE-NEXT: movaps %xmm0, 32(%rax)
524 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
525 ; SSE-NEXT: movaps %xmm0, 16(%rax)
526 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
527 ; SSE-NEXT: movaps %xmm0, (%rax)
528 ; SSE-NEXT: addq $88, %rsp
531 ; AVX1-ONLY-LABEL: store_i64_stride7_vf8:
532 ; AVX1-ONLY: # %bb.0:
533 ; AVX1-ONLY-NEXT: pushq %rax
534 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
535 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %ymm2
536 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
537 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %ymm7
538 ; AVX1-ONLY-NEXT: vmovaps (%r9), %ymm3
539 ; AVX1-ONLY-NEXT: vmovaps (%rax), %xmm5
540 ; AVX1-ONLY-NEXT: vmovaps 16(%rax), %xmm0
541 ; AVX1-ONLY-NEXT: vmovapd 32(%rax), %xmm11
542 ; AVX1-ONLY-NEXT: vmovaps 16(%rdi), %xmm1
543 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm2[1],ymm1[1],ymm2[3],ymm1[3]
544 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm7[6,7]
545 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3,4,5,6,7]
546 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
547 ; AVX1-ONLY-NEXT: vmovapd 32(%r8), %xmm4
548 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm1
549 ; AVX1-ONLY-NEXT: vmovaps 32(%rdx), %xmm6
550 ; AVX1-ONLY-NEXT: vmovaps 32(%rsi), %xmm8
551 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm9 = xmm8[1],xmm6[1]
552 ; AVX1-ONLY-NEXT: vbroadcastsd 40(%rcx), %ymm10
553 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm10[4,5,6,7]
554 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm9[0,1,2,3,4,5],ymm1[6,7]
555 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
556 ; AVX1-ONLY-NEXT: vmovaps 32(%rdi), %xmm10
557 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm0, %ymm12
558 ; AVX1-ONLY-NEXT: vmovaps (%r9), %xmm9
559 ; AVX1-ONLY-NEXT: vmovapd 32(%r9), %xmm15
560 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm4 = xmm4[0],xmm15[0]
561 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm4, %ymm4
562 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1,2,3,4,5],ymm12[6,7]
563 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
564 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm8 = xmm10[0],xmm8[0]
565 ; AVX1-ONLY-NEXT: vinsertf128 $1, 32(%rcx), %ymm8, %ymm10
566 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm8, %ymm6
567 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm6 = ymm6[0],ymm10[1],ymm6[2],ymm10[2]
568 ; AVX1-ONLY-NEXT: vbroadcastsd 8(%rcx), %ymm8
569 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm8[4,5,6,7]
570 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %xmm8
571 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm10 = xmm8[2,3,2,3]
572 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm10[0,1],ymm7[2,3,4,5,6,7]
573 ; AVX1-ONLY-NEXT: vmovaps (%r8), %xmm12
574 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm0, %ymm10
575 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3,4,5],ymm10[6,7]
576 ; AVX1-ONLY-NEXT: vmovaps 16(%rcx), %xmm13
577 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm10 = xmm13[2,3,2,3]
578 ; AVX1-ONLY-NEXT: vmovaps 16(%r8), %xmm14
579 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm14 = ymm3[1],ymm14[1],ymm3[3],ymm14[3]
580 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],mem[6,7]
581 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1],ymm14[2,3,4,5,6,7]
582 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm12 = xmm12[0],xmm9[0]
583 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %xmm14
584 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm14, %ymm12, %ymm0
585 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm12, %ymm12
586 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm12[0,1],ymm0[2,3],ymm12[4,5],ymm0[6,7]
587 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm15[1],xmm11[1]
588 ; AVX1-ONLY-NEXT: vmovapd 32(%rdi), %ymm11
589 ; AVX1-ONLY-NEXT: vmovapd 32(%rsi), %ymm15
590 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm11 = ymm11[0],ymm15[0],ymm11[2],ymm15[2]
591 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm11 = ymm0[0,1],ymm11[2,3]
592 ; AVX1-ONLY-NEXT: vmovapd 48(%rdi), %xmm0
593 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],mem[2,3]
594 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm15 = ymm15[0,0,3,2]
595 ; AVX1-ONLY-NEXT: vmovapd 32(%rax), %ymm2
596 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm15 = ymm2[2,3],ymm15[2,3]
597 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm15 = ymm15[0],ymm0[1],ymm15[2],ymm0[3]
598 ; AVX1-ONLY-NEXT: vmovapd 32(%r8), %ymm0
599 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm0[0],mem[0],ymm0[2],mem[2]
600 ; AVX1-ONLY-NEXT: vmovapd 48(%rcx), %xmm0
601 ; AVX1-ONLY-NEXT: vmovapd 48(%rdx), %xmm4
602 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm4 = xmm4[0],xmm0[0]
603 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm4[0,1],ymm1[2,3]
604 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %ymm4
605 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm4, %ymm4 # 32-byte Folded Reload
606 ; AVX1-ONLY-NEXT: # ymm4 = ymm4[0],mem[0],ymm4[2],mem[2]
607 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm5 = xmm9[1],xmm5[1]
608 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm5[0,1,2,3],ymm4[4,5,6,7]
609 ; AVX1-ONLY-NEXT: vmovaps (%r8), %ymm5
610 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm5[0],ymm3[0],ymm5[2],ymm3[2]
611 ; AVX1-ONLY-NEXT: vmovaps 16(%rdx), %xmm5
612 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm5 = xmm5[0],xmm13[0]
613 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm5[0,1,2,3],ymm3[4,5,6,7]
614 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
615 ; AVX1-ONLY-NEXT: vbroadcastsd 56(%r9), %ymm5
616 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm5[2,3]
617 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm2[3]
618 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
619 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm14[0],xmm8[0]
620 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %xmm5
621 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm5 = xmm5[0],mem[0]
622 ; AVX1-ONLY-NEXT: vmovaps %xmm5, 16(%rax)
623 ; AVX1-ONLY-NEXT: vmovaps %xmm2, (%rax)
624 ; AVX1-ONLY-NEXT: vmovaps %ymm3, 128(%rax)
625 ; AVX1-ONLY-NEXT: vmovaps %ymm4, 96(%rax)
626 ; AVX1-ONLY-NEXT: vmovapd %ymm1, 352(%rax)
627 ; AVX1-ONLY-NEXT: vmovapd %ymm15, 384(%rax)
628 ; AVX1-ONLY-NEXT: vmovapd %ymm11, 320(%rax)
629 ; AVX1-ONLY-NEXT: vmovaps %ymm12, 32(%rax)
630 ; AVX1-ONLY-NEXT: vmovaps %ymm10, 192(%rax)
631 ; AVX1-ONLY-NEXT: vmovaps %ymm7, 64(%rax)
632 ; AVX1-ONLY-NEXT: vmovapd %ymm6, 224(%rax)
633 ; AVX1-ONLY-NEXT: vmovapd %ymm0, 416(%rax)
634 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
635 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 256(%rax)
636 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
637 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 288(%rax)
638 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
639 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 160(%rax)
640 ; AVX1-ONLY-NEXT: popq %rax
641 ; AVX1-ONLY-NEXT: vzeroupper
642 ; AVX1-ONLY-NEXT: retq
644 ; AVX2-ONLY-LABEL: store_i64_stride7_vf8:
645 ; AVX2-ONLY: # %bb.0:
646 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
647 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %ymm8
648 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %ymm4
649 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %ymm7
650 ; AVX2-ONLY-NEXT: vmovaps 32(%rsi), %ymm10
651 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %ymm5
652 ; AVX2-ONLY-NEXT: vmovaps 32(%rdx), %ymm9
653 ; AVX2-ONLY-NEXT: vmovaps 32(%r8), %ymm3
654 ; AVX2-ONLY-NEXT: vmovaps (%rax), %xmm2
655 ; AVX2-ONLY-NEXT: vmovaps 16(%rax), %xmm0
656 ; AVX2-ONLY-NEXT: vmovaps 32(%rax), %xmm13
657 ; AVX2-ONLY-NEXT: vmovaps 48(%rax), %xmm12
658 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm4[1],ymm7[1],ymm4[3],ymm7[3]
659 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,3,3]
660 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm5[6,7]
661 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3,4,5,6,7]
662 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
663 ; AVX2-ONLY-NEXT: vmovaps 32(%r8), %xmm15
664 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm15, %ymm0, %ymm1
665 ; AVX2-ONLY-NEXT: vmovaps 32(%rdx), %xmm6
666 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm11 = mem[0,0]
667 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm11 = xmm11[1],xmm6[1]
668 ; AVX2-ONLY-NEXT: vbroadcastsd 40(%rcx), %ymm14
669 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm11[0,1,2,3],ymm14[4,5,6,7]
670 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm11[0,1,2,3,4,5],ymm1[6,7]
671 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
672 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm3[0],mem[0],ymm3[2],mem[2]
673 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm11 = ymm9[0],mem[0],ymm9[2],mem[2]
674 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm11[2,3],ymm3[2,3]
675 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
676 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm4[0],ymm7[0],ymm4[2],ymm7[2]
677 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm7 = mem[0,0]
678 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm7 = xmm7[1],xmm2[1]
679 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm7[0,1,2,3],ymm4[4,5,6,7]
680 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %xmm11
681 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %xmm14
682 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm14, %ymm0, %ymm7
683 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm15 = xmm15[0],mem[0]
684 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm15, %ymm15
685 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm15[0,1,2,3,4,5],ymm7[6,7]
686 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm15 = ymm8[1],ymm10[1],ymm8[3],ymm10[3]
687 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm15 = ymm15[0,2,3,3]
688 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm15[0,1,2,3,4,5],ymm9[6,7]
689 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm12[0,1],ymm9[2,3,4,5,6,7]
690 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %xmm12
691 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%rcx), %ymm12, %ymm12
692 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%rdx), %ymm11, %ymm15
693 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm12 = ymm15[0],ymm12[0],ymm15[2],ymm12[2]
694 ; AVX2-ONLY-NEXT: vmovaps (%r8), %xmm15
695 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm8[0],ymm10[0],ymm8[2],ymm10[2]
696 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm10 = mem[0,0]
697 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm10 = xmm10[1],xmm13[1]
698 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm10[0,1,2,3],ymm8[4,5,6,7]
699 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm10 = mem[0,0]
700 ; AVX2-ONLY-NEXT: vbroadcastsd 8(%rcx), %ymm13
701 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm5[0,1,2,3],ymm13[4,5,6,7]
702 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1],ymm13[2,3,4,5,6,7]
703 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm15, %ymm0, %ymm13
704 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1,2,3,4,5],ymm13[6,7]
705 ; AVX2-ONLY-NEXT: vmovaps (%r8), %ymm13
706 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm5 = ymm5[0],mem[0],ymm5[2],mem[2]
707 ; AVX2-ONLY-NEXT: vmovaps (%r9), %ymm0
708 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm13[0],ymm0[0],ymm13[2],ymm0[2]
709 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm5[2,3],ymm1[2,3]
710 ; AVX2-ONLY-NEXT: vbroadcastsd 56(%rcx), %ymm5
711 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm5[0,1],mem[2,3]
712 ; AVX2-ONLY-NEXT: vbroadcastsd 56(%r9), %ymm3
713 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm5[0,1,2,3],ymm3[4,5,6,7]
714 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm5 = xmm14[0],mem[0]
715 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm5
716 ; AVX2-ONLY-NEXT: vbroadcastsd 32(%rcx), %ymm6
717 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],ymm6[6,7]
718 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm6 = xmm15[0],mem[0]
719 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm6, %ymm6
720 ; AVX2-ONLY-NEXT: vbroadcastsd %xmm2, %ymm2
721 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm6[0,1,2,3],ymm2[4,5],ymm6[6,7]
722 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm13[1],ymm0[1],ymm13[3],ymm0[3]
723 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,3,3]
724 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
725 ; AVX2-ONLY-NEXT: vbroadcastsd 24(%rcx), %ymm6
726 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1],ymm0[2,3,4,5,6,7]
727 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rcx
728 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],mem[6,7]
729 ; AVX2-ONLY-NEXT: vmovaps %ymm1, 128(%rcx)
730 ; AVX2-ONLY-NEXT: vmovaps %ymm10, 64(%rcx)
731 ; AVX2-ONLY-NEXT: vmovaps %ymm8, 320(%rcx)
732 ; AVX2-ONLY-NEXT: vmovaps %ymm12, (%rcx)
733 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 192(%rcx)
734 ; AVX2-ONLY-NEXT: vmovaps %ymm9, 384(%rcx)
735 ; AVX2-ONLY-NEXT: vmovaps %ymm7, 256(%rcx)
736 ; AVX2-ONLY-NEXT: vmovaps %ymm2, 32(%rcx)
737 ; AVX2-ONLY-NEXT: vmovaps %ymm4, 96(%rcx)
738 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
739 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 352(%rcx)
740 ; AVX2-ONLY-NEXT: vmovaps %ymm5, 224(%rcx)
741 ; AVX2-ONLY-NEXT: vmovaps %ymm3, 416(%rcx)
742 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
743 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 288(%rcx)
744 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
745 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 160(%rcx)
746 ; AVX2-ONLY-NEXT: vzeroupper
747 ; AVX2-ONLY-NEXT: retq
749 ; AVX512F-ONLY-SLOW-LABEL: store_i64_stride7_vf8:
750 ; AVX512F-ONLY-SLOW: # %bb.0:
751 ; AVX512F-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
752 ; AVX512F-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
753 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rdi), %zmm0
754 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rsi), %zmm1
755 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rdx), %zmm3
756 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rcx), %zmm4
757 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%r8), %zmm6
758 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%r9), %zmm7
759 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%r10), %zmm2
760 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [15,7,15,7,15,7,15,7]
761 ; AVX512F-ONLY-SLOW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
762 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [0,8,0,8,0,8,0,8]
763 ; AVX512F-ONLY-SLOW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
764 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm6, %zmm8
765 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [5,0,14,6,5,0,14,6]
766 ; AVX512F-ONLY-SLOW-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
767 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm6, %zmm7, %zmm9
768 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm11 = [9,1,9,1,9,1,9,1]
769 ; AVX512F-ONLY-SLOW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
770 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm6, %zmm11
771 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [4,12,0,5,4,12,0,5]
772 ; AVX512F-ONLY-SLOW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
773 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm6, %zmm10
774 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm5, %zmm6
775 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm4, %zmm3, %zmm5
776 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [15,7,15,7]
777 ; AVX512F-ONLY-SLOW-NEXT: # ymm7 = mem[0,1,0,1]
778 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm1, %zmm0, %zmm7
779 ; AVX512F-ONLY-SLOW-NEXT: movb $24, %sil
780 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k1
781 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm7 {%k1}
782 ; AVX512F-ONLY-SLOW-NEXT: movb $96, %sil
783 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k1
784 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm7 {%k1}
785 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm5 = [14,1,2,3,4,5,6,15]
786 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm2, %zmm7, %zmm5
787 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,8,0,1,0,8,0,1]
788 ; AVX512F-ONLY-SLOW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
789 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm1, %zmm0, %zmm6
790 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdx), %xmm7
791 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm7 = xmm7[0],mem[0]
792 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm7, %ymm0, %ymm7
793 ; AVX512F-ONLY-SLOW-NEXT: movb $12, %sil
794 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k2
795 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm7, %zmm0, %zmm6 {%k2}
796 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $3, (%r10), %zmm8, %zmm7
797 ; AVX512F-ONLY-SLOW-NEXT: movb $112, %sil
798 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k2
799 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm6 {%k2}
800 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [0,13,6,7,0,13,6,7]
801 ; AVX512F-ONLY-SLOW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
802 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm2, %zmm9, %zmm7
803 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [6,14,6,14,6,14,6,14]
804 ; AVX512F-ONLY-SLOW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
805 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm4, %zmm3, %zmm8
806 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm9 = [6,14,6,14]
807 ; AVX512F-ONLY-SLOW-NEXT: # ymm9 = mem[0,1,0,1]
808 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm1, %zmm0, %zmm9
809 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm8 = zmm9[0,1,2,3],zmm8[4,5,6,7]
810 ; AVX512F-ONLY-SLOW-NEXT: movb $-61, %sil
811 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k2
812 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm8 {%k2}
813 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [11,3,11,3,11,3,11,3]
814 ; AVX512F-ONLY-SLOW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
815 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm1, %zmm0, %zmm7
816 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [2,10,0,3,2,10,0,3]
817 ; AVX512F-ONLY-SLOW-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
818 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm4, %zmm3, %zmm9
819 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm9 {%k1}
820 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r9), %ymm7
821 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r8), %ymm12
822 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm13 = ymm12[0],ymm7[0],ymm12[2],ymm7[2]
823 ; AVX512F-ONLY-SLOW-NEXT: movb $28, %sil
824 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k2
825 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm9 {%k2} = zmm13[2,3,2,3],zmm2[2,3,2,3]
826 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm13 = [1,0,10,2,1,0,10,2]
827 ; AVX512F-ONLY-SLOW-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3]
828 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm0, %zmm1, %zmm13
829 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq 8(%rcx), %ymm14
830 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm14 = mem[0,1,2,3],ymm14[4,5,6,7]
831 ; AVX512F-ONLY-SLOW-NEXT: movb $6, %cl
832 ; AVX512F-ONLY-SLOW-NEXT: kmovw %ecx, %k2
833 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm14, %zmm0, %zmm13 {%k2}
834 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [4,9,0,3,4,9,0,3]
835 ; AVX512F-ONLY-SLOW-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
836 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm2, %zmm11, %zmm14
837 ; AVX512F-ONLY-SLOW-NEXT: movb $56, %cl
838 ; AVX512F-ONLY-SLOW-NEXT: kmovw %ecx, %k2
839 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm13 {%k2}
840 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm11 = [13,5,13,5,13,5,13,5]
841 ; AVX512F-ONLY-SLOW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
842 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [3,0,12,4,3,0,12,4]
843 ; AVX512F-ONLY-SLOW-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
844 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm3, %zmm4, %zmm14
845 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm11, %zmm3
846 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm1, %zmm0, %zmm11
847 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm11 {%k1}
848 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,1,12,7,0,1,12,7]
849 ; AVX512F-ONLY-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
850 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm2, %zmm10, %zmm3
851 ; AVX512F-ONLY-SLOW-NEXT: movb $120, %cl
852 ; AVX512F-ONLY-SLOW-NEXT: kmovw %ecx, %k1
853 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm3 {%k1}
854 ; AVX512F-ONLY-SLOW-NEXT: movb $48, %cl
855 ; AVX512F-ONLY-SLOW-NEXT: kmovw %ecx, %k1
856 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm14 {%k1} = zmm0[0],zmm1[0],zmm0[2],zmm1[2],zmm0[4],zmm1[4],zmm0[6],zmm1[6]
857 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm12[1],ymm7[1],ymm12[3],ymm7[3]
858 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,3,3]
859 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
860 ; AVX512F-ONLY-SLOW-NEXT: movb $14, %cl
861 ; AVX512F-ONLY-SLOW-NEXT: kmovw %ecx, %k1
862 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm14 {%k1}
863 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, 256(%rax)
864 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, 64(%rax)
865 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, 128(%rax)
866 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, 320(%rax)
867 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, (%rax)
868 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, 384(%rax)
869 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, 192(%rax)
870 ; AVX512F-ONLY-SLOW-NEXT: vzeroupper
871 ; AVX512F-ONLY-SLOW-NEXT: retq
873 ; AVX512F-ONLY-FAST-LABEL: store_i64_stride7_vf8:
874 ; AVX512F-ONLY-FAST: # %bb.0:
875 ; AVX512F-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
876 ; AVX512F-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
877 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rdi), %zmm0
878 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rsi), %zmm1
879 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rdx), %zmm2
880 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rcx), %zmm4
881 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%r8), %zmm6
882 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%r9), %zmm7
883 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%r10), %zmm3
884 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [15,7,15,7,15,7,15,7]
885 ; AVX512F-ONLY-FAST-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
886 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [0,8,0,8,0,8,0,8]
887 ; AVX512F-ONLY-FAST-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
888 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm7, %zmm6, %zmm8
889 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [5,0,14,6,5,0,14,6]
890 ; AVX512F-ONLY-FAST-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
891 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm6, %zmm7, %zmm9
892 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [9,1,9,1,9,1,9,1]
893 ; AVX512F-ONLY-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
894 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm7, %zmm6, %zmm10
895 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [4,12,0,5,4,12,0,5]
896 ; AVX512F-ONLY-FAST-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
897 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm7, %zmm6, %zmm11
898 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm5, %zmm6
899 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm4, %zmm2, %zmm5
900 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [15,7,15,7]
901 ; AVX512F-ONLY-FAST-NEXT: # ymm7 = mem[0,1,0,1]
902 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm1, %zmm0, %zmm7
903 ; AVX512F-ONLY-FAST-NEXT: movb $24, %sil
904 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k1
905 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm7 {%k1}
906 ; AVX512F-ONLY-FAST-NEXT: movb $96, %sil
907 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k1
908 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm7 {%k1}
909 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = [14,1,2,3,4,5,6,15]
910 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm3, %zmm7, %zmm5
911 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,8,0,1,0,8,0,1]
912 ; AVX512F-ONLY-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
913 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm1, %zmm0, %zmm6
914 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdx), %xmm7
915 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm7 = xmm7[0],mem[0]
916 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm7, %ymm0, %ymm7
917 ; AVX512F-ONLY-FAST-NEXT: movb $12, %sil
918 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k2
919 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm7, %zmm0, %zmm6 {%k2}
920 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $3, (%r10), %zmm8, %zmm7
921 ; AVX512F-ONLY-FAST-NEXT: movb $112, %sil
922 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k2
923 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm6 {%k2}
924 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [0,13,6,7,0,13,6,7]
925 ; AVX512F-ONLY-FAST-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
926 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm3, %zmm9, %zmm7
927 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [6,14,6,14,6,14,6,14]
928 ; AVX512F-ONLY-FAST-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
929 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm4, %zmm2, %zmm8
930 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm9 = [6,14,6,14]
931 ; AVX512F-ONLY-FAST-NEXT: # ymm9 = mem[0,1,0,1]
932 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm1, %zmm0, %zmm9
933 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm8 = zmm9[0,1,2,3],zmm8[4,5,6,7]
934 ; AVX512F-ONLY-FAST-NEXT: movb $-61, %sil
935 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k2
936 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm8 {%k2}
937 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [3,0,12,4,3,0,12,4]
938 ; AVX512F-ONLY-FAST-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
939 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm2, %zmm4, %zmm7
940 ; AVX512F-ONLY-FAST-NEXT: movb $48, %sil
941 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k2
942 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm7 {%k2} = zmm0[0],zmm1[0],zmm0[2],zmm1[2],zmm0[4],zmm1[4],zmm0[6],zmm1[6]
943 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r9), %ymm9
944 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r8), %ymm12
945 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = <1,3,7,u>
946 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %ymm9, %ymm12, %ymm13
947 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm13 = ymm13[0,1,2,3,4,5],mem[6,7]
948 ; AVX512F-ONLY-FAST-NEXT: movb $14, %sil
949 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k2
950 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm13, %zmm0, %zmm7 {%k2}
951 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm13 = [11,3,11,3,11,3,11,3]
952 ; AVX512F-ONLY-FAST-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
953 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm1, %zmm0, %zmm13
954 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [2,10,0,3,2,10,0,3]
955 ; AVX512F-ONLY-FAST-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
956 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm4, %zmm2, %zmm14
957 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm14 {%k1}
958 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm9 = ymm12[0],ymm9[0],ymm12[2],ymm9[2]
959 ; AVX512F-ONLY-FAST-NEXT: movb $28, %sil
960 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k2
961 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm14 {%k2} = zmm9[2,3,2,3],zmm3[2,3,2,3]
962 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [1,0,10,2,1,0,10,2]
963 ; AVX512F-ONLY-FAST-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
964 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm1, %zmm9
965 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastq 8(%rcx), %ymm12
966 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm12 = mem[0,1,2,3],ymm12[4,5,6,7]
967 ; AVX512F-ONLY-FAST-NEXT: movb $6, %cl
968 ; AVX512F-ONLY-FAST-NEXT: kmovw %ecx, %k2
969 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm9 {%k2}
970 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [4,9,0,3,4,9,0,3]
971 ; AVX512F-ONLY-FAST-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
972 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm3, %zmm10, %zmm12
973 ; AVX512F-ONLY-FAST-NEXT: movb $56, %cl
974 ; AVX512F-ONLY-FAST-NEXT: kmovw %ecx, %k2
975 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm9 {%k2}
976 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [13,5,13,5,13,5,13,5]
977 ; AVX512F-ONLY-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
978 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm10, %zmm2
979 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm10, %zmm0
980 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm0 {%k1}
981 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,1,12,7,0,1,12,7]
982 ; AVX512F-ONLY-FAST-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
983 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm3, %zmm11, %zmm1
984 ; AVX512F-ONLY-FAST-NEXT: movb $120, %cl
985 ; AVX512F-ONLY-FAST-NEXT: kmovw %ecx, %k1
986 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
987 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, 256(%rax)
988 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm9, 64(%rax)
989 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm14, 128(%rax)
990 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, 192(%rax)
991 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm8, 320(%rax)
992 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm6, (%rax)
993 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, 384(%rax)
994 ; AVX512F-ONLY-FAST-NEXT: vzeroupper
995 ; AVX512F-ONLY-FAST-NEXT: retq
997 ; AVX512DQ-SLOW-LABEL: store_i64_stride7_vf8:
998 ; AVX512DQ-SLOW: # %bb.0:
999 ; AVX512DQ-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1000 ; AVX512DQ-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
1001 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rdi), %zmm0
1002 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rsi), %zmm1
1003 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rdx), %zmm4
1004 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rcx), %zmm5
1005 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%r8), %zmm9
1006 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%r9), %zmm10
1007 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%r10), %zmm3
1008 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [5,0,14,6,5,0,14,6]
1009 ; AVX512DQ-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
1010 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm9, %zmm10, %zmm2
1011 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,13,6,7,0,13,6,7]
1012 ; AVX512DQ-SLOW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1013 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm3, %zmm2, %zmm6
1014 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [6,14,6,14,6,14,6,14]
1015 ; AVX512DQ-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1016 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm5, %zmm4, %zmm2
1017 ; AVX512DQ-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [6,14,6,14]
1018 ; AVX512DQ-SLOW-NEXT: # ymm7 = mem[0,1,0,1]
1019 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm1, %zmm0, %zmm7
1020 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm2 = zmm7[0,1,2,3],zmm2[4,5,6,7]
1021 ; AVX512DQ-SLOW-NEXT: movb $-61, %sil
1022 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k1
1023 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, %zmm2 {%k1}
1024 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [11,3,11,3,11,3,11,3]
1025 ; AVX512DQ-SLOW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1026 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm1, %zmm0, %zmm7
1027 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [2,10,0,3,2,10,0,3]
1028 ; AVX512DQ-SLOW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1029 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm5, %zmm4, %zmm6
1030 ; AVX512DQ-SLOW-NEXT: movb $96, %sil
1031 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k1
1032 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, %zmm6 {%k1}
1033 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r9), %ymm7
1034 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r8), %ymm8
1035 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm11 = ymm8[0],ymm7[0],ymm8[2],ymm7[2]
1036 ; AVX512DQ-SLOW-NEXT: movb $28, %sil
1037 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k2
1038 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm6 {%k2} = zmm11[2,3,2,3],zmm3[2,3,2,3]
1039 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm11 = [15,7,15,7,15,7,15,7]
1040 ; AVX512DQ-SLOW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1041 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm12 = [9,1,9,1,9,1,9,1]
1042 ; AVX512DQ-SLOW-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1043 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm10, %zmm9, %zmm12
1044 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm13 = [0,8,0,8,0,8,0,8]
1045 ; AVX512DQ-SLOW-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1046 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm10, %zmm9, %zmm13
1047 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [4,12,0,5,4,12,0,5]
1048 ; AVX512DQ-SLOW-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
1049 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm10, %zmm9, %zmm14
1050 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm10, %zmm11, %zmm9
1051 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm5, %zmm4, %zmm11
1052 ; AVX512DQ-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm10 = [15,7,15,7]
1053 ; AVX512DQ-SLOW-NEXT: # ymm10 = mem[0,1,0,1]
1054 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm1, %zmm0, %zmm10
1055 ; AVX512DQ-SLOW-NEXT: movb $24, %sil
1056 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k2
1057 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm10 {%k2}
1058 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, %zmm10 {%k1}
1059 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm9 = [14,1,2,3,4,5,6,15]
1060 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm3, %zmm10, %zmm9
1061 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [1,0,10,2,1,0,10,2]
1062 ; AVX512DQ-SLOW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
1063 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm0, %zmm1, %zmm10
1064 ; AVX512DQ-SLOW-NEXT: vpbroadcastq 8(%rcx), %ymm11
1065 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm11 = mem[0,1,2,3],ymm11[4,5,6,7]
1066 ; AVX512DQ-SLOW-NEXT: movb $6, %sil
1067 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k2
1068 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm11, %zmm0, %zmm10 {%k2}
1069 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [4,9,0,3,4,9,0,3]
1070 ; AVX512DQ-SLOW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
1071 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm3, %zmm12, %zmm11
1072 ; AVX512DQ-SLOW-NEXT: movb $56, %sil
1073 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k2
1074 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm10 {%k2}
1075 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [0,8,0,1,0,8,0,1]
1076 ; AVX512DQ-SLOW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
1077 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm1, %zmm0, %zmm11
1078 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdx), %xmm12
1079 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm12 = xmm12[0],mem[0]
1080 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm12, %ymm0, %ymm12
1081 ; AVX512DQ-SLOW-NEXT: movb $12, %cl
1082 ; AVX512DQ-SLOW-NEXT: kmovw %ecx, %k2
1083 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm11 {%k2}
1084 ; AVX512DQ-SLOW-NEXT: movb $112, %cl
1085 ; AVX512DQ-SLOW-NEXT: kmovw %ecx, %k2
1086 ; AVX512DQ-SLOW-NEXT: vinserti64x2 $3, (%r10), %zmm13, %zmm11 {%k2}
1087 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm12 = [13,5,13,5,13,5,13,5]
1088 ; AVX512DQ-SLOW-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1089 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm13 = [3,0,12,4,3,0,12,4]
1090 ; AVX512DQ-SLOW-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3]
1091 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm4, %zmm5, %zmm13
1092 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm12, %zmm4
1093 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm1, %zmm0, %zmm12
1094 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm12 {%k1}
1095 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm4 = [0,1,12,7,0,1,12,7]
1096 ; AVX512DQ-SLOW-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3]
1097 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm3, %zmm14, %zmm4
1098 ; AVX512DQ-SLOW-NEXT: movb $120, %cl
1099 ; AVX512DQ-SLOW-NEXT: kmovw %ecx, %k1
1100 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm12, %zmm4 {%k1}
1101 ; AVX512DQ-SLOW-NEXT: movb $48, %cl
1102 ; AVX512DQ-SLOW-NEXT: kmovw %ecx, %k1
1103 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm13 {%k1} = zmm0[0],zmm1[0],zmm0[2],zmm1[2],zmm0[4],zmm1[4],zmm0[6],zmm1[6]
1104 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm8[1],ymm7[1],ymm8[3],ymm7[3]
1105 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,3,3]
1106 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
1107 ; AVX512DQ-SLOW-NEXT: movb $14, %cl
1108 ; AVX512DQ-SLOW-NEXT: kmovw %ecx, %k1
1109 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm13 {%k1}
1110 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, 256(%rax)
1111 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, (%rax)
1112 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, 64(%rax)
1113 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, 384(%rax)
1114 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, 128(%rax)
1115 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm13, 192(%rax)
1116 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, 320(%rax)
1117 ; AVX512DQ-SLOW-NEXT: vzeroupper
1118 ; AVX512DQ-SLOW-NEXT: retq
1120 ; AVX512DQ-FAST-LABEL: store_i64_stride7_vf8:
1121 ; AVX512DQ-FAST: # %bb.0:
1122 ; AVX512DQ-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
1123 ; AVX512DQ-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
1124 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rdi), %zmm0
1125 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rsi), %zmm1
1126 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rdx), %zmm2
1127 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rcx), %zmm4
1128 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%r8), %zmm8
1129 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%r9), %zmm9
1130 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%r10), %zmm3
1131 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [5,0,14,6,5,0,14,6]
1132 ; AVX512DQ-FAST-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
1133 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm8, %zmm9, %zmm5
1134 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,13,6,7,0,13,6,7]
1135 ; AVX512DQ-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1136 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm3, %zmm5, %zmm6
1137 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [6,14,6,14,6,14,6,14]
1138 ; AVX512DQ-FAST-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1139 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm4, %zmm2, %zmm5
1140 ; AVX512DQ-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [6,14,6,14]
1141 ; AVX512DQ-FAST-NEXT: # ymm7 = mem[0,1,0,1]
1142 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm1, %zmm0, %zmm7
1143 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm5 = zmm7[0,1,2,3],zmm5[4,5,6,7]
1144 ; AVX512DQ-FAST-NEXT: movb $-61, %sil
1145 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k1
1146 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, %zmm5 {%k1}
1147 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [3,0,12,4,3,0,12,4]
1148 ; AVX512DQ-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1149 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm2, %zmm4, %zmm6
1150 ; AVX512DQ-FAST-NEXT: movb $48, %sil
1151 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k1
1152 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm0[0],zmm1[0],zmm0[2],zmm1[2],zmm0[4],zmm1[4],zmm0[6],zmm1[6]
1153 ; AVX512DQ-FAST-NEXT: vmovdqa (%r9), %ymm10
1154 ; AVX512DQ-FAST-NEXT: vmovdqa (%r8), %ymm11
1155 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = <1,3,7,u>
1156 ; AVX512DQ-FAST-NEXT: vpermi2q %ymm10, %ymm11, %ymm7
1157 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm7 = ymm7[0,1,2,3,4,5],mem[6,7]
1158 ; AVX512DQ-FAST-NEXT: movb $14, %sil
1159 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k1
1160 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm7, %zmm0, %zmm6 {%k1}
1161 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm12 = [11,3,11,3,11,3,11,3]
1162 ; AVX512DQ-FAST-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1163 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm1, %zmm0, %zmm12
1164 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [2,10,0,3,2,10,0,3]
1165 ; AVX512DQ-FAST-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
1166 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm4, %zmm2, %zmm7
1167 ; AVX512DQ-FAST-NEXT: movb $96, %sil
1168 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k1
1169 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm12, %zmm7 {%k1}
1170 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm10 = ymm11[0],ymm10[0],ymm11[2],ymm10[2]
1171 ; AVX512DQ-FAST-NEXT: movb $28, %sil
1172 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k2
1173 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm7 {%k2} = zmm10[2,3,2,3],zmm3[2,3,2,3]
1174 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [15,7,15,7,15,7,15,7]
1175 ; AVX512DQ-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1176 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm11 = [9,1,9,1,9,1,9,1]
1177 ; AVX512DQ-FAST-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1178 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm9, %zmm8, %zmm11
1179 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm12 = [0,8,0,8,0,8,0,8]
1180 ; AVX512DQ-FAST-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1181 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm9, %zmm8, %zmm12
1182 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm13 = [4,12,0,5,4,12,0,5]
1183 ; AVX512DQ-FAST-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3]
1184 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm9, %zmm8, %zmm13
1185 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm9, %zmm10, %zmm8
1186 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm4, %zmm2, %zmm10
1187 ; AVX512DQ-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm9 = [15,7,15,7]
1188 ; AVX512DQ-FAST-NEXT: # ymm9 = mem[0,1,0,1]
1189 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm1, %zmm0, %zmm9
1190 ; AVX512DQ-FAST-NEXT: movb $24, %sil
1191 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k2
1192 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, %zmm9 {%k2}
1193 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm8, %zmm9 {%k1}
1194 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm8 = [14,1,2,3,4,5,6,15]
1195 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm3, %zmm9, %zmm8
1196 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [1,0,10,2,1,0,10,2]
1197 ; AVX512DQ-FAST-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1198 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm0, %zmm1, %zmm9
1199 ; AVX512DQ-FAST-NEXT: vpbroadcastq 8(%rcx), %ymm10
1200 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm10 = mem[0,1,2,3],ymm10[4,5,6,7]
1201 ; AVX512DQ-FAST-NEXT: movb $6, %sil
1202 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k2
1203 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm9 {%k2}
1204 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [4,9,0,3,4,9,0,3]
1205 ; AVX512DQ-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
1206 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm3, %zmm11, %zmm10
1207 ; AVX512DQ-FAST-NEXT: movb $56, %sil
1208 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k2
1209 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, %zmm9 {%k2}
1210 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [0,8,0,1,0,8,0,1]
1211 ; AVX512DQ-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
1212 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm1, %zmm0, %zmm10
1213 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdx), %xmm11
1214 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm11 = xmm11[0],mem[0]
1215 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm11, %ymm0, %ymm11
1216 ; AVX512DQ-FAST-NEXT: movb $12, %cl
1217 ; AVX512DQ-FAST-NEXT: kmovw %ecx, %k2
1218 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm11, %zmm0, %zmm10 {%k2}
1219 ; AVX512DQ-FAST-NEXT: movb $112, %cl
1220 ; AVX512DQ-FAST-NEXT: kmovw %ecx, %k2
1221 ; AVX512DQ-FAST-NEXT: vinserti64x2 $3, (%r10), %zmm12, %zmm10 {%k2}
1222 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm11 = [13,5,13,5,13,5,13,5]
1223 ; AVX512DQ-FAST-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1224 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm11, %zmm2
1225 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm11, %zmm0
1226 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm0 {%k1}
1227 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,1,12,7,0,1,12,7]
1228 ; AVX512DQ-FAST-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
1229 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm3, %zmm13, %zmm1
1230 ; AVX512DQ-FAST-NEXT: movb $120, %cl
1231 ; AVX512DQ-FAST-NEXT: kmovw %ecx, %k1
1232 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
1233 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, 256(%rax)
1234 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, (%rax)
1235 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm9, 64(%rax)
1236 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm8, 384(%rax)
1237 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, 128(%rax)
1238 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, 192(%rax)
1239 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, 320(%rax)
1240 ; AVX512DQ-FAST-NEXT: vzeroupper
1241 ; AVX512DQ-FAST-NEXT: retq
1243 ; AVX512BW-ONLY-SLOW-LABEL: store_i64_stride7_vf8:
1244 ; AVX512BW-ONLY-SLOW: # %bb.0:
1245 ; AVX512BW-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1246 ; AVX512BW-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
1247 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rdi), %zmm0
1248 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rsi), %zmm1
1249 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rdx), %zmm3
1250 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rcx), %zmm4
1251 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%r8), %zmm6
1252 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%r9), %zmm7
1253 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%r10), %zmm2
1254 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [15,7,15,7,15,7,15,7]
1255 ; AVX512BW-ONLY-SLOW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1256 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [0,8,0,8,0,8,0,8]
1257 ; AVX512BW-ONLY-SLOW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1258 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm6, %zmm8
1259 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [5,0,14,6,5,0,14,6]
1260 ; AVX512BW-ONLY-SLOW-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1261 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm6, %zmm7, %zmm9
1262 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm11 = [9,1,9,1,9,1,9,1]
1263 ; AVX512BW-ONLY-SLOW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1264 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm6, %zmm11
1265 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [4,12,0,5,4,12,0,5]
1266 ; AVX512BW-ONLY-SLOW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
1267 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm6, %zmm10
1268 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm5, %zmm6
1269 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm4, %zmm3, %zmm5
1270 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [15,7,15,7]
1271 ; AVX512BW-ONLY-SLOW-NEXT: # ymm7 = mem[0,1,0,1]
1272 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm1, %zmm0, %zmm7
1273 ; AVX512BW-ONLY-SLOW-NEXT: movb $24, %sil
1274 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k1
1275 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm7 {%k1}
1276 ; AVX512BW-ONLY-SLOW-NEXT: movb $96, %sil
1277 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k1
1278 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm7 {%k1}
1279 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm5 = [14,1,2,3,4,5,6,15]
1280 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm2, %zmm7, %zmm5
1281 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,8,0,1,0,8,0,1]
1282 ; AVX512BW-ONLY-SLOW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1283 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm1, %zmm0, %zmm6
1284 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%rdx), %xmm7
1285 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm7 = xmm7[0],mem[0]
1286 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, %xmm7, %ymm0, %ymm7
1287 ; AVX512BW-ONLY-SLOW-NEXT: movb $12, %sil
1288 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k2
1289 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm7, %zmm0, %zmm6 {%k2}
1290 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $3, (%r10), %zmm8, %zmm7
1291 ; AVX512BW-ONLY-SLOW-NEXT: movb $112, %sil
1292 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k2
1293 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm6 {%k2}
1294 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [0,13,6,7,0,13,6,7]
1295 ; AVX512BW-ONLY-SLOW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
1296 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm2, %zmm9, %zmm7
1297 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [6,14,6,14,6,14,6,14]
1298 ; AVX512BW-ONLY-SLOW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1299 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm4, %zmm3, %zmm8
1300 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm9 = [6,14,6,14]
1301 ; AVX512BW-ONLY-SLOW-NEXT: # ymm9 = mem[0,1,0,1]
1302 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm1, %zmm0, %zmm9
1303 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm8 = zmm9[0,1,2,3],zmm8[4,5,6,7]
1304 ; AVX512BW-ONLY-SLOW-NEXT: movb $-61, %sil
1305 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k2
1306 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm8 {%k2}
1307 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [11,3,11,3,11,3,11,3]
1308 ; AVX512BW-ONLY-SLOW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1309 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm1, %zmm0, %zmm7
1310 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [2,10,0,3,2,10,0,3]
1311 ; AVX512BW-ONLY-SLOW-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1312 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm4, %zmm3, %zmm9
1313 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm9 {%k1}
1314 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%r9), %ymm7
1315 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%r8), %ymm12
1316 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm13 = ymm12[0],ymm7[0],ymm12[2],ymm7[2]
1317 ; AVX512BW-ONLY-SLOW-NEXT: movb $28, %sil
1318 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k2
1319 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm9 {%k2} = zmm13[2,3,2,3],zmm2[2,3,2,3]
1320 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm13 = [1,0,10,2,1,0,10,2]
1321 ; AVX512BW-ONLY-SLOW-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3]
1322 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm0, %zmm1, %zmm13
1323 ; AVX512BW-ONLY-SLOW-NEXT: vpbroadcastq 8(%rcx), %ymm14
1324 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm14 = mem[0,1,2,3],ymm14[4,5,6,7]
1325 ; AVX512BW-ONLY-SLOW-NEXT: movb $6, %cl
1326 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %ecx, %k2
1327 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm14, %zmm0, %zmm13 {%k2}
1328 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [4,9,0,3,4,9,0,3]
1329 ; AVX512BW-ONLY-SLOW-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
1330 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm2, %zmm11, %zmm14
1331 ; AVX512BW-ONLY-SLOW-NEXT: movb $56, %cl
1332 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %ecx, %k2
1333 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm13 {%k2}
1334 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm11 = [13,5,13,5,13,5,13,5]
1335 ; AVX512BW-ONLY-SLOW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1336 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [3,0,12,4,3,0,12,4]
1337 ; AVX512BW-ONLY-SLOW-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
1338 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm3, %zmm4, %zmm14
1339 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm11, %zmm3
1340 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm1, %zmm0, %zmm11
1341 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm11 {%k1}
1342 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,1,12,7,0,1,12,7]
1343 ; AVX512BW-ONLY-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
1344 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm2, %zmm10, %zmm3
1345 ; AVX512BW-ONLY-SLOW-NEXT: movb $120, %cl
1346 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %ecx, %k1
1347 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm3 {%k1}
1348 ; AVX512BW-ONLY-SLOW-NEXT: movb $48, %cl
1349 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %ecx, %k1
1350 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm14 {%k1} = zmm0[0],zmm1[0],zmm0[2],zmm1[2],zmm0[4],zmm1[4],zmm0[6],zmm1[6]
1351 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm12[1],ymm7[1],ymm12[3],ymm7[3]
1352 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,3,3]
1353 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
1354 ; AVX512BW-ONLY-SLOW-NEXT: movb $14, %cl
1355 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %ecx, %k1
1356 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm14 {%k1}
1357 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, 256(%rax)
1358 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, 64(%rax)
1359 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, 128(%rax)
1360 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, 320(%rax)
1361 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, (%rax)
1362 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, 384(%rax)
1363 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, 192(%rax)
1364 ; AVX512BW-ONLY-SLOW-NEXT: vzeroupper
1365 ; AVX512BW-ONLY-SLOW-NEXT: retq
1367 ; AVX512BW-ONLY-FAST-LABEL: store_i64_stride7_vf8:
1368 ; AVX512BW-ONLY-FAST: # %bb.0:
1369 ; AVX512BW-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
1370 ; AVX512BW-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
1371 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rdi), %zmm0
1372 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rsi), %zmm1
1373 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rdx), %zmm2
1374 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rcx), %zmm4
1375 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%r8), %zmm6
1376 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%r9), %zmm7
1377 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%r10), %zmm3
1378 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [15,7,15,7,15,7,15,7]
1379 ; AVX512BW-ONLY-FAST-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1380 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [0,8,0,8,0,8,0,8]
1381 ; AVX512BW-ONLY-FAST-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1382 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm7, %zmm6, %zmm8
1383 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [5,0,14,6,5,0,14,6]
1384 ; AVX512BW-ONLY-FAST-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1385 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm6, %zmm7, %zmm9
1386 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [9,1,9,1,9,1,9,1]
1387 ; AVX512BW-ONLY-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1388 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm7, %zmm6, %zmm10
1389 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [4,12,0,5,4,12,0,5]
1390 ; AVX512BW-ONLY-FAST-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
1391 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm7, %zmm6, %zmm11
1392 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm5, %zmm6
1393 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm4, %zmm2, %zmm5
1394 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [15,7,15,7]
1395 ; AVX512BW-ONLY-FAST-NEXT: # ymm7 = mem[0,1,0,1]
1396 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm1, %zmm0, %zmm7
1397 ; AVX512BW-ONLY-FAST-NEXT: movb $24, %sil
1398 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k1
1399 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm7 {%k1}
1400 ; AVX512BW-ONLY-FAST-NEXT: movb $96, %sil
1401 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k1
1402 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm7 {%k1}
1403 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = [14,1,2,3,4,5,6,15]
1404 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm3, %zmm7, %zmm5
1405 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,8,0,1,0,8,0,1]
1406 ; AVX512BW-ONLY-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1407 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm1, %zmm0, %zmm6
1408 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa (%rdx), %xmm7
1409 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm7 = xmm7[0],mem[0]
1410 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, %xmm7, %ymm0, %ymm7
1411 ; AVX512BW-ONLY-FAST-NEXT: movb $12, %sil
1412 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k2
1413 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm7, %zmm0, %zmm6 {%k2}
1414 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $3, (%r10), %zmm8, %zmm7
1415 ; AVX512BW-ONLY-FAST-NEXT: movb $112, %sil
1416 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k2
1417 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm6 {%k2}
1418 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [0,13,6,7,0,13,6,7]
1419 ; AVX512BW-ONLY-FAST-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
1420 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm3, %zmm9, %zmm7
1421 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [6,14,6,14,6,14,6,14]
1422 ; AVX512BW-ONLY-FAST-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1423 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm4, %zmm2, %zmm8
1424 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm9 = [6,14,6,14]
1425 ; AVX512BW-ONLY-FAST-NEXT: # ymm9 = mem[0,1,0,1]
1426 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm1, %zmm0, %zmm9
1427 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm8 = zmm9[0,1,2,3],zmm8[4,5,6,7]
1428 ; AVX512BW-ONLY-FAST-NEXT: movb $-61, %sil
1429 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k2
1430 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm8 {%k2}
1431 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [3,0,12,4,3,0,12,4]
1432 ; AVX512BW-ONLY-FAST-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
1433 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm2, %zmm4, %zmm7
1434 ; AVX512BW-ONLY-FAST-NEXT: movb $48, %sil
1435 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k2
1436 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm7 {%k2} = zmm0[0],zmm1[0],zmm0[2],zmm1[2],zmm0[4],zmm1[4],zmm0[6],zmm1[6]
1437 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa (%r9), %ymm9
1438 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa (%r8), %ymm12
1439 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = <1,3,7,u>
1440 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %ymm9, %ymm12, %ymm13
1441 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm13 = ymm13[0,1,2,3,4,5],mem[6,7]
1442 ; AVX512BW-ONLY-FAST-NEXT: movb $14, %sil
1443 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k2
1444 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm13, %zmm0, %zmm7 {%k2}
1445 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm13 = [11,3,11,3,11,3,11,3]
1446 ; AVX512BW-ONLY-FAST-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1447 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm1, %zmm0, %zmm13
1448 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [2,10,0,3,2,10,0,3]
1449 ; AVX512BW-ONLY-FAST-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
1450 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm4, %zmm2, %zmm14
1451 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm14 {%k1}
1452 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm9 = ymm12[0],ymm9[0],ymm12[2],ymm9[2]
1453 ; AVX512BW-ONLY-FAST-NEXT: movb $28, %sil
1454 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k2
1455 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm14 {%k2} = zmm9[2,3,2,3],zmm3[2,3,2,3]
1456 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [1,0,10,2,1,0,10,2]
1457 ; AVX512BW-ONLY-FAST-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1458 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm1, %zmm9
1459 ; AVX512BW-ONLY-FAST-NEXT: vpbroadcastq 8(%rcx), %ymm12
1460 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm12 = mem[0,1,2,3],ymm12[4,5,6,7]
1461 ; AVX512BW-ONLY-FAST-NEXT: movb $6, %cl
1462 ; AVX512BW-ONLY-FAST-NEXT: kmovd %ecx, %k2
1463 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm9 {%k2}
1464 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [4,9,0,3,4,9,0,3]
1465 ; AVX512BW-ONLY-FAST-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
1466 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm3, %zmm10, %zmm12
1467 ; AVX512BW-ONLY-FAST-NEXT: movb $56, %cl
1468 ; AVX512BW-ONLY-FAST-NEXT: kmovd %ecx, %k2
1469 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm9 {%k2}
1470 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [13,5,13,5,13,5,13,5]
1471 ; AVX512BW-ONLY-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1472 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm10, %zmm2
1473 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm10, %zmm0
1474 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm0 {%k1}
1475 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,1,12,7,0,1,12,7]
1476 ; AVX512BW-ONLY-FAST-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
1477 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm3, %zmm11, %zmm1
1478 ; AVX512BW-ONLY-FAST-NEXT: movb $120, %cl
1479 ; AVX512BW-ONLY-FAST-NEXT: kmovd %ecx, %k1
1480 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
1481 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, 256(%rax)
1482 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm9, 64(%rax)
1483 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm14, 128(%rax)
1484 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, 192(%rax)
1485 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm8, 320(%rax)
1486 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm6, (%rax)
1487 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, 384(%rax)
1488 ; AVX512BW-ONLY-FAST-NEXT: vzeroupper
1489 ; AVX512BW-ONLY-FAST-NEXT: retq
1491 ; AVX512DQBW-SLOW-LABEL: store_i64_stride7_vf8:
1492 ; AVX512DQBW-SLOW: # %bb.0:
1493 ; AVX512DQBW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1494 ; AVX512DQBW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
1495 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rdi), %zmm0
1496 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rsi), %zmm1
1497 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rdx), %zmm4
1498 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rcx), %zmm5
1499 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%r8), %zmm9
1500 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%r9), %zmm10
1501 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%r10), %zmm3
1502 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [5,0,14,6,5,0,14,6]
1503 ; AVX512DQBW-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
1504 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm9, %zmm10, %zmm2
1505 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,13,6,7,0,13,6,7]
1506 ; AVX512DQBW-SLOW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1507 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm3, %zmm2, %zmm6
1508 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [6,14,6,14,6,14,6,14]
1509 ; AVX512DQBW-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1510 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm5, %zmm4, %zmm2
1511 ; AVX512DQBW-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [6,14,6,14]
1512 ; AVX512DQBW-SLOW-NEXT: # ymm7 = mem[0,1,0,1]
1513 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm1, %zmm0, %zmm7
1514 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm2 = zmm7[0,1,2,3],zmm2[4,5,6,7]
1515 ; AVX512DQBW-SLOW-NEXT: movb $-61, %sil
1516 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k1
1517 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, %zmm2 {%k1}
1518 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm7 = [11,3,11,3,11,3,11,3]
1519 ; AVX512DQBW-SLOW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1520 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm1, %zmm0, %zmm7
1521 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [2,10,0,3,2,10,0,3]
1522 ; AVX512DQBW-SLOW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1523 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm5, %zmm4, %zmm6
1524 ; AVX512DQBW-SLOW-NEXT: movb $96, %sil
1525 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k1
1526 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, %zmm6 {%k1}
1527 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%r9), %ymm7
1528 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%r8), %ymm8
1529 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm11 = ymm8[0],ymm7[0],ymm8[2],ymm7[2]
1530 ; AVX512DQBW-SLOW-NEXT: movb $28, %sil
1531 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k2
1532 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm6 {%k2} = zmm11[2,3,2,3],zmm3[2,3,2,3]
1533 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm11 = [15,7,15,7,15,7,15,7]
1534 ; AVX512DQBW-SLOW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1535 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm12 = [9,1,9,1,9,1,9,1]
1536 ; AVX512DQBW-SLOW-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1537 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm10, %zmm9, %zmm12
1538 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm13 = [0,8,0,8,0,8,0,8]
1539 ; AVX512DQBW-SLOW-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1540 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm10, %zmm9, %zmm13
1541 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [4,12,0,5,4,12,0,5]
1542 ; AVX512DQBW-SLOW-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
1543 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm10, %zmm9, %zmm14
1544 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm10, %zmm11, %zmm9
1545 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm5, %zmm4, %zmm11
1546 ; AVX512DQBW-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm10 = [15,7,15,7]
1547 ; AVX512DQBW-SLOW-NEXT: # ymm10 = mem[0,1,0,1]
1548 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm1, %zmm0, %zmm10
1549 ; AVX512DQBW-SLOW-NEXT: movb $24, %sil
1550 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k2
1551 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm10 {%k2}
1552 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, %zmm10 {%k1}
1553 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm9 = [14,1,2,3,4,5,6,15]
1554 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm3, %zmm10, %zmm9
1555 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [1,0,10,2,1,0,10,2]
1556 ; AVX512DQBW-SLOW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
1557 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm0, %zmm1, %zmm10
1558 ; AVX512DQBW-SLOW-NEXT: vpbroadcastq 8(%rcx), %ymm11
1559 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm11 = mem[0,1,2,3],ymm11[4,5,6,7]
1560 ; AVX512DQBW-SLOW-NEXT: movb $6, %sil
1561 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k2
1562 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm11, %zmm0, %zmm10 {%k2}
1563 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [4,9,0,3,4,9,0,3]
1564 ; AVX512DQBW-SLOW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
1565 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm3, %zmm12, %zmm11
1566 ; AVX512DQBW-SLOW-NEXT: movb $56, %sil
1567 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k2
1568 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm10 {%k2}
1569 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [0,8,0,1,0,8,0,1]
1570 ; AVX512DQBW-SLOW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
1571 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm1, %zmm0, %zmm11
1572 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%rdx), %xmm12
1573 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm12 = xmm12[0],mem[0]
1574 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, %xmm12, %ymm0, %ymm12
1575 ; AVX512DQBW-SLOW-NEXT: movb $12, %cl
1576 ; AVX512DQBW-SLOW-NEXT: kmovd %ecx, %k2
1577 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm11 {%k2}
1578 ; AVX512DQBW-SLOW-NEXT: movb $112, %cl
1579 ; AVX512DQBW-SLOW-NEXT: kmovd %ecx, %k2
1580 ; AVX512DQBW-SLOW-NEXT: vinserti64x2 $3, (%r10), %zmm13, %zmm11 {%k2}
1581 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm12 = [13,5,13,5,13,5,13,5]
1582 ; AVX512DQBW-SLOW-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1583 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm13 = [3,0,12,4,3,0,12,4]
1584 ; AVX512DQBW-SLOW-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3]
1585 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm4, %zmm5, %zmm13
1586 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm12, %zmm4
1587 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm1, %zmm0, %zmm12
1588 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm12 {%k1}
1589 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm4 = [0,1,12,7,0,1,12,7]
1590 ; AVX512DQBW-SLOW-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3]
1591 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm3, %zmm14, %zmm4
1592 ; AVX512DQBW-SLOW-NEXT: movb $120, %cl
1593 ; AVX512DQBW-SLOW-NEXT: kmovd %ecx, %k1
1594 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm12, %zmm4 {%k1}
1595 ; AVX512DQBW-SLOW-NEXT: movb $48, %cl
1596 ; AVX512DQBW-SLOW-NEXT: kmovd %ecx, %k1
1597 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm13 {%k1} = zmm0[0],zmm1[0],zmm0[2],zmm1[2],zmm0[4],zmm1[4],zmm0[6],zmm1[6]
1598 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm8[1],ymm7[1],ymm8[3],ymm7[3]
1599 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,3,3]
1600 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
1601 ; AVX512DQBW-SLOW-NEXT: movb $14, %cl
1602 ; AVX512DQBW-SLOW-NEXT: kmovd %ecx, %k1
1603 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm13 {%k1}
1604 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, 256(%rax)
1605 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, (%rax)
1606 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, 64(%rax)
1607 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, 384(%rax)
1608 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, 128(%rax)
1609 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm13, 192(%rax)
1610 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, 320(%rax)
1611 ; AVX512DQBW-SLOW-NEXT: vzeroupper
1612 ; AVX512DQBW-SLOW-NEXT: retq
1614 ; AVX512DQBW-FAST-LABEL: store_i64_stride7_vf8:
1615 ; AVX512DQBW-FAST: # %bb.0:
1616 ; AVX512DQBW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
1617 ; AVX512DQBW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
1618 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rdi), %zmm0
1619 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rsi), %zmm1
1620 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rdx), %zmm2
1621 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rcx), %zmm4
1622 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%r8), %zmm8
1623 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%r9), %zmm9
1624 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%r10), %zmm3
1625 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [5,0,14,6,5,0,14,6]
1626 ; AVX512DQBW-FAST-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
1627 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm8, %zmm9, %zmm5
1628 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,13,6,7,0,13,6,7]
1629 ; AVX512DQBW-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1630 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm3, %zmm5, %zmm6
1631 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [6,14,6,14,6,14,6,14]
1632 ; AVX512DQBW-FAST-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1633 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm4, %zmm2, %zmm5
1634 ; AVX512DQBW-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [6,14,6,14]
1635 ; AVX512DQBW-FAST-NEXT: # ymm7 = mem[0,1,0,1]
1636 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm1, %zmm0, %zmm7
1637 ; AVX512DQBW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm5 = zmm7[0,1,2,3],zmm5[4,5,6,7]
1638 ; AVX512DQBW-FAST-NEXT: movb $-61, %sil
1639 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k1
1640 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm6, %zmm5 {%k1}
1641 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [3,0,12,4,3,0,12,4]
1642 ; AVX512DQBW-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
1643 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm2, %zmm4, %zmm6
1644 ; AVX512DQBW-FAST-NEXT: movb $48, %sil
1645 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k1
1646 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm0[0],zmm1[0],zmm0[2],zmm1[2],zmm0[4],zmm1[4],zmm0[6],zmm1[6]
1647 ; AVX512DQBW-FAST-NEXT: vmovdqa (%r9), %ymm10
1648 ; AVX512DQBW-FAST-NEXT: vmovdqa (%r8), %ymm11
1649 ; AVX512DQBW-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = <1,3,7,u>
1650 ; AVX512DQBW-FAST-NEXT: vpermi2q %ymm10, %ymm11, %ymm7
1651 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm7 = ymm7[0,1,2,3,4,5],mem[6,7]
1652 ; AVX512DQBW-FAST-NEXT: movb $14, %sil
1653 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k1
1654 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm7, %zmm0, %zmm6 {%k1}
1655 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm12 = [11,3,11,3,11,3,11,3]
1656 ; AVX512DQBW-FAST-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1657 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm1, %zmm0, %zmm12
1658 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [2,10,0,3,2,10,0,3]
1659 ; AVX512DQBW-FAST-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
1660 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm4, %zmm2, %zmm7
1661 ; AVX512DQBW-FAST-NEXT: movb $96, %sil
1662 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k1
1663 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm12, %zmm7 {%k1}
1664 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm10 = ymm11[0],ymm10[0],ymm11[2],ymm10[2]
1665 ; AVX512DQBW-FAST-NEXT: movb $28, %sil
1666 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k2
1667 ; AVX512DQBW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm7 {%k2} = zmm10[2,3,2,3],zmm3[2,3,2,3]
1668 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [15,7,15,7,15,7,15,7]
1669 ; AVX512DQBW-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1670 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm11 = [9,1,9,1,9,1,9,1]
1671 ; AVX512DQBW-FAST-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1672 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm9, %zmm8, %zmm11
1673 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm12 = [0,8,0,8,0,8,0,8]
1674 ; AVX512DQBW-FAST-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1675 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm9, %zmm8, %zmm12
1676 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm13 = [4,12,0,5,4,12,0,5]
1677 ; AVX512DQBW-FAST-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3]
1678 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm9, %zmm8, %zmm13
1679 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm9, %zmm10, %zmm8
1680 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm4, %zmm2, %zmm10
1681 ; AVX512DQBW-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm9 = [15,7,15,7]
1682 ; AVX512DQBW-FAST-NEXT: # ymm9 = mem[0,1,0,1]
1683 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm1, %zmm0, %zmm9
1684 ; AVX512DQBW-FAST-NEXT: movb $24, %sil
1685 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k2
1686 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, %zmm9 {%k2}
1687 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm8, %zmm9 {%k1}
1688 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm8 = [14,1,2,3,4,5,6,15]
1689 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm3, %zmm9, %zmm8
1690 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [1,0,10,2,1,0,10,2]
1691 ; AVX512DQBW-FAST-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
1692 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm0, %zmm1, %zmm9
1693 ; AVX512DQBW-FAST-NEXT: vpbroadcastq 8(%rcx), %ymm10
1694 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm10 = mem[0,1,2,3],ymm10[4,5,6,7]
1695 ; AVX512DQBW-FAST-NEXT: movb $6, %sil
1696 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k2
1697 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm9 {%k2}
1698 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [4,9,0,3,4,9,0,3]
1699 ; AVX512DQBW-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
1700 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm3, %zmm11, %zmm10
1701 ; AVX512DQBW-FAST-NEXT: movb $56, %sil
1702 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k2
1703 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, %zmm9 {%k2}
1704 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [0,8,0,1,0,8,0,1]
1705 ; AVX512DQBW-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
1706 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm1, %zmm0, %zmm10
1707 ; AVX512DQBW-FAST-NEXT: vmovdqa (%rdx), %xmm11
1708 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm11 = xmm11[0],mem[0]
1709 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, %xmm11, %ymm0, %ymm11
1710 ; AVX512DQBW-FAST-NEXT: movb $12, %cl
1711 ; AVX512DQBW-FAST-NEXT: kmovd %ecx, %k2
1712 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm11, %zmm0, %zmm10 {%k2}
1713 ; AVX512DQBW-FAST-NEXT: movb $112, %cl
1714 ; AVX512DQBW-FAST-NEXT: kmovd %ecx, %k2
1715 ; AVX512DQBW-FAST-NEXT: vinserti64x2 $3, (%r10), %zmm12, %zmm10 {%k2}
1716 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm11 = [13,5,13,5,13,5,13,5]
1717 ; AVX512DQBW-FAST-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
1718 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm11, %zmm2
1719 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm11, %zmm0
1720 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm0 {%k1}
1721 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,1,12,7,0,1,12,7]
1722 ; AVX512DQBW-FAST-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
1723 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm3, %zmm13, %zmm1
1724 ; AVX512DQBW-FAST-NEXT: movb $120, %cl
1725 ; AVX512DQBW-FAST-NEXT: kmovd %ecx, %k1
1726 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
1727 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, 256(%rax)
1728 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, (%rax)
1729 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm9, 64(%rax)
1730 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm8, 384(%rax)
1731 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, 128(%rax)
1732 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm6, 192(%rax)
1733 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, 320(%rax)
1734 ; AVX512DQBW-FAST-NEXT: vzeroupper
1735 ; AVX512DQBW-FAST-NEXT: retq
1736 %in.vec0 = load <8 x i64>, ptr %in.vecptr0, align 64
1737 %in.vec1 = load <8 x i64>, ptr %in.vecptr1, align 64
1738 %in.vec2 = load <8 x i64>, ptr %in.vecptr2, align 64
1739 %in.vec3 = load <8 x i64>, ptr %in.vecptr3, align 64
1740 %in.vec4 = load <8 x i64>, ptr %in.vecptr4, align 64
1741 %in.vec5 = load <8 x i64>, ptr %in.vecptr5, align 64
1742 %in.vec6 = load <8 x i64>, ptr %in.vecptr6, align 64
1743 %1 = shufflevector <8 x i64> %in.vec0, <8 x i64> %in.vec1, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
1744 %2 = shufflevector <8 x i64> %in.vec2, <8 x i64> %in.vec3, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
1745 %3 = shufflevector <8 x i64> %in.vec4, <8 x i64> %in.vec5, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
1746 %4 = shufflevector <16 x i64> %1, <16 x i64> %2, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1747 %5 = shufflevector <8 x i64> %in.vec6, <8 x i64> poison, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
1748 %6 = shufflevector <16 x i64> %3, <16 x i64> %5, <24 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23>
1749 %7 = shufflevector <24 x i64> %6, <24 x i64> poison, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
1750 %8 = shufflevector <32 x i64> %4, <32 x i64> %7, <56 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55>
1751 %interleaved.vec = shufflevector <56 x i64> %8, <56 x i64> poison, <56 x i32> <i32 0, i32 8, i32 16, i32 24, i32 32, i32 40, i32 48, i32 1, i32 9, i32 17, i32 25, i32 33, i32 41, i32 49, i32 2, i32 10, i32 18, i32 26, i32 34, i32 42, i32 50, i32 3, i32 11, i32 19, i32 27, i32 35, i32 43, i32 51, i32 4, i32 12, i32 20, i32 28, i32 36, i32 44, i32 52, i32 5, i32 13, i32 21, i32 29, i32 37, i32 45, i32 53, i32 6, i32 14, i32 22, i32 30, i32 38, i32 46, i32 54, i32 7, i32 15, i32 23, i32 31, i32 39, i32 47, i32 55>
1752 store <56 x i64> %interleaved.vec, ptr %out.vec, align 64
1756 define void @store_i64_stride7_vf16(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %out.vec) nounwind {
1757 ; SSE-LABEL: store_i64_stride7_vf16:
1759 ; SSE-NEXT: subq $536, %rsp # imm = 0x218
1760 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
1761 ; SSE-NEXT: movapd (%rdi), %xmm2
1762 ; SSE-NEXT: movapd 16(%rdi), %xmm3
1763 ; SSE-NEXT: movapd 32(%rdi), %xmm6
1764 ; SSE-NEXT: movapd (%rsi), %xmm4
1765 ; SSE-NEXT: movapd 16(%rsi), %xmm7
1766 ; SSE-NEXT: movapd (%rdx), %xmm5
1767 ; SSE-NEXT: movapd 16(%rdx), %xmm8
1768 ; SSE-NEXT: movapd (%rcx), %xmm9
1769 ; SSE-NEXT: movapd 16(%rcx), %xmm11
1770 ; SSE-NEXT: movapd (%r8), %xmm10
1771 ; SSE-NEXT: movapd 16(%r8), %xmm12
1772 ; SSE-NEXT: movapd (%r9), %xmm13
1773 ; SSE-NEXT: movapd 16(%r9), %xmm15
1774 ; SSE-NEXT: movapd (%rax), %xmm0
1775 ; SSE-NEXT: movapd 16(%rax), %xmm1
1776 ; SSE-NEXT: movapd %xmm2, %xmm14
1777 ; SSE-NEXT: unpcklpd {{.*#+}} xmm14 = xmm14[0],xmm4[0]
1778 ; SSE-NEXT: movapd %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1779 ; SSE-NEXT: movsd {{.*#+}} xmm2 = xmm0[0],xmm2[1]
1780 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1781 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm5[1]
1782 ; SSE-NEXT: movapd %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1783 ; SSE-NEXT: unpcklpd {{.*#+}} xmm5 = xmm5[0],xmm9[0]
1784 ; SSE-NEXT: movapd %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1785 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm10[1]
1786 ; SSE-NEXT: movapd %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1787 ; SSE-NEXT: unpcklpd {{.*#+}} xmm10 = xmm10[0],xmm13[0]
1788 ; SSE-NEXT: movapd %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1789 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm0[1]
1790 ; SSE-NEXT: movapd %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1791 ; SSE-NEXT: movapd %xmm3, %xmm0
1792 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm7[0]
1793 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1794 ; SSE-NEXT: movsd {{.*#+}} xmm3 = xmm1[0],xmm3[1]
1795 ; SSE-NEXT: movapd %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1796 ; SSE-NEXT: unpckhpd {{.*#+}} xmm7 = xmm7[1],xmm8[1]
1797 ; SSE-NEXT: movapd %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1798 ; SSE-NEXT: unpcklpd {{.*#+}} xmm8 = xmm8[0],xmm11[0]
1799 ; SSE-NEXT: movapd %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1800 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm12[1]
1801 ; SSE-NEXT: movapd %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1802 ; SSE-NEXT: unpcklpd {{.*#+}} xmm12 = xmm12[0],xmm15[0]
1803 ; SSE-NEXT: movapd %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1804 ; SSE-NEXT: unpckhpd {{.*#+}} xmm15 = xmm15[1],xmm1[1]
1805 ; SSE-NEXT: movapd %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1806 ; SSE-NEXT: movapd 32(%rsi), %xmm1
1807 ; SSE-NEXT: movapd %xmm6, %xmm0
1808 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm1[0]
1809 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1810 ; SSE-NEXT: movapd 32(%rax), %xmm0
1811 ; SSE-NEXT: movsd {{.*#+}} xmm6 = xmm0[0],xmm6[1]
1812 ; SSE-NEXT: movapd %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1813 ; SSE-NEXT: movapd 32(%rdx), %xmm2
1814 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
1815 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1816 ; SSE-NEXT: movapd 32(%rcx), %xmm3
1817 ; SSE-NEXT: unpcklpd {{.*#+}} xmm2 = xmm2[0],xmm3[0]
1818 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1819 ; SSE-NEXT: movapd 32(%r8), %xmm1
1820 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm1[1]
1821 ; SSE-NEXT: movapd %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1822 ; SSE-NEXT: movapd 32(%r9), %xmm2
1823 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
1824 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1825 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
1826 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1827 ; SSE-NEXT: movapd 48(%rdi), %xmm1
1828 ; SSE-NEXT: movapd 48(%rsi), %xmm2
1829 ; SSE-NEXT: movapd %xmm1, %xmm0
1830 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
1831 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1832 ; SSE-NEXT: movapd 48(%rax), %xmm0
1833 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
1834 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1835 ; SSE-NEXT: movapd 48(%rdx), %xmm1
1836 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
1837 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1838 ; SSE-NEXT: movapd 48(%rcx), %xmm2
1839 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
1840 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1841 ; SSE-NEXT: movapd 48(%r8), %xmm1
1842 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
1843 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1844 ; SSE-NEXT: movapd 48(%r9), %xmm2
1845 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
1846 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1847 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
1848 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1849 ; SSE-NEXT: movapd 64(%rdi), %xmm1
1850 ; SSE-NEXT: movapd 64(%rsi), %xmm2
1851 ; SSE-NEXT: movapd %xmm1, %xmm0
1852 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
1853 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1854 ; SSE-NEXT: movapd 64(%rax), %xmm0
1855 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
1856 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1857 ; SSE-NEXT: movapd 64(%rdx), %xmm1
1858 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
1859 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1860 ; SSE-NEXT: movapd 64(%rcx), %xmm2
1861 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
1862 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1863 ; SSE-NEXT: movapd 64(%r8), %xmm1
1864 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
1865 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1866 ; SSE-NEXT: movapd 64(%r9), %xmm2
1867 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
1868 ; SSE-NEXT: movapd %xmm1, (%rsp) # 16-byte Spill
1869 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
1870 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1871 ; SSE-NEXT: movapd 80(%rdi), %xmm1
1872 ; SSE-NEXT: movapd 80(%rsi), %xmm2
1873 ; SSE-NEXT: movapd %xmm1, %xmm0
1874 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
1875 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1876 ; SSE-NEXT: movapd 80(%rax), %xmm0
1877 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
1878 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1879 ; SSE-NEXT: movapd 80(%rdx), %xmm1
1880 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
1881 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1882 ; SSE-NEXT: movapd 80(%rcx), %xmm14
1883 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm14[0]
1884 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1885 ; SSE-NEXT: movapd 80(%r8), %xmm1
1886 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm1[1]
1887 ; SSE-NEXT: movapd 80(%r9), %xmm13
1888 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm13[0]
1889 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1890 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm0[1]
1891 ; SSE-NEXT: movapd 96(%rdi), %xmm15
1892 ; SSE-NEXT: movapd 96(%rsi), %xmm12
1893 ; SSE-NEXT: movapd %xmm15, %xmm0
1894 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm12[0]
1895 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1896 ; SSE-NEXT: movapd 96(%rax), %xmm3
1897 ; SSE-NEXT: movsd {{.*#+}} xmm15 = xmm3[0],xmm15[1]
1898 ; SSE-NEXT: movapd 96(%rdx), %xmm11
1899 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm11[1]
1900 ; SSE-NEXT: movapd 96(%rcx), %xmm8
1901 ; SSE-NEXT: unpcklpd {{.*#+}} xmm11 = xmm11[0],xmm8[0]
1902 ; SSE-NEXT: movapd 96(%r8), %xmm9
1903 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm9[1]
1904 ; SSE-NEXT: movapd 96(%r9), %xmm6
1905 ; SSE-NEXT: unpcklpd {{.*#+}} xmm9 = xmm9[0],xmm6[0]
1906 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm3[1]
1907 ; SSE-NEXT: movapd 112(%rdi), %xmm5
1908 ; SSE-NEXT: movapd 112(%rsi), %xmm4
1909 ; SSE-NEXT: movapd %xmm5, %xmm7
1910 ; SSE-NEXT: unpcklpd {{.*#+}} xmm7 = xmm7[0],xmm4[0]
1911 ; SSE-NEXT: movapd 112(%rax), %xmm10
1912 ; SSE-NEXT: movsd {{.*#+}} xmm5 = xmm10[0],xmm5[1]
1913 ; SSE-NEXT: movapd 112(%rdx), %xmm3
1914 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm3[1]
1915 ; SSE-NEXT: movapd 112(%rcx), %xmm2
1916 ; SSE-NEXT: unpcklpd {{.*#+}} xmm3 = xmm3[0],xmm2[0]
1917 ; SSE-NEXT: movapd 112(%r8), %xmm1
1918 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
1919 ; SSE-NEXT: movapd 112(%r9), %xmm0
1920 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm0[0]
1921 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm10[1]
1922 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
1923 ; SSE-NEXT: movapd %xmm0, 880(%rax)
1924 ; SSE-NEXT: movapd %xmm2, 864(%rax)
1925 ; SSE-NEXT: movapd %xmm4, 848(%rax)
1926 ; SSE-NEXT: movapd %xmm5, 832(%rax)
1927 ; SSE-NEXT: movapd %xmm1, 816(%rax)
1928 ; SSE-NEXT: movapd %xmm3, 800(%rax)
1929 ; SSE-NEXT: movapd %xmm7, 784(%rax)
1930 ; SSE-NEXT: movapd %xmm6, 768(%rax)
1931 ; SSE-NEXT: movapd %xmm8, 752(%rax)
1932 ; SSE-NEXT: movapd %xmm12, 736(%rax)
1933 ; SSE-NEXT: movapd %xmm15, 720(%rax)
1934 ; SSE-NEXT: movapd %xmm9, 704(%rax)
1935 ; SSE-NEXT: movapd %xmm11, 688(%rax)
1936 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1937 ; SSE-NEXT: movaps %xmm0, 672(%rax)
1938 ; SSE-NEXT: movapd %xmm13, 656(%rax)
1939 ; SSE-NEXT: movapd %xmm14, 640(%rax)
1940 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1941 ; SSE-NEXT: movaps %xmm0, 624(%rax)
1942 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1943 ; SSE-NEXT: movaps %xmm0, 608(%rax)
1944 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1945 ; SSE-NEXT: movaps %xmm0, 592(%rax)
1946 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1947 ; SSE-NEXT: movaps %xmm0, 576(%rax)
1948 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1949 ; SSE-NEXT: movaps %xmm0, 560(%rax)
1950 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1951 ; SSE-NEXT: movaps %xmm0, 544(%rax)
1952 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1953 ; SSE-NEXT: movaps %xmm0, 528(%rax)
1954 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1955 ; SSE-NEXT: movaps %xmm0, 512(%rax)
1956 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1957 ; SSE-NEXT: movaps %xmm0, 496(%rax)
1958 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
1959 ; SSE-NEXT: movaps %xmm0, 480(%rax)
1960 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1961 ; SSE-NEXT: movaps %xmm0, 464(%rax)
1962 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1963 ; SSE-NEXT: movaps %xmm0, 448(%rax)
1964 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1965 ; SSE-NEXT: movaps %xmm0, 432(%rax)
1966 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1967 ; SSE-NEXT: movaps %xmm0, 416(%rax)
1968 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1969 ; SSE-NEXT: movaps %xmm0, 400(%rax)
1970 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1971 ; SSE-NEXT: movaps %xmm0, 384(%rax)
1972 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1973 ; SSE-NEXT: movaps %xmm0, 368(%rax)
1974 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1975 ; SSE-NEXT: movaps %xmm0, 352(%rax)
1976 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1977 ; SSE-NEXT: movaps %xmm0, 336(%rax)
1978 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1979 ; SSE-NEXT: movaps %xmm0, 320(%rax)
1980 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1981 ; SSE-NEXT: movaps %xmm0, 304(%rax)
1982 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1983 ; SSE-NEXT: movaps %xmm0, 288(%rax)
1984 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1985 ; SSE-NEXT: movaps %xmm0, 272(%rax)
1986 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1987 ; SSE-NEXT: movaps %xmm0, 256(%rax)
1988 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1989 ; SSE-NEXT: movaps %xmm0, 240(%rax)
1990 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1991 ; SSE-NEXT: movaps %xmm0, 224(%rax)
1992 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1993 ; SSE-NEXT: movaps %xmm0, 208(%rax)
1994 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1995 ; SSE-NEXT: movaps %xmm0, 192(%rax)
1996 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1997 ; SSE-NEXT: movaps %xmm0, 176(%rax)
1998 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1999 ; SSE-NEXT: movaps %xmm0, 160(%rax)
2000 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2001 ; SSE-NEXT: movaps %xmm0, 144(%rax)
2002 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2003 ; SSE-NEXT: movaps %xmm0, 128(%rax)
2004 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2005 ; SSE-NEXT: movaps %xmm0, 112(%rax)
2006 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2007 ; SSE-NEXT: movaps %xmm0, 96(%rax)
2008 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2009 ; SSE-NEXT: movaps %xmm0, 80(%rax)
2010 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2011 ; SSE-NEXT: movaps %xmm0, 64(%rax)
2012 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2013 ; SSE-NEXT: movaps %xmm0, 48(%rax)
2014 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2015 ; SSE-NEXT: movaps %xmm0, 32(%rax)
2016 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2017 ; SSE-NEXT: movaps %xmm0, 16(%rax)
2018 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2019 ; SSE-NEXT: movaps %xmm0, (%rax)
2020 ; SSE-NEXT: addq $536, %rsp # imm = 0x218
2023 ; AVX1-ONLY-LABEL: store_i64_stride7_vf16:
2024 ; AVX1-ONLY: # %bb.0:
2025 ; AVX1-ONLY-NEXT: subq $520, %rsp # imm = 0x208
2026 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
2027 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %ymm5
2028 ; AVX1-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2029 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %ymm0
2030 ; AVX1-ONLY-NEXT: vmovaps (%r9), %ymm6
2031 ; AVX1-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2032 ; AVX1-ONLY-NEXT: vmovaps (%r9), %xmm2
2033 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2034 ; AVX1-ONLY-NEXT: vmovaps (%r8), %xmm1
2035 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm2[0]
2036 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %xmm3
2037 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2038 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm3
2039 ; AVX1-ONLY-NEXT: vmovaps (%rax), %xmm7
2040 ; AVX1-ONLY-NEXT: vmovaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2041 ; AVX1-ONLY-NEXT: vmovaps 16(%rax), %xmm4
2042 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm2, %ymm2
2043 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1],ymm3[2,3],ymm2[4,5],ymm3[6,7]
2044 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2045 ; AVX1-ONLY-NEXT: vbroadcastsd 8(%rcx), %ymm2
2046 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm0[0,1,2,3],ymm2[4,5,6,7]
2047 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %xmm3
2048 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2049 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm3[2,3,2,3]
2050 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3,4,5,6,7]
2051 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
2052 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
2053 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2054 ; AVX1-ONLY-NEXT: vmovaps 16(%rdi), %xmm1
2055 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm5[1],ymm1[1],ymm5[3],ymm1[3]
2056 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
2057 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1],ymm0[2,3,4,5,6,7]
2058 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2059 ; AVX1-ONLY-NEXT: vmovaps 16(%rcx), %xmm0
2060 ; AVX1-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2061 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm0 = xmm0[2,3,2,3]
2062 ; AVX1-ONLY-NEXT: vmovaps 16(%r8), %xmm1
2063 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm6[1],ymm1[1],ymm6[3],ymm1[3]
2064 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],mem[6,7]
2065 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3,4,5,6,7]
2066 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2067 ; AVX1-ONLY-NEXT: vmovaps 32(%rsi), %xmm0
2068 ; AVX1-ONLY-NEXT: vmovaps 32(%rdi), %xmm1
2069 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
2070 ; AVX1-ONLY-NEXT: vinsertf128 $1, 32(%rcx), %ymm2, %ymm3
2071 ; AVX1-ONLY-NEXT: vmovaps 32(%rdx), %xmm4
2072 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm2, %ymm2
2073 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0],ymm3[1],ymm2[2],ymm3[2]
2074 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2075 ; AVX1-ONLY-NEXT: vmovaps 32(%rax), %xmm12
2076 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
2077 ; AVX1-ONLY-NEXT: vmovaps 32(%r9), %xmm11
2078 ; AVX1-ONLY-NEXT: vmovaps 32(%r8), %xmm2
2079 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm3 = xmm2[0],xmm11[0]
2080 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm3, %ymm3
2081 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1,2,3,4,5],ymm1[6,7]
2082 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2083 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm4[1]
2084 ; AVX1-ONLY-NEXT: vbroadcastsd 40(%rcx), %ymm1
2085 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
2086 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm1
2087 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
2088 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2089 ; AVX1-ONLY-NEXT: vmovaps 32(%rsi), %ymm10
2090 ; AVX1-ONLY-NEXT: vmovaps 48(%rdi), %xmm0
2091 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm10[1],ymm0[1],ymm10[3],ymm0[3]
2092 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
2093 ; AVX1-ONLY-NEXT: vmovaps 48(%rax), %xmm1
2094 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
2095 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2096 ; AVX1-ONLY-NEXT: vmovaps 32(%r9), %ymm9
2097 ; AVX1-ONLY-NEXT: vmovaps 48(%r8), %xmm0
2098 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm9[1],ymm0[1],ymm9[3],ymm0[3]
2099 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
2100 ; AVX1-ONLY-NEXT: vmovaps 48(%rcx), %xmm7
2101 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm7[2,3,2,3]
2102 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
2103 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2104 ; AVX1-ONLY-NEXT: vmovaps 64(%r9), %xmm8
2105 ; AVX1-ONLY-NEXT: vmovaps 64(%r8), %xmm0
2106 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm1 = xmm0[0],xmm8[0]
2107 ; AVX1-ONLY-NEXT: vmovaps 64(%rdi), %xmm2
2108 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2109 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm2
2110 ; AVX1-ONLY-NEXT: vmovaps 64(%rax), %xmm6
2111 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm1, %ymm1
2112 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3],ymm1[4,5],ymm2[6,7]
2113 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2114 ; AVX1-ONLY-NEXT: vmovaps 64(%rdx), %ymm1
2115 ; AVX1-ONLY-NEXT: vbroadcastsd 72(%rcx), %ymm2
2116 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm1[0,1,2,3],ymm2[4,5,6,7]
2117 ; AVX1-ONLY-NEXT: vmovaps 64(%rsi), %xmm15
2118 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm13 = xmm15[2,3,2,3]
2119 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm13[0,1],ymm2[2,3,4,5,6,7]
2120 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
2121 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
2122 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2123 ; AVX1-ONLY-NEXT: vmovaps 64(%rsi), %ymm5
2124 ; AVX1-ONLY-NEXT: vmovaps 80(%rdi), %xmm0
2125 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm5[1],ymm0[1],ymm5[3],ymm0[3]
2126 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
2127 ; AVX1-ONLY-NEXT: vmovaps 80(%rax), %xmm1
2128 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
2129 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2130 ; AVX1-ONLY-NEXT: vmovaps 64(%r9), %ymm4
2131 ; AVX1-ONLY-NEXT: vmovaps 80(%r8), %xmm0
2132 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm4[1],ymm0[1],ymm4[3],ymm0[3]
2133 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm0[0,1,2,3,4,5],mem[6,7]
2134 ; AVX1-ONLY-NEXT: vmovaps 80(%rcx), %xmm3
2135 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm14 = xmm3[2,3,2,3]
2136 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm14[0,1],ymm13[2,3,4,5,6,7]
2137 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2138 ; AVX1-ONLY-NEXT: vmovaps 96(%rsi), %xmm13
2139 ; AVX1-ONLY-NEXT: vmovaps 96(%rdi), %xmm14
2140 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm0 = xmm14[0],xmm13[0]
2141 ; AVX1-ONLY-NEXT: vinsertf128 $1, 96(%rcx), %ymm0, %ymm2
2142 ; AVX1-ONLY-NEXT: vmovaps 96(%rdx), %xmm1
2143 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
2144 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm0 = ymm0[0],ymm2[1],ymm0[2],ymm2[2]
2145 ; AVX1-ONLY-NEXT: vmovupd %ymm0, (%rsp) # 32-byte Spill
2146 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm14, %ymm0, %ymm0
2147 ; AVX1-ONLY-NEXT: vmovaps 96(%r8), %xmm2
2148 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm14 = xmm2[0],mem[0]
2149 ; AVX1-ONLY-NEXT: vinsertf128 $1, 96(%rax), %ymm14, %ymm14
2150 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm14[0,1,2,3,4,5],ymm0[6,7]
2151 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2152 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm13[1],xmm1[1]
2153 ; AVX1-ONLY-NEXT: vbroadcastsd 104(%rcx), %ymm1
2154 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
2155 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm1
2156 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm0[0,1,2,3,4,5],ymm1[6,7]
2157 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %ymm0
2158 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
2159 ; AVX1-ONLY-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
2160 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2161 ; AVX1-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
2162 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[1],mem[1]
2163 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm1[0,1,2,3],ymm0[4,5,6,7]
2164 ; AVX1-ONLY-NEXT: vmovaps (%r8), %ymm0
2165 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
2166 ; AVX1-ONLY-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
2167 ; AVX1-ONLY-NEXT: vmovaps 16(%rdx), %xmm1
2168 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
2169 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[0],mem[0]
2170 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm0[4,5,6,7]
2171 ; AVX1-ONLY-NEXT: vmovaps 32(%rdi), %ymm0
2172 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm10[0],ymm0[2],ymm10[2]
2173 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm11[1],xmm12[1]
2174 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm0[4,5,6,7]
2175 ; AVX1-ONLY-NEXT: vmovaps 32(%r8), %ymm0
2176 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm9[0],ymm0[2],ymm9[2]
2177 ; AVX1-ONLY-NEXT: vmovaps 48(%rdx), %xmm9
2178 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm7 = xmm9[0],xmm7[0]
2179 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm0[4,5,6,7]
2180 ; AVX1-ONLY-NEXT: vmovaps 64(%rdi), %ymm0
2181 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm5[0],ymm0[2],ymm5[2]
2182 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm5 = xmm8[1],xmm6[1]
2183 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm0[4,5,6,7]
2184 ; AVX1-ONLY-NEXT: vmovaps 64(%r8), %ymm0
2185 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
2186 ; AVX1-ONLY-NEXT: vmovaps 80(%rdx), %xmm4
2187 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm3 = xmm4[0],xmm3[0]
2188 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm3[0,1,2,3],ymm0[4,5,6,7]
2189 ; AVX1-ONLY-NEXT: vmovapd 96(%rdi), %ymm3
2190 ; AVX1-ONLY-NEXT: vmovapd 96(%rsi), %ymm4
2191 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm3[0],ymm4[0],ymm3[2],ymm4[2]
2192 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm6 = mem[0,0]
2193 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm3 = ymm6[0,1],ymm3[2,3]
2194 ; AVX1-ONLY-NEXT: vmovapd 112(%rdi), %xmm6
2195 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm6 = ymm6[0,1],mem[2,3]
2196 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm4 = ymm4[0,0,3,2]
2197 ; AVX1-ONLY-NEXT: vmovapd 96(%rax), %ymm8
2198 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm8[2,3],ymm4[2,3]
2199 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm4 = ymm4[0],ymm6[1],ymm4[2],ymm6[3]
2200 ; AVX1-ONLY-NEXT: vmovapd 112(%rcx), %xmm6
2201 ; AVX1-ONLY-NEXT: vmovapd 112(%rdx), %xmm9
2202 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm9 = xmm9[0],xmm6[0]
2203 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm9 = ymm9[0,1],mem[2,3]
2204 ; AVX1-ONLY-NEXT: vbroadcastsd 112(%r9), %ymm10
2205 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm9 = ymm9[0,1,2],ymm10[3]
2206 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm6 = xmm6[1],mem[1]
2207 ; AVX1-ONLY-NEXT: vbroadcastsd 120(%r9), %ymm10
2208 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm6 = ymm6[0,1],ymm10[2,3]
2209 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm3 = ymm3[0],ymm8[1],ymm3[2,3]
2210 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm6 = ymm6[0,1,2],ymm8[3]
2211 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
2212 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm8 = xmm8[0],xmm15[0]
2213 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
2214 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm10, %xmm10 # 16-byte Folded Reload
2215 ; AVX1-ONLY-NEXT: # xmm10 = xmm10[0],mem[0]
2216 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
2217 ; AVX1-ONLY-NEXT: vmovaps 64(%rdx), %xmm11
2218 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm11 = xmm11[0],mem[0]
2219 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %xmm12
2220 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm12 = xmm12[0],mem[0]
2221 ; AVX1-ONLY-NEXT: vmovaps %xmm12, 16(%rax)
2222 ; AVX1-ONLY-NEXT: vmovaps %xmm10, (%rax)
2223 ; AVX1-ONLY-NEXT: vmovaps %xmm11, 464(%rax)
2224 ; AVX1-ONLY-NEXT: vmovaps %xmm8, 448(%rax)
2225 ; AVX1-ONLY-NEXT: vmovapd %ymm4, 832(%rax)
2226 ; AVX1-ONLY-NEXT: vmovapd %ymm3, 768(%rax)
2227 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 576(%rax)
2228 ; AVX1-ONLY-NEXT: vmovaps %ymm5, 544(%rax)
2229 ; AVX1-ONLY-NEXT: vmovaps %ymm7, 352(%rax)
2230 ; AVX1-ONLY-NEXT: vmovaps %ymm2, 320(%rax)
2231 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 128(%rax)
2232 ; AVX1-ONLY-NEXT: vmovaps %ymm14, 96(%rax)
2233 ; AVX1-ONLY-NEXT: vmovapd %ymm6, 864(%rax)
2234 ; AVX1-ONLY-NEXT: vmovapd %ymm9, 800(%rax)
2235 ; AVX1-ONLY-NEXT: vmovaps %ymm13, 736(%rax)
2236 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2237 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 704(%rax)
2238 ; AVX1-ONLY-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
2239 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 672(%rax)
2240 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2241 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 640(%rax)
2242 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2243 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 608(%rax)
2244 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2245 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 512(%rax)
2246 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2247 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 480(%rax)
2248 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2249 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 416(%rax)
2250 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2251 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 384(%rax)
2252 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2253 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 288(%rax)
2254 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2255 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 256(%rax)
2256 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2257 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 224(%rax)
2258 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2259 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 192(%rax)
2260 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2261 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 160(%rax)
2262 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2263 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 64(%rax)
2264 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2265 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
2266 ; AVX1-ONLY-NEXT: addq $520, %rsp # imm = 0x208
2267 ; AVX1-ONLY-NEXT: vzeroupper
2268 ; AVX1-ONLY-NEXT: retq
2270 ; AVX2-ONLY-LABEL: store_i64_stride7_vf16:
2271 ; AVX2-ONLY: # %bb.0:
2272 ; AVX2-ONLY-NEXT: subq $552, %rsp # imm = 0x228
2273 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
2274 ; AVX2-ONLY-NEXT: vmovaps 64(%rdi), %ymm8
2275 ; AVX2-ONLY-NEXT: vmovups %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2276 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %ymm2
2277 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %ymm7
2278 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %ymm14
2279 ; AVX2-ONLY-NEXT: vmovaps 32(%rsi), %ymm10
2280 ; AVX2-ONLY-NEXT: vmovaps 64(%rsi), %ymm15
2281 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %ymm4
2282 ; AVX2-ONLY-NEXT: vmovaps 32(%rdx), %ymm3
2283 ; AVX2-ONLY-NEXT: vmovaps 16(%rax), %xmm0
2284 ; AVX2-ONLY-NEXT: vmovaps 32(%rax), %xmm1
2285 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm7[1],ymm14[1],ymm7[3],ymm14[3]
2286 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,2,3,3]
2287 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],ymm4[6,7]
2288 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm5[2,3,4,5,6,7]
2289 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2290 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %xmm0
2291 ; AVX2-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2292 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
2293 ; AVX2-ONLY-NEXT: vmovaps 32(%r8), %xmm5
2294 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm6 = xmm5[0],mem[0]
2295 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm6, %ymm6
2296 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1,2,3,4,5],ymm0[6,7]
2297 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2298 ; AVX2-ONLY-NEXT: vmovaps 32(%rdx), %xmm6
2299 ; AVX2-ONLY-NEXT: vmovaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2300 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm0 = mem[0,0]
2301 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm6[1]
2302 ; AVX2-ONLY-NEXT: vbroadcastsd 40(%rcx), %ymm6
2303 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm6[4,5,6,7]
2304 ; AVX2-ONLY-NEXT: vmovaps 48(%rax), %xmm6
2305 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
2306 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm5[6,7]
2307 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2308 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm10[1],ymm2[3],ymm10[3]
2309 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,3,3]
2310 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm3[6,7]
2311 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1],ymm0[2,3,4,5,6,7]
2312 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2313 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm8[1],ymm15[1],ymm8[3],ymm15[3]
2314 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm5 = ymm0[0,2,3,3]
2315 ; AVX2-ONLY-NEXT: vmovaps 64(%rdx), %ymm0
2316 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],ymm0[6,7]
2317 ; AVX2-ONLY-NEXT: vmovaps 80(%rax), %xmm6
2318 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm6[0,1],ymm5[2,3,4,5,6,7]
2319 ; AVX2-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2320 ; AVX2-ONLY-NEXT: vmovaps 96(%r8), %xmm5
2321 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm6 = xmm5[0],mem[0]
2322 ; AVX2-ONLY-NEXT: vinsertf128 $1, 96(%rax), %ymm6, %ymm6
2323 ; AVX2-ONLY-NEXT: vmovaps 96(%rdi), %xmm8
2324 ; AVX2-ONLY-NEXT: vmovaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2325 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm0, %ymm8
2326 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3,4,5],ymm8[6,7]
2327 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2328 ; AVX2-ONLY-NEXT: vmovaps 96(%rdx), %xmm8
2329 ; AVX2-ONLY-NEXT: vmovaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2330 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm6 = mem[0,0]
2331 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm6 = xmm6[1],xmm8[1]
2332 ; AVX2-ONLY-NEXT: vbroadcastsd 104(%rcx), %ymm8
2333 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
2334 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
2335 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5],ymm5[6,7]
2336 ; AVX2-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2337 ; AVX2-ONLY-NEXT: vmovaps 96(%rdi), %ymm12
2338 ; AVX2-ONLY-NEXT: vmovaps 96(%rsi), %ymm11
2339 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm12[1],ymm11[1],ymm12[3],ymm11[3]
2340 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,2,3,3]
2341 ; AVX2-ONLY-NEXT: vmovaps 96(%rdx), %ymm6
2342 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2343 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],ymm6[6,7]
2344 ; AVX2-ONLY-NEXT: vmovaps 112(%rax), %xmm6
2345 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm6[0,1],ymm5[2,3,4,5,6,7]
2346 ; AVX2-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2347 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %xmm5
2348 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%rcx), %ymm5, %ymm5
2349 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %xmm6
2350 ; AVX2-ONLY-NEXT: vmovups %ymm6, (%rsp) # 32-byte Spill
2351 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%rdx), %ymm6, %ymm6
2352 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm5 = ymm6[0],ymm5[0],ymm6[2],ymm5[2]
2353 ; AVX2-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2354 ; AVX2-ONLY-NEXT: vbroadcastsd 8(%rcx), %ymm5
2355 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm4[0,1,2,3],ymm5[4,5,6,7]
2356 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm6 = mem[0,0]
2357 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm6[0,1],ymm5[2,3,4,5,6,7]
2358 ; AVX2-ONLY-NEXT: vmovaps (%r8), %xmm13
2359 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm0, %ymm6
2360 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],ymm6[6,7]
2361 ; AVX2-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2362 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm5 = ymm7[0],ymm14[0],ymm7[2],ymm14[2]
2363 ; AVX2-ONLY-NEXT: vmovaps (%rax), %xmm9
2364 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm6 = mem[0,0]
2365 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm6 = xmm6[1],xmm9[1]
2366 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm6[0,1,2,3],ymm5[4,5,6,7]
2367 ; AVX2-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2368 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm4[0],mem[0],ymm4[2],mem[2]
2369 ; AVX2-ONLY-NEXT: vmovaps (%r8), %ymm8
2370 ; AVX2-ONLY-NEXT: vmovaps (%r9), %ymm6
2371 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm5 = ymm8[0],ymm6[0],ymm8[2],ymm6[2]
2372 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],ymm5[2,3]
2373 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2374 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm2[0],ymm10[0],ymm2[2],ymm10[2]
2375 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm4 = mem[0,0]
2376 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm4[1],xmm1[1]
2377 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
2378 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2379 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm3[0],mem[0],ymm3[2],mem[2]
2380 ; AVX2-ONLY-NEXT: vmovaps 32(%r8), %ymm10
2381 ; AVX2-ONLY-NEXT: vmovaps 32(%r9), %ymm5
2382 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm10[0],ymm5[0],ymm10[2],ymm5[2]
2383 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm1[2,3],ymm2[2,3]
2384 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2385 ; AVX2-ONLY-NEXT: vmovaps 64(%rsi), %xmm1
2386 ; AVX2-ONLY-NEXT: vinsertf128 $1, 64(%rcx), %ymm1, %ymm1
2387 ; AVX2-ONLY-NEXT: vmovaps 64(%rdi), %xmm7
2388 ; AVX2-ONLY-NEXT: vinsertf128 $1, 64(%rdx), %ymm7, %ymm2
2389 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
2390 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2391 ; AVX2-ONLY-NEXT: vbroadcastsd 72(%rcx), %ymm1
2392 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
2393 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = mem[0,0]
2394 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3,4,5,6,7]
2395 ; AVX2-ONLY-NEXT: vmovaps 64(%r8), %xmm4
2396 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm2
2397 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm2[6,7]
2398 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2399 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
2400 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm1[0],ymm15[0],ymm1[2],ymm15[2]
2401 ; AVX2-ONLY-NEXT: vmovaps 64(%rax), %xmm3
2402 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm14 = mem[0,0]
2403 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm14 = xmm14[1],xmm3[1]
2404 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm14[0,1,2,3],ymm1[4,5,6,7]
2405 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2406 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm14 = ymm0[0],mem[0],ymm0[2],mem[2]
2407 ; AVX2-ONLY-NEXT: vmovaps 64(%r8), %ymm2
2408 ; AVX2-ONLY-NEXT: vmovaps 64(%r9), %ymm1
2409 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm15 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
2410 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm14 = ymm14[2,3],ymm15[2,3]
2411 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm11 = ymm12[0],ymm11[0],ymm12[2],ymm11[2]
2412 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm12 = mem[0,0]
2413 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm12[0,1,2,3],ymm11[4,5,6,7]
2414 ; AVX2-ONLY-NEXT: vbroadcastsd 120(%rcx), %ymm12
2415 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm12 = xmm12[0,1],mem[2,3]
2416 ; AVX2-ONLY-NEXT: vbroadcastsd 120(%r9), %ymm15
2417 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm15 = ymm12[0,1,2,3],ymm15[4,5,6,7]
2418 ; AVX2-ONLY-NEXT: vmovaps 96(%rax), %ymm0
2419 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm11[0,1],ymm0[2,3],ymm11[4,5,6,7]
2420 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm15[0,1,2,3,4,5],ymm0[6,7]
2421 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm13[0],mem[0]
2422 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%rsp), %ymm0, %ymm0 # 16-byte Folded Reload
2423 ; AVX2-ONLY-NEXT: vbroadcastsd %xmm9, %ymm9
2424 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm9[4,5],ymm0[6,7]
2425 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm6 = ymm8[1],ymm6[1],ymm8[3],ymm6[3]
2426 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm6 = ymm6[0,2,3,3]
2427 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3,4,5],mem[6,7]
2428 ; AVX2-ONLY-NEXT: vbroadcastsd 24(%rcx), %ymm8
2429 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm8[0,1],ymm6[2,3,4,5,6,7]
2430 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
2431 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm6 = xmm6[0],mem[0]
2432 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm6 # 16-byte Folded Reload
2433 ; AVX2-ONLY-NEXT: vbroadcastsd 32(%rcx), %ymm9
2434 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3,4,5],ymm9[6,7]
2435 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm10[1],ymm5[1],ymm10[3],ymm5[3]
2436 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,2,3,3]
2437 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],mem[6,7]
2438 ; AVX2-ONLY-NEXT: vbroadcastsd 56(%rcx), %ymm9
2439 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm9[0,1],ymm5[2,3,4,5,6,7]
2440 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm4 = xmm4[0],mem[0]
2441 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm4, %ymm4
2442 ; AVX2-ONLY-NEXT: vbroadcastsd %xmm3, %ymm3
2443 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm4[0,1,2,3],ymm3[4,5],ymm4[6,7]
2444 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm2[1],ymm1[1],ymm2[3],ymm1[3]
2445 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,3,3]
2446 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],mem[6,7]
2447 ; AVX2-ONLY-NEXT: vbroadcastsd 88(%rcx), %ymm2
2448 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3,4,5,6,7]
2449 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
2450 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm2 = xmm2[0],mem[0]
2451 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2, %ymm2 # 16-byte Folded Reload
2452 ; AVX2-ONLY-NEXT: vbroadcastsd 96(%rcx), %ymm4
2453 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm4[6,7]
2454 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
2455 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm4[0],mem[0],ymm4[2],mem[2]
2456 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3],mem[2,3]
2457 ; AVX2-ONLY-NEXT: vbroadcastsd 112(%r9), %ymm7
2458 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm7[6,7]
2459 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
2460 ; AVX2-ONLY-NEXT: vmovaps %ymm4, 800(%rax)
2461 ; AVX2-ONLY-NEXT: vmovaps %ymm12, 768(%rax)
2462 ; AVX2-ONLY-NEXT: vmovaps %ymm14, 576(%rax)
2463 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
2464 ; AVX2-ONLY-NEXT: vmovaps %ymm4, 544(%rax)
2465 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
2466 ; AVX2-ONLY-NEXT: vmovaps %ymm4, 512(%rax)
2467 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
2468 ; AVX2-ONLY-NEXT: vmovaps %ymm4, 448(%rax)
2469 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
2470 ; AVX2-ONLY-NEXT: vmovaps %ymm4, 352(%rax)
2471 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
2472 ; AVX2-ONLY-NEXT: vmovaps %ymm4, 320(%rax)
2473 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
2474 ; AVX2-ONLY-NEXT: vmovaps %ymm4, 128(%rax)
2475 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
2476 ; AVX2-ONLY-NEXT: vmovaps %ymm4, 96(%rax)
2477 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
2478 ; AVX2-ONLY-NEXT: vmovaps %ymm4, 64(%rax)
2479 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
2480 ; AVX2-ONLY-NEXT: vmovaps %ymm4, (%rax)
2481 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
2482 ; AVX2-ONLY-NEXT: vmovaps %ymm4, 832(%rax)
2483 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
2484 ; AVX2-ONLY-NEXT: vmovaps %ymm4, 736(%rax)
2485 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
2486 ; AVX2-ONLY-NEXT: vmovaps %ymm4, 704(%rax)
2487 ; AVX2-ONLY-NEXT: vmovaps %ymm2, 672(%rax)
2488 ; AVX2-ONLY-NEXT: vmovaps %ymm1, 640(%rax)
2489 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
2490 ; AVX2-ONLY-NEXT: vmovaps %ymm1, 608(%rax)
2491 ; AVX2-ONLY-NEXT: vmovaps %ymm3, 480(%rax)
2492 ; AVX2-ONLY-NEXT: vmovaps %ymm5, 416(%rax)
2493 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
2494 ; AVX2-ONLY-NEXT: vmovaps %ymm1, 384(%rax)
2495 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
2496 ; AVX2-ONLY-NEXT: vmovaps %ymm1, 288(%rax)
2497 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
2498 ; AVX2-ONLY-NEXT: vmovaps %ymm1, 256(%rax)
2499 ; AVX2-ONLY-NEXT: vmovaps %ymm6, 224(%rax)
2500 ; AVX2-ONLY-NEXT: vmovaps %ymm8, 192(%rax)
2501 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
2502 ; AVX2-ONLY-NEXT: vmovaps %ymm1, 160(%rax)
2503 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
2504 ; AVX2-ONLY-NEXT: vmovaps %ymm11, 864(%rax)
2505 ; AVX2-ONLY-NEXT: addq $552, %rsp # imm = 0x228
2506 ; AVX2-ONLY-NEXT: vzeroupper
2507 ; AVX2-ONLY-NEXT: retq
2509 ; AVX512F-ONLY-SLOW-LABEL: store_i64_stride7_vf16:
2510 ; AVX512F-ONLY-SLOW: # %bb.0:
2511 ; AVX512F-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
2512 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rdi), %zmm4
2513 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm6
2514 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rsi), %zmm5
2515 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm7
2516 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rdx), %zmm8
2517 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm9
2518 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rcx), %zmm2
2519 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm3
2520 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%r8), %zmm11
2521 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [6,14,6,14,6,14,6,14]
2522 ; AVX512F-ONLY-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2523 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
2524 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm12
2525 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm0, %zmm12
2526 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm0 = [6,14,6,14]
2527 ; AVX512F-ONLY-SLOW-NEXT: # ymm0 = mem[0,1,0,1]
2528 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm15
2529 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} ymm16 = [15,7,15,7]
2530 ; AVX512F-ONLY-SLOW-NEXT: # ymm16 = mem[0,1,2,3,0,1,2,3]
2531 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm10
2532 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm16, %zmm10
2533 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm6, %zmm16
2534 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm13
2535 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm0, %zmm13
2536 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm23 = zmm13[0,1,2,3],zmm12[4,5,6,7]
2537 ; AVX512F-ONLY-SLOW-NEXT: movb $64, %sil
2538 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k1
2539 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm23 {%k1}
2540 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%r8), %zmm14
2541 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%r9), %zmm24
2542 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%r9), %zmm25
2543 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rax), %zmm22
2544 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rax), %zmm30
2545 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [0,8,0,1,0,8,0,1]
2546 ; AVX512F-ONLY-SLOW-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
2547 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [0,8,0,8,0,8,0,8]
2548 ; AVX512F-ONLY-SLOW-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2549 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm21
2550 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm25, %zmm20, %zmm21
2551 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm24, %zmm11, %zmm20
2552 ; AVX512F-ONLY-SLOW-NEXT: movb $96, %sil
2553 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k1
2554 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm13 = [4,12,0,5,4,12,0,5]
2555 ; AVX512F-ONLY-SLOW-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3]
2556 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm18
2557 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm17 = [15,7,15,7,15,7,15,7]
2558 ; AVX512F-ONLY-SLOW-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2559 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm26
2560 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm25, %zmm17, %zmm26
2561 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm19
2562 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm17, %zmm19
2563 ; AVX512F-ONLY-SLOW-NEXT: movb $24, %sil
2564 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k2
2565 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm3, %zmm9, %zmm17
2566 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm16 {%k2}
2567 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm27 = <u,1,2,3,4,15,u,u>
2568 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm11, %zmm16, %zmm27
2569 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm13, %zmm11
2570 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm29 = [0,1,12,7,0,1,12,7]
2571 ; AVX512F-ONLY-SLOW-NEXT: # zmm29 = mem[0,1,2,3,0,1,2,3]
2572 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm30, %zmm29, %zmm11
2573 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm31 = [11,3,11,3,11,3,11,3]
2574 ; AVX512F-ONLY-SLOW-NEXT: # zmm31 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2575 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm31, %zmm15
2576 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm17 = [2,10,0,3,2,10,0,3]
2577 ; AVX512F-ONLY-SLOW-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3]
2578 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm16
2579 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm17, %zmm16
2580 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm15, %zmm16 {%k1}
2581 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm28 = [9,1,9,1,9,1,9,1]
2582 ; AVX512F-ONLY-SLOW-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2583 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [5,0,14,6,5,0,14,6]
2584 ; AVX512F-ONLY-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
2585 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm14, %zmm25, %zmm0
2586 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm25, %zmm14, %zmm13
2587 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm25, %zmm28, %zmm14
2588 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%r9), %ymm15
2589 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2590 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm28, %zmm18
2591 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm25 = <u,1,2,3,4,5,15,u>
2592 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm24, %zmm27, %zmm25
2593 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <13,u,2,3,4,5,6,14>
2594 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm24, %zmm23, %zmm1
2595 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%r8), %ymm24
2596 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm23 = ymm24[0],ymm15[0],ymm24[2],ymm15[2]
2597 ; AVX512F-ONLY-SLOW-NEXT: movb $28, %sil
2598 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k3
2599 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm16 {%k3} = zmm23[2,3,2,3],zmm30[2,3,2,3]
2600 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [6,13,14,7,6,13,14,7]
2601 ; AVX512F-ONLY-SLOW-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
2602 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm26, %zmm22, %zmm28
2603 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [0,13,6,7,0,13,6,7]
2604 ; AVX512F-ONLY-SLOW-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
2605 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm22, %zmm0, %zmm27
2606 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r9), %ymm15
2607 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm29, %zmm13
2608 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%r8), %ymm26
2609 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm5, %zmm4, %zmm31
2610 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm2, %zmm8, %zmm17
2611 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, %zmm17 {%k1}
2612 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm23 = ymm26[0],ymm15[0],ymm26[2],ymm15[2]
2613 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm17 {%k3} = zmm23[2,3,2,3],zmm22[2,3,2,3]
2614 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm23 = [4,9,0,3,4,9,0,3]
2615 ; AVX512F-ONLY-SLOW-NEXT: # zmm23 = mem[0,1,2,3,0,1,2,3]
2616 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm23, %zmm14
2617 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm29 = [13,5,13,5,13,5,13,5]
2618 ; AVX512F-ONLY-SLOW-NEXT: # zmm29 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2619 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm30, %zmm23, %zmm18
2620 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm22 = [14,1,2,3,4,5,6,15]
2621 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm30, %zmm25, %zmm22
2622 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm23 = [0,13,2,3,4,5,6,7]
2623 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm30, %zmm1, %zmm23
2624 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm1
2625 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm29, %zmm1
2626 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
2627 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm2, %zmm8, %zmm0
2628 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm25 = [3,0,12,4,3,0,12,4]
2629 ; AVX512F-ONLY-SLOW-NEXT: # zmm25 = mem[0,1,2,3,0,1,2,3]
2630 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm25, %zmm3
2631 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm30
2632 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm29, %zmm30
2633 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm25, %zmm2
2634 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8
2635 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm12, %zmm8
2636 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm6, %zmm12
2637 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm31 = [1,0,10,2,1,0,10,2]
2638 ; AVX512F-ONLY-SLOW-NEXT: # zmm31 = mem[0,1,2,3,0,1,2,3]
2639 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm9
2640 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm31, %zmm9
2641 ; AVX512F-ONLY-SLOW-NEXT: movb $48, %sil
2642 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k3
2643 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm3 {%k3} = zmm6[0],zmm7[0],zmm6[2],zmm7[2],zmm6[4],zmm7[4],zmm6[6],zmm7[6]
2644 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm29, %zmm6
2645 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [6,14,6,14]
2646 ; AVX512F-ONLY-SLOW-NEXT: # ymm7 = mem[0,1,0,1]
2647 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm5, %zmm4, %zmm7
2648 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm5, %zmm4, %zmm29
2649 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm4, %zmm5, %zmm31
2650 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm2 {%k3} = zmm4[0],zmm5[0],zmm4[2],zmm5[2],zmm4[4],zmm5[4],zmm4[6],zmm5[6]
2651 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdx), %xmm4
2652 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],mem[0]
2653 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
2654 ; AVX512F-ONLY-SLOW-NEXT: movb $12, %sil
2655 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k3
2656 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm0, %zmm8 {%k3}
2657 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $3, (%rax), %zmm21, %zmm4
2658 ; AVX512F-ONLY-SLOW-NEXT: movb $112, %sil
2659 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k4
2660 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8 {%k4}
2661 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rdx), %xmm4
2662 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],mem[0]
2663 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
2664 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm0, %zmm12 {%k3}
2665 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $3, 64(%rax), %zmm20, %zmm4
2666 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm12 {%k4}
2667 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm6 {%k1}
2668 ; AVX512F-ONLY-SLOW-NEXT: movb $120, %sil
2669 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k3
2670 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm11 {%k3}
2671 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq 72(%rcx), %ymm1
2672 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
2673 ; AVX512F-ONLY-SLOW-NEXT: movb $6, %sil
2674 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k4
2675 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm9 {%k4}
2676 ; AVX512F-ONLY-SLOW-NEXT: movb $56, %sil
2677 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k5
2678 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, %zmm9 {%k5}
2679 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm19, %zmm10 {%k2}
2680 ; AVX512F-ONLY-SLOW-NEXT: movb $-31, %sil
2681 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k2
2682 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm28, %zmm10 {%k2}
2683 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm7[0,1,2,3],zmm0[4,5,6,7]
2684 ; AVX512F-ONLY-SLOW-NEXT: movb $-61, %sil
2685 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k2
2686 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, %zmm1 {%k2}
2687 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm30, %zmm29 {%k1}
2688 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm29, %zmm13 {%k3}
2689 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq 8(%rcx), %ymm4
2690 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = mem[0,1,2,3],ymm4[4,5,6,7]
2691 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm0, %zmm31 {%k4}
2692 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm31 {%k5}
2693 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm24, %ymm4 # 32-byte Folded Reload
2694 ; AVX512F-ONLY-SLOW-NEXT: # ymm4 = ymm24[1],mem[1],ymm24[3],mem[3]
2695 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,2,3,3]
2696 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],mem[6,7]
2697 ; AVX512F-ONLY-SLOW-NEXT: movb $14, %cl
2698 ; AVX512F-ONLY-SLOW-NEXT: kmovw %ecx, %k1
2699 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm0, %zmm3 {%k1}
2700 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm26[1],ymm15[1],ymm26[3],ymm15[3]
2701 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,3,3]
2702 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
2703 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm2 {%k1}
2704 ; AVX512F-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
2705 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, 64(%rax)
2706 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, 128(%rax)
2707 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, 256(%rax)
2708 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, 320(%rax)
2709 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, 384(%rax)
2710 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, 512(%rax)
2711 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, 576(%rax)
2712 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, 704(%rax)
2713 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, 192(%rax)
2714 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, 448(%rax)
2715 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, 640(%rax)
2716 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, (%rax)
2717 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, 768(%rax)
2718 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm22, 832(%rax)
2719 ; AVX512F-ONLY-SLOW-NEXT: vzeroupper
2720 ; AVX512F-ONLY-SLOW-NEXT: retq
2722 ; AVX512F-ONLY-FAST-LABEL: store_i64_stride7_vf16:
2723 ; AVX512F-ONLY-FAST: # %bb.0:
2724 ; AVX512F-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
2725 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rdi), %zmm12
2726 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rdi), %zmm4
2727 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rsi), %zmm0
2728 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rsi), %zmm5
2729 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rdx), %zmm1
2730 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rdx), %zmm7
2731 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rcx), %zmm17
2732 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rcx), %zmm9
2733 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%r8), %zmm30
2734 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [6,14,6,14,6,14,6,14]
2735 ; AVX512F-ONLY-FAST-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2736 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm18
2737 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm20, %zmm18
2738 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} ymm21 = [6,14,6,14]
2739 ; AVX512F-ONLY-FAST-NEXT: # ymm21 = mem[0,1,2,3,0,1,2,3]
2740 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm19
2741 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm21, %zmm19
2742 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%r8), %zmm11
2743 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%r9), %zmm10
2744 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%r9), %zmm15
2745 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rax), %zmm14
2746 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rax), %zmm31
2747 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,8,0,1,0,8,0,1]
2748 ; AVX512F-ONLY-FAST-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
2749 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm2
2750 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm3, %zmm2
2751 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdx), %xmm13
2752 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rdx), %xmm16
2753 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm13 = xmm13[0],mem[0]
2754 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm13, %ymm0, %ymm13
2755 ; AVX512F-ONLY-FAST-NEXT: movb $12, %sil
2756 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k1
2757 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm13, %zmm0, %zmm2 {%k1}
2758 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm13 = [0,8,0,8,0,8,0,8]
2759 ; AVX512F-ONLY-FAST-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2760 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm22
2761 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm13, %zmm22
2762 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $3, (%rax), %zmm22, %zmm22
2763 ; AVX512F-ONLY-FAST-NEXT: movb $112, %sil
2764 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k2
2765 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm22, %zmm2 {%k2}
2766 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm5, %zmm4, %zmm3
2767 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm16 = xmm16[0],mem[0]
2768 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $1, %xmm16, %ymm0, %ymm16
2769 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm16, %zmm0, %zmm3 {%k1}
2770 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm10, %zmm30, %zmm13
2771 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $3, 64(%rax), %zmm13, %zmm13
2772 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm3 {%k2}
2773 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm24 = [13,5,13,5,13,5,13,5]
2774 ; AVX512F-ONLY-FAST-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2775 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm13
2776 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm24, %zmm13
2777 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm26
2778 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm24, %zmm26
2779 ; AVX512F-ONLY-FAST-NEXT: movb $96, %sil
2780 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k1
2781 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm26 {%k1}
2782 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm16 = [4,12,0,5,4,12,0,5]
2783 ; AVX512F-ONLY-FAST-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3]
2784 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm30, %zmm13
2785 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm10, %zmm16, %zmm13
2786 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [0,1,12,7,0,1,12,7]
2787 ; AVX512F-ONLY-FAST-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
2788 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm31, %zmm27, %zmm13
2789 ; AVX512F-ONLY-FAST-NEXT: movb $120, %sil
2790 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm23 = [15,7,15,7,15,7,15,7]
2791 ; AVX512F-ONLY-FAST-NEXT: # zmm23 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2792 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm22
2793 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm23, %zmm22
2794 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [6,13,14,7,6,13,14,7]
2795 ; AVX512F-ONLY-FAST-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
2796 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm22, %zmm14, %zmm28
2797 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm29
2798 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm23, %zmm29
2799 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} ymm25 = [15,7,15,7]
2800 ; AVX512F-ONLY-FAST-NEXT: # ymm25 = mem[0,1,2,3,0,1,2,3]
2801 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm22
2802 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm25, %zmm22
2803 ; AVX512F-ONLY-FAST-NEXT: movb $24, %dil
2804 ; AVX512F-ONLY-FAST-NEXT: kmovw %edi, %k2
2805 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm29, %zmm22 {%k2}
2806 ; AVX512F-ONLY-FAST-NEXT: movb $-31, %dil
2807 ; AVX512F-ONLY-FAST-NEXT: kmovw %edi, %k3
2808 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm28, %zmm22 {%k3}
2809 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [5,0,14,6,5,0,14,6]
2810 ; AVX512F-ONLY-FAST-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
2811 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm11, %zmm15, %zmm28
2812 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm29 = [0,13,6,7,0,13,6,7]
2813 ; AVX512F-ONLY-FAST-NEXT: # zmm29 = mem[0,1,2,3,0,1,2,3]
2814 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm14, %zmm28, %zmm29
2815 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm17, %zmm1, %zmm20
2816 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm12, %zmm21
2817 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm20 = zmm21[0,1,2,3],zmm20[4,5,6,7]
2818 ; AVX512F-ONLY-FAST-NEXT: movb $-61, %dil
2819 ; AVX512F-ONLY-FAST-NEXT: kmovw %edi, %k3
2820 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm29, %zmm20 {%k3}
2821 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k3
2822 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm26, %zmm13 {%k3}
2823 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm26 = [3,0,12,4,3,0,12,4]
2824 ; AVX512F-ONLY-FAST-NEXT: # zmm26 = mem[0,1,2,3,0,1,2,3]
2825 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm21
2826 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm26, %zmm21
2827 ; AVX512F-ONLY-FAST-NEXT: movb $48, %sil
2828 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm28
2829 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm24, %zmm28
2830 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm12, %zmm24
2831 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm28, %zmm24 {%k1}
2832 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm15, %zmm11, %zmm16
2833 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm27, %zmm16
2834 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm24, %zmm16 {%k3}
2835 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k3
2836 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm21 {%k3} = zmm4[0],zmm5[0],zmm4[2],zmm5[2],zmm4[4],zmm5[4],zmm4[6],zmm5[6]
2837 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%r9), %ymm24
2838 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%r9), %ymm27
2839 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%r8), %ymm28
2840 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = [1,3,7,7]
2841 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm28, %ymm8
2842 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %ymm27, %ymm6, %ymm8
2843 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],mem[6,7]
2844 ; AVX512F-ONLY-FAST-NEXT: movb $14, %sil
2845 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm1, %zmm17, %zmm26
2846 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm26 {%k3} = zmm12[0],zmm0[0],zmm12[2],zmm0[2],zmm12[4],zmm0[4],zmm12[6],zmm0[6]
2847 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k3
2848 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm8, %zmm0, %zmm21 {%k3}
2849 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r8), %ymm8
2850 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %ymm24, %ymm8, %ymm6
2851 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3,4,5],mem[6,7]
2852 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm26 {%k3}
2853 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm6 = ymm28[0],ymm27[0],ymm28[2],ymm27[2]
2854 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm18 = zmm19[0,1,2,3],zmm18[4,5,6,7]
2855 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm19 = [11,3,11,3,11,3,11,3]
2856 ; AVX512F-ONLY-FAST-NEXT: # zmm19 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2857 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [2,10,0,3,2,10,0,3]
2858 ; AVX512F-ONLY-FAST-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
2859 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm27, %zmm1
2860 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm17
2861 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm27, %zmm17
2862 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm27
2863 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm19, %zmm27
2864 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm27, %zmm17 {%k1}
2865 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm12, %zmm19
2866 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm19, %zmm1 {%k1}
2867 ; AVX512F-ONLY-FAST-NEXT: movb $28, %al
2868 ; AVX512F-ONLY-FAST-NEXT: kmovw %eax, %k1
2869 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm17 {%k1} = zmm6[2,3,2,3],zmm31[2,3,2,3]
2870 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [1,0,10,2,1,0,10,2]
2871 ; AVX512F-ONLY-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
2872 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm12, %zmm6, %zmm0
2873 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm12
2874 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm6, %zmm12
2875 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm6 = ymm8[0],ymm24[0],ymm8[2],ymm24[2]
2876 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm1 {%k1} = zmm6[2,3,2,3],zmm14[2,3,2,3]
2877 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastq 72(%rcx), %ymm6
2878 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm6 = mem[0,1,2,3],ymm6[4,5,6,7]
2879 ; AVX512F-ONLY-FAST-NEXT: movb $6, %al
2880 ; AVX512F-ONLY-FAST-NEXT: kmovw %eax, %k1
2881 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm12 {%k1}
2882 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [9,1,9,1,9,1,9,1]
2883 ; AVX512F-ONLY-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2884 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm6, %zmm11
2885 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm30, %zmm8
2886 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm10, %zmm6, %zmm8
2887 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [4,9,0,3,4,9,0,3]
2888 ; AVX512F-ONLY-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
2889 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm6, %zmm11
2890 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm31, %zmm6, %zmm8
2891 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastq 8(%rcx), %ymm6
2892 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm6 = mem[0,1,2,3],ymm6[4,5,6,7]
2893 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm0 {%k1}
2894 ; AVX512F-ONLY-FAST-NEXT: movb $64, %al
2895 ; AVX512F-ONLY-FAST-NEXT: kmovw %eax, %k1
2896 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm30, %zmm18 {%k1}
2897 ; AVX512F-ONLY-FAST-NEXT: movb $56, %al
2898 ; AVX512F-ONLY-FAST-NEXT: kmovw %eax, %k1
2899 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm12 {%k1}
2900 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm0 {%k1}
2901 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm23, %zmm7
2902 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm25, %zmm4
2903 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm4 {%k2}
2904 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = <u,1,2,3,4,15,u,u>
2905 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm30, %zmm4, %zmm5
2906 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm4 = <u,1,2,3,4,5,15,u>
2907 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm10, %zmm5, %zmm4
2908 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = <13,u,2,3,4,5,6,14>
2909 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm10, %zmm18, %zmm5
2910 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm6 = [14,1,2,3,4,5,6,15]
2911 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm31, %zmm4, %zmm6
2912 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm4 = [0,13,2,3,4,5,6,7]
2913 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm31, %zmm5, %zmm4
2914 ; AVX512F-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
2915 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, 64(%rax)
2916 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, 128(%rax)
2917 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm26, 192(%rax)
2918 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm16, 256(%rax)
2919 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm20, 320(%rax)
2920 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm22, 384(%rax)
2921 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm12, 512(%rax)
2922 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm17, 576(%rax)
2923 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm21, 640(%rax)
2924 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, 704(%rax)
2925 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, 448(%rax)
2926 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, (%rax)
2927 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, 768(%rax)
2928 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm6, 832(%rax)
2929 ; AVX512F-ONLY-FAST-NEXT: vzeroupper
2930 ; AVX512F-ONLY-FAST-NEXT: retq
2932 ; AVX512DQ-SLOW-LABEL: store_i64_stride7_vf16:
2933 ; AVX512DQ-SLOW: # %bb.0:
2934 ; AVX512DQ-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
2935 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rdi), %zmm4
2936 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm6
2937 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rsi), %zmm13
2938 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm7
2939 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rdx), %zmm8
2940 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm9
2941 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rcx), %zmm2
2942 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm3
2943 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%r8), %zmm11
2944 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [6,14,6,14,6,14,6,14]
2945 ; AVX512DQ-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2946 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
2947 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, %zmm12
2948 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm0, %zmm12
2949 ; AVX512DQ-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [6,14,6,14]
2950 ; AVX512DQ-SLOW-NEXT: # ymm1 = mem[0,1,0,1]
2951 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, %zmm0
2952 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x2 {{.*#+}} ymm16 = [15,7,15,7]
2953 ; AVX512DQ-SLOW-NEXT: # ymm16 = mem[0,1,0,1]
2954 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm10
2955 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm13, %zmm16, %zmm10
2956 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm7, %zmm6, %zmm16
2957 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, %zmm14
2958 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm1, %zmm14
2959 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm23 = zmm14[0,1,2,3],zmm12[4,5,6,7]
2960 ; AVX512DQ-SLOW-NEXT: movb $64, %sil
2961 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k1
2962 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm23 {%k1}
2963 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%r8), %zmm14
2964 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%r9), %zmm24
2965 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%r9), %zmm25
2966 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rax), %zmm22
2967 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rax), %zmm28
2968 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm17 = [0,8,0,8,0,8,0,8]
2969 ; AVX512DQ-SLOW-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2970 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm14, %zmm21
2971 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm25, %zmm17, %zmm21
2972 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [13,5,13,5,13,5,13,5]
2973 ; AVX512DQ-SLOW-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2974 ; AVX512DQ-SLOW-NEXT: movb $96, %sil
2975 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k1
2976 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [4,12,0,5,4,12,0,5]
2977 ; AVX512DQ-SLOW-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
2978 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm20
2979 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm24, %zmm11, %zmm17
2980 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm18 = [15,7,15,7,15,7,15,7]
2981 ; AVX512DQ-SLOW-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2982 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm14, %zmm26
2983 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm25, %zmm18, %zmm26
2984 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, %zmm19
2985 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm18, %zmm19
2986 ; AVX512DQ-SLOW-NEXT: movb $24, %sil
2987 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k2
2988 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm3, %zmm9, %zmm18
2989 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm18, %zmm16 {%k2}
2990 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm27 = <u,1,2,3,4,15,u,u>
2991 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm11, %zmm16, %zmm27
2992 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm24, %zmm12, %zmm11
2993 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm30 = [0,1,12,7,0,1,12,7]
2994 ; AVX512DQ-SLOW-NEXT: # zmm30 = mem[0,1,2,3,0,1,2,3]
2995 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm28, %zmm30, %zmm11
2996 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm31 = [11,3,11,3,11,3,11,3]
2997 ; AVX512DQ-SLOW-NEXT: # zmm31 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
2998 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm31, %zmm0
2999 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [2,10,0,3,2,10,0,3]
3000 ; AVX512DQ-SLOW-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
3001 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, %zmm16
3002 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm18, %zmm16
3003 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm16 {%k1}
3004 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm29 = [9,1,9,1,9,1,9,1]
3005 ; AVX512DQ-SLOW-NEXT: # zmm29 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3006 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [5,0,14,6,5,0,14,6]
3007 ; AVX512DQ-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
3008 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm14, %zmm25, %zmm0
3009 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm25, %zmm14, %zmm12
3010 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm25, %zmm29, %zmm14
3011 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%r9), %ymm5
3012 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3013 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm24, %zmm29, %zmm20
3014 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm25 = <u,1,2,3,4,5,15,u>
3015 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm24, %zmm27, %zmm25
3016 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <13,u,2,3,4,5,6,14>
3017 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm24, %zmm23, %zmm1
3018 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%r8), %ymm23
3019 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm24 = ymm23[0],ymm5[0],ymm23[2],ymm5[2]
3020 ; AVX512DQ-SLOW-NEXT: movb $28, %sil
3021 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k3
3022 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm16 {%k3} = zmm24[2,3,2,3],zmm28[2,3,2,3]
3023 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm29 = [6,13,14,7,6,13,14,7]
3024 ; AVX512DQ-SLOW-NEXT: # zmm29 = mem[0,1,2,3,0,1,2,3]
3025 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm26, %zmm22, %zmm29
3026 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [0,13,6,7,0,13,6,7]
3027 ; AVX512DQ-SLOW-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
3028 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm22, %zmm0, %zmm27
3029 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r9), %ymm5
3030 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm30, %zmm12
3031 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%r8), %ymm26
3032 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm13, %zmm4, %zmm31
3033 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm2, %zmm8, %zmm18
3034 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm31, %zmm18 {%k1}
3035 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm24 = ymm26[0],ymm5[0],ymm26[2],ymm5[2]
3036 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm18 {%k3} = zmm24[2,3,2,3],zmm22[2,3,2,3]
3037 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm24 = [4,9,0,3,4,9,0,3]
3038 ; AVX512DQ-SLOW-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3]
3039 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm24, %zmm14
3040 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, %zmm0
3041 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm15, %zmm0
3042 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm28, %zmm24, %zmm20
3043 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm22 = [14,1,2,3,4,5,6,15]
3044 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm28, %zmm25, %zmm22
3045 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm24 = [0,13,2,3,4,5,6,7]
3046 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm28, %zmm1, %zmm24
3047 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [1,0,10,2,1,0,10,2]
3048 ; AVX512DQ-SLOW-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
3049 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm25 # 64-byte Reload
3050 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm2, %zmm8, %zmm25
3051 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [3,0,12,4,3,0,12,4]
3052 ; AVX512DQ-SLOW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
3053 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm9, %zmm1, %zmm3
3054 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, %zmm31
3055 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm15, %zmm31
3056 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm8, %zmm1, %zmm2
3057 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, %zmm9
3058 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm28, %zmm9
3059 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm30 = [0,8,0,1,0,8,0,1]
3060 ; AVX512DQ-SLOW-NEXT: # zmm30 = mem[0,1,2,3,0,1,2,3]
3061 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8
3062 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm13, %zmm30, %zmm8
3063 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm7, %zmm6, %zmm30
3064 ; AVX512DQ-SLOW-NEXT: movb $48, %sil
3065 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k3
3066 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm3 {%k3} = zmm6[0],zmm7[0],zmm6[2],zmm7[2],zmm6[4],zmm7[4],zmm6[6],zmm7[6]
3067 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm15, %zmm6
3068 ; AVX512DQ-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [6,14,6,14]
3069 ; AVX512DQ-SLOW-NEXT: # ymm7 = mem[0,1,0,1]
3070 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm13, %zmm4, %zmm7
3071 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm13, %zmm4, %zmm15
3072 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm4, %zmm13, %zmm28
3073 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm2 {%k3} = zmm4[0],zmm13[0],zmm4[2],zmm13[2],zmm4[4],zmm13[4],zmm4[6],zmm13[6]
3074 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdx), %xmm1
3075 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
3076 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
3077 ; AVX512DQ-SLOW-NEXT: movb $12, %sil
3078 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k5
3079 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm8 {%k5}
3080 ; AVX512DQ-SLOW-NEXT: movb $112, %sil
3081 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k7
3082 ; AVX512DQ-SLOW-NEXT: vinserti64x2 $3, (%rax), %zmm21, %zmm8 {%k7}
3083 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
3084 ; AVX512DQ-SLOW-NEXT: movb $120, %sil
3085 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k3
3086 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, %zmm11 {%k3}
3087 ; AVX512DQ-SLOW-NEXT: vpbroadcastq 72(%rcx), %ymm0
3088 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
3089 ; AVX512DQ-SLOW-NEXT: movb $6, %sil
3090 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k4
3091 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm9 {%k4}
3092 ; AVX512DQ-SLOW-NEXT: movb $56, %sil
3093 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k6
3094 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm20, %zmm9 {%k6}
3095 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rdx), %xmm0
3096 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
3097 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
3098 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm30 {%k5}
3099 ; AVX512DQ-SLOW-NEXT: vinserti64x2 $3, 64(%rax), %zmm17, %zmm30 {%k7}
3100 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm19, %zmm10 {%k2}
3101 ; AVX512DQ-SLOW-NEXT: movb $-31, %sil
3102 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k2
3103 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm29, %zmm10 {%k2}
3104 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm7[0,1,2,3],zmm25[4,5,6,7]
3105 ; AVX512DQ-SLOW-NEXT: movb $-61, %sil
3106 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k2
3107 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm27, %zmm0 {%k2}
3108 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm31, %zmm15 {%k1}
3109 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm15, %zmm12 {%k3}
3110 ; AVX512DQ-SLOW-NEXT: vpbroadcastq 8(%rcx), %ymm1
3111 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
3112 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm28 {%k4}
3113 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm14, %zmm28 {%k6}
3114 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm23, %ymm1 # 32-byte Folded Reload
3115 ; AVX512DQ-SLOW-NEXT: # ymm1 = ymm23[1],mem[1],ymm23[3],mem[3]
3116 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,3,3]
3117 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],mem[6,7]
3118 ; AVX512DQ-SLOW-NEXT: movb $14, %cl
3119 ; AVX512DQ-SLOW-NEXT: kmovw %ecx, %k1
3120 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm3 {%k1}
3121 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm26[1],ymm5[1],ymm26[3],ymm5[3]
3122 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,3,3]
3123 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],mem[6,7]
3124 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm2 {%k1}
3125 ; AVX512DQ-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
3126 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm28, 64(%rax)
3127 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm18, 128(%rax)
3128 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, 192(%rax)
3129 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm12, 256(%rax)
3130 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, 320(%rax)
3131 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, 384(%rax)
3132 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm30, 448(%rax)
3133 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, 512(%rax)
3134 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm16, 576(%rax)
3135 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, 640(%rax)
3136 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, 704(%rax)
3137 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, (%rax)
3138 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm24, 768(%rax)
3139 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm22, 832(%rax)
3140 ; AVX512DQ-SLOW-NEXT: vzeroupper
3141 ; AVX512DQ-SLOW-NEXT: retq
3143 ; AVX512DQ-FAST-LABEL: store_i64_stride7_vf16:
3144 ; AVX512DQ-FAST: # %bb.0:
3145 ; AVX512DQ-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
3146 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rdi), %zmm13
3147 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rdi), %zmm3
3148 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rsi), %zmm0
3149 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rsi), %zmm5
3150 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rdx), %zmm1
3151 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rdx), %zmm7
3152 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rcx), %zmm16
3153 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rcx), %zmm9
3154 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%r8), %zmm30
3155 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [6,14,6,14,6,14,6,14]
3156 ; AVX512DQ-FAST-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3157 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, %zmm17
3158 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm9, %zmm20, %zmm17
3159 ; AVX512DQ-FAST-NEXT: vbroadcasti64x2 {{.*#+}} ymm21 = [6,14,6,14]
3160 ; AVX512DQ-FAST-NEXT: # ymm21 = mem[0,1,0,1]
3161 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm18
3162 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm21, %zmm18
3163 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%r8), %zmm11
3164 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%r9), %zmm10
3165 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%r9), %zmm15
3166 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rax), %zmm14
3167 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rax), %zmm31
3168 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm4 = [0,8,0,1,0,8,0,1]
3169 ; AVX512DQ-FAST-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3]
3170 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm13, %zmm2
3171 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm4, %zmm2
3172 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdx), %xmm12
3173 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rdx), %xmm22
3174 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm12 = xmm12[0],mem[0]
3175 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm12, %ymm0, %ymm12
3176 ; AVX512DQ-FAST-NEXT: movb $12, %sil
3177 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k2
3178 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm2 {%k2}
3179 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm24 = [0,8,0,8,0,8,0,8]
3180 ; AVX512DQ-FAST-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3181 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, %zmm12
3182 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm15, %zmm24, %zmm12
3183 ; AVX512DQ-FAST-NEXT: movb $112, %sil
3184 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k3
3185 ; AVX512DQ-FAST-NEXT: vinserti64x2 $3, (%rax), %zmm12, %zmm2 {%k3}
3186 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm23 = [13,5,13,5,13,5,13,5]
3187 ; AVX512DQ-FAST-NEXT: # zmm23 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3188 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, %zmm12
3189 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm9, %zmm23, %zmm12
3190 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm25
3191 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm23, %zmm25
3192 ; AVX512DQ-FAST-NEXT: movb $96, %sil
3193 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k1
3194 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm12, %zmm25 {%k1}
3195 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [4,12,0,5,4,12,0,5]
3196 ; AVX512DQ-FAST-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
3197 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm10, %zmm30, %zmm24
3198 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm30, %zmm19
3199 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm10, %zmm12, %zmm19
3200 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [0,1,12,7,0,1,12,7]
3201 ; AVX512DQ-FAST-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
3202 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm31, %zmm27, %zmm19
3203 ; AVX512DQ-FAST-NEXT: movb $120, %sil
3204 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm5, %zmm3, %zmm4
3205 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm22 = xmm22[0],mem[0]
3206 ; AVX512DQ-FAST-NEXT: vinserti32x4 $1, %xmm22, %ymm0, %ymm22
3207 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm22, %zmm0, %zmm4 {%k2}
3208 ; AVX512DQ-FAST-NEXT: vinserti64x2 $3, 64(%rax), %zmm24, %zmm4 {%k3}
3209 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm24 = [15,7,15,7,15,7,15,7]
3210 ; AVX512DQ-FAST-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3211 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, %zmm22
3212 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm15, %zmm24, %zmm22
3213 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [6,13,14,7,6,13,14,7]
3214 ; AVX512DQ-FAST-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
3215 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm22, %zmm14, %zmm28
3216 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm29
3217 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm16, %zmm24, %zmm29
3218 ; AVX512DQ-FAST-NEXT: vbroadcasti64x2 {{.*#+}} ymm26 = [15,7,15,7]
3219 ; AVX512DQ-FAST-NEXT: # ymm26 = mem[0,1,0,1]
3220 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm13, %zmm22
3221 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm26, %zmm22
3222 ; AVX512DQ-FAST-NEXT: movb $24, %dil
3223 ; AVX512DQ-FAST-NEXT: kmovw %edi, %k2
3224 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm29, %zmm22 {%k2}
3225 ; AVX512DQ-FAST-NEXT: movb $-31, %dil
3226 ; AVX512DQ-FAST-NEXT: kmovw %edi, %k3
3227 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm28, %zmm22 {%k3}
3228 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [5,0,14,6,5,0,14,6]
3229 ; AVX512DQ-FAST-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
3230 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm11, %zmm15, %zmm28
3231 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm29 = [0,13,6,7,0,13,6,7]
3232 ; AVX512DQ-FAST-NEXT: # zmm29 = mem[0,1,2,3,0,1,2,3]
3233 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm14, %zmm28, %zmm29
3234 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm16, %zmm1, %zmm20
3235 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm0, %zmm13, %zmm21
3236 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm20 = zmm21[0,1,2,3],zmm20[4,5,6,7]
3237 ; AVX512DQ-FAST-NEXT: movb $-61, %dil
3238 ; AVX512DQ-FAST-NEXT: kmovw %edi, %k3
3239 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm29, %zmm20 {%k3}
3240 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k3
3241 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm25, %zmm19 {%k3}
3242 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm25 = [3,0,12,4,3,0,12,4]
3243 ; AVX512DQ-FAST-NEXT: # zmm25 = mem[0,1,2,3,0,1,2,3]
3244 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm9, %zmm21
3245 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm25, %zmm21
3246 ; AVX512DQ-FAST-NEXT: movb $48, %sil
3247 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm28
3248 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm16, %zmm23, %zmm28
3249 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm0, %zmm13, %zmm23
3250 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm28, %zmm23 {%k1}
3251 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm15, %zmm11, %zmm12
3252 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm14, %zmm27, %zmm12
3253 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, %zmm12 {%k3}
3254 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k3
3255 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm21 {%k3} = zmm3[0],zmm5[0],zmm3[2],zmm5[2],zmm3[4],zmm5[4],zmm3[6],zmm5[6]
3256 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%r9), %ymm23
3257 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%r9), %ymm27
3258 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%r8), %ymm28
3259 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = [1,3,7,7]
3260 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm28, %ymm8
3261 ; AVX512DQ-FAST-NEXT: vpermt2q %ymm27, %ymm6, %ymm8
3262 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],mem[6,7]
3263 ; AVX512DQ-FAST-NEXT: movb $14, %sil
3264 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm1, %zmm16, %zmm25
3265 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm25 {%k3} = zmm13[0],zmm0[0],zmm13[2],zmm0[2],zmm13[4],zmm0[4],zmm13[6],zmm0[6]
3266 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k3
3267 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm8, %zmm0, %zmm21 {%k3}
3268 ; AVX512DQ-FAST-NEXT: vmovdqa (%r8), %ymm8
3269 ; AVX512DQ-FAST-NEXT: vpermi2q %ymm23, %ymm8, %ymm6
3270 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3,4,5],mem[6,7]
3271 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm25 {%k3}
3272 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm6 = ymm28[0],ymm27[0],ymm28[2],ymm27[2]
3273 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm17 = zmm18[0,1,2,3],zmm17[4,5,6,7]
3274 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm18 = [11,3,11,3,11,3,11,3]
3275 ; AVX512DQ-FAST-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3276 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [2,10,0,3,2,10,0,3]
3277 ; AVX512DQ-FAST-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
3278 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm16, %zmm27, %zmm1
3279 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, %zmm16
3280 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm9, %zmm27, %zmm16
3281 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm27
3282 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm18, %zmm27
3283 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm27, %zmm16 {%k1}
3284 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm0, %zmm13, %zmm18
3285 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm18, %zmm1 {%k1}
3286 ; AVX512DQ-FAST-NEXT: movb $28, %al
3287 ; AVX512DQ-FAST-NEXT: kmovw %eax, %k1
3288 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm16 {%k1} = zmm6[2,3,2,3],zmm31[2,3,2,3]
3289 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [1,0,10,2,1,0,10,2]
3290 ; AVX512DQ-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
3291 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm13, %zmm6, %zmm0
3292 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm13
3293 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm6, %zmm13
3294 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm6 = ymm8[0],ymm23[0],ymm8[2],ymm23[2]
3295 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm1 {%k1} = zmm6[2,3,2,3],zmm14[2,3,2,3]
3296 ; AVX512DQ-FAST-NEXT: vpbroadcastq 72(%rcx), %ymm6
3297 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm6 = mem[0,1,2,3],ymm6[4,5,6,7]
3298 ; AVX512DQ-FAST-NEXT: movb $6, %al
3299 ; AVX512DQ-FAST-NEXT: kmovw %eax, %k1
3300 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm13 {%k1}
3301 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [9,1,9,1,9,1,9,1]
3302 ; AVX512DQ-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3303 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm15, %zmm6, %zmm11
3304 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm30, %zmm8
3305 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm10, %zmm6, %zmm8
3306 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [4,9,0,3,4,9,0,3]
3307 ; AVX512DQ-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
3308 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm14, %zmm6, %zmm11
3309 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm31, %zmm6, %zmm8
3310 ; AVX512DQ-FAST-NEXT: vpbroadcastq 8(%rcx), %ymm6
3311 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm6 = mem[0,1,2,3],ymm6[4,5,6,7]
3312 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm0 {%k1}
3313 ; AVX512DQ-FAST-NEXT: movb $64, %al
3314 ; AVX512DQ-FAST-NEXT: kmovw %eax, %k1
3315 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm30, %zmm17 {%k1}
3316 ; AVX512DQ-FAST-NEXT: movb $56, %al
3317 ; AVX512DQ-FAST-NEXT: kmovw %eax, %k1
3318 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm8, %zmm13 {%k1}
3319 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, %zmm0 {%k1}
3320 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm9, %zmm24, %zmm7
3321 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm26, %zmm3
3322 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, %zmm3 {%k2}
3323 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = <u,1,2,3,4,15,u,u>
3324 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm30, %zmm3, %zmm5
3325 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm3 = <u,1,2,3,4,5,15,u>
3326 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm10, %zmm5, %zmm3
3327 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = <13,u,2,3,4,5,6,14>
3328 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm10, %zmm17, %zmm5
3329 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm6 = [14,1,2,3,4,5,6,15]
3330 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm31, %zmm3, %zmm6
3331 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,13,2,3,4,5,6,7]
3332 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm31, %zmm5, %zmm3
3333 ; AVX512DQ-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
3334 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, 64(%rax)
3335 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, 128(%rax)
3336 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm25, 192(%rax)
3337 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm12, 256(%rax)
3338 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm20, 320(%rax)
3339 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm22, 384(%rax)
3340 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, 448(%rax)
3341 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm13, 512(%rax)
3342 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm16, 576(%rax)
3343 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm21, 640(%rax)
3344 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm19, 704(%rax)
3345 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, (%rax)
3346 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, 768(%rax)
3347 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, 832(%rax)
3348 ; AVX512DQ-FAST-NEXT: vzeroupper
3349 ; AVX512DQ-FAST-NEXT: retq
3351 ; AVX512BW-ONLY-SLOW-LABEL: store_i64_stride7_vf16:
3352 ; AVX512BW-ONLY-SLOW: # %bb.0:
3353 ; AVX512BW-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
3354 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rdi), %zmm4
3355 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm6
3356 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rsi), %zmm5
3357 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm7
3358 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rdx), %zmm8
3359 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm9
3360 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rcx), %zmm2
3361 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm3
3362 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%r8), %zmm11
3363 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [6,14,6,14,6,14,6,14]
3364 ; AVX512BW-ONLY-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3365 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3366 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm12
3367 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm0, %zmm12
3368 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm0 = [6,14,6,14]
3369 ; AVX512BW-ONLY-SLOW-NEXT: # ymm0 = mem[0,1,0,1]
3370 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm15
3371 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} ymm16 = [15,7,15,7]
3372 ; AVX512BW-ONLY-SLOW-NEXT: # ymm16 = mem[0,1,2,3,0,1,2,3]
3373 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm10
3374 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm16, %zmm10
3375 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm6, %zmm16
3376 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm13
3377 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm0, %zmm13
3378 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm23 = zmm13[0,1,2,3],zmm12[4,5,6,7]
3379 ; AVX512BW-ONLY-SLOW-NEXT: movb $64, %sil
3380 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k1
3381 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm23 {%k1}
3382 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%r8), %zmm14
3383 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%r9), %zmm24
3384 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%r9), %zmm25
3385 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rax), %zmm22
3386 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rax), %zmm30
3387 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [0,8,0,1,0,8,0,1]
3388 ; AVX512BW-ONLY-SLOW-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
3389 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [0,8,0,8,0,8,0,8]
3390 ; AVX512BW-ONLY-SLOW-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3391 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm21
3392 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm25, %zmm20, %zmm21
3393 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm24, %zmm11, %zmm20
3394 ; AVX512BW-ONLY-SLOW-NEXT: movb $96, %sil
3395 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k1
3396 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm13 = [4,12,0,5,4,12,0,5]
3397 ; AVX512BW-ONLY-SLOW-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3]
3398 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm18
3399 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm17 = [15,7,15,7,15,7,15,7]
3400 ; AVX512BW-ONLY-SLOW-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3401 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm26
3402 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm25, %zmm17, %zmm26
3403 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm19
3404 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm17, %zmm19
3405 ; AVX512BW-ONLY-SLOW-NEXT: movb $24, %sil
3406 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k2
3407 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm3, %zmm9, %zmm17
3408 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm16 {%k2}
3409 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm27 = <u,1,2,3,4,15,u,u>
3410 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm11, %zmm16, %zmm27
3411 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm13, %zmm11
3412 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm29 = [0,1,12,7,0,1,12,7]
3413 ; AVX512BW-ONLY-SLOW-NEXT: # zmm29 = mem[0,1,2,3,0,1,2,3]
3414 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm30, %zmm29, %zmm11
3415 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm31 = [11,3,11,3,11,3,11,3]
3416 ; AVX512BW-ONLY-SLOW-NEXT: # zmm31 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3417 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm31, %zmm15
3418 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm17 = [2,10,0,3,2,10,0,3]
3419 ; AVX512BW-ONLY-SLOW-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3]
3420 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm16
3421 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm17, %zmm16
3422 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm15, %zmm16 {%k1}
3423 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm28 = [9,1,9,1,9,1,9,1]
3424 ; AVX512BW-ONLY-SLOW-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3425 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [5,0,14,6,5,0,14,6]
3426 ; AVX512BW-ONLY-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
3427 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm14, %zmm25, %zmm0
3428 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm25, %zmm14, %zmm13
3429 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm25, %zmm28, %zmm14
3430 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 64(%r9), %ymm15
3431 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3432 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm28, %zmm18
3433 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm25 = <u,1,2,3,4,5,15,u>
3434 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm24, %zmm27, %zmm25
3435 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <13,u,2,3,4,5,6,14>
3436 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm24, %zmm23, %zmm1
3437 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%r8), %ymm24
3438 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm23 = ymm24[0],ymm15[0],ymm24[2],ymm15[2]
3439 ; AVX512BW-ONLY-SLOW-NEXT: movb $28, %sil
3440 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k3
3441 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm16 {%k3} = zmm23[2,3,2,3],zmm30[2,3,2,3]
3442 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [6,13,14,7,6,13,14,7]
3443 ; AVX512BW-ONLY-SLOW-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
3444 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm26, %zmm22, %zmm28
3445 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [0,13,6,7,0,13,6,7]
3446 ; AVX512BW-ONLY-SLOW-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
3447 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm22, %zmm0, %zmm27
3448 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%r9), %ymm15
3449 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm29, %zmm13
3450 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%r8), %ymm26
3451 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm5, %zmm4, %zmm31
3452 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm2, %zmm8, %zmm17
3453 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, %zmm17 {%k1}
3454 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm23 = ymm26[0],ymm15[0],ymm26[2],ymm15[2]
3455 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm17 {%k3} = zmm23[2,3,2,3],zmm22[2,3,2,3]
3456 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm23 = [4,9,0,3,4,9,0,3]
3457 ; AVX512BW-ONLY-SLOW-NEXT: # zmm23 = mem[0,1,2,3,0,1,2,3]
3458 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm23, %zmm14
3459 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm29 = [13,5,13,5,13,5,13,5]
3460 ; AVX512BW-ONLY-SLOW-NEXT: # zmm29 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3461 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm30, %zmm23, %zmm18
3462 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm22 = [14,1,2,3,4,5,6,15]
3463 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm30, %zmm25, %zmm22
3464 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm23 = [0,13,2,3,4,5,6,7]
3465 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm30, %zmm1, %zmm23
3466 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm1
3467 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm29, %zmm1
3468 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3469 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm2, %zmm8, %zmm0
3470 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm25 = [3,0,12,4,3,0,12,4]
3471 ; AVX512BW-ONLY-SLOW-NEXT: # zmm25 = mem[0,1,2,3,0,1,2,3]
3472 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm25, %zmm3
3473 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm30
3474 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm29, %zmm30
3475 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm25, %zmm2
3476 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8
3477 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm12, %zmm8
3478 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm6, %zmm12
3479 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm31 = [1,0,10,2,1,0,10,2]
3480 ; AVX512BW-ONLY-SLOW-NEXT: # zmm31 = mem[0,1,2,3,0,1,2,3]
3481 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm9
3482 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm31, %zmm9
3483 ; AVX512BW-ONLY-SLOW-NEXT: movb $48, %sil
3484 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k3
3485 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm3 {%k3} = zmm6[0],zmm7[0],zmm6[2],zmm7[2],zmm6[4],zmm7[4],zmm6[6],zmm7[6]
3486 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm29, %zmm6
3487 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [6,14,6,14]
3488 ; AVX512BW-ONLY-SLOW-NEXT: # ymm7 = mem[0,1,0,1]
3489 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm5, %zmm4, %zmm7
3490 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm5, %zmm4, %zmm29
3491 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm4, %zmm5, %zmm31
3492 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm2 {%k3} = zmm4[0],zmm5[0],zmm4[2],zmm5[2],zmm4[4],zmm5[4],zmm4[6],zmm5[6]
3493 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%rdx), %xmm4
3494 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],mem[0]
3495 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
3496 ; AVX512BW-ONLY-SLOW-NEXT: movb $12, %sil
3497 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k3
3498 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm0, %zmm8 {%k3}
3499 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $3, (%rax), %zmm21, %zmm4
3500 ; AVX512BW-ONLY-SLOW-NEXT: movb $112, %sil
3501 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k4
3502 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8 {%k4}
3503 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 64(%rdx), %xmm4
3504 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],mem[0]
3505 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
3506 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm0, %zmm12 {%k3}
3507 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $3, 64(%rax), %zmm20, %zmm4
3508 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm12 {%k4}
3509 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm6 {%k1}
3510 ; AVX512BW-ONLY-SLOW-NEXT: movb $120, %sil
3511 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k3
3512 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm11 {%k3}
3513 ; AVX512BW-ONLY-SLOW-NEXT: vpbroadcastq 72(%rcx), %ymm1
3514 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
3515 ; AVX512BW-ONLY-SLOW-NEXT: movb $6, %sil
3516 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k4
3517 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm9 {%k4}
3518 ; AVX512BW-ONLY-SLOW-NEXT: movb $56, %sil
3519 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k5
3520 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, %zmm9 {%k5}
3521 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm19, %zmm10 {%k2}
3522 ; AVX512BW-ONLY-SLOW-NEXT: movb $-31, %sil
3523 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k2
3524 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm28, %zmm10 {%k2}
3525 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm7[0,1,2,3],zmm0[4,5,6,7]
3526 ; AVX512BW-ONLY-SLOW-NEXT: movb $-61, %sil
3527 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k2
3528 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, %zmm1 {%k2}
3529 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm30, %zmm29 {%k1}
3530 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm29, %zmm13 {%k3}
3531 ; AVX512BW-ONLY-SLOW-NEXT: vpbroadcastq 8(%rcx), %ymm4
3532 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = mem[0,1,2,3],ymm4[4,5,6,7]
3533 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm0, %zmm31 {%k4}
3534 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm31 {%k5}
3535 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm24, %ymm4 # 32-byte Folded Reload
3536 ; AVX512BW-ONLY-SLOW-NEXT: # ymm4 = ymm24[1],mem[1],ymm24[3],mem[3]
3537 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,2,3,3]
3538 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],mem[6,7]
3539 ; AVX512BW-ONLY-SLOW-NEXT: movb $14, %cl
3540 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %ecx, %k1
3541 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm0, %zmm3 {%k1}
3542 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm0 = ymm26[1],ymm15[1],ymm26[3],ymm15[3]
3543 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,3,3]
3544 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
3545 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm2 {%k1}
3546 ; AVX512BW-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
3547 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, 64(%rax)
3548 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, 128(%rax)
3549 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, 256(%rax)
3550 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, 320(%rax)
3551 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, 384(%rax)
3552 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, 512(%rax)
3553 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, 576(%rax)
3554 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, 704(%rax)
3555 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, 192(%rax)
3556 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, 448(%rax)
3557 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, 640(%rax)
3558 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, (%rax)
3559 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, 768(%rax)
3560 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm22, 832(%rax)
3561 ; AVX512BW-ONLY-SLOW-NEXT: vzeroupper
3562 ; AVX512BW-ONLY-SLOW-NEXT: retq
3564 ; AVX512BW-ONLY-FAST-LABEL: store_i64_stride7_vf16:
3565 ; AVX512BW-ONLY-FAST: # %bb.0:
3566 ; AVX512BW-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
3567 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rdi), %zmm12
3568 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rdi), %zmm4
3569 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rsi), %zmm0
3570 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rsi), %zmm5
3571 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rdx), %zmm1
3572 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rdx), %zmm7
3573 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rcx), %zmm17
3574 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rcx), %zmm9
3575 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%r8), %zmm30
3576 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [6,14,6,14,6,14,6,14]
3577 ; AVX512BW-ONLY-FAST-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3578 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm18
3579 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm20, %zmm18
3580 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} ymm21 = [6,14,6,14]
3581 ; AVX512BW-ONLY-FAST-NEXT: # ymm21 = mem[0,1,2,3,0,1,2,3]
3582 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm19
3583 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm21, %zmm19
3584 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%r8), %zmm11
3585 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%r9), %zmm10
3586 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%r9), %zmm15
3587 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rax), %zmm14
3588 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rax), %zmm31
3589 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,8,0,1,0,8,0,1]
3590 ; AVX512BW-ONLY-FAST-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
3591 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm2
3592 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm3, %zmm2
3593 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa (%rdx), %xmm13
3594 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rdx), %xmm16
3595 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm13 = xmm13[0],mem[0]
3596 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, %xmm13, %ymm0, %ymm13
3597 ; AVX512BW-ONLY-FAST-NEXT: movb $12, %sil
3598 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k1
3599 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm13, %zmm0, %zmm2 {%k1}
3600 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm13 = [0,8,0,8,0,8,0,8]
3601 ; AVX512BW-ONLY-FAST-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3602 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm22
3603 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm13, %zmm22
3604 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $3, (%rax), %zmm22, %zmm22
3605 ; AVX512BW-ONLY-FAST-NEXT: movb $112, %sil
3606 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k2
3607 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm22, %zmm2 {%k2}
3608 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm5, %zmm4, %zmm3
3609 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm16 = xmm16[0],mem[0]
3610 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $1, %xmm16, %ymm0, %ymm16
3611 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm16, %zmm0, %zmm3 {%k1}
3612 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm10, %zmm30, %zmm13
3613 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $3, 64(%rax), %zmm13, %zmm13
3614 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm3 {%k2}
3615 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm24 = [13,5,13,5,13,5,13,5]
3616 ; AVX512BW-ONLY-FAST-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3617 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm13
3618 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm24, %zmm13
3619 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm26
3620 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm24, %zmm26
3621 ; AVX512BW-ONLY-FAST-NEXT: movb $96, %sil
3622 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k1
3623 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm26 {%k1}
3624 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm16 = [4,12,0,5,4,12,0,5]
3625 ; AVX512BW-ONLY-FAST-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3]
3626 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm30, %zmm13
3627 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm10, %zmm16, %zmm13
3628 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [0,1,12,7,0,1,12,7]
3629 ; AVX512BW-ONLY-FAST-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
3630 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm31, %zmm27, %zmm13
3631 ; AVX512BW-ONLY-FAST-NEXT: movb $120, %sil
3632 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm23 = [15,7,15,7,15,7,15,7]
3633 ; AVX512BW-ONLY-FAST-NEXT: # zmm23 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3634 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm22
3635 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm23, %zmm22
3636 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [6,13,14,7,6,13,14,7]
3637 ; AVX512BW-ONLY-FAST-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
3638 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm22, %zmm14, %zmm28
3639 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm29
3640 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm23, %zmm29
3641 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} ymm25 = [15,7,15,7]
3642 ; AVX512BW-ONLY-FAST-NEXT: # ymm25 = mem[0,1,2,3,0,1,2,3]
3643 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm22
3644 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm25, %zmm22
3645 ; AVX512BW-ONLY-FAST-NEXT: movb $24, %dil
3646 ; AVX512BW-ONLY-FAST-NEXT: kmovd %edi, %k2
3647 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm29, %zmm22 {%k2}
3648 ; AVX512BW-ONLY-FAST-NEXT: movb $-31, %dil
3649 ; AVX512BW-ONLY-FAST-NEXT: kmovd %edi, %k3
3650 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm28, %zmm22 {%k3}
3651 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [5,0,14,6,5,0,14,6]
3652 ; AVX512BW-ONLY-FAST-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
3653 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm11, %zmm15, %zmm28
3654 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm29 = [0,13,6,7,0,13,6,7]
3655 ; AVX512BW-ONLY-FAST-NEXT: # zmm29 = mem[0,1,2,3,0,1,2,3]
3656 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm14, %zmm28, %zmm29
3657 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm17, %zmm1, %zmm20
3658 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm12, %zmm21
3659 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm20 = zmm21[0,1,2,3],zmm20[4,5,6,7]
3660 ; AVX512BW-ONLY-FAST-NEXT: movb $-61, %dil
3661 ; AVX512BW-ONLY-FAST-NEXT: kmovd %edi, %k3
3662 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm29, %zmm20 {%k3}
3663 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k3
3664 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm26, %zmm13 {%k3}
3665 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm26 = [3,0,12,4,3,0,12,4]
3666 ; AVX512BW-ONLY-FAST-NEXT: # zmm26 = mem[0,1,2,3,0,1,2,3]
3667 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm21
3668 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm26, %zmm21
3669 ; AVX512BW-ONLY-FAST-NEXT: movb $48, %sil
3670 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm28
3671 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm24, %zmm28
3672 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm12, %zmm24
3673 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm28, %zmm24 {%k1}
3674 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm15, %zmm11, %zmm16
3675 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm27, %zmm16
3676 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm24, %zmm16 {%k3}
3677 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k3
3678 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm21 {%k3} = zmm4[0],zmm5[0],zmm4[2],zmm5[2],zmm4[4],zmm5[4],zmm4[6],zmm5[6]
3679 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%r9), %ymm24
3680 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%r9), %ymm27
3681 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%r8), %ymm28
3682 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = [1,3,7,7]
3683 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %ymm28, %ymm8
3684 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %ymm27, %ymm6, %ymm8
3685 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],mem[6,7]
3686 ; AVX512BW-ONLY-FAST-NEXT: movb $14, %sil
3687 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm1, %zmm17, %zmm26
3688 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm26 {%k3} = zmm12[0],zmm0[0],zmm12[2],zmm0[2],zmm12[4],zmm0[4],zmm12[6],zmm0[6]
3689 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k3
3690 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm8, %zmm0, %zmm21 {%k3}
3691 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa (%r8), %ymm8
3692 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %ymm24, %ymm8, %ymm6
3693 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3,4,5],mem[6,7]
3694 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm26 {%k3}
3695 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm6 = ymm28[0],ymm27[0],ymm28[2],ymm27[2]
3696 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm18 = zmm19[0,1,2,3],zmm18[4,5,6,7]
3697 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm19 = [11,3,11,3,11,3,11,3]
3698 ; AVX512BW-ONLY-FAST-NEXT: # zmm19 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3699 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [2,10,0,3,2,10,0,3]
3700 ; AVX512BW-ONLY-FAST-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
3701 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm27, %zmm1
3702 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm17
3703 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm27, %zmm17
3704 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm27
3705 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm19, %zmm27
3706 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm27, %zmm17 {%k1}
3707 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm12, %zmm19
3708 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm19, %zmm1 {%k1}
3709 ; AVX512BW-ONLY-FAST-NEXT: movb $28, %al
3710 ; AVX512BW-ONLY-FAST-NEXT: kmovd %eax, %k1
3711 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm17 {%k1} = zmm6[2,3,2,3],zmm31[2,3,2,3]
3712 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [1,0,10,2,1,0,10,2]
3713 ; AVX512BW-ONLY-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
3714 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm12, %zmm6, %zmm0
3715 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm12
3716 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm6, %zmm12
3717 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm6 = ymm8[0],ymm24[0],ymm8[2],ymm24[2]
3718 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm1 {%k1} = zmm6[2,3,2,3],zmm14[2,3,2,3]
3719 ; AVX512BW-ONLY-FAST-NEXT: vpbroadcastq 72(%rcx), %ymm6
3720 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm6 = mem[0,1,2,3],ymm6[4,5,6,7]
3721 ; AVX512BW-ONLY-FAST-NEXT: movb $6, %al
3722 ; AVX512BW-ONLY-FAST-NEXT: kmovd %eax, %k1
3723 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm12 {%k1}
3724 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [9,1,9,1,9,1,9,1]
3725 ; AVX512BW-ONLY-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3726 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm6, %zmm11
3727 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm30, %zmm8
3728 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm10, %zmm6, %zmm8
3729 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [4,9,0,3,4,9,0,3]
3730 ; AVX512BW-ONLY-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
3731 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm6, %zmm11
3732 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm31, %zmm6, %zmm8
3733 ; AVX512BW-ONLY-FAST-NEXT: vpbroadcastq 8(%rcx), %ymm6
3734 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm6 = mem[0,1,2,3],ymm6[4,5,6,7]
3735 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm0 {%k1}
3736 ; AVX512BW-ONLY-FAST-NEXT: movb $64, %al
3737 ; AVX512BW-ONLY-FAST-NEXT: kmovd %eax, %k1
3738 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm30, %zmm18 {%k1}
3739 ; AVX512BW-ONLY-FAST-NEXT: movb $56, %al
3740 ; AVX512BW-ONLY-FAST-NEXT: kmovd %eax, %k1
3741 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm12 {%k1}
3742 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm0 {%k1}
3743 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm23, %zmm7
3744 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm25, %zmm4
3745 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm4 {%k2}
3746 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = <u,1,2,3,4,15,u,u>
3747 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm30, %zmm4, %zmm5
3748 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm4 = <u,1,2,3,4,5,15,u>
3749 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm10, %zmm5, %zmm4
3750 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = <13,u,2,3,4,5,6,14>
3751 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm10, %zmm18, %zmm5
3752 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm6 = [14,1,2,3,4,5,6,15]
3753 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm31, %zmm4, %zmm6
3754 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm4 = [0,13,2,3,4,5,6,7]
3755 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm31, %zmm5, %zmm4
3756 ; AVX512BW-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
3757 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, 64(%rax)
3758 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, 128(%rax)
3759 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm26, 192(%rax)
3760 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm16, 256(%rax)
3761 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm20, 320(%rax)
3762 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm22, 384(%rax)
3763 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm12, 512(%rax)
3764 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm17, 576(%rax)
3765 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm21, 640(%rax)
3766 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, 704(%rax)
3767 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, 448(%rax)
3768 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, (%rax)
3769 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, 768(%rax)
3770 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm6, 832(%rax)
3771 ; AVX512BW-ONLY-FAST-NEXT: vzeroupper
3772 ; AVX512BW-ONLY-FAST-NEXT: retq
3774 ; AVX512DQBW-SLOW-LABEL: store_i64_stride7_vf16:
3775 ; AVX512DQBW-SLOW: # %bb.0:
3776 ; AVX512DQBW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
3777 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rdi), %zmm4
3778 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm6
3779 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rsi), %zmm13
3780 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm7
3781 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rdx), %zmm8
3782 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm9
3783 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rcx), %zmm2
3784 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm3
3785 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%r8), %zmm11
3786 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [6,14,6,14,6,14,6,14]
3787 ; AVX512DQBW-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3788 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3789 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, %zmm12
3790 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm0, %zmm12
3791 ; AVX512DQBW-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [6,14,6,14]
3792 ; AVX512DQBW-SLOW-NEXT: # ymm1 = mem[0,1,0,1]
3793 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, %zmm0
3794 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x2 {{.*#+}} ymm16 = [15,7,15,7]
3795 ; AVX512DQBW-SLOW-NEXT: # ymm16 = mem[0,1,0,1]
3796 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm10
3797 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm13, %zmm16, %zmm10
3798 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm7, %zmm6, %zmm16
3799 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, %zmm14
3800 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm1, %zmm14
3801 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm23 = zmm14[0,1,2,3],zmm12[4,5,6,7]
3802 ; AVX512DQBW-SLOW-NEXT: movb $64, %sil
3803 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k1
3804 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm23 {%k1}
3805 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%r8), %zmm14
3806 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%r9), %zmm24
3807 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%r9), %zmm25
3808 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rax), %zmm22
3809 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rax), %zmm28
3810 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm17 = [0,8,0,8,0,8,0,8]
3811 ; AVX512DQBW-SLOW-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3812 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm14, %zmm21
3813 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm25, %zmm17, %zmm21
3814 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [13,5,13,5,13,5,13,5]
3815 ; AVX512DQBW-SLOW-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3816 ; AVX512DQBW-SLOW-NEXT: movb $96, %sil
3817 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k1
3818 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [4,12,0,5,4,12,0,5]
3819 ; AVX512DQBW-SLOW-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
3820 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm20
3821 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm24, %zmm11, %zmm17
3822 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm18 = [15,7,15,7,15,7,15,7]
3823 ; AVX512DQBW-SLOW-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3824 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm14, %zmm26
3825 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm25, %zmm18, %zmm26
3826 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm8, %zmm19
3827 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm18, %zmm19
3828 ; AVX512DQBW-SLOW-NEXT: movb $24, %sil
3829 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k2
3830 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm3, %zmm9, %zmm18
3831 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm18, %zmm16 {%k2}
3832 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm27 = <u,1,2,3,4,15,u,u>
3833 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm11, %zmm16, %zmm27
3834 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm24, %zmm12, %zmm11
3835 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm30 = [0,1,12,7,0,1,12,7]
3836 ; AVX512DQBW-SLOW-NEXT: # zmm30 = mem[0,1,2,3,0,1,2,3]
3837 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm28, %zmm30, %zmm11
3838 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm31 = [11,3,11,3,11,3,11,3]
3839 ; AVX512DQBW-SLOW-NEXT: # zmm31 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3840 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm31, %zmm0
3841 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [2,10,0,3,2,10,0,3]
3842 ; AVX512DQBW-SLOW-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
3843 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, %zmm16
3844 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm18, %zmm16
3845 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm16 {%k1}
3846 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm29 = [9,1,9,1,9,1,9,1]
3847 ; AVX512DQBW-SLOW-NEXT: # zmm29 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3848 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [5,0,14,6,5,0,14,6]
3849 ; AVX512DQBW-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
3850 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm14, %zmm25, %zmm0
3851 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm25, %zmm14, %zmm12
3852 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm25, %zmm29, %zmm14
3853 ; AVX512DQBW-SLOW-NEXT: vmovdqa 64(%r9), %ymm5
3854 ; AVX512DQBW-SLOW-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3855 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm24, %zmm29, %zmm20
3856 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm25 = <u,1,2,3,4,5,15,u>
3857 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm24, %zmm27, %zmm25
3858 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <13,u,2,3,4,5,6,14>
3859 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm24, %zmm23, %zmm1
3860 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%r8), %ymm23
3861 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm24 = ymm23[0],ymm5[0],ymm23[2],ymm5[2]
3862 ; AVX512DQBW-SLOW-NEXT: movb $28, %sil
3863 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k3
3864 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm16 {%k3} = zmm24[2,3,2,3],zmm28[2,3,2,3]
3865 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm29 = [6,13,14,7,6,13,14,7]
3866 ; AVX512DQBW-SLOW-NEXT: # zmm29 = mem[0,1,2,3,0,1,2,3]
3867 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm26, %zmm22, %zmm29
3868 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [0,13,6,7,0,13,6,7]
3869 ; AVX512DQBW-SLOW-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
3870 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm22, %zmm0, %zmm27
3871 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%r9), %ymm5
3872 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm30, %zmm12
3873 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%r8), %ymm26
3874 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm13, %zmm4, %zmm31
3875 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm2, %zmm8, %zmm18
3876 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm31, %zmm18 {%k1}
3877 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm24 = ymm26[0],ymm5[0],ymm26[2],ymm5[2]
3878 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm18 {%k3} = zmm24[2,3,2,3],zmm22[2,3,2,3]
3879 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm24 = [4,9,0,3,4,9,0,3]
3880 ; AVX512DQBW-SLOW-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3]
3881 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm24, %zmm14
3882 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, %zmm0
3883 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm15, %zmm0
3884 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm28, %zmm24, %zmm20
3885 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm22 = [14,1,2,3,4,5,6,15]
3886 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm28, %zmm25, %zmm22
3887 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm24 = [0,13,2,3,4,5,6,7]
3888 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm28, %zmm1, %zmm24
3889 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [1,0,10,2,1,0,10,2]
3890 ; AVX512DQBW-SLOW-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
3891 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm25 # 64-byte Reload
3892 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm2, %zmm8, %zmm25
3893 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [3,0,12,4,3,0,12,4]
3894 ; AVX512DQBW-SLOW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
3895 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm9, %zmm1, %zmm3
3896 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm8, %zmm31
3897 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm15, %zmm31
3898 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm8, %zmm1, %zmm2
3899 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, %zmm9
3900 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm28, %zmm9
3901 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm30 = [0,8,0,1,0,8,0,1]
3902 ; AVX512DQBW-SLOW-NEXT: # zmm30 = mem[0,1,2,3,0,1,2,3]
3903 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8
3904 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm13, %zmm30, %zmm8
3905 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm7, %zmm6, %zmm30
3906 ; AVX512DQBW-SLOW-NEXT: movb $48, %sil
3907 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k3
3908 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm3 {%k3} = zmm6[0],zmm7[0],zmm6[2],zmm7[2],zmm6[4],zmm7[4],zmm6[6],zmm7[6]
3909 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm15, %zmm6
3910 ; AVX512DQBW-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [6,14,6,14]
3911 ; AVX512DQBW-SLOW-NEXT: # ymm7 = mem[0,1,0,1]
3912 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm13, %zmm4, %zmm7
3913 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm13, %zmm4, %zmm15
3914 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm4, %zmm13, %zmm28
3915 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm2 {%k3} = zmm4[0],zmm13[0],zmm4[2],zmm13[2],zmm4[4],zmm13[4],zmm4[6],zmm13[6]
3916 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%rdx), %xmm1
3917 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
3918 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
3919 ; AVX512DQBW-SLOW-NEXT: movb $12, %sil
3920 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k5
3921 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm8 {%k5}
3922 ; AVX512DQBW-SLOW-NEXT: movb $112, %sil
3923 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k7
3924 ; AVX512DQBW-SLOW-NEXT: vinserti64x2 $3, (%rax), %zmm21, %zmm8 {%k7}
3925 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
3926 ; AVX512DQBW-SLOW-NEXT: movb $120, %sil
3927 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k3
3928 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, %zmm11 {%k3}
3929 ; AVX512DQBW-SLOW-NEXT: vpbroadcastq 72(%rcx), %ymm0
3930 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
3931 ; AVX512DQBW-SLOW-NEXT: movb $6, %sil
3932 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k4
3933 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm9 {%k4}
3934 ; AVX512DQBW-SLOW-NEXT: movb $56, %sil
3935 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k6
3936 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm20, %zmm9 {%k6}
3937 ; AVX512DQBW-SLOW-NEXT: vmovdqa 64(%rdx), %xmm0
3938 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
3939 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
3940 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm30 {%k5}
3941 ; AVX512DQBW-SLOW-NEXT: vinserti64x2 $3, 64(%rax), %zmm17, %zmm30 {%k7}
3942 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm19, %zmm10 {%k2}
3943 ; AVX512DQBW-SLOW-NEXT: movb $-31, %sil
3944 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k2
3945 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm29, %zmm10 {%k2}
3946 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm7[0,1,2,3],zmm25[4,5,6,7]
3947 ; AVX512DQBW-SLOW-NEXT: movb $-61, %sil
3948 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k2
3949 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm27, %zmm0 {%k2}
3950 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm31, %zmm15 {%k1}
3951 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm15, %zmm12 {%k3}
3952 ; AVX512DQBW-SLOW-NEXT: vpbroadcastq 8(%rcx), %ymm1
3953 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
3954 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm28 {%k4}
3955 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm14, %zmm28 {%k6}
3956 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm23, %ymm1 # 32-byte Folded Reload
3957 ; AVX512DQBW-SLOW-NEXT: # ymm1 = ymm23[1],mem[1],ymm23[3],mem[3]
3958 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,3,3]
3959 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],mem[6,7]
3960 ; AVX512DQBW-SLOW-NEXT: movb $14, %cl
3961 ; AVX512DQBW-SLOW-NEXT: kmovd %ecx, %k1
3962 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm3 {%k1}
3963 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{.*#+}} ymm1 = ymm26[1],ymm5[1],ymm26[3],ymm5[3]
3964 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,3,3]
3965 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],mem[6,7]
3966 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm2 {%k1}
3967 ; AVX512DQBW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
3968 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm28, 64(%rax)
3969 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm18, 128(%rax)
3970 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, 192(%rax)
3971 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm12, 256(%rax)
3972 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, 320(%rax)
3973 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, 384(%rax)
3974 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm30, 448(%rax)
3975 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, 512(%rax)
3976 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm16, 576(%rax)
3977 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, 640(%rax)
3978 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, 704(%rax)
3979 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm8, (%rax)
3980 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm24, 768(%rax)
3981 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm22, 832(%rax)
3982 ; AVX512DQBW-SLOW-NEXT: vzeroupper
3983 ; AVX512DQBW-SLOW-NEXT: retq
3985 ; AVX512DQBW-FAST-LABEL: store_i64_stride7_vf16:
3986 ; AVX512DQBW-FAST: # %bb.0:
3987 ; AVX512DQBW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
3988 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rdi), %zmm13
3989 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rdi), %zmm3
3990 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rsi), %zmm0
3991 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rsi), %zmm5
3992 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rdx), %zmm1
3993 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rdx), %zmm7
3994 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rcx), %zmm16
3995 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rcx), %zmm9
3996 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%r8), %zmm30
3997 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [6,14,6,14,6,14,6,14]
3998 ; AVX512DQBW-FAST-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
3999 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, %zmm17
4000 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm9, %zmm20, %zmm17
4001 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x2 {{.*#+}} ymm21 = [6,14,6,14]
4002 ; AVX512DQBW-FAST-NEXT: # ymm21 = mem[0,1,0,1]
4003 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm18
4004 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm21, %zmm18
4005 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%r8), %zmm11
4006 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%r9), %zmm10
4007 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%r9), %zmm15
4008 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rax), %zmm14
4009 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rax), %zmm31
4010 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm4 = [0,8,0,1,0,8,0,1]
4011 ; AVX512DQBW-FAST-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3]
4012 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm13, %zmm2
4013 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm4, %zmm2
4014 ; AVX512DQBW-FAST-NEXT: vmovdqa (%rdx), %xmm12
4015 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rdx), %xmm22
4016 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm12 = xmm12[0],mem[0]
4017 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, %xmm12, %ymm0, %ymm12
4018 ; AVX512DQBW-FAST-NEXT: movb $12, %sil
4019 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k2
4020 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm2 {%k2}
4021 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm24 = [0,8,0,8,0,8,0,8]
4022 ; AVX512DQBW-FAST-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4023 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, %zmm12
4024 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm15, %zmm24, %zmm12
4025 ; AVX512DQBW-FAST-NEXT: movb $112, %sil
4026 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k3
4027 ; AVX512DQBW-FAST-NEXT: vinserti64x2 $3, (%rax), %zmm12, %zmm2 {%k3}
4028 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm23 = [13,5,13,5,13,5,13,5]
4029 ; AVX512DQBW-FAST-NEXT: # zmm23 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4030 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, %zmm12
4031 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm9, %zmm23, %zmm12
4032 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm25
4033 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm23, %zmm25
4034 ; AVX512DQBW-FAST-NEXT: movb $96, %sil
4035 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k1
4036 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm12, %zmm25 {%k1}
4037 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [4,12,0,5,4,12,0,5]
4038 ; AVX512DQBW-FAST-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
4039 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm10, %zmm30, %zmm24
4040 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm30, %zmm19
4041 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm10, %zmm12, %zmm19
4042 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [0,1,12,7,0,1,12,7]
4043 ; AVX512DQBW-FAST-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
4044 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm31, %zmm27, %zmm19
4045 ; AVX512DQBW-FAST-NEXT: movb $120, %sil
4046 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm5, %zmm3, %zmm4
4047 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm22 = xmm22[0],mem[0]
4048 ; AVX512DQBW-FAST-NEXT: vinserti32x4 $1, %xmm22, %ymm0, %ymm22
4049 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm22, %zmm0, %zmm4 {%k2}
4050 ; AVX512DQBW-FAST-NEXT: vinserti64x2 $3, 64(%rax), %zmm24, %zmm4 {%k3}
4051 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm24 = [15,7,15,7,15,7,15,7]
4052 ; AVX512DQBW-FAST-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4053 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, %zmm22
4054 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm15, %zmm24, %zmm22
4055 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [6,13,14,7,6,13,14,7]
4056 ; AVX512DQBW-FAST-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
4057 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm22, %zmm14, %zmm28
4058 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm29
4059 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm16, %zmm24, %zmm29
4060 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x2 {{.*#+}} ymm26 = [15,7,15,7]
4061 ; AVX512DQBW-FAST-NEXT: # ymm26 = mem[0,1,0,1]
4062 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm13, %zmm22
4063 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm26, %zmm22
4064 ; AVX512DQBW-FAST-NEXT: movb $24, %dil
4065 ; AVX512DQBW-FAST-NEXT: kmovd %edi, %k2
4066 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm29, %zmm22 {%k2}
4067 ; AVX512DQBW-FAST-NEXT: movb $-31, %dil
4068 ; AVX512DQBW-FAST-NEXT: kmovd %edi, %k3
4069 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm28, %zmm22 {%k3}
4070 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [5,0,14,6,5,0,14,6]
4071 ; AVX512DQBW-FAST-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
4072 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm11, %zmm15, %zmm28
4073 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm29 = [0,13,6,7,0,13,6,7]
4074 ; AVX512DQBW-FAST-NEXT: # zmm29 = mem[0,1,2,3,0,1,2,3]
4075 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm14, %zmm28, %zmm29
4076 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm16, %zmm1, %zmm20
4077 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm0, %zmm13, %zmm21
4078 ; AVX512DQBW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm20 = zmm21[0,1,2,3],zmm20[4,5,6,7]
4079 ; AVX512DQBW-FAST-NEXT: movb $-61, %dil
4080 ; AVX512DQBW-FAST-NEXT: kmovd %edi, %k3
4081 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm29, %zmm20 {%k3}
4082 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k3
4083 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm25, %zmm19 {%k3}
4084 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm25 = [3,0,12,4,3,0,12,4]
4085 ; AVX512DQBW-FAST-NEXT: # zmm25 = mem[0,1,2,3,0,1,2,3]
4086 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm9, %zmm21
4087 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm7, %zmm25, %zmm21
4088 ; AVX512DQBW-FAST-NEXT: movb $48, %sil
4089 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm28
4090 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm16, %zmm23, %zmm28
4091 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm0, %zmm13, %zmm23
4092 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm28, %zmm23 {%k1}
4093 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm15, %zmm11, %zmm12
4094 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm14, %zmm27, %zmm12
4095 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm23, %zmm12 {%k3}
4096 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k3
4097 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm21 {%k3} = zmm3[0],zmm5[0],zmm3[2],zmm5[2],zmm3[4],zmm5[4],zmm3[6],zmm5[6]
4098 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%r9), %ymm23
4099 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%r9), %ymm27
4100 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%r8), %ymm28
4101 ; AVX512DQBW-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = [1,3,7,7]
4102 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %ymm28, %ymm8
4103 ; AVX512DQBW-FAST-NEXT: vpermt2q %ymm27, %ymm6, %ymm8
4104 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],mem[6,7]
4105 ; AVX512DQBW-FAST-NEXT: movb $14, %sil
4106 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm1, %zmm16, %zmm25
4107 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm25 {%k3} = zmm13[0],zmm0[0],zmm13[2],zmm0[2],zmm13[4],zmm0[4],zmm13[6],zmm0[6]
4108 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k3
4109 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm8, %zmm0, %zmm21 {%k3}
4110 ; AVX512DQBW-FAST-NEXT: vmovdqa (%r8), %ymm8
4111 ; AVX512DQBW-FAST-NEXT: vpermi2q %ymm23, %ymm8, %ymm6
4112 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3,4,5],mem[6,7]
4113 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm25 {%k3}
4114 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm6 = ymm28[0],ymm27[0],ymm28[2],ymm27[2]
4115 ; AVX512DQBW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm17 = zmm18[0,1,2,3],zmm17[4,5,6,7]
4116 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm18 = [11,3,11,3,11,3,11,3]
4117 ; AVX512DQBW-FAST-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4118 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [2,10,0,3,2,10,0,3]
4119 ; AVX512DQBW-FAST-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
4120 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm16, %zmm27, %zmm1
4121 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, %zmm16
4122 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm9, %zmm27, %zmm16
4123 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm27
4124 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm18, %zmm27
4125 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm27, %zmm16 {%k1}
4126 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm0, %zmm13, %zmm18
4127 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm18, %zmm1 {%k1}
4128 ; AVX512DQBW-FAST-NEXT: movb $28, %al
4129 ; AVX512DQBW-FAST-NEXT: kmovd %eax, %k1
4130 ; AVX512DQBW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm16 {%k1} = zmm6[2,3,2,3],zmm31[2,3,2,3]
4131 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [1,0,10,2,1,0,10,2]
4132 ; AVX512DQBW-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
4133 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm13, %zmm6, %zmm0
4134 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm13
4135 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm6, %zmm13
4136 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm6 = ymm8[0],ymm23[0],ymm8[2],ymm23[2]
4137 ; AVX512DQBW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm1 {%k1} = zmm6[2,3,2,3],zmm14[2,3,2,3]
4138 ; AVX512DQBW-FAST-NEXT: vpbroadcastq 72(%rcx), %ymm6
4139 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm6 = mem[0,1,2,3],ymm6[4,5,6,7]
4140 ; AVX512DQBW-FAST-NEXT: movb $6, %al
4141 ; AVX512DQBW-FAST-NEXT: kmovd %eax, %k1
4142 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm13 {%k1}
4143 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm6 = [9,1,9,1,9,1,9,1]
4144 ; AVX512DQBW-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
4145 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm15, %zmm6, %zmm11
4146 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm30, %zmm8
4147 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm10, %zmm6, %zmm8
4148 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [4,9,0,3,4,9,0,3]
4149 ; AVX512DQBW-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
4150 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm14, %zmm6, %zmm11
4151 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm31, %zmm6, %zmm8
4152 ; AVX512DQBW-FAST-NEXT: vpbroadcastq 8(%rcx), %ymm6
4153 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm6 = mem[0,1,2,3],ymm6[4,5,6,7]
4154 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm0 {%k1}
4155 ; AVX512DQBW-FAST-NEXT: movb $64, %al
4156 ; AVX512DQBW-FAST-NEXT: kmovd %eax, %k1
4157 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm30, %zmm17 {%k1}
4158 ; AVX512DQBW-FAST-NEXT: movb $56, %al
4159 ; AVX512DQBW-FAST-NEXT: kmovd %eax, %k1
4160 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm8, %zmm13 {%k1}
4161 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, %zmm0 {%k1}
4162 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm9, %zmm24, %zmm7
4163 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm26, %zmm3
4164 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, %zmm3 {%k2}
4165 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = <u,1,2,3,4,15,u,u>
4166 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm30, %zmm3, %zmm5
4167 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm3 = <u,1,2,3,4,5,15,u>
4168 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm10, %zmm5, %zmm3
4169 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = <13,u,2,3,4,5,6,14>
4170 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm10, %zmm17, %zmm5
4171 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm6 = [14,1,2,3,4,5,6,15]
4172 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm31, %zmm3, %zmm6
4173 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,13,2,3,4,5,6,7]
4174 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm31, %zmm5, %zmm3
4175 ; AVX512DQBW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
4176 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, 64(%rax)
4177 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, 128(%rax)
4178 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm25, 192(%rax)
4179 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm12, 256(%rax)
4180 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm20, 320(%rax)
4181 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm22, 384(%rax)
4182 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, 448(%rax)
4183 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm13, 512(%rax)
4184 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm16, 576(%rax)
4185 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm21, 640(%rax)
4186 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm19, 704(%rax)
4187 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, (%rax)
4188 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, 768(%rax)
4189 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm6, 832(%rax)
4190 ; AVX512DQBW-FAST-NEXT: vzeroupper
4191 ; AVX512DQBW-FAST-NEXT: retq
4192 %in.vec0 = load <16 x i64>, ptr %in.vecptr0, align 64
4193 %in.vec1 = load <16 x i64>, ptr %in.vecptr1, align 64
4194 %in.vec2 = load <16 x i64>, ptr %in.vecptr2, align 64
4195 %in.vec3 = load <16 x i64>, ptr %in.vecptr3, align 64
4196 %in.vec4 = load <16 x i64>, ptr %in.vecptr4, align 64
4197 %in.vec5 = load <16 x i64>, ptr %in.vecptr5, align 64
4198 %in.vec6 = load <16 x i64>, ptr %in.vecptr6, align 64
4199 %1 = shufflevector <16 x i64> %in.vec0, <16 x i64> %in.vec1, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
4200 %2 = shufflevector <16 x i64> %in.vec2, <16 x i64> %in.vec3, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
4201 %3 = shufflevector <16 x i64> %in.vec4, <16 x i64> %in.vec5, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
4202 %4 = shufflevector <32 x i64> %1, <32 x i64> %2, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
4203 %5 = shufflevector <16 x i64> %in.vec6, <16 x i64> poison, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
4204 %6 = shufflevector <32 x i64> %3, <32 x i64> %5, <48 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47>
4205 %7 = shufflevector <48 x i64> %6, <48 x i64> poison, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
4206 %8 = shufflevector <64 x i64> %4, <64 x i64> %7, <112 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111>
4207 %interleaved.vec = shufflevector <112 x i64> %8, <112 x i64> poison, <112 x i32> <i32 0, i32 16, i32 32, i32 48, i32 64, i32 80, i32 96, i32 1, i32 17, i32 33, i32 49, i32 65, i32 81, i32 97, i32 2, i32 18, i32 34, i32 50, i32 66, i32 82, i32 98, i32 3, i32 19, i32 35, i32 51, i32 67, i32 83, i32 99, i32 4, i32 20, i32 36, i32 52, i32 68, i32 84, i32 100, i32 5, i32 21, i32 37, i32 53, i32 69, i32 85, i32 101, i32 6, i32 22, i32 38, i32 54, i32 70, i32 86, i32 102, i32 7, i32 23, i32 39, i32 55, i32 71, i32 87, i32 103, i32 8, i32 24, i32 40, i32 56, i32 72, i32 88, i32 104, i32 9, i32 25, i32 41, i32 57, i32 73, i32 89, i32 105, i32 10, i32 26, i32 42, i32 58, i32 74, i32 90, i32 106, i32 11, i32 27, i32 43, i32 59, i32 75, i32 91, i32 107, i32 12, i32 28, i32 44, i32 60, i32 76, i32 92, i32 108, i32 13, i32 29, i32 45, i32 61, i32 77, i32 93, i32 109, i32 14, i32 30, i32 46, i32 62, i32 78, i32 94, i32 110, i32 15, i32 31, i32 47, i32 63, i32 79, i32 95, i32 111>
4208 store <112 x i64> %interleaved.vec, ptr %out.vec, align 64
4212 define void @store_i64_stride7_vf32(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %out.vec) nounwind {
4213 ; SSE-LABEL: store_i64_stride7_vf32:
4215 ; SSE-NEXT: subq $1432, %rsp # imm = 0x598
4216 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
4217 ; SSE-NEXT: movapd (%rdi), %xmm2
4218 ; SSE-NEXT: movapd 16(%rdi), %xmm3
4219 ; SSE-NEXT: movapd 32(%rdi), %xmm4
4220 ; SSE-NEXT: movapd (%rsi), %xmm5
4221 ; SSE-NEXT: movapd 16(%rsi), %xmm6
4222 ; SSE-NEXT: movapd (%rdx), %xmm7
4223 ; SSE-NEXT: movapd 16(%rdx), %xmm8
4224 ; SSE-NEXT: movapd (%rcx), %xmm9
4225 ; SSE-NEXT: movapd 16(%rcx), %xmm10
4226 ; SSE-NEXT: movapd (%r8), %xmm11
4227 ; SSE-NEXT: movapd 16(%r8), %xmm12
4228 ; SSE-NEXT: movapd (%r9), %xmm13
4229 ; SSE-NEXT: movapd 16(%r9), %xmm14
4230 ; SSE-NEXT: movapd (%rax), %xmm0
4231 ; SSE-NEXT: movapd 16(%rax), %xmm1
4232 ; SSE-NEXT: movapd %xmm2, %xmm15
4233 ; SSE-NEXT: unpcklpd {{.*#+}} xmm15 = xmm15[0],xmm5[0]
4234 ; SSE-NEXT: movapd %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4235 ; SSE-NEXT: movsd {{.*#+}} xmm2 = xmm0[0],xmm2[1]
4236 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4237 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm7[1]
4238 ; SSE-NEXT: movapd %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4239 ; SSE-NEXT: unpcklpd {{.*#+}} xmm7 = xmm7[0],xmm9[0]
4240 ; SSE-NEXT: movapd %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4241 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm11[1]
4242 ; SSE-NEXT: movapd %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4243 ; SSE-NEXT: unpcklpd {{.*#+}} xmm11 = xmm11[0],xmm13[0]
4244 ; SSE-NEXT: movapd %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4245 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm0[1]
4246 ; SSE-NEXT: movapd %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4247 ; SSE-NEXT: movapd %xmm3, %xmm0
4248 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm6[0]
4249 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4250 ; SSE-NEXT: movsd {{.*#+}} xmm3 = xmm1[0],xmm3[1]
4251 ; SSE-NEXT: movapd %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4252 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm8[1]
4253 ; SSE-NEXT: movapd %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4254 ; SSE-NEXT: unpcklpd {{.*#+}} xmm8 = xmm8[0],xmm10[0]
4255 ; SSE-NEXT: movapd %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4256 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm12[1]
4257 ; SSE-NEXT: movapd %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4258 ; SSE-NEXT: unpcklpd {{.*#+}} xmm12 = xmm12[0],xmm14[0]
4259 ; SSE-NEXT: movapd %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4260 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm1[1]
4261 ; SSE-NEXT: movapd %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4262 ; SSE-NEXT: movapd 32(%rsi), %xmm1
4263 ; SSE-NEXT: movapd %xmm4, %xmm0
4264 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm1[0]
4265 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4266 ; SSE-NEXT: movapd 32(%rax), %xmm0
4267 ; SSE-NEXT: movsd {{.*#+}} xmm4 = xmm0[0],xmm4[1]
4268 ; SSE-NEXT: movapd %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4269 ; SSE-NEXT: movapd 32(%rdx), %xmm2
4270 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
4271 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4272 ; SSE-NEXT: movapd 32(%rcx), %xmm3
4273 ; SSE-NEXT: unpcklpd {{.*#+}} xmm2 = xmm2[0],xmm3[0]
4274 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4275 ; SSE-NEXT: movapd 32(%r8), %xmm1
4276 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm1[1]
4277 ; SSE-NEXT: movapd %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4278 ; SSE-NEXT: movapd 32(%r9), %xmm2
4279 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4280 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4281 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4282 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4283 ; SSE-NEXT: movapd 48(%rdi), %xmm1
4284 ; SSE-NEXT: movapd 48(%rsi), %xmm2
4285 ; SSE-NEXT: movapd %xmm1, %xmm0
4286 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
4287 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4288 ; SSE-NEXT: movapd 48(%rax), %xmm0
4289 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
4290 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4291 ; SSE-NEXT: movapd 48(%rdx), %xmm1
4292 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
4293 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4294 ; SSE-NEXT: movapd 48(%rcx), %xmm2
4295 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4296 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4297 ; SSE-NEXT: movapd 48(%r8), %xmm1
4298 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
4299 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4300 ; SSE-NEXT: movapd 48(%r9), %xmm2
4301 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4302 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4303 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4304 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4305 ; SSE-NEXT: movapd 64(%rdi), %xmm1
4306 ; SSE-NEXT: movapd 64(%rsi), %xmm2
4307 ; SSE-NEXT: movapd %xmm1, %xmm0
4308 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
4309 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4310 ; SSE-NEXT: movapd 64(%rax), %xmm0
4311 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
4312 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4313 ; SSE-NEXT: movapd 64(%rdx), %xmm1
4314 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
4315 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4316 ; SSE-NEXT: movapd 64(%rcx), %xmm2
4317 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4318 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4319 ; SSE-NEXT: movapd 64(%r8), %xmm1
4320 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
4321 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4322 ; SSE-NEXT: movapd 64(%r9), %xmm2
4323 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4324 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4325 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4326 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4327 ; SSE-NEXT: movapd 80(%rdi), %xmm1
4328 ; SSE-NEXT: movapd 80(%rsi), %xmm2
4329 ; SSE-NEXT: movapd %xmm1, %xmm0
4330 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
4331 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4332 ; SSE-NEXT: movapd 80(%rax), %xmm0
4333 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
4334 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4335 ; SSE-NEXT: movapd 80(%rdx), %xmm1
4336 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
4337 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4338 ; SSE-NEXT: movapd 80(%rcx), %xmm2
4339 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4340 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4341 ; SSE-NEXT: movapd 80(%r8), %xmm1
4342 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
4343 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4344 ; SSE-NEXT: movapd 80(%r9), %xmm2
4345 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4346 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4347 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4348 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4349 ; SSE-NEXT: movapd 96(%rdi), %xmm1
4350 ; SSE-NEXT: movapd 96(%rsi), %xmm2
4351 ; SSE-NEXT: movapd %xmm1, %xmm0
4352 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
4353 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4354 ; SSE-NEXT: movapd 96(%rax), %xmm0
4355 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
4356 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4357 ; SSE-NEXT: movapd 96(%rdx), %xmm1
4358 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
4359 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4360 ; SSE-NEXT: movapd 96(%rcx), %xmm2
4361 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4362 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4363 ; SSE-NEXT: movapd 96(%r8), %xmm1
4364 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
4365 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4366 ; SSE-NEXT: movapd 96(%r9), %xmm2
4367 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4368 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4369 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4370 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4371 ; SSE-NEXT: movapd 112(%rdi), %xmm1
4372 ; SSE-NEXT: movapd 112(%rsi), %xmm2
4373 ; SSE-NEXT: movapd %xmm1, %xmm0
4374 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
4375 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4376 ; SSE-NEXT: movapd 112(%rax), %xmm0
4377 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
4378 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4379 ; SSE-NEXT: movapd 112(%rdx), %xmm1
4380 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
4381 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4382 ; SSE-NEXT: movapd 112(%rcx), %xmm2
4383 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4384 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4385 ; SSE-NEXT: movapd 112(%r8), %xmm1
4386 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
4387 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4388 ; SSE-NEXT: movapd 112(%r9), %xmm2
4389 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4390 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4391 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4392 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4393 ; SSE-NEXT: movapd 128(%rdi), %xmm1
4394 ; SSE-NEXT: movapd 128(%rsi), %xmm2
4395 ; SSE-NEXT: movapd %xmm1, %xmm0
4396 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
4397 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4398 ; SSE-NEXT: movapd 128(%rax), %xmm0
4399 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
4400 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4401 ; SSE-NEXT: movapd 128(%rdx), %xmm1
4402 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
4403 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4404 ; SSE-NEXT: movapd 128(%rcx), %xmm2
4405 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4406 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4407 ; SSE-NEXT: movapd 128(%r8), %xmm1
4408 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
4409 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4410 ; SSE-NEXT: movapd 128(%r9), %xmm2
4411 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4412 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4413 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4414 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4415 ; SSE-NEXT: movapd 144(%rdi), %xmm1
4416 ; SSE-NEXT: movapd 144(%rsi), %xmm2
4417 ; SSE-NEXT: movapd %xmm1, %xmm0
4418 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
4419 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4420 ; SSE-NEXT: movapd 144(%rax), %xmm0
4421 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
4422 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4423 ; SSE-NEXT: movapd 144(%rdx), %xmm1
4424 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
4425 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4426 ; SSE-NEXT: movapd 144(%rcx), %xmm2
4427 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4428 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4429 ; SSE-NEXT: movapd 144(%r8), %xmm1
4430 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
4431 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4432 ; SSE-NEXT: movapd 144(%r9), %xmm2
4433 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4434 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4435 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4436 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4437 ; SSE-NEXT: movapd 160(%rdi), %xmm1
4438 ; SSE-NEXT: movapd 160(%rsi), %xmm2
4439 ; SSE-NEXT: movapd %xmm1, %xmm0
4440 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
4441 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4442 ; SSE-NEXT: movapd 160(%rax), %xmm0
4443 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
4444 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4445 ; SSE-NEXT: movapd 160(%rdx), %xmm1
4446 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
4447 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4448 ; SSE-NEXT: movapd 160(%rcx), %xmm2
4449 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4450 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4451 ; SSE-NEXT: movapd 160(%r8), %xmm1
4452 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
4453 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4454 ; SSE-NEXT: movapd 160(%r9), %xmm2
4455 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4456 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4457 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4458 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4459 ; SSE-NEXT: movapd 176(%rdi), %xmm1
4460 ; SSE-NEXT: movapd 176(%rsi), %xmm2
4461 ; SSE-NEXT: movapd %xmm1, %xmm0
4462 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
4463 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4464 ; SSE-NEXT: movapd 176(%rax), %xmm0
4465 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
4466 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4467 ; SSE-NEXT: movapd 176(%rdx), %xmm1
4468 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
4469 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4470 ; SSE-NEXT: movapd 176(%rcx), %xmm2
4471 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4472 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4473 ; SSE-NEXT: movapd 176(%r8), %xmm1
4474 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
4475 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4476 ; SSE-NEXT: movapd 176(%r9), %xmm2
4477 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4478 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4479 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4480 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4481 ; SSE-NEXT: movapd 192(%rdi), %xmm1
4482 ; SSE-NEXT: movapd 192(%rsi), %xmm2
4483 ; SSE-NEXT: movapd %xmm1, %xmm0
4484 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
4485 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4486 ; SSE-NEXT: movapd 192(%rax), %xmm0
4487 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
4488 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4489 ; SSE-NEXT: movapd 192(%rdx), %xmm1
4490 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
4491 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4492 ; SSE-NEXT: movapd 192(%rcx), %xmm2
4493 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4494 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4495 ; SSE-NEXT: movapd 192(%r8), %xmm1
4496 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
4497 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4498 ; SSE-NEXT: movapd 192(%r9), %xmm2
4499 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4500 ; SSE-NEXT: movapd %xmm1, (%rsp) # 16-byte Spill
4501 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
4502 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4503 ; SSE-NEXT: movapd 208(%rdi), %xmm1
4504 ; SSE-NEXT: movapd 208(%rsi), %xmm2
4505 ; SSE-NEXT: movapd %xmm1, %xmm0
4506 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
4507 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4508 ; SSE-NEXT: movapd 208(%rax), %xmm0
4509 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
4510 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4511 ; SSE-NEXT: movapd 208(%rdx), %xmm1
4512 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
4513 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4514 ; SSE-NEXT: movapd 208(%rcx), %xmm14
4515 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm14[0]
4516 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4517 ; SSE-NEXT: movapd 208(%r8), %xmm1
4518 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm1[1]
4519 ; SSE-NEXT: movapd 208(%r9), %xmm13
4520 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm13[0]
4521 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4522 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm0[1]
4523 ; SSE-NEXT: movapd 224(%rdi), %xmm15
4524 ; SSE-NEXT: movapd 224(%rsi), %xmm12
4525 ; SSE-NEXT: movapd %xmm15, %xmm0
4526 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm12[0]
4527 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4528 ; SSE-NEXT: movapd 224(%rax), %xmm3
4529 ; SSE-NEXT: movsd {{.*#+}} xmm15 = xmm3[0],xmm15[1]
4530 ; SSE-NEXT: movapd 224(%rdx), %xmm11
4531 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm11[1]
4532 ; SSE-NEXT: movapd 224(%rcx), %xmm8
4533 ; SSE-NEXT: unpcklpd {{.*#+}} xmm11 = xmm11[0],xmm8[0]
4534 ; SSE-NEXT: movapd 224(%r8), %xmm9
4535 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm9[1]
4536 ; SSE-NEXT: movapd 224(%r9), %xmm6
4537 ; SSE-NEXT: unpcklpd {{.*#+}} xmm9 = xmm9[0],xmm6[0]
4538 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm3[1]
4539 ; SSE-NEXT: movapd 240(%rdi), %xmm5
4540 ; SSE-NEXT: movapd 240(%rsi), %xmm4
4541 ; SSE-NEXT: movapd %xmm5, %xmm7
4542 ; SSE-NEXT: unpcklpd {{.*#+}} xmm7 = xmm7[0],xmm4[0]
4543 ; SSE-NEXT: movapd 240(%rax), %xmm10
4544 ; SSE-NEXT: movsd {{.*#+}} xmm5 = xmm10[0],xmm5[1]
4545 ; SSE-NEXT: movapd 240(%rdx), %xmm3
4546 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm3[1]
4547 ; SSE-NEXT: movapd 240(%rcx), %xmm2
4548 ; SSE-NEXT: unpcklpd {{.*#+}} xmm3 = xmm3[0],xmm2[0]
4549 ; SSE-NEXT: movapd 240(%r8), %xmm1
4550 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
4551 ; SSE-NEXT: movapd 240(%r9), %xmm0
4552 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm0[0]
4553 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm10[1]
4554 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
4555 ; SSE-NEXT: movapd %xmm0, 1776(%rax)
4556 ; SSE-NEXT: movapd %xmm2, 1760(%rax)
4557 ; SSE-NEXT: movapd %xmm4, 1744(%rax)
4558 ; SSE-NEXT: movapd %xmm5, 1728(%rax)
4559 ; SSE-NEXT: movapd %xmm1, 1712(%rax)
4560 ; SSE-NEXT: movapd %xmm3, 1696(%rax)
4561 ; SSE-NEXT: movapd %xmm7, 1680(%rax)
4562 ; SSE-NEXT: movapd %xmm6, 1664(%rax)
4563 ; SSE-NEXT: movapd %xmm8, 1648(%rax)
4564 ; SSE-NEXT: movapd %xmm12, 1632(%rax)
4565 ; SSE-NEXT: movapd %xmm15, 1616(%rax)
4566 ; SSE-NEXT: movapd %xmm9, 1600(%rax)
4567 ; SSE-NEXT: movapd %xmm11, 1584(%rax)
4568 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4569 ; SSE-NEXT: movaps %xmm0, 1568(%rax)
4570 ; SSE-NEXT: movapd %xmm13, 1552(%rax)
4571 ; SSE-NEXT: movapd %xmm14, 1536(%rax)
4572 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4573 ; SSE-NEXT: movaps %xmm0, 1520(%rax)
4574 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4575 ; SSE-NEXT: movaps %xmm0, 1504(%rax)
4576 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4577 ; SSE-NEXT: movaps %xmm0, 1488(%rax)
4578 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4579 ; SSE-NEXT: movaps %xmm0, 1472(%rax)
4580 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4581 ; SSE-NEXT: movaps %xmm0, 1456(%rax)
4582 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4583 ; SSE-NEXT: movaps %xmm0, 1440(%rax)
4584 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4585 ; SSE-NEXT: movaps %xmm0, 1424(%rax)
4586 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4587 ; SSE-NEXT: movaps %xmm0, 1408(%rax)
4588 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4589 ; SSE-NEXT: movaps %xmm0, 1392(%rax)
4590 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
4591 ; SSE-NEXT: movaps %xmm0, 1376(%rax)
4592 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4593 ; SSE-NEXT: movaps %xmm0, 1360(%rax)
4594 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4595 ; SSE-NEXT: movaps %xmm0, 1344(%rax)
4596 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4597 ; SSE-NEXT: movaps %xmm0, 1328(%rax)
4598 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4599 ; SSE-NEXT: movaps %xmm0, 1312(%rax)
4600 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4601 ; SSE-NEXT: movaps %xmm0, 1296(%rax)
4602 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4603 ; SSE-NEXT: movaps %xmm0, 1280(%rax)
4604 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4605 ; SSE-NEXT: movaps %xmm0, 1264(%rax)
4606 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4607 ; SSE-NEXT: movaps %xmm0, 1248(%rax)
4608 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4609 ; SSE-NEXT: movaps %xmm0, 1232(%rax)
4610 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4611 ; SSE-NEXT: movaps %xmm0, 1216(%rax)
4612 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4613 ; SSE-NEXT: movaps %xmm0, 1200(%rax)
4614 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4615 ; SSE-NEXT: movaps %xmm0, 1184(%rax)
4616 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4617 ; SSE-NEXT: movaps %xmm0, 1168(%rax)
4618 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4619 ; SSE-NEXT: movaps %xmm0, 1152(%rax)
4620 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4621 ; SSE-NEXT: movaps %xmm0, 1136(%rax)
4622 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4623 ; SSE-NEXT: movaps %xmm0, 1120(%rax)
4624 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4625 ; SSE-NEXT: movaps %xmm0, 1104(%rax)
4626 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4627 ; SSE-NEXT: movaps %xmm0, 1088(%rax)
4628 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4629 ; SSE-NEXT: movaps %xmm0, 1072(%rax)
4630 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4631 ; SSE-NEXT: movaps %xmm0, 1056(%rax)
4632 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4633 ; SSE-NEXT: movaps %xmm0, 1040(%rax)
4634 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4635 ; SSE-NEXT: movaps %xmm0, 1024(%rax)
4636 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4637 ; SSE-NEXT: movaps %xmm0, 1008(%rax)
4638 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4639 ; SSE-NEXT: movaps %xmm0, 992(%rax)
4640 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4641 ; SSE-NEXT: movaps %xmm0, 976(%rax)
4642 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4643 ; SSE-NEXT: movaps %xmm0, 960(%rax)
4644 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4645 ; SSE-NEXT: movaps %xmm0, 944(%rax)
4646 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4647 ; SSE-NEXT: movaps %xmm0, 928(%rax)
4648 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4649 ; SSE-NEXT: movaps %xmm0, 912(%rax)
4650 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4651 ; SSE-NEXT: movaps %xmm0, 896(%rax)
4652 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4653 ; SSE-NEXT: movaps %xmm0, 880(%rax)
4654 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4655 ; SSE-NEXT: movaps %xmm0, 864(%rax)
4656 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4657 ; SSE-NEXT: movaps %xmm0, 848(%rax)
4658 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4659 ; SSE-NEXT: movaps %xmm0, 832(%rax)
4660 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4661 ; SSE-NEXT: movaps %xmm0, 816(%rax)
4662 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4663 ; SSE-NEXT: movaps %xmm0, 800(%rax)
4664 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4665 ; SSE-NEXT: movaps %xmm0, 784(%rax)
4666 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4667 ; SSE-NEXT: movaps %xmm0, 768(%rax)
4668 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4669 ; SSE-NEXT: movaps %xmm0, 752(%rax)
4670 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4671 ; SSE-NEXT: movaps %xmm0, 736(%rax)
4672 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4673 ; SSE-NEXT: movaps %xmm0, 720(%rax)
4674 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4675 ; SSE-NEXT: movaps %xmm0, 704(%rax)
4676 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4677 ; SSE-NEXT: movaps %xmm0, 688(%rax)
4678 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4679 ; SSE-NEXT: movaps %xmm0, 672(%rax)
4680 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4681 ; SSE-NEXT: movaps %xmm0, 656(%rax)
4682 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4683 ; SSE-NEXT: movaps %xmm0, 640(%rax)
4684 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4685 ; SSE-NEXT: movaps %xmm0, 624(%rax)
4686 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4687 ; SSE-NEXT: movaps %xmm0, 608(%rax)
4688 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4689 ; SSE-NEXT: movaps %xmm0, 592(%rax)
4690 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4691 ; SSE-NEXT: movaps %xmm0, 576(%rax)
4692 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4693 ; SSE-NEXT: movaps %xmm0, 560(%rax)
4694 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4695 ; SSE-NEXT: movaps %xmm0, 544(%rax)
4696 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4697 ; SSE-NEXT: movaps %xmm0, 528(%rax)
4698 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4699 ; SSE-NEXT: movaps %xmm0, 512(%rax)
4700 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4701 ; SSE-NEXT: movaps %xmm0, 496(%rax)
4702 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4703 ; SSE-NEXT: movaps %xmm0, 480(%rax)
4704 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4705 ; SSE-NEXT: movaps %xmm0, 464(%rax)
4706 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4707 ; SSE-NEXT: movaps %xmm0, 448(%rax)
4708 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4709 ; SSE-NEXT: movaps %xmm0, 432(%rax)
4710 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4711 ; SSE-NEXT: movaps %xmm0, 416(%rax)
4712 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4713 ; SSE-NEXT: movaps %xmm0, 400(%rax)
4714 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4715 ; SSE-NEXT: movaps %xmm0, 384(%rax)
4716 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4717 ; SSE-NEXT: movaps %xmm0, 368(%rax)
4718 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4719 ; SSE-NEXT: movaps %xmm0, 352(%rax)
4720 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4721 ; SSE-NEXT: movaps %xmm0, 336(%rax)
4722 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4723 ; SSE-NEXT: movaps %xmm0, 320(%rax)
4724 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4725 ; SSE-NEXT: movaps %xmm0, 304(%rax)
4726 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4727 ; SSE-NEXT: movaps %xmm0, 288(%rax)
4728 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4729 ; SSE-NEXT: movaps %xmm0, 272(%rax)
4730 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4731 ; SSE-NEXT: movaps %xmm0, 256(%rax)
4732 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4733 ; SSE-NEXT: movaps %xmm0, 240(%rax)
4734 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4735 ; SSE-NEXT: movaps %xmm0, 224(%rax)
4736 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4737 ; SSE-NEXT: movaps %xmm0, 208(%rax)
4738 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4739 ; SSE-NEXT: movaps %xmm0, 192(%rax)
4740 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4741 ; SSE-NEXT: movaps %xmm0, 176(%rax)
4742 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4743 ; SSE-NEXT: movaps %xmm0, 160(%rax)
4744 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4745 ; SSE-NEXT: movaps %xmm0, 144(%rax)
4746 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4747 ; SSE-NEXT: movaps %xmm0, 128(%rax)
4748 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4749 ; SSE-NEXT: movaps %xmm0, 112(%rax)
4750 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4751 ; SSE-NEXT: movaps %xmm0, 96(%rax)
4752 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4753 ; SSE-NEXT: movaps %xmm0, 80(%rax)
4754 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4755 ; SSE-NEXT: movaps %xmm0, 64(%rax)
4756 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4757 ; SSE-NEXT: movaps %xmm0, 48(%rax)
4758 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4759 ; SSE-NEXT: movaps %xmm0, 32(%rax)
4760 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4761 ; SSE-NEXT: movaps %xmm0, 16(%rax)
4762 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4763 ; SSE-NEXT: movaps %xmm0, (%rax)
4764 ; SSE-NEXT: addq $1432, %rsp # imm = 0x598
4767 ; AVX1-ONLY-LABEL: store_i64_stride7_vf32:
4768 ; AVX1-ONLY: # %bb.0:
4769 ; AVX1-ONLY-NEXT: subq $1320, %rsp # imm = 0x528
4770 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
4771 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %ymm3
4772 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %ymm1
4773 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %ymm2
4774 ; AVX1-ONLY-NEXT: vmovaps (%r8), %ymm5
4775 ; AVX1-ONLY-NEXT: vmovaps (%r9), %ymm4
4776 ; AVX1-ONLY-NEXT: vmovaps (%r9), %xmm6
4777 ; AVX1-ONLY-NEXT: vmovaps (%r8), %xmm7
4778 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm8 = xmm7[0],xmm6[0]
4779 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %xmm12
4780 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm8, %ymm9
4781 ; AVX1-ONLY-NEXT: vmovaps (%rax), %xmm10
4782 ; AVX1-ONLY-NEXT: vmovaps 16(%rax), %xmm11
4783 ; AVX1-ONLY-NEXT: vmovaps 32(%rax), %xmm0
4784 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm8, %ymm8
4785 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm8[0,1],ymm9[2,3],ymm8[4,5],ymm9[6,7]
4786 ; AVX1-ONLY-NEXT: vmovups %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4787 ; AVX1-ONLY-NEXT: vbroadcastsd 8(%rcx), %ymm8
4788 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm2[0,1,2,3],ymm8[4,5,6,7]
4789 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %xmm9
4790 ; AVX1-ONLY-NEXT: vmovaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4791 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm9 = xmm9[2,3,2,3]
4792 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm9[0,1],ymm8[2,3,4,5,6,7]
4793 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm0, %ymm7
4794 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm8[0,1,2,3,4,5],ymm7[6,7]
4795 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4796 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm3[0],ymm1[0],ymm3[2],ymm1[2]
4797 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm6 = xmm6[1],xmm10[1]
4798 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm6[0,1,2,3],ymm3[4,5,6,7]
4799 ; AVX1-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4800 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm5[0],ymm4[0],ymm5[2],ymm4[2]
4801 ; AVX1-ONLY-NEXT: vmovaps 16(%rcx), %xmm5
4802 ; AVX1-ONLY-NEXT: vmovaps 16(%rdx), %xmm6
4803 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm6 = xmm6[0],xmm5[0]
4804 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm6[0,1,2,3],ymm3[4,5,6,7]
4805 ; AVX1-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4806 ; AVX1-ONLY-NEXT: vmovaps 16(%rdi), %xmm3
4807 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm3[1],ymm1[3],ymm3[3]
4808 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm2[6,7]
4809 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm11[0,1],ymm1[2,3,4,5,6,7]
4810 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4811 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm5[2,3,2,3]
4812 ; AVX1-ONLY-NEXT: vmovaps 16(%r8), %xmm2
4813 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm4[1],ymm2[1],ymm4[3],ymm2[3]
4814 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],mem[6,7]
4815 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3,4,5,6,7]
4816 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4817 ; AVX1-ONLY-NEXT: vmovaps 32(%rsi), %xmm1
4818 ; AVX1-ONLY-NEXT: vmovaps 32(%rdi), %xmm2
4819 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm3 = xmm2[0],xmm1[0]
4820 ; AVX1-ONLY-NEXT: vinsertf128 $1, 32(%rcx), %ymm3, %ymm4
4821 ; AVX1-ONLY-NEXT: vmovaps 32(%rdx), %xmm5
4822 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm3, %ymm3
4823 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm3 = ymm3[0],ymm4[1],ymm3[2],ymm4[2]
4824 ; AVX1-ONLY-NEXT: vmovupd %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4825 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
4826 ; AVX1-ONLY-NEXT: vmovaps 32(%r9), %xmm3
4827 ; AVX1-ONLY-NEXT: vmovaps 32(%r8), %xmm4
4828 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm6 = xmm4[0],xmm3[0]
4829 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm6, %ymm6
4830 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm6[0,1,2,3,4,5],ymm2[6,7]
4831 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4832 ; AVX1-ONLY-NEXT: vmovaps 32(%rdi), %ymm2
4833 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm5[1]
4834 ; AVX1-ONLY-NEXT: vbroadcastsd 40(%rcx), %ymm5
4835 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm5[4,5,6,7]
4836 ; AVX1-ONLY-NEXT: vmovaps 32(%rsi), %ymm5
4837 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm4
4838 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm4[6,7]
4839 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4840 ; AVX1-ONLY-NEXT: vmovaps 32(%r8), %ymm1
4841 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm3[1],xmm0[1]
4842 ; AVX1-ONLY-NEXT: vmovaps 32(%r9), %ymm3
4843 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm2[0],ymm5[0],ymm2[2],ymm5[2]
4844 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
4845 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4846 ; AVX1-ONLY-NEXT: vmovaps 48(%rcx), %xmm0
4847 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm1[0],ymm3[0],ymm1[2],ymm3[2]
4848 ; AVX1-ONLY-NEXT: vmovaps 48(%rdx), %xmm2
4849 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4850 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
4851 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4852 ; AVX1-ONLY-NEXT: vmovaps 48(%rdi), %xmm1
4853 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm5[1],ymm1[1],ymm5[3],ymm1[3]
4854 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],mem[6,7]
4855 ; AVX1-ONLY-NEXT: vmovaps 48(%rax), %xmm2
4856 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3,4,5,6,7]
4857 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4858 ; AVX1-ONLY-NEXT: vmovaps 48(%r8), %xmm1
4859 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm3[1],ymm1[1],ymm3[3],ymm1[3]
4860 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm0 = xmm0[2,3,2,3]
4861 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],mem[6,7]
4862 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3,4,5,6,7]
4863 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4864 ; AVX1-ONLY-NEXT: vmovaps 64(%r9), %xmm0
4865 ; AVX1-ONLY-NEXT: vmovaps 64(%r8), %xmm1
4866 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
4867 ; AVX1-ONLY-NEXT: vmovaps 64(%rdi), %xmm3
4868 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4869 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm3
4870 ; AVX1-ONLY-NEXT: vmovaps 64(%rax), %xmm4
4871 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm2, %ymm2
4872 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1],ymm3[2,3],ymm2[4,5],ymm3[6,7]
4873 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4874 ; AVX1-ONLY-NEXT: vmovaps 64(%rdx), %ymm2
4875 ; AVX1-ONLY-NEXT: vbroadcastsd 72(%rcx), %ymm3
4876 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm2[0,1,2,3],ymm3[4,5,6,7]
4877 ; AVX1-ONLY-NEXT: vmovaps 64(%rsi), %xmm11
4878 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm5 = xmm11[2,3,2,3]
4879 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm5[0,1],ymm3[2,3,4,5,6,7]
4880 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
4881 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1,2,3,4,5],ymm1[6,7]
4882 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4883 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm4[1]
4884 ; AVX1-ONLY-NEXT: vmovaps 64(%rdi), %ymm1
4885 ; AVX1-ONLY-NEXT: vmovaps 64(%rsi), %ymm3
4886 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm1[0],ymm3[0],ymm1[2],ymm3[2]
4887 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
4888 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4889 ; AVX1-ONLY-NEXT: vmovaps 64(%r8), %ymm0
4890 ; AVX1-ONLY-NEXT: vmovaps 64(%r9), %ymm1
4891 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
4892 ; AVX1-ONLY-NEXT: vmovaps 80(%rcx), %xmm4
4893 ; AVX1-ONLY-NEXT: vmovaps 80(%rdx), %xmm5
4894 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm5 = xmm5[0],xmm4[0]
4895 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm5[0,1,2,3],ymm0[4,5,6,7]
4896 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4897 ; AVX1-ONLY-NEXT: vmovaps 80(%rdi), %xmm0
4898 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm3[1],ymm0[1],ymm3[3],ymm0[3]
4899 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm2[6,7]
4900 ; AVX1-ONLY-NEXT: vmovaps 80(%rax), %xmm2
4901 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1],ymm0[2,3,4,5,6,7]
4902 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4903 ; AVX1-ONLY-NEXT: vmovaps 80(%r8), %xmm0
4904 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
4905 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm4[2,3,2,3]
4906 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
4907 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
4908 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4909 ; AVX1-ONLY-NEXT: vmovaps 96(%rsi), %xmm0
4910 ; AVX1-ONLY-NEXT: vmovaps 96(%rdi), %xmm1
4911 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
4912 ; AVX1-ONLY-NEXT: vinsertf128 $1, 96(%rcx), %ymm2, %ymm3
4913 ; AVX1-ONLY-NEXT: vmovaps 96(%rdx), %xmm4
4914 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm2, %ymm2
4915 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0],ymm3[1],ymm2[2],ymm3[2]
4916 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4917 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
4918 ; AVX1-ONLY-NEXT: vmovaps 96(%r9), %xmm2
4919 ; AVX1-ONLY-NEXT: vmovaps 96(%r8), %xmm3
4920 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm5 = xmm3[0],xmm2[0]
4921 ; AVX1-ONLY-NEXT: vmovaps 96(%rax), %xmm6
4922 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm5
4923 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm5[0,1,2,3,4,5],ymm1[6,7]
4924 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4925 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm4[1]
4926 ; AVX1-ONLY-NEXT: vbroadcastsd 104(%rcx), %ymm1
4927 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
4928 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm1
4929 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
4930 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4931 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm2[1],xmm6[1]
4932 ; AVX1-ONLY-NEXT: vmovaps 96(%rdi), %ymm1
4933 ; AVX1-ONLY-NEXT: vmovaps 96(%rsi), %ymm2
4934 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm1[0],ymm2[0],ymm1[2],ymm2[2]
4935 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
4936 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4937 ; AVX1-ONLY-NEXT: vmovaps 96(%r8), %ymm0
4938 ; AVX1-ONLY-NEXT: vmovaps 96(%r9), %ymm1
4939 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
4940 ; AVX1-ONLY-NEXT: vmovaps 112(%rcx), %xmm3
4941 ; AVX1-ONLY-NEXT: vmovaps 112(%rdx), %xmm4
4942 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm4[0],xmm3[0]
4943 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
4944 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4945 ; AVX1-ONLY-NEXT: vmovaps 112(%rdi), %xmm0
4946 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm0[1],ymm2[3],ymm0[3]
4947 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
4948 ; AVX1-ONLY-NEXT: vmovaps 112(%rax), %xmm2
4949 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1],ymm0[2,3,4,5,6,7]
4950 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4951 ; AVX1-ONLY-NEXT: vmovaps 112(%r8), %xmm0
4952 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
4953 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm3[2,3,2,3]
4954 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
4955 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
4956 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4957 ; AVX1-ONLY-NEXT: vmovaps 128(%r9), %xmm0
4958 ; AVX1-ONLY-NEXT: vmovaps 128(%r8), %xmm1
4959 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
4960 ; AVX1-ONLY-NEXT: vmovaps 128(%rdi), %xmm10
4961 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm2, %ymm3
4962 ; AVX1-ONLY-NEXT: vmovaps 128(%rax), %xmm4
4963 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm2, %ymm2
4964 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1],ymm3[2,3],ymm2[4,5],ymm3[6,7]
4965 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4966 ; AVX1-ONLY-NEXT: vmovaps 128(%rdx), %ymm2
4967 ; AVX1-ONLY-NEXT: vbroadcastsd 136(%rcx), %ymm3
4968 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm2[0,1,2,3],ymm3[4,5,6,7]
4969 ; AVX1-ONLY-NEXT: vmovaps 128(%rsi), %xmm13
4970 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm5 = xmm13[2,3,2,3]
4971 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm5[0,1],ymm3[2,3,4,5,6,7]
4972 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
4973 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1,2,3,4,5],ymm1[6,7]
4974 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4975 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm4[1]
4976 ; AVX1-ONLY-NEXT: vmovaps 128(%rdi), %ymm1
4977 ; AVX1-ONLY-NEXT: vmovaps 128(%rsi), %ymm3
4978 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm1[0],ymm3[0],ymm1[2],ymm3[2]
4979 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
4980 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4981 ; AVX1-ONLY-NEXT: vmovaps 128(%r8), %ymm0
4982 ; AVX1-ONLY-NEXT: vmovaps 128(%r9), %ymm1
4983 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
4984 ; AVX1-ONLY-NEXT: vmovaps 144(%rcx), %xmm4
4985 ; AVX1-ONLY-NEXT: vmovaps 144(%rdx), %xmm5
4986 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm5 = xmm5[0],xmm4[0]
4987 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm5[0,1,2,3],ymm0[4,5,6,7]
4988 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4989 ; AVX1-ONLY-NEXT: vmovaps 144(%rdi), %xmm0
4990 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm3[1],ymm0[1],ymm3[3],ymm0[3]
4991 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm2[6,7]
4992 ; AVX1-ONLY-NEXT: vmovaps 144(%rax), %xmm2
4993 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1],ymm0[2,3,4,5,6,7]
4994 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4995 ; AVX1-ONLY-NEXT: vmovaps 144(%r8), %xmm0
4996 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
4997 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm4[2,3,2,3]
4998 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
4999 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
5000 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5001 ; AVX1-ONLY-NEXT: vmovaps 160(%rsi), %xmm0
5002 ; AVX1-ONLY-NEXT: vmovaps 160(%rdi), %xmm1
5003 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
5004 ; AVX1-ONLY-NEXT: vinsertf128 $1, 160(%rcx), %ymm2, %ymm3
5005 ; AVX1-ONLY-NEXT: vmovaps 160(%rdx), %xmm4
5006 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm2, %ymm2
5007 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0],ymm3[1],ymm2[2],ymm3[2]
5008 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5009 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
5010 ; AVX1-ONLY-NEXT: vmovaps 160(%r9), %xmm2
5011 ; AVX1-ONLY-NEXT: vmovaps 160(%r8), %xmm3
5012 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm5 = xmm3[0],xmm2[0]
5013 ; AVX1-ONLY-NEXT: vmovaps 160(%rax), %xmm6
5014 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm5
5015 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm5[0,1,2,3,4,5],ymm1[6,7]
5016 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5017 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm4[1]
5018 ; AVX1-ONLY-NEXT: vbroadcastsd 168(%rcx), %ymm1
5019 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5020 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm1
5021 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
5022 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5023 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm2[1],xmm6[1]
5024 ; AVX1-ONLY-NEXT: vmovaps 160(%rdi), %ymm1
5025 ; AVX1-ONLY-NEXT: vmovaps 160(%rsi), %ymm2
5026 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm1[0],ymm2[0],ymm1[2],ymm2[2]
5027 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5028 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5029 ; AVX1-ONLY-NEXT: vmovaps 160(%r8), %ymm0
5030 ; AVX1-ONLY-NEXT: vmovaps 160(%r9), %ymm1
5031 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5032 ; AVX1-ONLY-NEXT: vmovaps 176(%rcx), %xmm3
5033 ; AVX1-ONLY-NEXT: vmovaps 176(%rdx), %xmm4
5034 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm4 = xmm4[0],xmm3[0]
5035 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
5036 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5037 ; AVX1-ONLY-NEXT: vmovaps 176(%rdi), %xmm0
5038 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm0[1],ymm2[3],ymm0[3]
5039 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
5040 ; AVX1-ONLY-NEXT: vmovaps 176(%rax), %xmm2
5041 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1],ymm0[2,3,4,5,6,7]
5042 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5043 ; AVX1-ONLY-NEXT: vmovaps 176(%r8), %xmm0
5044 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
5045 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm3[2,3,2,3]
5046 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
5047 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
5048 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5049 ; AVX1-ONLY-NEXT: vmovaps 192(%r9), %xmm0
5050 ; AVX1-ONLY-NEXT: vmovaps 192(%r8), %xmm1
5051 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
5052 ; AVX1-ONLY-NEXT: vmovaps 192(%rdi), %xmm9
5053 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm2, %ymm3
5054 ; AVX1-ONLY-NEXT: vmovaps 192(%rax), %xmm5
5055 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm2, %ymm2
5056 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1],ymm3[2,3],ymm2[4,5],ymm3[6,7]
5057 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5058 ; AVX1-ONLY-NEXT: vmovaps 192(%rdx), %ymm2
5059 ; AVX1-ONLY-NEXT: vbroadcastsd 200(%rcx), %ymm3
5060 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm2[0,1,2,3],ymm3[4,5,6,7]
5061 ; AVX1-ONLY-NEXT: vmovaps 192(%rsi), %xmm8
5062 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm8[2,3,2,3]
5063 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1],ymm6[2,3,4,5,6,7]
5064 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
5065 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm6[0,1,2,3,4,5],ymm1[6,7]
5066 ; AVX1-ONLY-NEXT: vmovups %ymm1, (%rsp) # 32-byte Spill
5067 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm5[1]
5068 ; AVX1-ONLY-NEXT: vmovaps 192(%rdi), %ymm1
5069 ; AVX1-ONLY-NEXT: vmovaps 192(%rsi), %ymm5
5070 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm1[0],ymm5[0],ymm1[2],ymm5[2]
5071 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5072 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5073 ; AVX1-ONLY-NEXT: vmovaps 192(%r8), %ymm0
5074 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
5075 ; AVX1-ONLY-NEXT: vmovaps 208(%rcx), %xmm4
5076 ; AVX1-ONLY-NEXT: vmovaps 208(%rdx), %xmm6
5077 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm6 = xmm6[0],xmm4[0]
5078 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1,2,3],ymm0[4,5,6,7]
5079 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5080 ; AVX1-ONLY-NEXT: vmovaps 208(%rdi), %xmm0
5081 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm5[1],ymm0[1],ymm5[3],ymm0[3]
5082 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm2[6,7]
5083 ; AVX1-ONLY-NEXT: vmovaps 208(%rax), %xmm2
5084 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1],ymm0[2,3,4,5,6,7]
5085 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5086 ; AVX1-ONLY-NEXT: vmovaps 224(%rsi), %xmm2
5087 ; AVX1-ONLY-NEXT: vmovaps 224(%rdi), %xmm0
5088 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm5 = xmm0[0],xmm2[0]
5089 ; AVX1-ONLY-NEXT: vinsertf128 $1, 224(%rcx), %ymm5, %ymm6
5090 ; AVX1-ONLY-NEXT: vmovaps 224(%rdx), %xmm1
5091 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm5, %ymm5
5092 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm7 = ymm5[0],ymm6[1],ymm5[2],ymm6[2]
5093 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
5094 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm5 = mem[0,0]
5095 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm0[0,1],ymm5[2,3],ymm0[4,5,6,7]
5096 ; AVX1-ONLY-NEXT: vmovapd 224(%r8), %ymm3
5097 ; AVX1-ONLY-NEXT: vinsertf128 $1, 224(%rax), %ymm3, %ymm6
5098 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1],ymm5[2,3],ymm6[4,5],ymm5[6,7]
5099 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm2 = xmm2[2,3,2,3]
5100 ; AVX1-ONLY-NEXT: vbroadcastsd 232(%rcx), %ymm5
5101 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm5[4,5,6,7]
5102 ; AVX1-ONLY-NEXT: vinsertf128 $1, 224(%r8), %ymm1, %ymm1
5103 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm2[0,1],ymm1[2,3],ymm2[4,5],ymm1[6,7]
5104 ; AVX1-ONLY-NEXT: vmovapd 224(%rdi), %ymm1
5105 ; AVX1-ONLY-NEXT: vmovapd 224(%rsi), %ymm2
5106 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm1[0],ymm2[0],ymm1[2],ymm2[2]
5107 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm15 = mem[0,0]
5108 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm15[0,1],ymm1[2,3]
5109 ; AVX1-ONLY-NEXT: vmovapd 240(%rdi), %xmm15
5110 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm15 = ymm15[0,1],mem[2,3]
5111 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0,0,3,2]
5112 ; AVX1-ONLY-NEXT: vmovapd 224(%rax), %ymm0
5113 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm0[2,3],ymm2[2,3]
5114 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm2 = ymm2[0],ymm15[1],ymm2[2],ymm15[3]
5115 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm4[1],mem[1]
5116 ; AVX1-ONLY-NEXT: vbroadcastsd 216(%r9), %ymm15
5117 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm15[4,5,6,7]
5118 ; AVX1-ONLY-NEXT: vmovapd 240(%rcx), %xmm15
5119 ; AVX1-ONLY-NEXT: vmovapd 240(%rdx), %xmm14
5120 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm14 = xmm14[0],xmm15[0]
5121 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm3 = ymm14[0,1],ymm3[2,3]
5122 ; AVX1-ONLY-NEXT: vbroadcastsd 240(%r9), %ymm14
5123 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm3 = ymm3[0,1,2],ymm14[3]
5124 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm14 = xmm15[1],mem[1]
5125 ; AVX1-ONLY-NEXT: vbroadcastsd 248(%r9), %ymm15
5126 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm14 = ymm14[0,1],ymm15[2,3]
5127 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0],ymm0[1],ymm1[2,3]
5128 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm14[0,1,2],ymm0[3]
5129 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm13 = xmm10[0],xmm13[0]
5130 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
5131 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm14 = xmm10[0],xmm11[0]
5132 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm8 = xmm9[0],xmm8[0]
5133 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm12, %xmm9 # 16-byte Folded Reload
5134 ; AVX1-ONLY-NEXT: # xmm9 = xmm12[0],mem[0]
5135 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rsi
5136 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],mem[6,7]
5137 ; AVX1-ONLY-NEXT: vmovaps 128(%rdx), %xmm15
5138 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm15 = xmm15[0],mem[0]
5139 ; AVX1-ONLY-NEXT: vmovaps 64(%rdx), %xmm12
5140 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm12 = xmm12[0],mem[0]
5141 ; AVX1-ONLY-NEXT: vmovaps 192(%rdx), %xmm11
5142 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm11 = xmm11[0],mem[0]
5143 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %xmm10
5144 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm10 = xmm10[0],mem[0]
5145 ; AVX1-ONLY-NEXT: vmovaps %xmm10, 16(%rsi)
5146 ; AVX1-ONLY-NEXT: vmovaps %xmm9, (%rsi)
5147 ; AVX1-ONLY-NEXT: vmovaps %xmm11, 1360(%rsi)
5148 ; AVX1-ONLY-NEXT: vmovaps %xmm8, 1344(%rsi)
5149 ; AVX1-ONLY-NEXT: vmovaps %xmm12, 464(%rsi)
5150 ; AVX1-ONLY-NEXT: vmovaps %xmm14, 448(%rsi)
5151 ; AVX1-ONLY-NEXT: vmovaps %xmm15, 912(%rsi)
5152 ; AVX1-ONLY-NEXT: vmovaps %xmm13, 896(%rsi)
5153 ; AVX1-ONLY-NEXT: vmovapd %ymm0, 1760(%rsi)
5154 ; AVX1-ONLY-NEXT: vmovapd %ymm2, 1728(%rsi)
5155 ; AVX1-ONLY-NEXT: vmovapd %ymm3, 1696(%rsi)
5156 ; AVX1-ONLY-NEXT: vmovapd %ymm1, 1664(%rsi)
5157 ; AVX1-ONLY-NEXT: vmovaps %ymm5, 1632(%rsi)
5158 ; AVX1-ONLY-NEXT: vmovaps %ymm6, 1600(%rsi)
5159 ; AVX1-ONLY-NEXT: vmovapd %ymm7, 1568(%rsi)
5160 ; AVX1-ONLY-NEXT: vmovaps %ymm4, 1536(%rsi)
5161 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5162 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1504(%rsi)
5163 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5164 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1472(%rsi)
5165 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5166 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1440(%rsi)
5167 ; AVX1-ONLY-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
5168 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1408(%rsi)
5169 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5170 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1376(%rsi)
5171 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5172 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1312(%rsi)
5173 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5174 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1280(%rsi)
5175 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5176 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1248(%rsi)
5177 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5178 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1216(%rsi)
5179 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5180 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1184(%rsi)
5181 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5182 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1152(%rsi)
5183 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5184 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1120(%rsi)
5185 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5186 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1088(%rsi)
5187 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5188 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1056(%rsi)
5189 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5190 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1024(%rsi)
5191 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5192 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 992(%rsi)
5193 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5194 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 960(%rsi)
5195 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5196 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 928(%rsi)
5197 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5198 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 864(%rsi)
5199 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5200 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 832(%rsi)
5201 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5202 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 800(%rsi)
5203 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5204 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 768(%rsi)
5205 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5206 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 736(%rsi)
5207 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5208 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 704(%rsi)
5209 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5210 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 672(%rsi)
5211 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5212 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 640(%rsi)
5213 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5214 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 608(%rsi)
5215 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5216 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 576(%rsi)
5217 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5218 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 544(%rsi)
5219 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5220 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 512(%rsi)
5221 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5222 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 480(%rsi)
5223 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5224 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 416(%rsi)
5225 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5226 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 384(%rsi)
5227 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5228 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 352(%rsi)
5229 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5230 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 320(%rsi)
5231 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5232 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 288(%rsi)
5233 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5234 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 256(%rsi)
5235 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5236 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 224(%rsi)
5237 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5238 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 192(%rsi)
5239 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5240 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 160(%rsi)
5241 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5242 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 128(%rsi)
5243 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5244 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 96(%rsi)
5245 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5246 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 64(%rsi)
5247 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5248 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 32(%rsi)
5249 ; AVX1-ONLY-NEXT: addq $1320, %rsp # imm = 0x528
5250 ; AVX1-ONLY-NEXT: vzeroupper
5251 ; AVX1-ONLY-NEXT: retq
5253 ; AVX2-ONLY-LABEL: store_i64_stride7_vf32:
5254 ; AVX2-ONLY: # %bb.0:
5255 ; AVX2-ONLY-NEXT: subq $1672, %rsp # imm = 0x688
5256 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
5257 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %ymm0
5258 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %ymm1
5259 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %ymm2
5260 ; AVX2-ONLY-NEXT: vmovaps (%r8), %ymm13
5261 ; AVX2-ONLY-NEXT: vmovaps (%r9), %ymm9
5262 ; AVX2-ONLY-NEXT: vmovups %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5263 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %xmm3
5264 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%rcx), %ymm3, %ymm3
5265 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %xmm4
5266 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5267 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %xmm7
5268 ; AVX2-ONLY-NEXT: vmovaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5269 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%rdx), %ymm4, %ymm4
5270 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm4[0],ymm3[0],ymm4[2],ymm3[2]
5271 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5272 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
5273 ; AVX2-ONLY-NEXT: vbroadcastsd 8(%rcx), %ymm4
5274 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm2[0,1,2,3],ymm4[4,5,6,7]
5275 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm3[0,1],ymm4[2,3,4,5,6,7]
5276 ; AVX2-ONLY-NEXT: vmovaps (%r8), %xmm5
5277 ; AVX2-ONLY-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5278 ; AVX2-ONLY-NEXT: vmovaps 32(%r8), %xmm3
5279 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
5280 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
5281 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5282 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
5283 ; AVX2-ONLY-NEXT: vmovaps (%rax), %xmm8
5284 ; AVX2-ONLY-NEXT: vmovaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5285 ; AVX2-ONLY-NEXT: vmovaps 16(%rax), %xmm5
5286 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm6 = mem[0,0]
5287 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm6 = xmm6[1],xmm8[1]
5288 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm6[0,1,2,3],ymm4[4,5,6,7]
5289 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5290 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm13[0],ymm9[0],ymm13[2],ymm9[2]
5291 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm6 = ymm2[0],mem[0],ymm2[2],mem[2]
5292 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm6[2,3],ymm4[2,3]
5293 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5294 ; AVX2-ONLY-NEXT: vmovaps 32(%rax), %xmm4
5295 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
5296 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,3,3]
5297 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm2[6,7]
5298 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm5[0,1],ymm0[2,3,4,5,6,7]
5299 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5300 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm0, %ymm0
5301 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm1 = xmm3[0],mem[0]
5302 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm1, %ymm1
5303 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
5304 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5305 ; AVX2-ONLY-NEXT: vmovaps 32(%rdx), %xmm1
5306 ; AVX2-ONLY-NEXT: vmovaps %xmm1, (%rsp) # 16-byte Spill
5307 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm0 = mem[0,0]
5308 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
5309 ; AVX2-ONLY-NEXT: vbroadcastsd 40(%rcx), %ymm1
5310 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5311 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm1
5312 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
5313 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5314 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm0 = mem[0,0]
5315 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm4[1]
5316 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %ymm1
5317 ; AVX2-ONLY-NEXT: vmovaps 32(%rsi), %ymm2
5318 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm1[0],ymm2[0],ymm1[2],ymm2[2]
5319 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
5320 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5321 ; AVX2-ONLY-NEXT: vmovaps 32(%rdx), %ymm0
5322 ; AVX2-ONLY-NEXT: vmovaps 32(%r8), %ymm4
5323 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5324 ; AVX2-ONLY-NEXT: vmovaps 32(%r9), %ymm3
5325 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5326 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm4[0],ymm3[0],ymm4[2],ymm3[2]
5327 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],mem[0],ymm0[2],mem[2]
5328 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm4[2,3],ymm3[2,3]
5329 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5330 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
5331 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,3,3]
5332 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
5333 ; AVX2-ONLY-NEXT: vmovaps 48(%rax), %xmm1
5334 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
5335 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5336 ; AVX2-ONLY-NEXT: vmovaps 64(%rsi), %xmm0
5337 ; AVX2-ONLY-NEXT: vinsertf128 $1, 64(%rcx), %ymm0, %ymm0
5338 ; AVX2-ONLY-NEXT: vmovaps 64(%rdi), %xmm1
5339 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5340 ; AVX2-ONLY-NEXT: vinsertf128 $1, 64(%rdx), %ymm1, %ymm1
5341 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5342 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5343 ; AVX2-ONLY-NEXT: vmovaps 64(%rdx), %ymm0
5344 ; AVX2-ONLY-NEXT: vbroadcastsd 72(%rcx), %ymm1
5345 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5346 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = mem[0,0]
5347 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3,4,5,6,7]
5348 ; AVX2-ONLY-NEXT: vmovaps 64(%r8), %xmm2
5349 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5350 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
5351 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm2[6,7]
5352 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5353 ; AVX2-ONLY-NEXT: vmovaps 64(%rax), %xmm2
5354 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5355 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
5356 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
5357 ; AVX2-ONLY-NEXT: vmovaps 64(%rdi), %ymm2
5358 ; AVX2-ONLY-NEXT: vmovaps 64(%rsi), %ymm3
5359 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
5360 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm4[4,5,6,7]
5361 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5362 ; AVX2-ONLY-NEXT: vmovaps 64(%r8), %ymm4
5363 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5364 ; AVX2-ONLY-NEXT: vmovaps 64(%r9), %ymm1
5365 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5366 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm4[0],ymm1[0],ymm4[2],ymm1[2]
5367 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],mem[0],ymm0[2],mem[2]
5368 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm4[2,3],ymm1[2,3]
5369 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5370 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
5371 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,3,3]
5372 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
5373 ; AVX2-ONLY-NEXT: vmovaps 80(%rax), %xmm1
5374 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
5375 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5376 ; AVX2-ONLY-NEXT: vmovaps 96(%r8), %xmm0
5377 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm1 = xmm0[0],mem[0]
5378 ; AVX2-ONLY-NEXT: vmovaps 96(%rax), %xmm2
5379 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
5380 ; AVX2-ONLY-NEXT: vmovaps 96(%rdi), %xmm3
5381 ; AVX2-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5382 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
5383 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm3[6,7]
5384 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5385 ; AVX2-ONLY-NEXT: vmovaps 96(%rdx), %xmm3
5386 ; AVX2-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5387 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
5388 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
5389 ; AVX2-ONLY-NEXT: vbroadcastsd 104(%rcx), %ymm3
5390 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
5391 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
5392 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
5393 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5394 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm0 = mem[0,0]
5395 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
5396 ; AVX2-ONLY-NEXT: vmovaps 96(%rdi), %ymm1
5397 ; AVX2-ONLY-NEXT: vmovaps 96(%rsi), %ymm2
5398 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm1[0],ymm2[0],ymm1[2],ymm2[2]
5399 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
5400 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5401 ; AVX2-ONLY-NEXT: vmovaps 96(%rdx), %ymm0
5402 ; AVX2-ONLY-NEXT: vmovaps 96(%r8), %ymm4
5403 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5404 ; AVX2-ONLY-NEXT: vmovaps 96(%r9), %ymm3
5405 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5406 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm4[0],ymm3[0],ymm4[2],ymm3[2]
5407 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],mem[0],ymm0[2],mem[2]
5408 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm4[2,3],ymm3[2,3]
5409 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5410 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
5411 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,3,3]
5412 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
5413 ; AVX2-ONLY-NEXT: vmovaps 112(%rax), %xmm1
5414 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
5415 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5416 ; AVX2-ONLY-NEXT: vmovaps 128(%rsi), %xmm0
5417 ; AVX2-ONLY-NEXT: vinsertf128 $1, 128(%rcx), %ymm0, %ymm0
5418 ; AVX2-ONLY-NEXT: vmovaps 128(%rdi), %xmm1
5419 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5420 ; AVX2-ONLY-NEXT: vinsertf128 $1, 128(%rdx), %ymm1, %ymm1
5421 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5422 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5423 ; AVX2-ONLY-NEXT: vmovaps 128(%rdx), %ymm0
5424 ; AVX2-ONLY-NEXT: vbroadcastsd 136(%rcx), %ymm1
5425 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5426 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = mem[0,0]
5427 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3,4,5,6,7]
5428 ; AVX2-ONLY-NEXT: vmovaps 128(%r8), %xmm2
5429 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5430 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
5431 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm2[6,7]
5432 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5433 ; AVX2-ONLY-NEXT: vmovaps 128(%rax), %xmm2
5434 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5435 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
5436 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
5437 ; AVX2-ONLY-NEXT: vmovaps 128(%rdi), %ymm2
5438 ; AVX2-ONLY-NEXT: vmovaps 128(%rsi), %ymm3
5439 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
5440 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm4[4,5,6,7]
5441 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5442 ; AVX2-ONLY-NEXT: vmovaps 128(%r8), %ymm15
5443 ; AVX2-ONLY-NEXT: vmovaps 128(%r9), %ymm1
5444 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5445 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm15[0],ymm1[0],ymm15[2],ymm1[2]
5446 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],mem[0],ymm0[2],mem[2]
5447 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm4[2,3],ymm1[2,3]
5448 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5449 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
5450 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,3,3]
5451 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
5452 ; AVX2-ONLY-NEXT: vmovaps 144(%rax), %xmm1
5453 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
5454 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5455 ; AVX2-ONLY-NEXT: vmovaps 160(%r8), %xmm0
5456 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm1 = xmm0[0],mem[0]
5457 ; AVX2-ONLY-NEXT: vmovaps 160(%rax), %xmm2
5458 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
5459 ; AVX2-ONLY-NEXT: vmovaps 160(%rdi), %xmm12
5460 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm0, %ymm3
5461 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm3[6,7]
5462 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5463 ; AVX2-ONLY-NEXT: vmovaps 160(%rdx), %xmm11
5464 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
5465 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm11[1]
5466 ; AVX2-ONLY-NEXT: vbroadcastsd 168(%rcx), %ymm3
5467 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
5468 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
5469 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
5470 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5471 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm0 = mem[0,0]
5472 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
5473 ; AVX2-ONLY-NEXT: vmovaps 160(%rdi), %ymm1
5474 ; AVX2-ONLY-NEXT: vmovaps 160(%rsi), %ymm2
5475 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm1[0],ymm2[0],ymm1[2],ymm2[2]
5476 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
5477 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5478 ; AVX2-ONLY-NEXT: vmovaps 160(%rdx), %ymm0
5479 ; AVX2-ONLY-NEXT: vmovaps 160(%r8), %ymm9
5480 ; AVX2-ONLY-NEXT: vmovaps 160(%r9), %ymm8
5481 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm9[0],ymm8[0],ymm9[2],ymm8[2]
5482 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],mem[0],ymm0[2],mem[2]
5483 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm4[2,3],ymm3[2,3]
5484 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5485 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
5486 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,3,3]
5487 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
5488 ; AVX2-ONLY-NEXT: vmovaps 176(%rax), %xmm1
5489 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
5490 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5491 ; AVX2-ONLY-NEXT: vmovaps 192(%rsi), %xmm0
5492 ; AVX2-ONLY-NEXT: vinsertf128 $1, 192(%rcx), %ymm0, %ymm0
5493 ; AVX2-ONLY-NEXT: vmovaps 192(%rdi), %xmm7
5494 ; AVX2-ONLY-NEXT: vinsertf128 $1, 192(%rdx), %ymm7, %ymm1
5495 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
5496 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5497 ; AVX2-ONLY-NEXT: vmovaps 192(%rdx), %ymm0
5498 ; AVX2-ONLY-NEXT: vbroadcastsd 200(%rcx), %ymm1
5499 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5500 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = mem[0,0]
5501 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3,4,5,6,7]
5502 ; AVX2-ONLY-NEXT: vmovaps 192(%r8), %xmm5
5503 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm2
5504 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm2[6,7]
5505 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5506 ; AVX2-ONLY-NEXT: vmovaps 192(%rax), %xmm4
5507 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
5508 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm4[1]
5509 ; AVX2-ONLY-NEXT: vmovaps 192(%rdi), %ymm2
5510 ; AVX2-ONLY-NEXT: vmovaps 192(%rsi), %ymm3
5511 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm6 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
5512 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm6[4,5,6,7]
5513 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5514 ; AVX2-ONLY-NEXT: vmovaps 192(%r8), %ymm1
5515 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm1[0],mem[0],ymm1[2],mem[2]
5516 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm6 = ymm0[0],mem[0],ymm0[2],mem[2]
5517 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm6[2,3],ymm1[2,3]
5518 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5519 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
5520 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,3,3]
5521 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
5522 ; AVX2-ONLY-NEXT: vmovaps 208(%rax), %xmm1
5523 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
5524 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5525 ; AVX2-ONLY-NEXT: vmovaps 224(%rdi), %ymm0
5526 ; AVX2-ONLY-NEXT: vmovaps 224(%r8), %ymm2
5527 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
5528 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm2[0,1],ymm0[0,1]
5529 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5,6,7]
5530 ; AVX2-ONLY-NEXT: vbroadcastsd 224(%rax), %ymm3
5531 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5],ymm1[6,7]
5532 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5533 ; AVX2-ONLY-NEXT: vmovaps 224(%rdx), %xmm1
5534 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
5535 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm3[1],xmm1[1]
5536 ; AVX2-ONLY-NEXT: vbroadcastsd 232(%rcx), %ymm6
5537 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm6[4,5,6,7]
5538 ; AVX2-ONLY-NEXT: vinsertf128 $1, 224(%r8), %ymm0, %ymm6
5539 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm6[6,7]
5540 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5541 ; AVX2-ONLY-NEXT: vmovaps 224(%rsi), %ymm3
5542 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm6 = mem[0,0]
5543 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm10 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
5544 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm10[4,5,6,7]
5545 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5546 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm3[1],ymm0[3],ymm3[3]
5547 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm3 = ymm0[0,2,3,3]
5548 ; AVX2-ONLY-NEXT: vmovaps 224(%rdx), %ymm0
5549 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm0[6,7]
5550 ; AVX2-ONLY-NEXT: vmovaps 240(%rax), %xmm6
5551 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm6[0,1],ymm3[2,3,4,5,6,7]
5552 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
5553 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm3 = xmm3[0],mem[0]
5554 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm3 # 16-byte Folded Reload
5555 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 16-byte Folded Reload
5556 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm6[4,5],ymm3[6,7]
5557 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5558 ; AVX2-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm13, %ymm3 # 32-byte Folded Reload
5559 ; AVX2-ONLY-NEXT: # ymm3 = ymm13[1],mem[1],ymm13[3],mem[3]
5560 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,2,3,3]
5561 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],mem[6,7]
5562 ; AVX2-ONLY-NEXT: vbroadcastsd 24(%rcx), %ymm6
5563 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm6[0,1],ymm3[2,3,4,5,6,7]
5564 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5565 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
5566 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm3 = xmm3[0],mem[0]
5567 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%rsp), %ymm3, %ymm3 # 16-byte Folded Reload
5568 ; AVX2-ONLY-NEXT: vbroadcastsd 32(%rcx), %ymm6
5569 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm6[6,7]
5570 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5571 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
5572 ; AVX2-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm3 # 32-byte Folded Reload
5573 ; AVX2-ONLY-NEXT: # ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
5574 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,2,3,3]
5575 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],mem[6,7]
5576 ; AVX2-ONLY-NEXT: vbroadcastsd 56(%rcx), %ymm6
5577 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm6[0,1],ymm3[2,3,4,5,6,7]
5578 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5579 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
5580 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm3 = xmm3[0],mem[0]
5581 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm3 # 16-byte Folded Reload
5582 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 16-byte Folded Reload
5583 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm6[4,5],ymm3[6,7]
5584 ; AVX2-ONLY-NEXT: vmovups %ymm3, (%rsp) # 32-byte Spill
5585 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
5586 ; AVX2-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm3 # 32-byte Folded Reload
5587 ; AVX2-ONLY-NEXT: # ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
5588 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,2,3,3]
5589 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],mem[6,7]
5590 ; AVX2-ONLY-NEXT: vbroadcastsd 88(%rcx), %ymm6
5591 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm6[0,1],ymm3[2,3,4,5,6,7]
5592 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5593 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
5594 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm3 = xmm3[0],mem[0]
5595 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm3 # 16-byte Folded Reload
5596 ; AVX2-ONLY-NEXT: vbroadcastsd 96(%rcx), %ymm6
5597 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm3[0,1,2,3,4,5],ymm6[6,7]
5598 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
5599 ; AVX2-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm3 # 32-byte Folded Reload
5600 ; AVX2-ONLY-NEXT: # ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
5601 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,2,3,3]
5602 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],mem[6,7]
5603 ; AVX2-ONLY-NEXT: vbroadcastsd 120(%rcx), %ymm6
5604 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm6[0,1],ymm3[2,3,4,5,6,7]
5605 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
5606 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm3 = xmm3[0],mem[0]
5607 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm3 # 16-byte Folded Reload
5608 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 16-byte Folded Reload
5609 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm3[0,1,2,3],ymm6[4,5],ymm3[6,7]
5610 ; AVX2-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm3 # 32-byte Folded Reload
5611 ; AVX2-ONLY-NEXT: # ymm3 = ymm15[1],mem[1],ymm15[3],mem[3]
5612 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,2,3,3]
5613 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],mem[6,7]
5614 ; AVX2-ONLY-NEXT: vbroadcastsd 152(%rcx), %ymm15
5615 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm15[0,1],ymm3[2,3,4,5,6,7]
5616 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm12 = xmm12[0],mem[0]
5617 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm12, %ymm11
5618 ; AVX2-ONLY-NEXT: vbroadcastsd 160(%rcx), %ymm12
5619 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm11[0,1,2,3,4,5],ymm12[6,7]
5620 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm8 = ymm9[1],ymm8[1],ymm9[3],ymm8[3]
5621 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,2,3,3]
5622 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],mem[6,7]
5623 ; AVX2-ONLY-NEXT: vbroadcastsd 184(%rcx), %ymm9
5624 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm9[0,1],ymm8[2,3,4,5,6,7]
5625 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm5 = xmm5[0],mem[0]
5626 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm5, %ymm5
5627 ; AVX2-ONLY-NEXT: vbroadcastsd %xmm4, %ymm4
5628 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm5[0,1,2,3],ymm4[4,5],ymm5[6,7]
5629 ; AVX2-ONLY-NEXT: vbroadcastsd 216(%rcx), %ymm5
5630 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm5[0,1],mem[2,3]
5631 ; AVX2-ONLY-NEXT: vbroadcastsd 216(%r9), %ymm7
5632 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm7[4,5,6,7]
5633 ; AVX2-ONLY-NEXT: vmovaps 224(%rdi), %xmm7
5634 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm7 = xmm7[0],mem[0]
5635 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm7, %ymm1
5636 ; AVX2-ONLY-NEXT: vbroadcastsd 224(%rcx), %ymm7
5637 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm7[6,7]
5638 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
5639 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm0[2,3],ymm2[2,3]
5640 ; AVX2-ONLY-NEXT: vbroadcastsd 240(%r9), %ymm2
5641 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm2[6,7]
5642 ; AVX2-ONLY-NEXT: vbroadcastsd 248(%rcx), %ymm2
5643 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm2 = xmm2[0,1],mem[2,3]
5644 ; AVX2-ONLY-NEXT: vbroadcastsd 248(%r9), %ymm7
5645 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm7[4,5,6,7]
5646 ; AVX2-ONLY-NEXT: vmovaps 224(%rax), %ymm7
5647 ; AVX2-ONLY-NEXT: vblendps $243, {{[-0-9]+}}(%r{{[sb]}}p), %ymm7, %ymm9 # 32-byte Folded Reload
5648 ; AVX2-ONLY-NEXT: # ymm9 = mem[0,1],ymm7[2,3],mem[4,5,6,7]
5649 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm7[6,7]
5650 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rcx
5651 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],mem[6,7]
5652 ; AVX2-ONLY-NEXT: vmovaps %ymm2, 1760(%rcx)
5653 ; AVX2-ONLY-NEXT: vmovaps %ymm14, 1728(%rcx)
5654 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1696(%rcx)
5655 ; AVX2-ONLY-NEXT: vmovaps %ymm9, 1664(%rcx)
5656 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5657 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1632(%rcx)
5658 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5659 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1600(%rcx)
5660 ; AVX2-ONLY-NEXT: vmovaps %ymm1, 1568(%rcx)
5661 ; AVX2-ONLY-NEXT: vmovaps %ymm5, 1536(%rcx)
5662 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5663 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1504(%rcx)
5664 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5665 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1472(%rcx)
5666 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5667 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1440(%rcx)
5668 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5669 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1408(%rcx)
5670 ; AVX2-ONLY-NEXT: vmovaps %ymm4, 1376(%rcx)
5671 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5672 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1344(%rcx)
5673 ; AVX2-ONLY-NEXT: vmovaps %ymm8, 1312(%rcx)
5674 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5675 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1280(%rcx)
5676 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5677 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1248(%rcx)
5678 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5679 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1216(%rcx)
5680 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5681 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1184(%rcx)
5682 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5683 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1152(%rcx)
5684 ; AVX2-ONLY-NEXT: vmovaps %ymm11, 1120(%rcx)
5685 ; AVX2-ONLY-NEXT: vmovaps %ymm3, 1088(%rcx)
5686 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5687 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1056(%rcx)
5688 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5689 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1024(%rcx)
5690 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5691 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 992(%rcx)
5692 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5693 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 960(%rcx)
5694 ; AVX2-ONLY-NEXT: vmovaps %ymm6, 928(%rcx)
5695 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5696 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 896(%rcx)
5697 ; AVX2-ONLY-NEXT: vmovaps %ymm10, 864(%rcx)
5698 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5699 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 832(%rcx)
5700 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5701 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 800(%rcx)
5702 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5703 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 768(%rcx)
5704 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5705 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 736(%rcx)
5706 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5707 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 704(%rcx)
5708 ; AVX2-ONLY-NEXT: vmovaps %ymm13, 672(%rcx)
5709 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5710 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 640(%rcx)
5711 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5712 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 608(%rcx)
5713 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5714 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 576(%rcx)
5715 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5716 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 544(%rcx)
5717 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5718 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 512(%rcx)
5719 ; AVX2-ONLY-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
5720 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 480(%rcx)
5721 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5722 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 448(%rcx)
5723 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5724 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 416(%rcx)
5725 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5726 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 384(%rcx)
5727 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5728 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 352(%rcx)
5729 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5730 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 320(%rcx)
5731 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5732 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 288(%rcx)
5733 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5734 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 256(%rcx)
5735 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5736 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 224(%rcx)
5737 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5738 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 192(%rcx)
5739 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5740 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 160(%rcx)
5741 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5742 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 128(%rcx)
5743 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5744 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 96(%rcx)
5745 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5746 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 64(%rcx)
5747 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5748 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 32(%rcx)
5749 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5750 ; AVX2-ONLY-NEXT: vmovaps %ymm0, (%rcx)
5751 ; AVX2-ONLY-NEXT: addq $1672, %rsp # imm = 0x688
5752 ; AVX2-ONLY-NEXT: vzeroupper
5753 ; AVX2-ONLY-NEXT: retq
5755 ; AVX512F-ONLY-SLOW-LABEL: store_i64_stride7_vf32:
5756 ; AVX512F-ONLY-SLOW: # %bb.0:
5757 ; AVX512F-ONLY-SLOW-NEXT: subq $2120, %rsp # imm = 0x848
5758 ; AVX512F-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
5759 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rdi), %zmm8
5760 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm11
5761 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rsi), %zmm27
5762 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm13
5763 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm18
5764 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rdx), %zmm21
5765 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rcx), %zmm7
5766 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm25
5767 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%r8), %zmm0
5768 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%r9), %zmm4
5769 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rax), %zmm1
5770 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rax), %zmm5
5771 ; AVX512F-ONLY-SLOW-NEXT: movb $96, %r10b
5772 ; AVX512F-ONLY-SLOW-NEXT: kmovw %r10d, %k1
5773 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5774 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [9,1,9,1,9,1,9,1]
5775 ; AVX512F-ONLY-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
5776 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm6
5777 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm2, %zmm6
5778 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm14
5779 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [4,9,0,3,4,9,0,3]
5780 ; AVX512F-ONLY-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
5781 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
5782 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm3, %zmm6
5783 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5784 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm15
5785 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [11,3,11,3,11,3,11,3]
5786 ; AVX512F-ONLY-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
5787 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm1
5788 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm3, %zmm1
5789 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm17
5790 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [2,10,0,3,2,10,0,3]
5791 ; AVX512F-ONLY-SLOW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
5792 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm3
5793 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm6, %zmm3
5794 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm16
5795 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm3 {%k1}
5796 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r9), %ymm10
5797 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm10, (%rsp) # 32-byte Spill
5798 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%r9), %ymm6
5799 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5800 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r8), %ymm1
5801 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5802 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%r8), %ymm12
5803 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5804 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm1 = ymm1[0],ymm10[0],ymm1[2],ymm10[2]
5805 ; AVX512F-ONLY-SLOW-NEXT: movb $28, %r10b
5806 ; AVX512F-ONLY-SLOW-NEXT: kmovw %r10d, %k2
5807 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm3 {%k2} = zmm1[2,3,2,3],zmm2[2,3,2,3]
5808 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5809 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [4,12,0,5,4,12,0,5]
5810 ; AVX512F-ONLY-SLOW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
5811 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3
5812 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm1, %zmm3
5813 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm10
5814 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,1,12,7,0,1,12,7]
5815 ; AVX512F-ONLY-SLOW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
5816 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm1, %zmm3
5817 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5818 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [5,0,14,6,5,0,14,6]
5819 ; AVX512F-ONLY-SLOW-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
5820 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm19
5821 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm20
5822 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5823 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm22, %zmm19
5824 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,13,6,7,0,13,6,7]
5825 ; AVX512F-ONLY-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
5826 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm3, %zmm19
5827 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5828 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm19
5829 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm4 = [15,7,15,7,15,7,15,7]
5830 ; AVX512F-ONLY-SLOW-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
5831 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm4, %zmm0
5832 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [6,13,14,7,6,13,14,7]
5833 ; AVX512F-ONLY-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
5834 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm2, %zmm19
5835 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5836 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm0
5837 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5838 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm17, %zmm0
5839 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, %zmm19
5840 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm25, %zmm16, %zmm19
5841 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
5842 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm12[0],ymm6[0],ymm12[2],ymm6[2]
5843 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm19 {%k2} = zmm0[2,3,2,3],zmm5[2,3,2,3]
5844 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5845 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%r8), %zmm0
5846 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%r9), %zmm12
5847 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm6
5848 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5849 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm14, %zmm6
5850 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5851 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm15, %zmm6
5852 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5853 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm6
5854 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm10, %zmm6
5855 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm1, %zmm6
5856 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5857 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, %zmm6
5858 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5859 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm22, %zmm6
5860 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm3, %zmm6
5861 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5862 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5863 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm4, %zmm0
5864 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm2, %zmm5
5865 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5866 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%rdi), %zmm6
5867 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%rsi), %zmm19
5868 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm0
5869 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm17, %zmm0
5870 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%rdx), %zmm5
5871 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%rcx), %zmm28
5872 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm12
5873 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm28, %zmm16, %zmm12
5874 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm17
5875 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
5876 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%rax), %zmm29
5877 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%r9), %ymm16
5878 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %ymm16, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5879 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 128(%r8), %ymm0
5880 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5881 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm16[0],ymm0[2],ymm16[2]
5882 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm12 {%k2} = zmm0[2,3,2,3],zmm29[2,3,2,3]
5883 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5884 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%r8), %zmm0
5885 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%r9), %zmm20
5886 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm20, %zmm0, %zmm10
5887 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm1, %zmm10
5888 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5889 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm0, %zmm20, %zmm22
5890 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm3, %zmm22
5891 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5892 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm1
5893 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm14, %zmm1
5894 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm15, %zmm1
5895 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5896 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5897 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm4, %zmm0
5898 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm2, %zmm29
5899 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [13,5,13,5,13,5,13,5]
5900 ; AVX512F-ONLY-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
5901 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm0
5902 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm3, %zmm0
5903 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5904 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm24 = [6,14,6,14,6,14,6,14]
5905 ; AVX512F-ONLY-SLOW-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
5906 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm0
5907 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm24, %zmm0
5908 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5909 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm0
5910 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm4, %zmm0
5911 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5912 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [3,0,12,4,3,0,12,4]
5913 ; AVX512F-ONLY-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
5914 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm0, %zmm7
5915 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, %zmm1
5916 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm25, %zmm3, %zmm1
5917 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5918 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, %zmm1
5919 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm25, %zmm24, %zmm1
5920 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5921 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, %zmm1
5922 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm25, %zmm4, %zmm1
5923 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5924 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm0, %zmm25
5925 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%rdx), %zmm18
5926 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%rcx), %zmm1
5927 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, %zmm2
5928 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm24, %zmm2
5929 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5930 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm26
5931 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm28, %zmm3, %zmm26
5932 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm28, %zmm5, %zmm24
5933 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm30
5934 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm28, %zmm4, %zmm30
5935 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm0, %zmm28
5936 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm21
5937 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm0, %zmm21
5938 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm1, %zmm18, %zmm17
5939 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5940 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, %zmm0
5941 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm4, %zmm18
5942 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm3, %zmm0
5943 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5944 ; AVX512F-ONLY-SLOW-NEXT: movb $48, %r10b
5945 ; AVX512F-ONLY-SLOW-NEXT: kmovw %r10d, %k3
5946 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm4 = [1,0,10,2,1,0,10,2]
5947 ; AVX512F-ONLY-SLOW-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3]
5948 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, %zmm15
5949 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm4, %zmm15
5950 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm7 {%k3} = zmm8[0],zmm27[0],zmm8[2],zmm27[2],zmm8[4],zmm27[4],zmm8[6],zmm27[6]
5951 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5952 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm7
5953 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm14
5954 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm16
5955 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm17
5956 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [0,8,0,1,0,8,0,1]
5957 ; AVX512F-ONLY-SLOW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
5958 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm5, %zmm17
5959 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm3, %zmm7
5960 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [6,14,6,14]
5961 ; AVX512F-ONLY-SLOW-NEXT: # ymm1 = mem[0,1,0,1]
5962 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm1, %zmm14
5963 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm10 = [15,7,15,7]
5964 ; AVX512F-ONLY-SLOW-NEXT: # ymm10 = mem[0,1,0,1]
5965 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm10, %zmm16
5966 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, %zmm27
5967 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm4, %zmm27
5968 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm25 {%k3} = zmm11[0],zmm13[0],zmm11[2],zmm13[2],zmm11[4],zmm13[4],zmm11[6],zmm13[6]
5969 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm2
5970 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm12
5971 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm31
5972 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm5, %zmm11
5973 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm3, %zmm2
5974 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm1, %zmm12
5975 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm10, %zmm31
5976 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%rdi), %zmm22
5977 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%rsi), %zmm0
5978 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm22, %zmm23
5979 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm1, %zmm23
5980 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm22, %zmm13
5981 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm3, %zmm13
5982 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm19, %zmm8
5983 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm4, %zmm8
5984 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm19, %zmm6, %zmm3
5985 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm19, %zmm6, %zmm1
5986 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm28 {%k3} = zmm6[0],zmm19[0],zmm6[2],zmm19[2],zmm6[4],zmm19[4],zmm6[6],zmm19[6]
5987 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm9
5988 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm5, %zmm6
5989 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm10, %zmm9
5990 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm21 {%k3} = zmm22[0],zmm0[0],zmm22[2],zmm0[2],zmm22[4],zmm0[4],zmm22[6],zmm0[6]
5991 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm0, %zmm22, %zmm5
5992 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm22, %zmm0, %zmm4
5993 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
5994 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm0, %zmm22, %zmm19
5995 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5996 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm10, %zmm22
5997 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdx), %xmm0
5998 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
5999 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
6000 ; AVX512F-ONLY-SLOW-NEXT: movb $12, %sil
6001 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k3
6002 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm17 {%k3}
6003 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [0,8,0,8,0,8,0,8]
6004 ; AVX512F-ONLY-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6005 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
6006 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm10 # 64-byte Folded Reload
6007 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $3, (%rax), %zmm10, %zmm10
6008 ; AVX512F-ONLY-SLOW-NEXT: movb $112, %sil
6009 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k4
6010 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm17 {%k4}
6011 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rdx), %xmm10
6012 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
6013 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
6014 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm11 {%k3}
6015 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
6016 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm10 # 64-byte Folded Reload
6017 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $3, 64(%rax), %zmm10, %zmm10
6018 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm11 {%k4}
6019 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 128(%rdx), %xmm10
6020 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
6021 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
6022 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm6 {%k3}
6023 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
6024 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm0, %zmm10
6025 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $3, 128(%rax), %zmm10, %zmm10
6026 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm6 {%k4}
6027 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 192(%rdx), %xmm10
6028 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
6029 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
6030 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm5 {%k3}
6031 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%r8), %zmm19
6032 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%r9), %zmm20
6033 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm20, %zmm19, %zmm0
6034 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $3, 192(%rax), %zmm0, %zmm0
6035 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k4}
6036 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6037 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm7 {%k1}
6038 ; AVX512F-ONLY-SLOW-NEXT: movb $120, %sil
6039 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k3
6040 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6041 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm0 {%k3}
6042 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6043 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm14, %zmm7 # 64-byte Folded Reload
6044 ; AVX512F-ONLY-SLOW-NEXT: # zmm7 = zmm14[0,1,2,3],mem[4,5,6,7]
6045 ; AVX512F-ONLY-SLOW-NEXT: movb $-61, %sil
6046 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k4
6047 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6048 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm7 {%k4}
6049 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6050 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm2 {%k1}
6051 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6052 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm0 {%k3}
6053 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6054 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm26, %zmm3 {%k1}
6055 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm26 # 64-byte Reload
6056 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm26 {%k3}
6057 ; AVX512F-ONLY-SLOW-NEXT: movb $24, %sil
6058 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k3
6059 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6060 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm16 {%k3}
6061 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm12, %zmm3 # 64-byte Folded Reload
6062 ; AVX512F-ONLY-SLOW-NEXT: # zmm3 = zmm12[0,1,2,3],mem[4,5,6,7]
6063 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6064 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3 {%k4}
6065 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm1[0,1,2,3],zmm24[4,5,6,7]
6066 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6067 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k4}
6068 ; AVX512F-ONLY-SLOW-NEXT: movb $-31, %sil
6069 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k4
6070 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6071 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm16 {%k4}
6072 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6073 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm31 {%k3}
6074 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6075 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm31 {%k4}
6076 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm30, %zmm9 {%k3}
6077 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm29, %zmm9 {%k4}
6078 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq 8(%rcx), %ymm0
6079 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
6080 ; AVX512F-ONLY-SLOW-NEXT: movb $6, %sil
6081 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k4
6082 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm15 {%k4}
6083 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq 72(%rcx), %ymm0
6084 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
6085 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm27 {%k4}
6086 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq 136(%rcx), %ymm0
6087 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
6088 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm8 {%k4}
6089 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq 200(%rcx), %ymm0
6090 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
6091 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm4 {%k4}
6092 ; AVX512F-ONLY-SLOW-NEXT: movb $56, %cl
6093 ; AVX512F-ONLY-SLOW-NEXT: kmovw %ecx, %k4
6094 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6095 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm15 {%k4}
6096 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6097 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm27 {%k4}
6098 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6099 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm8 {%k4}
6100 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
6101 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm20, %zmm19, %zmm2
6102 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%rax), %zmm0
6103 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
6104 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm10, %zmm2
6105 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4 {%k4}
6106 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
6107 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm13 {%k1}
6108 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
6109 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
6110 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm14 {%k1}
6111 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm23, %zmm10 # 64-byte Folded Reload
6112 ; AVX512F-ONLY-SLOW-NEXT: # zmm10 = zmm23[0,1,2,3],mem[4,5,6,7]
6113 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <0,11,u,u,4,5,6,7>
6114 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm19, %zmm21, %zmm12
6115 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <0,1,11,u,4,5,6,7>
6116 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm20, %zmm12, %zmm2
6117 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 192(%r8), %ymm12
6118 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm12 = ymm12[0],mem[0],ymm12[2],mem[2]
6119 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm14 {%k2} = zmm12[2,3,2,3],zmm0[2,3,2,3]
6120 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm21
6121 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, %zmm22 {%k3}
6122 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
6123 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq (%rsp), %ymm12, %ymm12 # 32-byte Folded Reload
6124 ; AVX512F-ONLY-SLOW-NEXT: # ymm12 = ymm12[1],mem[1],ymm12[3],mem[3]
6125 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,2,3,3]
6126 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3,4,5],mem[6,7]
6127 ; AVX512F-ONLY-SLOW-NEXT: movb $14, %cl
6128 ; AVX512F-ONLY-SLOW-NEXT: kmovw %ecx, %k1
6129 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
6130 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm18 {%k1}
6131 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
6132 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm12, %ymm12 # 32-byte Folded Reload
6133 ; AVX512F-ONLY-SLOW-NEXT: # ymm12 = ymm12[1],mem[1],ymm12[3],mem[3]
6134 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,2,3,3]
6135 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3,4,5],mem[6,7]
6136 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm25 {%k1}
6137 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
6138 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm12, %ymm12 # 32-byte Folded Reload
6139 ; AVX512F-ONLY-SLOW-NEXT: # ymm12 = ymm12[1],mem[1],ymm12[3],mem[3]
6140 ; AVX512F-ONLY-SLOW-NEXT: movb $64, %cl
6141 ; AVX512F-ONLY-SLOW-NEXT: kmovw %ecx, %k2
6142 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm19, %zmm10 {%k2}
6143 ; AVX512F-ONLY-SLOW-NEXT: movb $8, %cl
6144 ; AVX512F-ONLY-SLOW-NEXT: kmovw %ecx, %k2
6145 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm2 {%k2}
6146 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,2,3,3]
6147 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3,4,5],mem[6,7]
6148 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm28 {%k1}
6149 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <12,u,u,3,4,5,6,13>
6150 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm19, %zmm13, %zmm12
6151 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm13 = <u,1,2,3,4,15,u,u>
6152 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm19, %zmm22, %zmm13
6153 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm14 = <0,12,u,3,4,5,6,7>
6154 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm20, %zmm12, %zmm14
6155 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <13,u,2,3,4,5,6,14>
6156 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm20, %zmm10, %zmm12
6157 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm10 = <u,1,2,3,4,5,15,u>
6158 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm20, %zmm13, %zmm10
6159 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm13 = [0,1,12,3,4,5,6,7]
6160 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm0, %zmm14, %zmm13
6161 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm14 = [0,13,2,3,4,5,6,7]
6162 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm0, %zmm12, %zmm14
6163 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm12 = [14,1,2,3,4,5,6,15]
6164 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm0, %zmm10, %zmm12
6165 ; AVX512F-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
6166 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, 1472(%rax)
6167 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, 1408(%rax)
6168 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, 1280(%rax)
6169 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, 1216(%rax)
6170 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm26, 1152(%rax)
6171 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6172 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 1024(%rax)
6173 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, 960(%rax)
6174 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, 832(%rax)
6175 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, 768(%rax)
6176 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6177 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 704(%rax)
6178 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6179 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 576(%rax)
6180 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, 512(%rax)
6181 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, 384(%rax)
6182 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, 320(%rax)
6183 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6184 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 256(%rax)
6185 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6186 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 128(%rax)
6187 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm15, 64(%rax)
6188 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, 1344(%rax)
6189 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm28, 1088(%rax)
6190 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, 896(%rax)
6191 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, 640(%rax)
6192 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, 448(%rax)
6193 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, 192(%rax)
6194 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, (%rax)
6195 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, 1728(%rax)
6196 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, 1664(%rax)
6197 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, 1600(%rax)
6198 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, 1536(%rax)
6199 ; AVX512F-ONLY-SLOW-NEXT: addq $2120, %rsp # imm = 0x848
6200 ; AVX512F-ONLY-SLOW-NEXT: vzeroupper
6201 ; AVX512F-ONLY-SLOW-NEXT: retq
6203 ; AVX512F-ONLY-FAST-LABEL: store_i64_stride7_vf32:
6204 ; AVX512F-ONLY-FAST: # %bb.0:
6205 ; AVX512F-ONLY-FAST-NEXT: subq $2024, %rsp # imm = 0x7E8
6206 ; AVX512F-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
6207 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rdi), %zmm7
6208 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rdi), %zmm30
6209 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rsi), %zmm17
6210 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rsi), %zmm15
6211 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rdx), %zmm13
6212 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rdx), %zmm8
6213 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rcx), %zmm27
6214 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rcx), %zmm20
6215 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%r8), %zmm0
6216 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%r9), %zmm9
6217 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rax), %zmm4
6218 ; AVX512F-ONLY-FAST-NEXT: movb $96, %r10b
6219 ; AVX512F-ONLY-FAST-NEXT: kmovw %r10d, %k1
6220 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6221 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [9,1,9,1,9,1,9,1]
6222 ; AVX512F-ONLY-FAST-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6223 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm3
6224 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm2, %zmm3
6225 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm10
6226 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [4,9,0,3,4,9,0,3]
6227 ; AVX512F-ONLY-FAST-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
6228 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm2, %zmm3
6229 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6230 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm11
6231 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [11,3,11,3,11,3,11,3]
6232 ; AVX512F-ONLY-FAST-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6233 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm1
6234 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm2, %zmm1
6235 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm14
6236 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [2,10,0,3,2,10,0,3]
6237 ; AVX512F-ONLY-FAST-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
6238 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm2
6239 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm3, %zmm2
6240 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm16
6241 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm2 {%k1}
6242 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm5
6243 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r9), %ymm1
6244 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%r9), %ymm6
6245 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 128(%r9), %ymm2
6246 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%r8), %ymm19
6247 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%r8), %ymm23
6248 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%r8), %ymm22
6249 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm3 = ymm19[0],ymm1[0],ymm19[2],ymm1[2]
6250 ; AVX512F-ONLY-FAST-NEXT: movb $28, %r10b
6251 ; AVX512F-ONLY-FAST-NEXT: kmovw %r10d, %k2
6252 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm5 {%k2} = zmm3[2,3,2,3],zmm4[2,3,2,3]
6253 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6254 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm21
6255 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [1,3,7,7]
6256 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %ymm1, %ymm3, %ymm19
6257 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %ymm19, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6258 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm4 = [4,12,0,5,4,12,0,5]
6259 ; AVX512F-ONLY-FAST-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3]
6260 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
6261 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm4, %zmm1
6262 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm19
6263 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [0,1,12,7,0,1,12,7]
6264 ; AVX512F-ONLY-FAST-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
6265 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm5, %zmm1
6266 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6267 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm25 = [5,0,14,6,5,0,14,6]
6268 ; AVX512F-ONLY-FAST-NEXT: # zmm25 = mem[0,1,2,3,0,1,2,3]
6269 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm1
6270 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6271 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm25, %zmm1
6272 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm4 = [0,13,6,7,0,13,6,7]
6273 ; AVX512F-ONLY-FAST-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3]
6274 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm4, %zmm1
6275 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6276 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm28 = [15,7,15,7,15,7,15,7]
6277 ; AVX512F-ONLY-FAST-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6278 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm28, %zmm0
6279 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [6,13,14,7,6,13,14,7]
6280 ; AVX512F-ONLY-FAST-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
6281 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm1, %zmm21
6282 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6283 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm23[0],ymm6[0],ymm23[2],ymm6[2]
6284 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %ymm6, %ymm3, %ymm23
6285 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %ymm23, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6286 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm6 = ymm22[0],ymm2[0],ymm22[2],ymm2[2]
6287 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %ymm2, %ymm3, %ymm22
6288 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %ymm22, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6289 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm30, %zmm2
6290 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6291 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm14, %zmm2
6292 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm3
6293 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm16, %zmm3
6294 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3 {%k1}
6295 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rax), %zmm18
6296 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm3 {%k2} = zmm0[2,3,2,3],zmm18[2,3,2,3]
6297 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6298 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%r8), %zmm0
6299 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%r9), %zmm22
6300 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm2
6301 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6302 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm10, %zmm2
6303 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm9
6304 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6305 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm11, %zmm2
6306 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6307 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm2
6308 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm19, %zmm2
6309 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm5, %zmm2
6310 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6311 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm22, %zmm2
6312 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm25, %zmm2
6313 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm4, %zmm2
6314 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6315 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6316 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm28, %zmm0
6317 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm1, %zmm18
6318 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%rdi), %zmm26
6319 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%rsi), %zmm3
6320 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm26, %zmm2
6321 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm14, %zmm2
6322 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%rdx), %zmm24
6323 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%rcx), %zmm0
6324 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm24, %zmm14
6325 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm16, %zmm14
6326 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm16, %zmm23
6327 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm14 {%k1}
6328 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%rax), %zmm11
6329 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm14 {%k2} = zmm6[2,3,2,3],zmm11[2,3,2,3]
6330 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6331 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%r8), %zmm2
6332 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%r9), %zmm14
6333 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm14, %zmm2, %zmm19
6334 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm5, %zmm19
6335 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6336 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm2, %zmm14, %zmm25
6337 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm4, %zmm25
6338 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6339 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm4
6340 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm10, %zmm4
6341 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm9, %zmm4
6342 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6343 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6344 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm28, %zmm2
6345 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm1, %zmm11
6346 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [3,0,12,4,3,0,12,4]
6347 ; AVX512F-ONLY-FAST-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
6348 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm27, %zmm29
6349 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm5, %zmm29
6350 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [13,5,13,5,13,5,13,5]
6351 ; AVX512F-ONLY-FAST-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6352 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm2
6353 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm1, %zmm2
6354 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6355 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [6,14,6,14,6,14,6,14]
6356 ; AVX512F-ONLY-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6357 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm2
6358 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm10, %zmm2
6359 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6360 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm28, %zmm8
6361 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6362 ; AVX512F-ONLY-FAST-NEXT: movb $48, %r10b
6363 ; AVX512F-ONLY-FAST-NEXT: kmovw %r10d, %k3
6364 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm31 = [0,8,0,1,0,8,0,1]
6365 ; AVX512F-ONLY-FAST-NEXT: # zmm31 = mem[0,1,2,3,0,1,2,3]
6366 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm25
6367 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm31, %zmm25
6368 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm4 = [1,0,10,2,1,0,10,2]
6369 ; AVX512F-ONLY-FAST-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3]
6370 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm17, %zmm21
6371 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm4, %zmm21
6372 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm29 {%k3} = zmm7[0],zmm17[0],zmm7[2],zmm17[2],zmm7[4],zmm17[4],zmm7[6],zmm17[6]
6373 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm9
6374 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm1, %zmm9
6375 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm8 = [6,14,6,14]
6376 ; AVX512F-ONLY-FAST-NEXT: # ymm8 = mem[0,1,0,1]
6377 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm2
6378 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm8, %zmm2
6379 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6380 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [15,7,15,7]
6381 ; AVX512F-ONLY-FAST-NEXT: # ymm2 = mem[0,1,0,1]
6382 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm2, %zmm7
6383 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm6
6384 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6385 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm20, %zmm17
6386 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm5, %zmm17
6387 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm2
6388 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm1, %zmm2
6389 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6390 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm2
6391 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm10, %zmm2
6392 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6393 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm28, %zmm13
6394 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6395 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm30, %zmm16
6396 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm31, %zmm16
6397 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm15, %zmm20
6398 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm30, %zmm4, %zmm20
6399 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm17 {%k3} = zmm30[0],zmm15[0],zmm30[2],zmm15[2],zmm30[4],zmm15[4],zmm30[6],zmm15[6]
6400 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm30, %zmm7
6401 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm1, %zmm7
6402 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm30, %zmm2
6403 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm8, %zmm2
6404 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6405 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm6, %zmm30
6406 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm13
6407 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%rdx), %zmm15
6408 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%rcx), %zmm2
6409 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm15, %zmm6
6410 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm10, %zmm6
6411 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6412 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm12
6413 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm5, %zmm12
6414 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm15, %zmm6
6415 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm1, %zmm6
6416 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6417 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm2, %zmm15, %zmm23
6418 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6419 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm28, %zmm15
6420 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm24, %zmm0, %zmm5
6421 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm24, %zmm10
6422 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm24, %zmm27
6423 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm28, %zmm24
6424 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm1, %zmm27
6425 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%rdi), %zmm19
6426 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%rsi), %zmm0
6427 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm19, %zmm23
6428 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm8, %zmm23
6429 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm19, %zmm6
6430 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm1, %zmm6
6431 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm26, %zmm28
6432 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm31, %zmm28
6433 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm2
6434 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm4, %zmm2
6435 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm5 {%k3} = zmm26[0],zmm3[0],zmm26[2],zmm3[2],zmm26[4],zmm3[4],zmm26[6],zmm3[6]
6436 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm3, %zmm26, %zmm1
6437 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm3, %zmm26, %zmm8
6438 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm13, %zmm26
6439 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm12 {%k3} = zmm19[0],zmm0[0],zmm19[2],zmm0[2],zmm19[4],zmm0[4],zmm19[6],zmm0[6]
6440 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm19, %zmm31
6441 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm19, %zmm0, %zmm4
6442 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
6443 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm19, %zmm3
6444 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6445 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm13, %zmm19
6446 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdx), %xmm0
6447 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
6448 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
6449 ; AVX512F-ONLY-FAST-NEXT: movb $12, %sil
6450 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k3
6451 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm25 {%k3}
6452 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [0,8,0,8,0,8,0,8]
6453 ; AVX512F-ONLY-FAST-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6454 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
6455 ; AVX512F-ONLY-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm3 # 64-byte Folded Reload
6456 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $3, (%rax), %zmm3, %zmm3
6457 ; AVX512F-ONLY-FAST-NEXT: movb $112, %sil
6458 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k4
6459 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm25 {%k4}
6460 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rdx), %xmm3
6461 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm3[0],mem[0]
6462 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
6463 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm16 {%k3}
6464 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
6465 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm0, %zmm3
6466 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $3, 64(%rax), %zmm3, %zmm3
6467 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm16 {%k4}
6468 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 128(%rdx), %xmm3
6469 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm3[0],mem[0]
6470 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
6471 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm28 {%k3}
6472 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
6473 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm0, %zmm3
6474 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $3, 128(%rax), %zmm3, %zmm3
6475 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm28 {%k4}
6476 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 192(%rdx), %xmm3
6477 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm3[0],mem[0]
6478 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
6479 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm31 {%k3}
6480 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%r8), %zmm22
6481 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%r9), %zmm14
6482 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm14, %zmm22, %zmm0
6483 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $3, 192(%rax), %zmm0, %zmm0
6484 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm31 {%k4}
6485 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6486 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
6487 ; AVX512F-ONLY-FAST-NEXT: movb $14, %sil
6488 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k3
6489 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm29 {%k3}
6490 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6491 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm9 {%k1}
6492 ; AVX512F-ONLY-FAST-NEXT: movb $120, %sil
6493 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k4
6494 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6495 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm0 {%k4}
6496 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6497 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6498 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm13 # 64-byte Folded Reload
6499 ; AVX512F-ONLY-FAST-NEXT: # zmm13 = zmm0[0,1,2,3],mem[4,5,6,7]
6500 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6501 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
6502 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm17 {%k3}
6503 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6504 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
6505 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm5 {%k3}
6506 ; AVX512F-ONLY-FAST-NEXT: movb $-61, %sil
6507 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k5
6508 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6509 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm13 {%k5}
6510 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6511 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm7 {%k1}
6512 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
6513 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm3 {%k4}
6514 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm27, %zmm1 {%k1}
6515 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
6516 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm27 {%k4}
6517 ; AVX512F-ONLY-FAST-NEXT: movb $24, %sil
6518 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k3
6519 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
6520 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6521 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm9 {%k3}
6522 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6523 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm1 # 64-byte Folded Reload
6524 ; AVX512F-ONLY-FAST-NEXT: # zmm1 = zmm0[0,1,2,3],mem[4,5,6,7]
6525 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6526 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1 {%k5}
6527 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm7 = zmm8[0,1,2,3],zmm10[4,5,6,7]
6528 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6529 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm7 {%k5}
6530 ; AVX512F-ONLY-FAST-NEXT: movb $-31, %sil
6531 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k4
6532 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6533 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm9 {%k4}
6534 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6535 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6536 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm30 {%k3}
6537 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm18, %zmm30 {%k4}
6538 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm24, %zmm26 {%k3}
6539 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm26 {%k4}
6540 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastq 8(%rcx), %ymm0
6541 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
6542 ; AVX512F-ONLY-FAST-NEXT: movb $6, %sil
6543 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k4
6544 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm21 {%k4}
6545 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastq 72(%rcx), %ymm0
6546 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
6547 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm20 {%k4}
6548 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastq 136(%rcx), %ymm0
6549 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
6550 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm2 {%k4}
6551 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastq 200(%rcx), %ymm0
6552 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
6553 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm4 {%k4}
6554 ; AVX512F-ONLY-FAST-NEXT: movb $56, %cl
6555 ; AVX512F-ONLY-FAST-NEXT: kmovw %ecx, %k4
6556 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6557 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm21 {%k4}
6558 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6559 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm20 {%k4}
6560 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6561 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm2 {%k4}
6562 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
6563 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm14, %zmm22, %zmm8
6564 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%rax), %zmm0
6565 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
6566 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm9, %zmm8
6567 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm4 {%k4}
6568 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
6569 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm6 {%k1}
6570 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
6571 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
6572 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm11 {%k1}
6573 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm23, %zmm8 # 64-byte Folded Reload
6574 ; AVX512F-ONLY-FAST-NEXT: # zmm8 = zmm23[0,1,2,3],mem[4,5,6,7]
6575 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm9 = <0,11,u,u,4,5,6,7>
6576 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm22, %zmm12, %zmm9
6577 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm10 = <0,1,11,u,4,5,6,7>
6578 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm14, %zmm9, %zmm10
6579 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 192(%r8), %ymm9
6580 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm9 = ymm9[0],mem[0],ymm9[2],mem[2]
6581 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm11 {%k2} = zmm9[2,3,2,3],zmm0[2,3,2,3]
6582 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm12
6583 ; AVX512F-ONLY-FAST-NEXT: movb $64, %al
6584 ; AVX512F-ONLY-FAST-NEXT: kmovw %eax, %k1
6585 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm22, %zmm8 {%k1}
6586 ; AVX512F-ONLY-FAST-NEXT: movb $8, %al
6587 ; AVX512F-ONLY-FAST-NEXT: kmovw %eax, %k1
6588 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm10 {%k1}
6589 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm15, %zmm19 {%k3}
6590 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm9 = <12,u,u,3,4,5,6,13>
6591 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm22, %zmm6, %zmm9
6592 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm6 = <u,1,2,3,4,15,u,u>
6593 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm22, %zmm19, %zmm6
6594 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm11 = <0,12,u,3,4,5,6,7>
6595 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm14, %zmm9, %zmm11
6596 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm9 = <13,u,2,3,4,5,6,14>
6597 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm14, %zmm8, %zmm9
6598 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm8 = <u,1,2,3,4,5,15,u>
6599 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm14, %zmm6, %zmm8
6600 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm6 = [0,1,12,3,4,5,6,7]
6601 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm11, %zmm6
6602 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm11 = [0,13,2,3,4,5,6,7]
6603 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm9, %zmm11
6604 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm9 = [14,1,2,3,4,5,6,15]
6605 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm8, %zmm9
6606 ; AVX512F-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
6607 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm12, 1472(%rax)
6608 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, 1408(%rax)
6609 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm26, 1280(%rax)
6610 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, 1216(%rax)
6611 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm27, 1152(%rax)
6612 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, 1088(%rax)
6613 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6614 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 1024(%rax)
6615 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, 960(%rax)
6616 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm30, 832(%rax)
6617 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, 768(%rax)
6618 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, 704(%rax)
6619 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm17, 640(%rax)
6620 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6621 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 576(%rax)
6622 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm20, 512(%rax)
6623 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6624 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 384(%rax)
6625 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, 320(%rax)
6626 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6627 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 256(%rax)
6628 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm29, 192(%rax)
6629 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6630 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 128(%rax)
6631 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm21, 64(%rax)
6632 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm31, 1344(%rax)
6633 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm28, 896(%rax)
6634 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm16, 448(%rax)
6635 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm25, (%rax)
6636 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm9, 1728(%rax)
6637 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, 1664(%rax)
6638 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm6, 1600(%rax)
6639 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, 1536(%rax)
6640 ; AVX512F-ONLY-FAST-NEXT: addq $2024, %rsp # imm = 0x7E8
6641 ; AVX512F-ONLY-FAST-NEXT: vzeroupper
6642 ; AVX512F-ONLY-FAST-NEXT: retq
6644 ; AVX512DQ-SLOW-LABEL: store_i64_stride7_vf32:
6645 ; AVX512DQ-SLOW: # %bb.0:
6646 ; AVX512DQ-SLOW-NEXT: subq $2120, %rsp # imm = 0x848
6647 ; AVX512DQ-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
6648 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rdi), %zmm10
6649 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm8
6650 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rsi), %zmm15
6651 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm13
6652 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm20
6653 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rdx), %zmm21
6654 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rcx), %zmm5
6655 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm28
6656 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%r8), %zmm3
6657 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%r9), %zmm2
6658 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rax), %zmm4
6659 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rax), %zmm7
6660 ; AVX512DQ-SLOW-NEXT: movb $96, %r10b
6661 ; AVX512DQ-SLOW-NEXT: kmovw %r10d, %k1
6662 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6663 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [9,1,9,1,9,1,9,1]
6664 ; AVX512DQ-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6665 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm1
6666 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm0, %zmm1
6667 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm14
6668 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,9,0,3,4,9,0,3]
6669 ; AVX512DQ-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
6670 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm0, %zmm1
6671 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6672 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm16
6673 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [11,3,11,3,11,3,11,3]
6674 ; AVX512DQ-SLOW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6675 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, %zmm0
6676 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm15, %zmm1, %zmm0
6677 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm18
6678 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [2,10,0,3,2,10,0,3]
6679 ; AVX512DQ-SLOW-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
6680 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm21, %zmm1
6681 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm9, %zmm1
6682 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, %zmm17
6683 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
6684 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r9), %ymm9
6685 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm9, (%rsp) # 32-byte Spill
6686 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%r9), %ymm11
6687 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6688 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r8), %ymm0
6689 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6690 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%r8), %ymm12
6691 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6692 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm9[0],ymm0[2],ymm9[2]
6693 ; AVX512DQ-SLOW-NEXT: movb $28, %r10b
6694 ; AVX512DQ-SLOW-NEXT: kmovw %r10d, %k2
6695 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm1 {%k2} = zmm0[2,3,2,3],zmm4[2,3,2,3]
6696 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6697 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,0,5,4,12,0,5]
6698 ; AVX512DQ-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
6699 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm1
6700 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm0, %zmm1
6701 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm9
6702 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,1,12,7,0,1,12,7]
6703 ; AVX512DQ-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
6704 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm0, %zmm1
6705 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6706 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [5,0,14,6,5,0,14,6]
6707 ; AVX512DQ-SLOW-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
6708 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm19
6709 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6710 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm22, %zmm19
6711 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,13,6,7,0,13,6,7]
6712 ; AVX512DQ-SLOW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
6713 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm1, %zmm19
6714 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6715 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm19
6716 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm4 = [15,7,15,7,15,7,15,7]
6717 ; AVX512DQ-SLOW-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6718 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm4, %zmm3
6719 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [6,13,14,7,6,13,14,7]
6720 ; AVX512DQ-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
6721 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm2, %zmm19
6722 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6723 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, %zmm3
6724 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6725 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm13, %zmm18, %zmm3
6726 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm20, %zmm19
6727 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm28, %zmm17, %zmm19
6728 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm17, %zmm23
6729 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm19 {%k1}
6730 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm3 = ymm12[0],ymm11[0],ymm12[2],ymm11[2]
6731 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm19 {%k2} = zmm3[2,3,2,3],zmm7[2,3,2,3]
6732 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6733 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%r8), %zmm3
6734 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%r9), %zmm12
6735 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm11
6736 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6737 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm12, %zmm14, %zmm11
6738 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6739 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm16, %zmm11
6740 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6741 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm11
6742 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm12, %zmm9, %zmm11
6743 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm0, %zmm11
6744 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6745 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm12, %zmm11
6746 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6747 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm22, %zmm11
6748 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm1, %zmm11
6749 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6750 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6751 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm12, %zmm4, %zmm3
6752 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm2, %zmm7
6753 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6754 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%rdi), %zmm24
6755 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%rsi), %zmm31
6756 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm24, %zmm3
6757 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm31, %zmm18, %zmm3
6758 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%rdx), %zmm17
6759 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%rcx), %zmm30
6760 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm17, %zmm7
6761 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm30, %zmm23, %zmm7
6762 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm7 {%k1}
6763 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%rax), %zmm29
6764 ; AVX512DQ-SLOW-NEXT: vmovdqa 128(%r9), %ymm11
6765 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6766 ; AVX512DQ-SLOW-NEXT: vmovdqa 128(%r8), %ymm3
6767 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6768 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm3 = ymm3[0],ymm11[0],ymm3[2],ymm11[2]
6769 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm7 {%k2} = zmm3[2,3,2,3],zmm29[2,3,2,3]
6770 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6771 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%r8), %zmm3
6772 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%r9), %zmm7
6773 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm7, %zmm3, %zmm9
6774 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm29, %zmm0, %zmm9
6775 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6776 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm3, %zmm7, %zmm22
6777 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm29, %zmm1, %zmm22
6778 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6779 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm0
6780 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm14, %zmm0
6781 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6782 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm29, %zmm16, %zmm0
6783 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6784 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6785 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm4, %zmm3
6786 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm2, %zmm29
6787 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [13,5,13,5,13,5,13,5]
6788 ; AVX512DQ-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6789 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm21, %zmm0
6790 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm3, %zmm0
6791 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6792 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm25 = [6,14,6,14,6,14,6,14]
6793 ; AVX512DQ-SLOW-NEXT: # zmm25 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6794 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm21, %zmm0
6795 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm25, %zmm0
6796 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6797 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm21, %zmm0
6798 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm4, %zmm0
6799 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6800 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [3,0,12,4,3,0,12,4]
6801 ; AVX512DQ-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
6802 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm21, %zmm0, %zmm5
6803 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm20, %zmm26
6804 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm28, %zmm3, %zmm26
6805 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm20, %zmm1
6806 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm28, %zmm25, %zmm1
6807 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6808 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm20, %zmm1
6809 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm28, %zmm4, %zmm1
6810 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6811 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm20, %zmm0, %zmm28
6812 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%rdx), %zmm12
6813 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%rcx), %zmm2
6814 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm12, %zmm1
6815 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm25, %zmm1
6816 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6817 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm17, %zmm11
6818 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm30, %zmm3, %zmm11
6819 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm30, %zmm17, %zmm25
6820 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm17, %zmm22
6821 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm30, %zmm4, %zmm22
6822 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm17, %zmm0, %zmm30
6823 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm16
6824 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm12, %zmm0, %zmm16
6825 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm2, %zmm12, %zmm23
6826 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6827 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm12, %zmm0
6828 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm4, %zmm12
6829 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm3, %zmm0
6830 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6831 ; AVX512DQ-SLOW-NEXT: movb $48, %r10b
6832 ; AVX512DQ-SLOW-NEXT: kmovw %r10d, %k3
6833 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm4 = [1,0,10,2,1,0,10,2]
6834 ; AVX512DQ-SLOW-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3]
6835 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm15, %zmm20
6836 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm10, %zmm4, %zmm20
6837 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm5 {%k3} = zmm10[0],zmm15[0],zmm10[2],zmm15[2],zmm10[4],zmm15[4],zmm10[6],zmm15[6]
6838 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6839 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, %zmm18
6840 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, %zmm7
6841 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, %zmm21
6842 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, %zmm14
6843 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,8,0,1,0,8,0,1]
6844 ; AVX512DQ-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
6845 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm15, %zmm2, %zmm14
6846 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm15, %zmm3, %zmm18
6847 ; AVX512DQ-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [6,14,6,14]
6848 ; AVX512DQ-SLOW-NEXT: # ymm1 = mem[0,1,0,1]
6849 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm15, %zmm1, %zmm7
6850 ; AVX512DQ-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm6 = [15,7,15,7]
6851 ; AVX512DQ-SLOW-NEXT: # ymm6 = mem[0,1,0,1]
6852 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm15, %zmm6, %zmm21
6853 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm13, %zmm15
6854 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm8, %zmm4, %zmm15
6855 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm28 {%k3} = zmm8[0],zmm13[0],zmm8[2],zmm13[2],zmm8[4],zmm13[4],zmm8[6],zmm13[6]
6856 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, %zmm23
6857 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, %zmm5
6858 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, %zmm17
6859 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, %zmm10
6860 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm13, %zmm2, %zmm10
6861 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm13, %zmm3, %zmm23
6862 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm13, %zmm1, %zmm5
6863 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm13, %zmm6, %zmm17
6864 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%rdi), %zmm19
6865 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%rsi), %zmm0
6866 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm19, %zmm27
6867 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm0, %zmm1, %zmm27
6868 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm19, %zmm13
6869 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm0, %zmm3, %zmm13
6870 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm31, %zmm8
6871 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm24, %zmm4, %zmm8
6872 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm31, %zmm24, %zmm3
6873 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm31, %zmm24, %zmm1
6874 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm30 {%k3} = zmm24[0],zmm31[0],zmm24[2],zmm31[2],zmm24[4],zmm31[4],zmm24[6],zmm31[6]
6875 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm24, %zmm9
6876 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm31, %zmm2, %zmm24
6877 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm31, %zmm6, %zmm9
6878 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm16 {%k3} = zmm19[0],zmm0[0],zmm19[2],zmm0[2],zmm19[4],zmm0[4],zmm19[6],zmm0[6]
6879 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm0, %zmm19, %zmm2
6880 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm19, %zmm0, %zmm4
6881 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
6882 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm0, %zmm19, %zmm31
6883 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm31, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6884 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm0, %zmm6, %zmm19
6885 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6886 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm18 {%k1}
6887 ; AVX512DQ-SLOW-NEXT: movb $120, %sil
6888 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k3
6889 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6890 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm18, %zmm0 {%k3}
6891 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6892 ; AVX512DQ-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7, %zmm18 # 64-byte Folded Reload
6893 ; AVX512DQ-SLOW-NEXT: # zmm18 = zmm7[0,1,2,3],mem[4,5,6,7]
6894 ; AVX512DQ-SLOW-NEXT: movb $-61, %sil
6895 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k4
6896 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6897 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm18 {%k4}
6898 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm26, %zmm23 {%k1}
6899 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm26 # 64-byte Reload
6900 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm23, %zmm26 {%k3}
6901 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm3 {%k1}
6902 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6903 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm0 {%k3}
6904 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6905 ; AVX512DQ-SLOW-NEXT: movb $24, %sil
6906 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k3
6907 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6908 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm21 {%k3}
6909 ; AVX512DQ-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm5, %zmm3 # 64-byte Folded Reload
6910 ; AVX512DQ-SLOW-NEXT: # zmm3 = zmm5[0,1,2,3],mem[4,5,6,7]
6911 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
6912 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3 {%k4}
6913 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm25[4,5,6,7]
6914 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
6915 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm0 {%k4}
6916 ; AVX512DQ-SLOW-NEXT: movb $-31, %sil
6917 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k4
6918 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
6919 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm21 {%k4}
6920 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
6921 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm17 {%k3}
6922 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
6923 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm17 {%k4}
6924 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm22, %zmm9 {%k3}
6925 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm29, %zmm9 {%k4}
6926 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdx), %xmm1
6927 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
6928 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
6929 ; AVX512DQ-SLOW-NEXT: movb $12, %sil
6930 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k4
6931 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm14 {%k4}
6932 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [0,8,0,8,0,8,0,8]
6933 ; AVX512DQ-SLOW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
6934 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
6935 ; AVX512DQ-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm6 # 64-byte Folded Reload
6936 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rdx), %xmm5
6937 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
6938 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
6939 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm10 {%k4}
6940 ; AVX512DQ-SLOW-NEXT: vmovdqa 128(%rdx), %xmm5
6941 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
6942 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
6943 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm24 {%k4}
6944 ; AVX512DQ-SLOW-NEXT: vmovdqa 192(%rdx), %xmm5
6945 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
6946 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
6947 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm2 {%k4}
6948 ; AVX512DQ-SLOW-NEXT: movb $112, %sil
6949 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k4
6950 ; AVX512DQ-SLOW-NEXT: vinserti64x2 $3, (%rax), %zmm6, %zmm14 {%k4}
6951 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
6952 ; AVX512DQ-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm5 # 64-byte Folded Reload
6953 ; AVX512DQ-SLOW-NEXT: vinserti64x2 $3, 64(%rax), %zmm5, %zmm10 {%k4}
6954 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
6955 ; AVX512DQ-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm5 # 64-byte Folded Reload
6956 ; AVX512DQ-SLOW-NEXT: vinserti64x2 $3, 128(%rax), %zmm5, %zmm24 {%k4}
6957 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%r8), %zmm5
6958 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%r9), %zmm11
6959 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm11, %zmm5, %zmm1
6960 ; AVX512DQ-SLOW-NEXT: vinserti64x2 $3, 192(%rax), %zmm1, %zmm2 {%k4}
6961 ; AVX512DQ-SLOW-NEXT: vpbroadcastq 8(%rcx), %ymm1
6962 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
6963 ; AVX512DQ-SLOW-NEXT: movb $6, %sil
6964 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k4
6965 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm20 {%k4}
6966 ; AVX512DQ-SLOW-NEXT: vpbroadcastq 72(%rcx), %ymm1
6967 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
6968 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm15 {%k4}
6969 ; AVX512DQ-SLOW-NEXT: vpbroadcastq 136(%rcx), %ymm1
6970 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
6971 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm8 {%k4}
6972 ; AVX512DQ-SLOW-NEXT: vpbroadcastq 200(%rcx), %ymm1
6973 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
6974 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm4 {%k4}
6975 ; AVX512DQ-SLOW-NEXT: movb $56, %cl
6976 ; AVX512DQ-SLOW-NEXT: kmovw %ecx, %k4
6977 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
6978 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm20 {%k4}
6979 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
6980 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm15 {%k4}
6981 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
6982 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm8 {%k4}
6983 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
6984 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm11, %zmm5, %zmm6
6985 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%rax), %zmm1
6986 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
6987 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm7, %zmm6
6988 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, %zmm4 {%k4}
6989 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
6990 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, %zmm13 {%k1}
6991 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
6992 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
6993 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, %zmm23 {%k1}
6994 ; AVX512DQ-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm27, %zmm7 # 64-byte Folded Reload
6995 ; AVX512DQ-SLOW-NEXT: # zmm7 = zmm27[0,1,2,3],mem[4,5,6,7]
6996 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm22 = <0,11,u,u,4,5,6,7>
6997 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm5, %zmm16, %zmm22
6998 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm6 = <0,1,11,u,4,5,6,7>
6999 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm11, %zmm22, %zmm6
7000 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%r8), %ymm16
7001 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm16 = ymm16[0],mem[0],ymm16[2],mem[2]
7002 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm23 {%k2} = zmm16[2,3,2,3],zmm1[2,3,2,3]
7003 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm12, %zmm19 {%k3}
7004 ; AVX512DQ-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
7005 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq (%rsp), %ymm12, %ymm12 # 32-byte Folded Reload
7006 ; AVX512DQ-SLOW-NEXT: # ymm12 = ymm12[1],mem[1],ymm12[3],mem[3]
7007 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,2,3,3]
7008 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3,4,5],mem[6,7]
7009 ; AVX512DQ-SLOW-NEXT: movb $14, %cl
7010 ; AVX512DQ-SLOW-NEXT: kmovw %ecx, %k1
7011 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
7012 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm16 {%k1}
7013 ; AVX512DQ-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
7014 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm12, %ymm12 # 32-byte Folded Reload
7015 ; AVX512DQ-SLOW-NEXT: # ymm12 = ymm12[1],mem[1],ymm12[3],mem[3]
7016 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,2,3,3]
7017 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3,4,5],mem[6,7]
7018 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm28 {%k1}
7019 ; AVX512DQ-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
7020 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm12, %ymm12 # 32-byte Folded Reload
7021 ; AVX512DQ-SLOW-NEXT: # ymm12 = ymm12[1],mem[1],ymm12[3],mem[3]
7022 ; AVX512DQ-SLOW-NEXT: movb $64, %cl
7023 ; AVX512DQ-SLOW-NEXT: kmovw %ecx, %k2
7024 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm7 {%k2}
7025 ; AVX512DQ-SLOW-NEXT: movb $8, %cl
7026 ; AVX512DQ-SLOW-NEXT: kmovw %ecx, %k2
7027 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm6 {%k2}
7028 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,2,3,3]
7029 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3,4,5],mem[6,7]
7030 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm30 {%k1}
7031 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <12,u,u,3,4,5,6,13>
7032 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm5, %zmm13, %zmm12
7033 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm13 = <u,1,2,3,4,15,u,u>
7034 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm5, %zmm19, %zmm13
7035 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm5 = <0,12,u,3,4,5,6,7>
7036 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm11, %zmm12, %zmm5
7037 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <13,u,2,3,4,5,6,14>
7038 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm11, %zmm7, %zmm12
7039 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <u,1,2,3,4,5,15,u>
7040 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm11, %zmm13, %zmm7
7041 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm11 = [0,1,12,3,4,5,6,7]
7042 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm1, %zmm5, %zmm11
7043 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm5 = [0,13,2,3,4,5,6,7]
7044 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm1, %zmm12, %zmm5
7045 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm12 = [14,1,2,3,4,5,6,15]
7046 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm1, %zmm7, %zmm12
7047 ; AVX512DQ-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
7048 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm23, 1472(%rax)
7049 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, 1408(%rax)
7050 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, 1344(%rax)
7051 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, 1280(%rax)
7052 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, 1216(%rax)
7053 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7054 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 1152(%rax)
7055 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm30, 1088(%rax)
7056 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7057 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 1024(%rax)
7058 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, 960(%rax)
7059 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm24, 896(%rax)
7060 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm17, 832(%rax)
7061 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, 768(%rax)
7062 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm26, 704(%rax)
7063 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm28, 640(%rax)
7064 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7065 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 576(%rax)
7066 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm15, 512(%rax)
7067 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, 448(%rax)
7068 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm21, 384(%rax)
7069 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm18, 320(%rax)
7070 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7071 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 256(%rax)
7072 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm16, 192(%rax)
7073 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7074 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 128(%rax)
7075 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm20, 64(%rax)
7076 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm14, (%rax)
7077 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm12, 1728(%rax)
7078 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, 1664(%rax)
7079 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, 1600(%rax)
7080 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, 1536(%rax)
7081 ; AVX512DQ-SLOW-NEXT: addq $2120, %rsp # imm = 0x848
7082 ; AVX512DQ-SLOW-NEXT: vzeroupper
7083 ; AVX512DQ-SLOW-NEXT: retq
7085 ; AVX512DQ-FAST-LABEL: store_i64_stride7_vf32:
7086 ; AVX512DQ-FAST: # %bb.0:
7087 ; AVX512DQ-FAST-NEXT: subq $2056, %rsp # imm = 0x808
7088 ; AVX512DQ-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
7089 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rdi), %zmm10
7090 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rdi), %zmm15
7091 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rsi), %zmm16
7092 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rsi), %zmm18
7093 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rdx), %zmm21
7094 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rdx), %zmm9
7095 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rcx), %zmm20
7096 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rcx), %zmm17
7097 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%r8), %zmm0
7098 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%r9), %zmm6
7099 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rax), %zmm8
7100 ; AVX512DQ-FAST-NEXT: movb $96, %r10b
7101 ; AVX512DQ-FAST-NEXT: kmovw %r10d, %k1
7102 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7103 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [9,1,9,1,9,1,9,1]
7104 ; AVX512DQ-FAST-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7105 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm2
7106 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm1, %zmm2
7107 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm11
7108 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [4,9,0,3,4,9,0,3]
7109 ; AVX512DQ-FAST-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
7110 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm8, %zmm1, %zmm2
7111 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7112 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm12
7113 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [11,3,11,3,11,3,11,3]
7114 ; AVX512DQ-FAST-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7115 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, %zmm1
7116 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm16, %zmm2, %zmm1
7117 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm14
7118 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [2,10,0,3,2,10,0,3]
7119 ; AVX512DQ-FAST-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
7120 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm9, %zmm2
7121 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm3, %zmm2
7122 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm19
7123 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm2 {%k1}
7124 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm4
7125 ; AVX512DQ-FAST-NEXT: vmovdqa (%r9), %ymm1
7126 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%r9), %ymm5
7127 ; AVX512DQ-FAST-NEXT: vmovdqa 128(%r9), %ymm3
7128 ; AVX512DQ-FAST-NEXT: vmovdqa (%r8), %ymm7
7129 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%r8), %ymm23
7130 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%r8), %ymm22
7131 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm7[0],ymm1[0],ymm7[2],ymm1[2]
7132 ; AVX512DQ-FAST-NEXT: movb $28, %r10b
7133 ; AVX512DQ-FAST-NEXT: kmovw %r10d, %k2
7134 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm4 {%k2} = zmm2[2,3,2,3],zmm8[2,3,2,3]
7135 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7136 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = [1,3,7,7]
7137 ; AVX512DQ-FAST-NEXT: vpermt2q %ymm1, %ymm4, %ymm7
7138 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7139 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [4,12,0,5,4,12,0,5]
7140 ; AVX512DQ-FAST-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
7141 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
7142 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm2, %zmm1
7143 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm13
7144 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [0,1,12,7,0,1,12,7]
7145 ; AVX512DQ-FAST-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
7146 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm8, %zmm7, %zmm1
7147 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7148 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm24 = [5,0,14,6,5,0,14,6]
7149 ; AVX512DQ-FAST-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3]
7150 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, %zmm1
7151 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7152 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm24, %zmm1
7153 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,13,6,7,0,13,6,7]
7154 ; AVX512DQ-FAST-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
7155 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm8, %zmm2, %zmm1
7156 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7157 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm26 = [15,7,15,7,15,7,15,7]
7158 ; AVX512DQ-FAST-NEXT: # zmm26 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7159 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm26, %zmm0
7160 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [6,13,14,7,6,13,14,7]
7161 ; AVX512DQ-FAST-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
7162 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
7163 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7164 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm23[0],ymm5[0],ymm23[2],ymm5[2]
7165 ; AVX512DQ-FAST-NEXT: vpermt2q %ymm5, %ymm4, %ymm23
7166 ; AVX512DQ-FAST-NEXT: vmovdqu64 %ymm23, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7167 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm22[0],ymm3[0],ymm22[2],ymm3[2]
7168 ; AVX512DQ-FAST-NEXT: vpermt2q %ymm3, %ymm4, %ymm22
7169 ; AVX512DQ-FAST-NEXT: vmovdqu64 %ymm22, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7170 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm15, %zmm23
7171 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm15, %zmm3
7172 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm14, %zmm5
7173 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7174 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm18, %zmm14, %zmm3
7175 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm21, %zmm4
7176 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm17, %zmm19, %zmm4
7177 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm4 {%k1}
7178 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rax), %zmm14
7179 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm4 {%k2} = zmm0[2,3,2,3],zmm14[2,3,2,3]
7180 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7181 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%r8), %zmm0
7182 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%r9), %zmm22
7183 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7184 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm4
7185 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7186 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm11, %zmm4
7187 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm12, %zmm3
7188 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7189 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm14, %zmm12, %zmm4
7190 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7191 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm6
7192 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm13, %zmm4
7193 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm13, %zmm6
7194 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm14, %zmm7, %zmm6
7195 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7196 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm22, %zmm6
7197 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm24, %zmm6
7198 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm14, %zmm2, %zmm6
7199 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7200 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7201 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm26, %zmm0
7202 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm1, %zmm14
7203 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%rdi), %zmm27
7204 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%rsi), %zmm6
7205 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm27, %zmm0
7206 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm5, %zmm0
7207 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%rdx), %zmm13
7208 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%rcx), %zmm5
7209 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm13, %zmm22
7210 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm19, %zmm22
7211 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm19, %zmm28
7212 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm22 {%k1}
7213 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%rax), %zmm12
7214 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm22 {%k2} = zmm8[2,3,2,3],zmm12[2,3,2,3]
7215 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7216 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%r8), %zmm0
7217 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%r9), %zmm8
7218 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm8, %zmm0, %zmm4
7219 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm12, %zmm7, %zmm4
7220 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7221 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm0, %zmm8, %zmm24
7222 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm12, %zmm2, %zmm24
7223 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7224 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm2
7225 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm8, %zmm11, %zmm2
7226 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7227 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm12, %zmm3, %zmm2
7228 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7229 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7230 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm8, %zmm26, %zmm0
7231 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm1, %zmm12
7232 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm15 = [3,0,12,4,3,0,12,4]
7233 ; AVX512DQ-FAST-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3]
7234 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm20, %zmm1
7235 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm9, %zmm0
7236 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm9, %zmm15, %zmm1
7237 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm3
7238 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [13,5,13,5,13,5,13,5]
7239 ; AVX512DQ-FAST-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7240 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm9, %zmm2
7241 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm1, %zmm2
7242 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7243 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [6,14,6,14,6,14,6,14]
7244 ; AVX512DQ-FAST-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7245 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm2
7246 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm9, %zmm2
7247 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7248 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm26, %zmm0
7249 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7250 ; AVX512DQ-FAST-NEXT: movb $48, %r10b
7251 ; AVX512DQ-FAST-NEXT: kmovw %r10d, %k3
7252 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [0,8,0,1,0,8,0,1]
7253 ; AVX512DQ-FAST-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
7254 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, %zmm29
7255 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm16, %zmm11, %zmm29
7256 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,0,10,2,1,0,10,2]
7257 ; AVX512DQ-FAST-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
7258 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm16, %zmm31
7259 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm10, %zmm8, %zmm31
7260 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm3 {%k3} = zmm10[0],zmm16[0],zmm10[2],zmm16[2],zmm10[4],zmm16[4],zmm10[6],zmm16[6]
7261 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, (%rsp) # 64-byte Spill
7262 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, %zmm7
7263 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm16, %zmm1, %zmm7
7264 ; AVX512DQ-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [6,14,6,14]
7265 ; AVX512DQ-FAST-NEXT: # ymm4 = mem[0,1,0,1]
7266 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, %zmm24
7267 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm16, %zmm4, %zmm24
7268 ; AVX512DQ-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [15,7,15,7]
7269 ; AVX512DQ-FAST-NEXT: # ymm2 = mem[0,1,0,1]
7270 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm16, %zmm2, %zmm10
7271 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7272 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm17, %zmm22
7273 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm21, %zmm15, %zmm22
7274 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm21, %zmm25
7275 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm17, %zmm1, %zmm25
7276 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm21, %zmm30
7277 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm17, %zmm9, %zmm30
7278 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm17, %zmm26, %zmm21
7279 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7280 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, %zmm0
7281 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, %zmm19
7282 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm18, %zmm11, %zmm19
7283 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm18, %zmm17
7284 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm23, %zmm8, %zmm17
7285 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm22 {%k3} = zmm23[0],zmm18[0],zmm23[2],zmm18[2],zmm23[4],zmm18[4],zmm23[6],zmm18[6]
7286 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, %zmm3
7287 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm18, %zmm1, %zmm3
7288 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm18, %zmm4, %zmm23
7289 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm18, %zmm2, %zmm0
7290 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7291 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%rdx), %zmm18
7292 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%rcx), %zmm0
7293 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm18, %zmm10
7294 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm9, %zmm10
7295 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7296 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm16
7297 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm18, %zmm15, %zmm16
7298 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm18, %zmm10
7299 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm1, %zmm10
7300 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7301 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm0, %zmm18, %zmm28
7302 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm28, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7303 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm26, %zmm18
7304 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm13, %zmm5, %zmm15
7305 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm5, %zmm13, %zmm9
7306 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm13, %zmm28
7307 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm26, %zmm13
7308 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm1, %zmm28
7309 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%rdi), %zmm20
7310 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%rsi), %zmm0
7311 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm20, %zmm21
7312 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm4, %zmm21
7313 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm20, %zmm10
7314 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm1, %zmm10
7315 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm27, %zmm26
7316 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm11, %zmm26
7317 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, %zmm5
7318 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm27, %zmm8, %zmm5
7319 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm15 {%k3} = zmm27[0],zmm6[0],zmm27[2],zmm6[2],zmm27[4],zmm6[4],zmm27[6],zmm6[6]
7320 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm6, %zmm27, %zmm1
7321 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm6, %zmm27, %zmm4
7322 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm2, %zmm27
7323 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm16 {%k3} = zmm20[0],zmm0[0],zmm20[2],zmm0[2],zmm20[4],zmm0[4],zmm20[6],zmm0[6]
7324 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm0, %zmm20, %zmm11
7325 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm20, %zmm0, %zmm8
7326 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7327 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm0, %zmm20, %zmm6
7328 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7329 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm2, %zmm20
7330 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7331 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
7332 ; AVX512DQ-FAST-NEXT: movb $14, %sil
7333 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k3
7334 ; AVX512DQ-FAST-NEXT: vmovdqu64 (%rsp), %zmm2 # 64-byte Reload
7335 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm2 {%k3}
7336 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
7337 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7338 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm7 {%k1}
7339 ; AVX512DQ-FAST-NEXT: movb $120, %sil
7340 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k4
7341 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7342 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, %zmm0 {%k4}
7343 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7344 ; AVX512DQ-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm24, %zmm2 # 64-byte Folded Reload
7345 ; AVX512DQ-FAST-NEXT: # zmm2 = zmm24[0,1,2,3],mem[4,5,6,7]
7346 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7347 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
7348 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm22 {%k3}
7349 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7350 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
7351 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm15 {%k3}
7352 ; AVX512DQ-FAST-NEXT: movb $-61, %sil
7353 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k5
7354 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7355 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm2 {%k5}
7356 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7357 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm25, %zmm3 {%k1}
7358 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
7359 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm7 {%k4}
7360 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm28, %zmm1 {%k1}
7361 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm24 # 64-byte Reload
7362 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm24 {%k4}
7363 ; AVX512DQ-FAST-NEXT: movb $24, %sil
7364 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k3
7365 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7366 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7367 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm6 {%k3}
7368 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm23[0,1,2,3],zmm30[4,5,6,7]
7369 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7370 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm1 {%k5}
7371 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm2 = zmm4[0,1,2,3],zmm9[4,5,6,7]
7372 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7373 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm2 {%k5}
7374 ; AVX512DQ-FAST-NEXT: movb $-31, %sil
7375 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k4
7376 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7377 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm6 {%k4}
7378 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, %zmm4
7379 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm28 # 64-byte Reload
7380 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7381 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm28 {%k3}
7382 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm14, %zmm28 {%k4}
7383 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm13, %zmm27 {%k3}
7384 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm12, %zmm27 {%k4}
7385 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdx), %xmm0
7386 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
7387 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
7388 ; AVX512DQ-FAST-NEXT: movb $12, %sil
7389 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k4
7390 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm29 {%k4}
7391 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [0,8,0,8,0,8,0,8]
7392 ; AVX512DQ-FAST-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7393 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7394 ; AVX512DQ-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm6 # 64-byte Folded Reload
7395 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rdx), %xmm3
7396 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm3[0],mem[0]
7397 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
7398 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm19 {%k4}
7399 ; AVX512DQ-FAST-NEXT: vmovdqa 128(%rdx), %xmm3
7400 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm3[0],mem[0]
7401 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
7402 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm26 {%k4}
7403 ; AVX512DQ-FAST-NEXT: vmovdqa 192(%rdx), %xmm3
7404 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm3[0],mem[0]
7405 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
7406 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm11 {%k4}
7407 ; AVX512DQ-FAST-NEXT: movb $112, %sil
7408 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k4
7409 ; AVX512DQ-FAST-NEXT: vinserti64x2 $3, (%rax), %zmm6, %zmm29 {%k4}
7410 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
7411 ; AVX512DQ-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm3 # 64-byte Folded Reload
7412 ; AVX512DQ-FAST-NEXT: vinserti64x2 $3, 64(%rax), %zmm3, %zmm19 {%k4}
7413 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
7414 ; AVX512DQ-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm3 # 64-byte Folded Reload
7415 ; AVX512DQ-FAST-NEXT: vinserti64x2 $3, 128(%rax), %zmm3, %zmm26 {%k4}
7416 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%r8), %zmm3
7417 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%r9), %zmm6
7418 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm6, %zmm3, %zmm0
7419 ; AVX512DQ-FAST-NEXT: vinserti64x2 $3, 192(%rax), %zmm0, %zmm11 {%k4}
7420 ; AVX512DQ-FAST-NEXT: vpbroadcastq 8(%rcx), %ymm0
7421 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
7422 ; AVX512DQ-FAST-NEXT: movb $6, %sil
7423 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k4
7424 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm31 {%k4}
7425 ; AVX512DQ-FAST-NEXT: vpbroadcastq 72(%rcx), %ymm0
7426 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
7427 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm17 {%k4}
7428 ; AVX512DQ-FAST-NEXT: vpbroadcastq 136(%rcx), %ymm0
7429 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
7430 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm5 {%k4}
7431 ; AVX512DQ-FAST-NEXT: vpbroadcastq 200(%rcx), %ymm0
7432 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
7433 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm8 {%k4}
7434 ; AVX512DQ-FAST-NEXT: movb $56, %cl
7435 ; AVX512DQ-FAST-NEXT: kmovw %ecx, %k4
7436 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7437 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm31 {%k4}
7438 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7439 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm17 {%k4}
7440 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7441 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm5 {%k4}
7442 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
7443 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm6, %zmm3, %zmm9
7444 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%rax), %zmm0
7445 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
7446 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm12, %zmm9
7447 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm9, %zmm8 {%k4}
7448 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
7449 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm9, %zmm10 {%k1}
7450 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
7451 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
7452 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm9, %zmm14 {%k1}
7453 ; AVX512DQ-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm21, %zmm9 # 64-byte Folded Reload
7454 ; AVX512DQ-FAST-NEXT: # zmm9 = zmm21[0,1,2,3],mem[4,5,6,7]
7455 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm12 = <0,11,u,u,4,5,6,7>
7456 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm3, %zmm16, %zmm12
7457 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm13 = <0,1,11,u,4,5,6,7>
7458 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm6, %zmm12, %zmm13
7459 ; AVX512DQ-FAST-NEXT: vmovdqa 192(%r8), %ymm12
7460 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm12 = ymm12[0],mem[0],ymm12[2],mem[2]
7461 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm14 {%k2} = zmm12[2,3,2,3],zmm0[2,3,2,3]
7462 ; AVX512DQ-FAST-NEXT: movb $64, %al
7463 ; AVX512DQ-FAST-NEXT: kmovw %eax, %k1
7464 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm9 {%k1}
7465 ; AVX512DQ-FAST-NEXT: movb $8, %al
7466 ; AVX512DQ-FAST-NEXT: kmovw %eax, %k1
7467 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm13 {%k1}
7468 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm18, %zmm20 {%k3}
7469 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm12 = <12,u,u,3,4,5,6,13>
7470 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm3, %zmm10, %zmm12
7471 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm10 = <u,1,2,3,4,15,u,u>
7472 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm3, %zmm20, %zmm10
7473 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm3 = <0,12,u,3,4,5,6,7>
7474 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm6, %zmm12, %zmm3
7475 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm12 = <13,u,2,3,4,5,6,14>
7476 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm6, %zmm9, %zmm12
7477 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm9 = <u,1,2,3,4,5,15,u>
7478 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm6, %zmm10, %zmm9
7479 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm6 = [0,1,12,3,4,5,6,7]
7480 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm0, %zmm3, %zmm6
7481 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,13,2,3,4,5,6,7]
7482 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm0, %zmm12, %zmm3
7483 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm10 = [14,1,2,3,4,5,6,15]
7484 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm0, %zmm9, %zmm10
7485 ; AVX512DQ-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
7486 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm14, 1472(%rax)
7487 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm8, 1408(%rax)
7488 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, 1344(%rax)
7489 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm27, 1280(%rax)
7490 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, 1216(%rax)
7491 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm24, 1152(%rax)
7492 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm15, 1088(%rax)
7493 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7494 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 1024(%rax)
7495 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, 960(%rax)
7496 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm26, 896(%rax)
7497 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm28, 832(%rax)
7498 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, 768(%rax)
7499 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, 704(%rax)
7500 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm22, 640(%rax)
7501 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7502 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 576(%rax)
7503 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm17, 512(%rax)
7504 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm19, 448(%rax)
7505 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, 384(%rax)
7506 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7507 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 320(%rax)
7508 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7509 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 256(%rax)
7510 ; AVX512DQ-FAST-NEXT: vmovups (%rsp), %zmm0 # 64-byte Reload
7511 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 192(%rax)
7512 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7513 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 128(%rax)
7514 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm31, 64(%rax)
7515 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm29, (%rax)
7516 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, 1728(%rax)
7517 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, 1664(%rax)
7518 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, 1600(%rax)
7519 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm13, 1536(%rax)
7520 ; AVX512DQ-FAST-NEXT: addq $2056, %rsp # imm = 0x808
7521 ; AVX512DQ-FAST-NEXT: vzeroupper
7522 ; AVX512DQ-FAST-NEXT: retq
7524 ; AVX512BW-ONLY-SLOW-LABEL: store_i64_stride7_vf32:
7525 ; AVX512BW-ONLY-SLOW: # %bb.0:
7526 ; AVX512BW-ONLY-SLOW-NEXT: subq $2120, %rsp # imm = 0x848
7527 ; AVX512BW-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
7528 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rdi), %zmm8
7529 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm11
7530 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rsi), %zmm27
7531 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm13
7532 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm18
7533 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rdx), %zmm21
7534 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rcx), %zmm7
7535 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm25
7536 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%r8), %zmm0
7537 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%r9), %zmm4
7538 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rax), %zmm1
7539 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rax), %zmm5
7540 ; AVX512BW-ONLY-SLOW-NEXT: movb $96, %r10b
7541 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %r10d, %k1
7542 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7543 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [9,1,9,1,9,1,9,1]
7544 ; AVX512BW-ONLY-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7545 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm6
7546 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm2, %zmm6
7547 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm14
7548 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [4,9,0,3,4,9,0,3]
7549 ; AVX512BW-ONLY-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
7550 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
7551 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm3, %zmm6
7552 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7553 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm15
7554 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [11,3,11,3,11,3,11,3]
7555 ; AVX512BW-ONLY-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7556 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm1
7557 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm3, %zmm1
7558 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm17
7559 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [2,10,0,3,2,10,0,3]
7560 ; AVX512BW-ONLY-SLOW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
7561 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm3
7562 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm6, %zmm3
7563 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm16
7564 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm3 {%k1}
7565 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%r9), %ymm10
7566 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu %ymm10, (%rsp) # 32-byte Spill
7567 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 64(%r9), %ymm6
7568 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7569 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%r8), %ymm1
7570 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7571 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 64(%r8), %ymm12
7572 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7573 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm1 = ymm1[0],ymm10[0],ymm1[2],ymm10[2]
7574 ; AVX512BW-ONLY-SLOW-NEXT: movb $28, %r10b
7575 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %r10d, %k2
7576 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm3 {%k2} = zmm1[2,3,2,3],zmm2[2,3,2,3]
7577 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7578 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [4,12,0,5,4,12,0,5]
7579 ; AVX512BW-ONLY-SLOW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
7580 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3
7581 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm1, %zmm3
7582 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm10
7583 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,1,12,7,0,1,12,7]
7584 ; AVX512BW-ONLY-SLOW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
7585 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm1, %zmm3
7586 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7587 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [5,0,14,6,5,0,14,6]
7588 ; AVX512BW-ONLY-SLOW-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
7589 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm19
7590 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm20
7591 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7592 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm22, %zmm19
7593 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,13,6,7,0,13,6,7]
7594 ; AVX512BW-ONLY-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
7595 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm3, %zmm19
7596 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7597 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm19
7598 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm4 = [15,7,15,7,15,7,15,7]
7599 ; AVX512BW-ONLY-SLOW-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7600 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm4, %zmm0
7601 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [6,13,14,7,6,13,14,7]
7602 ; AVX512BW-ONLY-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
7603 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm2, %zmm19
7604 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7605 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm0
7606 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7607 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm17, %zmm0
7608 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, %zmm19
7609 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm25, %zmm16, %zmm19
7610 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
7611 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm12[0],ymm6[0],ymm12[2],ymm6[2]
7612 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm19 {%k2} = zmm0[2,3,2,3],zmm5[2,3,2,3]
7613 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7614 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%r8), %zmm0
7615 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%r9), %zmm12
7616 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm6
7617 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7618 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm14, %zmm6
7619 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7620 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm15, %zmm6
7621 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7622 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm6
7623 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm10, %zmm6
7624 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm1, %zmm6
7625 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7626 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, %zmm6
7627 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7628 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm22, %zmm6
7629 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm3, %zmm6
7630 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7631 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7632 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm4, %zmm0
7633 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm2, %zmm5
7634 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7635 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%rdi), %zmm6
7636 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%rsi), %zmm19
7637 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm0
7638 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm17, %zmm0
7639 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%rdx), %zmm5
7640 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%rcx), %zmm28
7641 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm12
7642 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm28, %zmm16, %zmm12
7643 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm17
7644 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
7645 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%rax), %zmm29
7646 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%r9), %ymm16
7647 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %ymm16, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7648 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 128(%r8), %ymm0
7649 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7650 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm16[0],ymm0[2],ymm16[2]
7651 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm12 {%k2} = zmm0[2,3,2,3],zmm29[2,3,2,3]
7652 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7653 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%r8), %zmm0
7654 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%r9), %zmm20
7655 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm20, %zmm0, %zmm10
7656 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm1, %zmm10
7657 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7658 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm0, %zmm20, %zmm22
7659 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm3, %zmm22
7660 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7661 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm1
7662 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm14, %zmm1
7663 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm15, %zmm1
7664 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7665 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7666 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm4, %zmm0
7667 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm2, %zmm29
7668 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [13,5,13,5,13,5,13,5]
7669 ; AVX512BW-ONLY-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7670 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm0
7671 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm3, %zmm0
7672 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7673 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm24 = [6,14,6,14,6,14,6,14]
7674 ; AVX512BW-ONLY-SLOW-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7675 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm0
7676 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm24, %zmm0
7677 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7678 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm0
7679 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm4, %zmm0
7680 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7681 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [3,0,12,4,3,0,12,4]
7682 ; AVX512BW-ONLY-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
7683 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm0, %zmm7
7684 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, %zmm1
7685 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm25, %zmm3, %zmm1
7686 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7687 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, %zmm1
7688 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm25, %zmm24, %zmm1
7689 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7690 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, %zmm1
7691 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm25, %zmm4, %zmm1
7692 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7693 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm0, %zmm25
7694 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%rdx), %zmm18
7695 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%rcx), %zmm1
7696 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, %zmm2
7697 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm24, %zmm2
7698 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7699 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm26
7700 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm28, %zmm3, %zmm26
7701 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm28, %zmm5, %zmm24
7702 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm30
7703 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm28, %zmm4, %zmm30
7704 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm5, %zmm0, %zmm28
7705 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm21
7706 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm0, %zmm21
7707 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm1, %zmm18, %zmm17
7708 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7709 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, %zmm0
7710 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm4, %zmm18
7711 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm3, %zmm0
7712 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7713 ; AVX512BW-ONLY-SLOW-NEXT: movb $48, %r10b
7714 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %r10d, %k3
7715 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm4 = [1,0,10,2,1,0,10,2]
7716 ; AVX512BW-ONLY-SLOW-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3]
7717 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, %zmm15
7718 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm4, %zmm15
7719 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm7 {%k3} = zmm8[0],zmm27[0],zmm8[2],zmm27[2],zmm8[4],zmm27[4],zmm8[6],zmm27[6]
7720 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7721 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm7
7722 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm14
7723 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm16
7724 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm17
7725 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [0,8,0,1,0,8,0,1]
7726 ; AVX512BW-ONLY-SLOW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
7727 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm5, %zmm17
7728 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm3, %zmm7
7729 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [6,14,6,14]
7730 ; AVX512BW-ONLY-SLOW-NEXT: # ymm1 = mem[0,1,0,1]
7731 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm1, %zmm14
7732 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm10 = [15,7,15,7]
7733 ; AVX512BW-ONLY-SLOW-NEXT: # ymm10 = mem[0,1,0,1]
7734 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm10, %zmm16
7735 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, %zmm27
7736 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm4, %zmm27
7737 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm25 {%k3} = zmm11[0],zmm13[0],zmm11[2],zmm13[2],zmm11[4],zmm13[4],zmm11[6],zmm13[6]
7738 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm2
7739 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm12
7740 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm31
7741 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm5, %zmm11
7742 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm3, %zmm2
7743 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm1, %zmm12
7744 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm10, %zmm31
7745 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%rdi), %zmm22
7746 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%rsi), %zmm0
7747 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm22, %zmm23
7748 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm1, %zmm23
7749 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm22, %zmm13
7750 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm3, %zmm13
7751 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm19, %zmm8
7752 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm4, %zmm8
7753 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm19, %zmm6, %zmm3
7754 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm19, %zmm6, %zmm1
7755 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm28 {%k3} = zmm6[0],zmm19[0],zmm6[2],zmm19[2],zmm6[4],zmm19[4],zmm6[6],zmm19[6]
7756 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm9
7757 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm5, %zmm6
7758 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm19, %zmm10, %zmm9
7759 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm21 {%k3} = zmm22[0],zmm0[0],zmm22[2],zmm0[2],zmm22[4],zmm0[4],zmm22[6],zmm0[6]
7760 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm0, %zmm22, %zmm5
7761 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm22, %zmm0, %zmm4
7762 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
7763 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm0, %zmm22, %zmm19
7764 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7765 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm10, %zmm22
7766 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%rdx), %xmm0
7767 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
7768 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
7769 ; AVX512BW-ONLY-SLOW-NEXT: movb $12, %sil
7770 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k3
7771 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm17 {%k3}
7772 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [0,8,0,8,0,8,0,8]
7773 ; AVX512BW-ONLY-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7774 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
7775 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm10 # 64-byte Folded Reload
7776 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $3, (%rax), %zmm10, %zmm10
7777 ; AVX512BW-ONLY-SLOW-NEXT: movb $112, %sil
7778 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k4
7779 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm17 {%k4}
7780 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 64(%rdx), %xmm10
7781 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
7782 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
7783 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm11 {%k3}
7784 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
7785 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm10 # 64-byte Folded Reload
7786 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $3, 64(%rax), %zmm10, %zmm10
7787 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm11 {%k4}
7788 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 128(%rdx), %xmm10
7789 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
7790 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
7791 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm6 {%k3}
7792 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
7793 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm0, %zmm10
7794 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $3, 128(%rax), %zmm10, %zmm10
7795 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm6 {%k4}
7796 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 192(%rdx), %xmm10
7797 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
7798 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
7799 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm10, %zmm0, %zmm5 {%k3}
7800 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%r8), %zmm19
7801 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%r9), %zmm20
7802 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm20, %zmm19, %zmm0
7803 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $3, 192(%rax), %zmm0, %zmm0
7804 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k4}
7805 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7806 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm7 {%k1}
7807 ; AVX512BW-ONLY-SLOW-NEXT: movb $120, %sil
7808 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k3
7809 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7810 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm0 {%k3}
7811 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7812 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm14, %zmm7 # 64-byte Folded Reload
7813 ; AVX512BW-ONLY-SLOW-NEXT: # zmm7 = zmm14[0,1,2,3],mem[4,5,6,7]
7814 ; AVX512BW-ONLY-SLOW-NEXT: movb $-61, %sil
7815 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k4
7816 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7817 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm7 {%k4}
7818 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7819 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm2 {%k1}
7820 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7821 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm0 {%k3}
7822 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7823 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm26, %zmm3 {%k1}
7824 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm26 # 64-byte Reload
7825 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm26 {%k3}
7826 ; AVX512BW-ONLY-SLOW-NEXT: movb $24, %sil
7827 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k3
7828 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7829 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm16 {%k3}
7830 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm12, %zmm3 # 64-byte Folded Reload
7831 ; AVX512BW-ONLY-SLOW-NEXT: # zmm3 = zmm12[0,1,2,3],mem[4,5,6,7]
7832 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7833 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3 {%k4}
7834 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm1[0,1,2,3],zmm24[4,5,6,7]
7835 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7836 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k4}
7837 ; AVX512BW-ONLY-SLOW-NEXT: movb $-31, %sil
7838 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k4
7839 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7840 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm16 {%k4}
7841 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7842 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm31 {%k3}
7843 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7844 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm31 {%k4}
7845 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm30, %zmm9 {%k3}
7846 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm29, %zmm9 {%k4}
7847 ; AVX512BW-ONLY-SLOW-NEXT: vpbroadcastq 8(%rcx), %ymm0
7848 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
7849 ; AVX512BW-ONLY-SLOW-NEXT: movb $6, %sil
7850 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k4
7851 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm15 {%k4}
7852 ; AVX512BW-ONLY-SLOW-NEXT: vpbroadcastq 72(%rcx), %ymm0
7853 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
7854 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm27 {%k4}
7855 ; AVX512BW-ONLY-SLOW-NEXT: vpbroadcastq 136(%rcx), %ymm0
7856 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
7857 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm8 {%k4}
7858 ; AVX512BW-ONLY-SLOW-NEXT: vpbroadcastq 200(%rcx), %ymm0
7859 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
7860 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm4 {%k4}
7861 ; AVX512BW-ONLY-SLOW-NEXT: movb $56, %cl
7862 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %ecx, %k4
7863 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7864 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm15 {%k4}
7865 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7866 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm27 {%k4}
7867 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7868 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm8 {%k4}
7869 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
7870 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm20, %zmm19, %zmm2
7871 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%rax), %zmm0
7872 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
7873 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm10, %zmm2
7874 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4 {%k4}
7875 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
7876 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm13 {%k1}
7877 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
7878 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
7879 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm14 {%k1}
7880 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm23, %zmm10 # 64-byte Folded Reload
7881 ; AVX512BW-ONLY-SLOW-NEXT: # zmm10 = zmm23[0,1,2,3],mem[4,5,6,7]
7882 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <0,11,u,u,4,5,6,7>
7883 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm19, %zmm21, %zmm12
7884 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <0,1,11,u,4,5,6,7>
7885 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm20, %zmm12, %zmm2
7886 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 192(%r8), %ymm12
7887 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm12 = ymm12[0],mem[0],ymm12[2],mem[2]
7888 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm14 {%k2} = zmm12[2,3,2,3],zmm0[2,3,2,3]
7889 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm21
7890 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, %zmm22 {%k3}
7891 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
7892 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq (%rsp), %ymm12, %ymm12 # 32-byte Folded Reload
7893 ; AVX512BW-ONLY-SLOW-NEXT: # ymm12 = ymm12[1],mem[1],ymm12[3],mem[3]
7894 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,2,3,3]
7895 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3,4,5],mem[6,7]
7896 ; AVX512BW-ONLY-SLOW-NEXT: movb $14, %cl
7897 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %ecx, %k1
7898 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
7899 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm18 {%k1}
7900 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
7901 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm12, %ymm12 # 32-byte Folded Reload
7902 ; AVX512BW-ONLY-SLOW-NEXT: # ymm12 = ymm12[1],mem[1],ymm12[3],mem[3]
7903 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,2,3,3]
7904 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3,4,5],mem[6,7]
7905 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm25 {%k1}
7906 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
7907 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm12, %ymm12 # 32-byte Folded Reload
7908 ; AVX512BW-ONLY-SLOW-NEXT: # ymm12 = ymm12[1],mem[1],ymm12[3],mem[3]
7909 ; AVX512BW-ONLY-SLOW-NEXT: movb $64, %cl
7910 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %ecx, %k2
7911 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm19, %zmm10 {%k2}
7912 ; AVX512BW-ONLY-SLOW-NEXT: movb $8, %cl
7913 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %ecx, %k2
7914 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm2 {%k2}
7915 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,2,3,3]
7916 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3,4,5],mem[6,7]
7917 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm28 {%k1}
7918 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <12,u,u,3,4,5,6,13>
7919 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm19, %zmm13, %zmm12
7920 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm13 = <u,1,2,3,4,15,u,u>
7921 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm19, %zmm22, %zmm13
7922 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm14 = <0,12,u,3,4,5,6,7>
7923 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm20, %zmm12, %zmm14
7924 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <13,u,2,3,4,5,6,14>
7925 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm20, %zmm10, %zmm12
7926 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm10 = <u,1,2,3,4,5,15,u>
7927 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm20, %zmm13, %zmm10
7928 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm13 = [0,1,12,3,4,5,6,7]
7929 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm0, %zmm14, %zmm13
7930 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm14 = [0,13,2,3,4,5,6,7]
7931 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm0, %zmm12, %zmm14
7932 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm12 = [14,1,2,3,4,5,6,15]
7933 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm0, %zmm10, %zmm12
7934 ; AVX512BW-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
7935 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, 1472(%rax)
7936 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, 1408(%rax)
7937 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, 1280(%rax)
7938 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, 1216(%rax)
7939 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm26, 1152(%rax)
7940 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7941 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 1024(%rax)
7942 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, 960(%rax)
7943 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, 832(%rax)
7944 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, 768(%rax)
7945 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7946 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 704(%rax)
7947 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7948 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 576(%rax)
7949 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, 512(%rax)
7950 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, 384(%rax)
7951 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, 320(%rax)
7952 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7953 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 256(%rax)
7954 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7955 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 128(%rax)
7956 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm15, 64(%rax)
7957 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, 1344(%rax)
7958 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm28, 1088(%rax)
7959 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, 896(%rax)
7960 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, 640(%rax)
7961 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, 448(%rax)
7962 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, 192(%rax)
7963 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, (%rax)
7964 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, 1728(%rax)
7965 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, 1664(%rax)
7966 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, 1600(%rax)
7967 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, 1536(%rax)
7968 ; AVX512BW-ONLY-SLOW-NEXT: addq $2120, %rsp # imm = 0x848
7969 ; AVX512BW-ONLY-SLOW-NEXT: vzeroupper
7970 ; AVX512BW-ONLY-SLOW-NEXT: retq
7972 ; AVX512BW-ONLY-FAST-LABEL: store_i64_stride7_vf32:
7973 ; AVX512BW-ONLY-FAST: # %bb.0:
7974 ; AVX512BW-ONLY-FAST-NEXT: subq $2024, %rsp # imm = 0x7E8
7975 ; AVX512BW-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
7976 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rdi), %zmm7
7977 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rdi), %zmm30
7978 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rsi), %zmm17
7979 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rsi), %zmm15
7980 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rdx), %zmm13
7981 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rdx), %zmm8
7982 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rcx), %zmm27
7983 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rcx), %zmm20
7984 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%r8), %zmm0
7985 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%r9), %zmm9
7986 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rax), %zmm4
7987 ; AVX512BW-ONLY-FAST-NEXT: movb $96, %r10b
7988 ; AVX512BW-ONLY-FAST-NEXT: kmovd %r10d, %k1
7989 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7990 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [9,1,9,1,9,1,9,1]
7991 ; AVX512BW-ONLY-FAST-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
7992 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm3
7993 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm2, %zmm3
7994 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm10
7995 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [4,9,0,3,4,9,0,3]
7996 ; AVX512BW-ONLY-FAST-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
7997 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm2, %zmm3
7998 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7999 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm11
8000 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [11,3,11,3,11,3,11,3]
8001 ; AVX512BW-ONLY-FAST-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8002 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm1
8003 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm2, %zmm1
8004 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm14
8005 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [2,10,0,3,2,10,0,3]
8006 ; AVX512BW-ONLY-FAST-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
8007 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm2
8008 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm3, %zmm2
8009 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm16
8010 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm2 {%k1}
8011 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm5
8012 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa (%r9), %ymm1
8013 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 64(%r9), %ymm6
8014 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 128(%r9), %ymm2
8015 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%r8), %ymm19
8016 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%r8), %ymm23
8017 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%r8), %ymm22
8018 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm3 = ymm19[0],ymm1[0],ymm19[2],ymm1[2]
8019 ; AVX512BW-ONLY-FAST-NEXT: movb $28, %r10b
8020 ; AVX512BW-ONLY-FAST-NEXT: kmovd %r10d, %k2
8021 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm5 {%k2} = zmm3[2,3,2,3],zmm4[2,3,2,3]
8022 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8023 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm21
8024 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [1,3,7,7]
8025 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %ymm1, %ymm3, %ymm19
8026 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %ymm19, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8027 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm4 = [4,12,0,5,4,12,0,5]
8028 ; AVX512BW-ONLY-FAST-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3]
8029 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
8030 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm4, %zmm1
8031 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm19
8032 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [0,1,12,7,0,1,12,7]
8033 ; AVX512BW-ONLY-FAST-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
8034 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm5, %zmm1
8035 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8036 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm25 = [5,0,14,6,5,0,14,6]
8037 ; AVX512BW-ONLY-FAST-NEXT: # zmm25 = mem[0,1,2,3,0,1,2,3]
8038 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm1
8039 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8040 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm25, %zmm1
8041 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm4 = [0,13,6,7,0,13,6,7]
8042 ; AVX512BW-ONLY-FAST-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3]
8043 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm4, %zmm1
8044 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8045 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm28 = [15,7,15,7,15,7,15,7]
8046 ; AVX512BW-ONLY-FAST-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8047 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm28, %zmm0
8048 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [6,13,14,7,6,13,14,7]
8049 ; AVX512BW-ONLY-FAST-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
8050 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm1, %zmm21
8051 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8052 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm23[0],ymm6[0],ymm23[2],ymm6[2]
8053 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %ymm6, %ymm3, %ymm23
8054 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %ymm23, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8055 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm6 = ymm22[0],ymm2[0],ymm22[2],ymm2[2]
8056 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %ymm2, %ymm3, %ymm22
8057 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %ymm22, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8058 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm30, %zmm2
8059 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8060 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm14, %zmm2
8061 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm3
8062 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm16, %zmm3
8063 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3 {%k1}
8064 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rax), %zmm18
8065 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm3 {%k2} = zmm0[2,3,2,3],zmm18[2,3,2,3]
8066 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8067 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%r8), %zmm0
8068 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%r9), %zmm22
8069 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm2
8070 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8071 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm10, %zmm2
8072 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm9
8073 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8074 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm11, %zmm2
8075 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8076 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm2
8077 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm19, %zmm2
8078 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm5, %zmm2
8079 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8080 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm22, %zmm2
8081 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm25, %zmm2
8082 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm4, %zmm2
8083 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8084 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8085 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm28, %zmm0
8086 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm1, %zmm18
8087 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%rdi), %zmm26
8088 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%rsi), %zmm3
8089 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm26, %zmm2
8090 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm14, %zmm2
8091 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%rdx), %zmm24
8092 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%rcx), %zmm0
8093 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm24, %zmm14
8094 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm16, %zmm14
8095 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm16, %zmm23
8096 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm14 {%k1}
8097 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%rax), %zmm11
8098 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm14 {%k2} = zmm6[2,3,2,3],zmm11[2,3,2,3]
8099 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8100 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%r8), %zmm2
8101 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%r9), %zmm14
8102 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm14, %zmm2, %zmm19
8103 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm5, %zmm19
8104 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8105 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm2, %zmm14, %zmm25
8106 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm4, %zmm25
8107 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8108 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm4
8109 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm10, %zmm4
8110 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm9, %zmm4
8111 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8112 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8113 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm28, %zmm2
8114 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm1, %zmm11
8115 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [3,0,12,4,3,0,12,4]
8116 ; AVX512BW-ONLY-FAST-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
8117 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm27, %zmm29
8118 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm5, %zmm29
8119 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [13,5,13,5,13,5,13,5]
8120 ; AVX512BW-ONLY-FAST-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8121 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm2
8122 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm1, %zmm2
8123 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8124 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [6,14,6,14,6,14,6,14]
8125 ; AVX512BW-ONLY-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8126 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm2
8127 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm10, %zmm2
8128 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8129 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm27, %zmm28, %zmm8
8130 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8131 ; AVX512BW-ONLY-FAST-NEXT: movb $48, %r10b
8132 ; AVX512BW-ONLY-FAST-NEXT: kmovd %r10d, %k3
8133 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm31 = [0,8,0,1,0,8,0,1]
8134 ; AVX512BW-ONLY-FAST-NEXT: # zmm31 = mem[0,1,2,3,0,1,2,3]
8135 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm25
8136 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm31, %zmm25
8137 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm4 = [1,0,10,2,1,0,10,2]
8138 ; AVX512BW-ONLY-FAST-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3]
8139 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm17, %zmm21
8140 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm4, %zmm21
8141 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm29 {%k3} = zmm7[0],zmm17[0],zmm7[2],zmm17[2],zmm7[4],zmm17[4],zmm7[6],zmm17[6]
8142 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm9
8143 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm1, %zmm9
8144 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm8 = [6,14,6,14]
8145 ; AVX512BW-ONLY-FAST-NEXT: # ymm8 = mem[0,1,0,1]
8146 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm2
8147 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm8, %zmm2
8148 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8149 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [15,7,15,7]
8150 ; AVX512BW-ONLY-FAST-NEXT: # ymm2 = mem[0,1,0,1]
8151 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm2, %zmm7
8152 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm6
8153 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8154 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm20, %zmm17
8155 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm5, %zmm17
8156 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm2
8157 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm1, %zmm2
8158 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8159 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm2
8160 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm10, %zmm2
8161 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8162 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm28, %zmm13
8163 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8164 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm30, %zmm16
8165 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm31, %zmm16
8166 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm15, %zmm20
8167 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm30, %zmm4, %zmm20
8168 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm17 {%k3} = zmm30[0],zmm15[0],zmm30[2],zmm15[2],zmm30[4],zmm15[4],zmm30[6],zmm15[6]
8169 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm30, %zmm7
8170 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm1, %zmm7
8171 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm30, %zmm2
8172 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm8, %zmm2
8173 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8174 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm6, %zmm30
8175 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm13
8176 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%rdx), %zmm15
8177 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%rcx), %zmm2
8178 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm15, %zmm6
8179 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm10, %zmm6
8180 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8181 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm12
8182 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm15, %zmm5, %zmm12
8183 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm15, %zmm6
8184 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm1, %zmm6
8185 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8186 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm2, %zmm15, %zmm23
8187 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8188 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm28, %zmm15
8189 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm24, %zmm0, %zmm5
8190 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm24, %zmm10
8191 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm24, %zmm27
8192 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm28, %zmm24
8193 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm1, %zmm27
8194 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%rdi), %zmm19
8195 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%rsi), %zmm0
8196 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm19, %zmm23
8197 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm8, %zmm23
8198 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm19, %zmm6
8199 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm1, %zmm6
8200 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm26, %zmm28
8201 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm31, %zmm28
8202 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm2
8203 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm26, %zmm4, %zmm2
8204 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm5 {%k3} = zmm26[0],zmm3[0],zmm26[2],zmm3[2],zmm26[4],zmm3[4],zmm26[6],zmm3[6]
8205 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm3, %zmm26, %zmm1
8206 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm3, %zmm26, %zmm8
8207 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm13, %zmm26
8208 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm12 {%k3} = zmm19[0],zmm0[0],zmm19[2],zmm0[2],zmm19[4],zmm0[4],zmm19[6],zmm0[6]
8209 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm19, %zmm31
8210 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm19, %zmm0, %zmm4
8211 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
8212 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm19, %zmm3
8213 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8214 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm13, %zmm19
8215 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa (%rdx), %xmm0
8216 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
8217 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
8218 ; AVX512BW-ONLY-FAST-NEXT: movb $12, %sil
8219 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k3
8220 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm25 {%k3}
8221 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [0,8,0,8,0,8,0,8]
8222 ; AVX512BW-ONLY-FAST-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8223 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
8224 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm3 # 64-byte Folded Reload
8225 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $3, (%rax), %zmm3, %zmm3
8226 ; AVX512BW-ONLY-FAST-NEXT: movb $112, %sil
8227 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k4
8228 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm25 {%k4}
8229 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 64(%rdx), %xmm3
8230 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm3[0],mem[0]
8231 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
8232 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm16 {%k3}
8233 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
8234 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm22, %zmm0, %zmm3
8235 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $3, 64(%rax), %zmm3, %zmm3
8236 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm16 {%k4}
8237 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 128(%rdx), %xmm3
8238 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm3[0],mem[0]
8239 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
8240 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm28 {%k3}
8241 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
8242 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm0, %zmm3
8243 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $3, 128(%rax), %zmm3, %zmm3
8244 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm28 {%k4}
8245 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 192(%rdx), %xmm3
8246 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm3[0],mem[0]
8247 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
8248 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm31 {%k3}
8249 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%r8), %zmm22
8250 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%r9), %zmm14
8251 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm14, %zmm22, %zmm0
8252 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $3, 192(%rax), %zmm0, %zmm0
8253 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm31 {%k4}
8254 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8255 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
8256 ; AVX512BW-ONLY-FAST-NEXT: movb $14, %sil
8257 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k3
8258 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm29 {%k3}
8259 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8260 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm9 {%k1}
8261 ; AVX512BW-ONLY-FAST-NEXT: movb $120, %sil
8262 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k4
8263 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8264 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm0 {%k4}
8265 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8266 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8267 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm13 # 64-byte Folded Reload
8268 ; AVX512BW-ONLY-FAST-NEXT: # zmm13 = zmm0[0,1,2,3],mem[4,5,6,7]
8269 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8270 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
8271 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm17 {%k3}
8272 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8273 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
8274 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm5 {%k3}
8275 ; AVX512BW-ONLY-FAST-NEXT: movb $-61, %sil
8276 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k5
8277 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8278 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm13 {%k5}
8279 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8280 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm7 {%k1}
8281 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
8282 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm3 {%k4}
8283 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm27, %zmm1 {%k1}
8284 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
8285 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm27 {%k4}
8286 ; AVX512BW-ONLY-FAST-NEXT: movb $24, %sil
8287 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k3
8288 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
8289 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8290 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm9 {%k3}
8291 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8292 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm1 # 64-byte Folded Reload
8293 ; AVX512BW-ONLY-FAST-NEXT: # zmm1 = zmm0[0,1,2,3],mem[4,5,6,7]
8294 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8295 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1 {%k5}
8296 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm7 = zmm8[0,1,2,3],zmm10[4,5,6,7]
8297 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8298 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm7 {%k5}
8299 ; AVX512BW-ONLY-FAST-NEXT: movb $-31, %sil
8300 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k4
8301 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8302 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm9 {%k4}
8303 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8304 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8305 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm30 {%k3}
8306 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm18, %zmm30 {%k4}
8307 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm24, %zmm26 {%k3}
8308 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm26 {%k4}
8309 ; AVX512BW-ONLY-FAST-NEXT: vpbroadcastq 8(%rcx), %ymm0
8310 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
8311 ; AVX512BW-ONLY-FAST-NEXT: movb $6, %sil
8312 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k4
8313 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm21 {%k4}
8314 ; AVX512BW-ONLY-FAST-NEXT: vpbroadcastq 72(%rcx), %ymm0
8315 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
8316 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm20 {%k4}
8317 ; AVX512BW-ONLY-FAST-NEXT: vpbroadcastq 136(%rcx), %ymm0
8318 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
8319 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm2 {%k4}
8320 ; AVX512BW-ONLY-FAST-NEXT: vpbroadcastq 200(%rcx), %ymm0
8321 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
8322 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm4 {%k4}
8323 ; AVX512BW-ONLY-FAST-NEXT: movb $56, %cl
8324 ; AVX512BW-ONLY-FAST-NEXT: kmovd %ecx, %k4
8325 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8326 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm21 {%k4}
8327 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8328 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm20 {%k4}
8329 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8330 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm2 {%k4}
8331 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
8332 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm14, %zmm22, %zmm8
8333 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%rax), %zmm0
8334 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
8335 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm9, %zmm8
8336 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm4 {%k4}
8337 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
8338 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm6 {%k1}
8339 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
8340 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
8341 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm11 {%k1}
8342 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm23, %zmm8 # 64-byte Folded Reload
8343 ; AVX512BW-ONLY-FAST-NEXT: # zmm8 = zmm23[0,1,2,3],mem[4,5,6,7]
8344 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm9 = <0,11,u,u,4,5,6,7>
8345 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm22, %zmm12, %zmm9
8346 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm10 = <0,1,11,u,4,5,6,7>
8347 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm14, %zmm9, %zmm10
8348 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 192(%r8), %ymm9
8349 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm9 = ymm9[0],mem[0],ymm9[2],mem[2]
8350 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm11 {%k2} = zmm9[2,3,2,3],zmm0[2,3,2,3]
8351 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm12
8352 ; AVX512BW-ONLY-FAST-NEXT: movb $64, %al
8353 ; AVX512BW-ONLY-FAST-NEXT: kmovd %eax, %k1
8354 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm22, %zmm8 {%k1}
8355 ; AVX512BW-ONLY-FAST-NEXT: movb $8, %al
8356 ; AVX512BW-ONLY-FAST-NEXT: kmovd %eax, %k1
8357 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm10 {%k1}
8358 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm15, %zmm19 {%k3}
8359 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm9 = <12,u,u,3,4,5,6,13>
8360 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm22, %zmm6, %zmm9
8361 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm6 = <u,1,2,3,4,15,u,u>
8362 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm22, %zmm19, %zmm6
8363 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm11 = <0,12,u,3,4,5,6,7>
8364 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm14, %zmm9, %zmm11
8365 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm9 = <13,u,2,3,4,5,6,14>
8366 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm14, %zmm8, %zmm9
8367 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm8 = <u,1,2,3,4,5,15,u>
8368 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm14, %zmm6, %zmm8
8369 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm6 = [0,1,12,3,4,5,6,7]
8370 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm11, %zmm6
8371 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm11 = [0,13,2,3,4,5,6,7]
8372 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm9, %zmm11
8373 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm9 = [14,1,2,3,4,5,6,15]
8374 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm8, %zmm9
8375 ; AVX512BW-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
8376 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm12, 1472(%rax)
8377 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, 1408(%rax)
8378 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm26, 1280(%rax)
8379 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, 1216(%rax)
8380 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm27, 1152(%rax)
8381 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, 1088(%rax)
8382 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8383 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 1024(%rax)
8384 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, 960(%rax)
8385 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm30, 832(%rax)
8386 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, 768(%rax)
8387 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, 704(%rax)
8388 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm17, 640(%rax)
8389 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8390 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 576(%rax)
8391 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm20, 512(%rax)
8392 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8393 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 384(%rax)
8394 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, 320(%rax)
8395 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8396 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 256(%rax)
8397 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm29, 192(%rax)
8398 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8399 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 128(%rax)
8400 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm21, 64(%rax)
8401 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm31, 1344(%rax)
8402 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm28, 896(%rax)
8403 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm16, 448(%rax)
8404 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm25, (%rax)
8405 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm9, 1728(%rax)
8406 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, 1664(%rax)
8407 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm6, 1600(%rax)
8408 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm10, 1536(%rax)
8409 ; AVX512BW-ONLY-FAST-NEXT: addq $2024, %rsp # imm = 0x7E8
8410 ; AVX512BW-ONLY-FAST-NEXT: vzeroupper
8411 ; AVX512BW-ONLY-FAST-NEXT: retq
8413 ; AVX512DQBW-SLOW-LABEL: store_i64_stride7_vf32:
8414 ; AVX512DQBW-SLOW: # %bb.0:
8415 ; AVX512DQBW-SLOW-NEXT: subq $2120, %rsp # imm = 0x848
8416 ; AVX512DQBW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
8417 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rdi), %zmm10
8418 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm8
8419 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rsi), %zmm15
8420 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm13
8421 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm20
8422 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rdx), %zmm21
8423 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rcx), %zmm5
8424 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm28
8425 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%r8), %zmm3
8426 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%r9), %zmm2
8427 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rax), %zmm4
8428 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rax), %zmm7
8429 ; AVX512DQBW-SLOW-NEXT: movb $96, %r10b
8430 ; AVX512DQBW-SLOW-NEXT: kmovd %r10d, %k1
8431 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8432 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [9,1,9,1,9,1,9,1]
8433 ; AVX512DQBW-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8434 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm1
8435 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm0, %zmm1
8436 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm14
8437 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,9,0,3,4,9,0,3]
8438 ; AVX512DQBW-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
8439 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm0, %zmm1
8440 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8441 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm16
8442 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [11,3,11,3,11,3,11,3]
8443 ; AVX512DQBW-SLOW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8444 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, %zmm0
8445 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm15, %zmm1, %zmm0
8446 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm18
8447 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [2,10,0,3,2,10,0,3]
8448 ; AVX512DQBW-SLOW-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
8449 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm21, %zmm1
8450 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm9, %zmm1
8451 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, %zmm17
8452 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
8453 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%r9), %ymm9
8454 ; AVX512DQBW-SLOW-NEXT: vmovdqu %ymm9, (%rsp) # 32-byte Spill
8455 ; AVX512DQBW-SLOW-NEXT: vmovdqa 64(%r9), %ymm11
8456 ; AVX512DQBW-SLOW-NEXT: vmovdqu %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8457 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%r8), %ymm0
8458 ; AVX512DQBW-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8459 ; AVX512DQBW-SLOW-NEXT: vmovdqa 64(%r8), %ymm12
8460 ; AVX512DQBW-SLOW-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8461 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm9[0],ymm0[2],ymm9[2]
8462 ; AVX512DQBW-SLOW-NEXT: movb $28, %r10b
8463 ; AVX512DQBW-SLOW-NEXT: kmovd %r10d, %k2
8464 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm1 {%k2} = zmm0[2,3,2,3],zmm4[2,3,2,3]
8465 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8466 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,0,5,4,12,0,5]
8467 ; AVX512DQBW-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
8468 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm1
8469 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm0, %zmm1
8470 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm9
8471 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,1,12,7,0,1,12,7]
8472 ; AVX512DQBW-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
8473 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm0, %zmm1
8474 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8475 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm22 = [5,0,14,6,5,0,14,6]
8476 ; AVX512DQBW-SLOW-NEXT: # zmm22 = mem[0,1,2,3,0,1,2,3]
8477 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm19
8478 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8479 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm22, %zmm19
8480 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,13,6,7,0,13,6,7]
8481 ; AVX512DQBW-SLOW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
8482 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm1, %zmm19
8483 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8484 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm19
8485 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm4 = [15,7,15,7,15,7,15,7]
8486 ; AVX512DQBW-SLOW-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8487 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm4, %zmm3
8488 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [6,13,14,7,6,13,14,7]
8489 ; AVX512DQBW-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
8490 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm2, %zmm19
8491 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8492 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm8, %zmm3
8493 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8494 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm13, %zmm18, %zmm3
8495 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm20, %zmm19
8496 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm28, %zmm17, %zmm19
8497 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm17, %zmm23
8498 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm19 {%k1}
8499 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm3 = ymm12[0],ymm11[0],ymm12[2],ymm11[2]
8500 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm19 {%k2} = zmm3[2,3,2,3],zmm7[2,3,2,3]
8501 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8502 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%r8), %zmm3
8503 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%r9), %zmm12
8504 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm11
8505 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8506 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm12, %zmm14, %zmm11
8507 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8508 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm16, %zmm11
8509 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8510 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm11
8511 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm12, %zmm9, %zmm11
8512 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm0, %zmm11
8513 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8514 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm12, %zmm11
8515 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8516 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm22, %zmm11
8517 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm1, %zmm11
8518 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8519 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8520 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm12, %zmm4, %zmm3
8521 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm2, %zmm7
8522 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8523 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%rdi), %zmm24
8524 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%rsi), %zmm31
8525 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm24, %zmm3
8526 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm31, %zmm18, %zmm3
8527 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%rdx), %zmm17
8528 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%rcx), %zmm30
8529 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm17, %zmm7
8530 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm30, %zmm23, %zmm7
8531 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm7 {%k1}
8532 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%rax), %zmm29
8533 ; AVX512DQBW-SLOW-NEXT: vmovdqa 128(%r9), %ymm11
8534 ; AVX512DQBW-SLOW-NEXT: vmovdqu %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8535 ; AVX512DQBW-SLOW-NEXT: vmovdqa 128(%r8), %ymm3
8536 ; AVX512DQBW-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8537 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm3 = ymm3[0],ymm11[0],ymm3[2],ymm11[2]
8538 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm7 {%k2} = zmm3[2,3,2,3],zmm29[2,3,2,3]
8539 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8540 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%r8), %zmm3
8541 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%r9), %zmm7
8542 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm7, %zmm3, %zmm9
8543 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm29, %zmm0, %zmm9
8544 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8545 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm3, %zmm7, %zmm22
8546 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm29, %zmm1, %zmm22
8547 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8548 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm0
8549 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm14, %zmm0
8550 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8551 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm29, %zmm16, %zmm0
8552 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8553 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8554 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm4, %zmm3
8555 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm2, %zmm29
8556 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [13,5,13,5,13,5,13,5]
8557 ; AVX512DQBW-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8558 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm21, %zmm0
8559 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm3, %zmm0
8560 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8561 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm25 = [6,14,6,14,6,14,6,14]
8562 ; AVX512DQBW-SLOW-NEXT: # zmm25 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8563 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm21, %zmm0
8564 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm25, %zmm0
8565 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8566 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm21, %zmm0
8567 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm4, %zmm0
8568 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8569 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [3,0,12,4,3,0,12,4]
8570 ; AVX512DQBW-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
8571 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm21, %zmm0, %zmm5
8572 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm20, %zmm26
8573 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm28, %zmm3, %zmm26
8574 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm20, %zmm1
8575 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm28, %zmm25, %zmm1
8576 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8577 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm20, %zmm1
8578 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm28, %zmm4, %zmm1
8579 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8580 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm20, %zmm0, %zmm28
8581 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%rdx), %zmm12
8582 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%rcx), %zmm2
8583 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm12, %zmm1
8584 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm25, %zmm1
8585 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8586 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm17, %zmm11
8587 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm30, %zmm3, %zmm11
8588 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm30, %zmm17, %zmm25
8589 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm17, %zmm22
8590 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm30, %zmm4, %zmm22
8591 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm17, %zmm0, %zmm30
8592 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm16
8593 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm12, %zmm0, %zmm16
8594 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm2, %zmm12, %zmm23
8595 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8596 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm12, %zmm0
8597 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm4, %zmm12
8598 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm3, %zmm0
8599 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8600 ; AVX512DQBW-SLOW-NEXT: movb $48, %r10b
8601 ; AVX512DQBW-SLOW-NEXT: kmovd %r10d, %k3
8602 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm4 = [1,0,10,2,1,0,10,2]
8603 ; AVX512DQBW-SLOW-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3]
8604 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm15, %zmm20
8605 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm10, %zmm4, %zmm20
8606 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm5 {%k3} = zmm10[0],zmm15[0],zmm10[2],zmm15[2],zmm10[4],zmm15[4],zmm10[6],zmm15[6]
8607 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8608 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, %zmm18
8609 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, %zmm7
8610 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, %zmm21
8611 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, %zmm14
8612 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,8,0,1,0,8,0,1]
8613 ; AVX512DQBW-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
8614 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm15, %zmm2, %zmm14
8615 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm15, %zmm3, %zmm18
8616 ; AVX512DQBW-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [6,14,6,14]
8617 ; AVX512DQBW-SLOW-NEXT: # ymm1 = mem[0,1,0,1]
8618 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm15, %zmm1, %zmm7
8619 ; AVX512DQBW-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm6 = [15,7,15,7]
8620 ; AVX512DQBW-SLOW-NEXT: # ymm6 = mem[0,1,0,1]
8621 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm15, %zmm6, %zmm21
8622 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm13, %zmm15
8623 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm8, %zmm4, %zmm15
8624 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm28 {%k3} = zmm8[0],zmm13[0],zmm8[2],zmm13[2],zmm8[4],zmm13[4],zmm8[6],zmm13[6]
8625 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm8, %zmm23
8626 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm8, %zmm5
8627 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm8, %zmm17
8628 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm8, %zmm10
8629 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm13, %zmm2, %zmm10
8630 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm13, %zmm3, %zmm23
8631 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm13, %zmm1, %zmm5
8632 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm13, %zmm6, %zmm17
8633 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%rdi), %zmm19
8634 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%rsi), %zmm0
8635 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm19, %zmm27
8636 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm0, %zmm1, %zmm27
8637 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm19, %zmm13
8638 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm0, %zmm3, %zmm13
8639 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm31, %zmm8
8640 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm24, %zmm4, %zmm8
8641 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm31, %zmm24, %zmm3
8642 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm31, %zmm24, %zmm1
8643 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm30 {%k3} = zmm24[0],zmm31[0],zmm24[2],zmm31[2],zmm24[4],zmm31[4],zmm24[6],zmm31[6]
8644 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm24, %zmm9
8645 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm31, %zmm2, %zmm24
8646 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm31, %zmm6, %zmm9
8647 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm16 {%k3} = zmm19[0],zmm0[0],zmm19[2],zmm0[2],zmm19[4],zmm0[4],zmm19[6],zmm0[6]
8648 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm0, %zmm19, %zmm2
8649 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm19, %zmm0, %zmm4
8650 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
8651 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm0, %zmm19, %zmm31
8652 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm31, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8653 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm0, %zmm6, %zmm19
8654 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8655 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm18 {%k1}
8656 ; AVX512DQBW-SLOW-NEXT: movb $120, %sil
8657 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k3
8658 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8659 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm18, %zmm0 {%k3}
8660 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8661 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7, %zmm18 # 64-byte Folded Reload
8662 ; AVX512DQBW-SLOW-NEXT: # zmm18 = zmm7[0,1,2,3],mem[4,5,6,7]
8663 ; AVX512DQBW-SLOW-NEXT: movb $-61, %sil
8664 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k4
8665 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8666 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm18 {%k4}
8667 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm26, %zmm23 {%k1}
8668 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm26 # 64-byte Reload
8669 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm23, %zmm26 {%k3}
8670 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm3 {%k1}
8671 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8672 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm0 {%k3}
8673 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8674 ; AVX512DQBW-SLOW-NEXT: movb $24, %sil
8675 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k3
8676 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8677 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm21 {%k3}
8678 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm5, %zmm3 # 64-byte Folded Reload
8679 ; AVX512DQBW-SLOW-NEXT: # zmm3 = zmm5[0,1,2,3],mem[4,5,6,7]
8680 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8681 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3 {%k4}
8682 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm25[4,5,6,7]
8683 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8684 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm0 {%k4}
8685 ; AVX512DQBW-SLOW-NEXT: movb $-31, %sil
8686 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k4
8687 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8688 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm21 {%k4}
8689 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8690 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm17 {%k3}
8691 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8692 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm17 {%k4}
8693 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm22, %zmm9 {%k3}
8694 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm29, %zmm9 {%k4}
8695 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%rdx), %xmm1
8696 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
8697 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
8698 ; AVX512DQBW-SLOW-NEXT: movb $12, %sil
8699 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k4
8700 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm14 {%k4}
8701 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [0,8,0,8,0,8,0,8]
8702 ; AVX512DQBW-SLOW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8703 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
8704 ; AVX512DQBW-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm6 # 64-byte Folded Reload
8705 ; AVX512DQBW-SLOW-NEXT: vmovdqa 64(%rdx), %xmm5
8706 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
8707 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
8708 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm10 {%k4}
8709 ; AVX512DQBW-SLOW-NEXT: vmovdqa 128(%rdx), %xmm5
8710 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
8711 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
8712 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm24 {%k4}
8713 ; AVX512DQBW-SLOW-NEXT: vmovdqa 192(%rdx), %xmm5
8714 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
8715 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
8716 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm2 {%k4}
8717 ; AVX512DQBW-SLOW-NEXT: movb $112, %sil
8718 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k4
8719 ; AVX512DQBW-SLOW-NEXT: vinserti64x2 $3, (%rax), %zmm6, %zmm14 {%k4}
8720 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
8721 ; AVX512DQBW-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm5 # 64-byte Folded Reload
8722 ; AVX512DQBW-SLOW-NEXT: vinserti64x2 $3, 64(%rax), %zmm5, %zmm10 {%k4}
8723 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
8724 ; AVX512DQBW-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm5 # 64-byte Folded Reload
8725 ; AVX512DQBW-SLOW-NEXT: vinserti64x2 $3, 128(%rax), %zmm5, %zmm24 {%k4}
8726 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%r8), %zmm5
8727 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%r9), %zmm11
8728 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm11, %zmm5, %zmm1
8729 ; AVX512DQBW-SLOW-NEXT: vinserti64x2 $3, 192(%rax), %zmm1, %zmm2 {%k4}
8730 ; AVX512DQBW-SLOW-NEXT: vpbroadcastq 8(%rcx), %ymm1
8731 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
8732 ; AVX512DQBW-SLOW-NEXT: movb $6, %sil
8733 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k4
8734 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm20 {%k4}
8735 ; AVX512DQBW-SLOW-NEXT: vpbroadcastq 72(%rcx), %ymm1
8736 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
8737 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm15 {%k4}
8738 ; AVX512DQBW-SLOW-NEXT: vpbroadcastq 136(%rcx), %ymm1
8739 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
8740 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm8 {%k4}
8741 ; AVX512DQBW-SLOW-NEXT: vpbroadcastq 200(%rcx), %ymm1
8742 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
8743 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm4 {%k4}
8744 ; AVX512DQBW-SLOW-NEXT: movb $56, %cl
8745 ; AVX512DQBW-SLOW-NEXT: kmovd %ecx, %k4
8746 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8747 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm20 {%k4}
8748 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8749 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm15 {%k4}
8750 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8751 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm8 {%k4}
8752 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
8753 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm11, %zmm5, %zmm6
8754 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%rax), %zmm1
8755 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
8756 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm7, %zmm6
8757 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, %zmm4 {%k4}
8758 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
8759 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, %zmm13 {%k1}
8760 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
8761 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
8762 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, %zmm23 {%k1}
8763 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm27, %zmm7 # 64-byte Folded Reload
8764 ; AVX512DQBW-SLOW-NEXT: # zmm7 = zmm27[0,1,2,3],mem[4,5,6,7]
8765 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm22 = <0,11,u,u,4,5,6,7>
8766 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm5, %zmm16, %zmm22
8767 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm6 = <0,1,11,u,4,5,6,7>
8768 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm11, %zmm22, %zmm6
8769 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%r8), %ymm16
8770 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm16 = ymm16[0],mem[0],ymm16[2],mem[2]
8771 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm23 {%k2} = zmm16[2,3,2,3],zmm1[2,3,2,3]
8772 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm12, %zmm19 {%k3}
8773 ; AVX512DQBW-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
8774 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq (%rsp), %ymm12, %ymm12 # 32-byte Folded Reload
8775 ; AVX512DQBW-SLOW-NEXT: # ymm12 = ymm12[1],mem[1],ymm12[3],mem[3]
8776 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,2,3,3]
8777 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3,4,5],mem[6,7]
8778 ; AVX512DQBW-SLOW-NEXT: movb $14, %cl
8779 ; AVX512DQBW-SLOW-NEXT: kmovd %ecx, %k1
8780 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
8781 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm16 {%k1}
8782 ; AVX512DQBW-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
8783 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm12, %ymm12 # 32-byte Folded Reload
8784 ; AVX512DQBW-SLOW-NEXT: # ymm12 = ymm12[1],mem[1],ymm12[3],mem[3]
8785 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,2,3,3]
8786 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3,4,5],mem[6,7]
8787 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm28 {%k1}
8788 ; AVX512DQBW-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
8789 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm12, %ymm12 # 32-byte Folded Reload
8790 ; AVX512DQBW-SLOW-NEXT: # ymm12 = ymm12[1],mem[1],ymm12[3],mem[3]
8791 ; AVX512DQBW-SLOW-NEXT: movb $64, %cl
8792 ; AVX512DQBW-SLOW-NEXT: kmovd %ecx, %k2
8793 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm7 {%k2}
8794 ; AVX512DQBW-SLOW-NEXT: movb $8, %cl
8795 ; AVX512DQBW-SLOW-NEXT: kmovd %ecx, %k2
8796 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm6 {%k2}
8797 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,2,3,3]
8798 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3,4,5],mem[6,7]
8799 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm30 {%k1}
8800 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <12,u,u,3,4,5,6,13>
8801 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm5, %zmm13, %zmm12
8802 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm13 = <u,1,2,3,4,15,u,u>
8803 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm5, %zmm19, %zmm13
8804 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm5 = <0,12,u,3,4,5,6,7>
8805 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm11, %zmm12, %zmm5
8806 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <13,u,2,3,4,5,6,14>
8807 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm11, %zmm7, %zmm12
8808 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <u,1,2,3,4,5,15,u>
8809 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm11, %zmm13, %zmm7
8810 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm11 = [0,1,12,3,4,5,6,7]
8811 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm1, %zmm5, %zmm11
8812 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm5 = [0,13,2,3,4,5,6,7]
8813 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm1, %zmm12, %zmm5
8814 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm12 = [14,1,2,3,4,5,6,15]
8815 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm1, %zmm7, %zmm12
8816 ; AVX512DQBW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
8817 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm23, 1472(%rax)
8818 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, 1408(%rax)
8819 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, 1344(%rax)
8820 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, 1280(%rax)
8821 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, 1216(%rax)
8822 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8823 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 1152(%rax)
8824 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm30, 1088(%rax)
8825 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8826 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 1024(%rax)
8827 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm8, 960(%rax)
8828 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm24, 896(%rax)
8829 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm17, 832(%rax)
8830 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, 768(%rax)
8831 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm26, 704(%rax)
8832 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm28, 640(%rax)
8833 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8834 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 576(%rax)
8835 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm15, 512(%rax)
8836 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, 448(%rax)
8837 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm21, 384(%rax)
8838 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm18, 320(%rax)
8839 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8840 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 256(%rax)
8841 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm16, 192(%rax)
8842 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8843 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 128(%rax)
8844 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm20, 64(%rax)
8845 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm14, (%rax)
8846 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm12, 1728(%rax)
8847 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, 1664(%rax)
8848 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, 1600(%rax)
8849 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, 1536(%rax)
8850 ; AVX512DQBW-SLOW-NEXT: addq $2120, %rsp # imm = 0x848
8851 ; AVX512DQBW-SLOW-NEXT: vzeroupper
8852 ; AVX512DQBW-SLOW-NEXT: retq
8854 ; AVX512DQBW-FAST-LABEL: store_i64_stride7_vf32:
8855 ; AVX512DQBW-FAST: # %bb.0:
8856 ; AVX512DQBW-FAST-NEXT: subq $2056, %rsp # imm = 0x808
8857 ; AVX512DQBW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
8858 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rdi), %zmm10
8859 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rdi), %zmm15
8860 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rsi), %zmm16
8861 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rsi), %zmm18
8862 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rdx), %zmm21
8863 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rdx), %zmm9
8864 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rcx), %zmm20
8865 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rcx), %zmm17
8866 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%r8), %zmm0
8867 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%r9), %zmm6
8868 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rax), %zmm8
8869 ; AVX512DQBW-FAST-NEXT: movb $96, %r10b
8870 ; AVX512DQBW-FAST-NEXT: kmovd %r10d, %k1
8871 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8872 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [9,1,9,1,9,1,9,1]
8873 ; AVX512DQBW-FAST-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8874 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm2
8875 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm1, %zmm2
8876 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm11
8877 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [4,9,0,3,4,9,0,3]
8878 ; AVX512DQBW-FAST-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
8879 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm8, %zmm1, %zmm2
8880 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8881 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm12
8882 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm2 = [11,3,11,3,11,3,11,3]
8883 ; AVX512DQBW-FAST-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8884 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, %zmm1
8885 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm16, %zmm2, %zmm1
8886 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm14
8887 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [2,10,0,3,2,10,0,3]
8888 ; AVX512DQBW-FAST-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
8889 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm9, %zmm2
8890 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm3, %zmm2
8891 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm19
8892 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm2 {%k1}
8893 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm4
8894 ; AVX512DQBW-FAST-NEXT: vmovdqa (%r9), %ymm1
8895 ; AVX512DQBW-FAST-NEXT: vmovdqa 64(%r9), %ymm5
8896 ; AVX512DQBW-FAST-NEXT: vmovdqa 128(%r9), %ymm3
8897 ; AVX512DQBW-FAST-NEXT: vmovdqa (%r8), %ymm7
8898 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%r8), %ymm23
8899 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%r8), %ymm22
8900 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm7[0],ymm1[0],ymm7[2],ymm1[2]
8901 ; AVX512DQBW-FAST-NEXT: movb $28, %r10b
8902 ; AVX512DQBW-FAST-NEXT: kmovd %r10d, %k2
8903 ; AVX512DQBW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm4 {%k2} = zmm2[2,3,2,3],zmm8[2,3,2,3]
8904 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8905 ; AVX512DQBW-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = [1,3,7,7]
8906 ; AVX512DQBW-FAST-NEXT: vpermt2q %ymm1, %ymm4, %ymm7
8907 ; AVX512DQBW-FAST-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8908 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [4,12,0,5,4,12,0,5]
8909 ; AVX512DQBW-FAST-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
8910 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
8911 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm2, %zmm1
8912 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm13
8913 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [0,1,12,7,0,1,12,7]
8914 ; AVX512DQBW-FAST-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
8915 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm8, %zmm7, %zmm1
8916 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8917 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm24 = [5,0,14,6,5,0,14,6]
8918 ; AVX512DQBW-FAST-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3]
8919 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm6, %zmm1
8920 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8921 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm24, %zmm1
8922 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,13,6,7,0,13,6,7]
8923 ; AVX512DQBW-FAST-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
8924 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm8, %zmm2, %zmm1
8925 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8926 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm26 = [15,7,15,7,15,7,15,7]
8927 ; AVX512DQBW-FAST-NEXT: # zmm26 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
8928 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm26, %zmm0
8929 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [6,13,14,7,6,13,14,7]
8930 ; AVX512DQBW-FAST-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
8931 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
8932 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8933 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm23[0],ymm5[0],ymm23[2],ymm5[2]
8934 ; AVX512DQBW-FAST-NEXT: vpermt2q %ymm5, %ymm4, %ymm23
8935 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %ymm23, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8936 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm8 = ymm22[0],ymm3[0],ymm22[2],ymm3[2]
8937 ; AVX512DQBW-FAST-NEXT: vpermt2q %ymm3, %ymm4, %ymm22
8938 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %ymm22, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8939 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm15, %zmm23
8940 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm15, %zmm3
8941 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm14, %zmm5
8942 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8943 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm18, %zmm14, %zmm3
8944 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm21, %zmm4
8945 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm17, %zmm19, %zmm4
8946 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm4 {%k1}
8947 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rax), %zmm14
8948 ; AVX512DQBW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm4 {%k2} = zmm0[2,3,2,3],zmm14[2,3,2,3]
8949 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8950 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%r8), %zmm0
8951 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%r9), %zmm22
8952 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8953 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm4
8954 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8955 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm11, %zmm4
8956 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm12, %zmm3
8957 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8958 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm14, %zmm12, %zmm4
8959 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8960 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm6
8961 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm13, %zmm4
8962 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm13, %zmm6
8963 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm14, %zmm7, %zmm6
8964 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8965 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm22, %zmm6
8966 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm24, %zmm6
8967 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm14, %zmm2, %zmm6
8968 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8969 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8970 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm26, %zmm0
8971 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm1, %zmm14
8972 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%rdi), %zmm27
8973 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%rsi), %zmm6
8974 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm27, %zmm0
8975 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm5, %zmm0
8976 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%rdx), %zmm13
8977 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%rcx), %zmm5
8978 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm13, %zmm22
8979 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm19, %zmm22
8980 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm19, %zmm28
8981 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm22 {%k1}
8982 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%rax), %zmm12
8983 ; AVX512DQBW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm22 {%k2} = zmm8[2,3,2,3],zmm12[2,3,2,3]
8984 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8985 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%r8), %zmm0
8986 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%r9), %zmm8
8987 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm8, %zmm0, %zmm4
8988 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm12, %zmm7, %zmm4
8989 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8990 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm0, %zmm8, %zmm24
8991 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm12, %zmm2, %zmm24
8992 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8993 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm2
8994 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm8, %zmm11, %zmm2
8995 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8996 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm12, %zmm3, %zmm2
8997 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8998 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8999 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm8, %zmm26, %zmm0
9000 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm1, %zmm12
9001 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm15 = [3,0,12,4,3,0,12,4]
9002 ; AVX512DQBW-FAST-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3]
9003 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm20, %zmm1
9004 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm9, %zmm0
9005 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm9, %zmm15, %zmm1
9006 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm3
9007 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm1 = [13,5,13,5,13,5,13,5]
9008 ; AVX512DQBW-FAST-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
9009 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm9, %zmm2
9010 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm1, %zmm2
9011 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9012 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [6,14,6,14,6,14,6,14]
9013 ; AVX512DQBW-FAST-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
9014 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm2
9015 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm9, %zmm2
9016 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9017 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm26, %zmm0
9018 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9019 ; AVX512DQBW-FAST-NEXT: movb $48, %r10b
9020 ; AVX512DQBW-FAST-NEXT: kmovd %r10d, %k3
9021 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [0,8,0,1,0,8,0,1]
9022 ; AVX512DQBW-FAST-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
9023 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, %zmm29
9024 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm16, %zmm11, %zmm29
9025 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [1,0,10,2,1,0,10,2]
9026 ; AVX512DQBW-FAST-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
9027 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm16, %zmm31
9028 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm10, %zmm8, %zmm31
9029 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm3 {%k3} = zmm10[0],zmm16[0],zmm10[2],zmm16[2],zmm10[4],zmm16[4],zmm10[6],zmm16[6]
9030 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, (%rsp) # 64-byte Spill
9031 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, %zmm7
9032 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm16, %zmm1, %zmm7
9033 ; AVX512DQBW-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [6,14,6,14]
9034 ; AVX512DQBW-FAST-NEXT: # ymm4 = mem[0,1,0,1]
9035 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, %zmm24
9036 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm16, %zmm4, %zmm24
9037 ; AVX512DQBW-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [15,7,15,7]
9038 ; AVX512DQBW-FAST-NEXT: # ymm2 = mem[0,1,0,1]
9039 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm16, %zmm2, %zmm10
9040 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9041 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm17, %zmm22
9042 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm21, %zmm15, %zmm22
9043 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm21, %zmm25
9044 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm17, %zmm1, %zmm25
9045 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm21, %zmm30
9046 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm17, %zmm9, %zmm30
9047 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm17, %zmm26, %zmm21
9048 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9049 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm23, %zmm0
9050 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm23, %zmm19
9051 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm18, %zmm11, %zmm19
9052 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm18, %zmm17
9053 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm23, %zmm8, %zmm17
9054 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm22 {%k3} = zmm23[0],zmm18[0],zmm23[2],zmm18[2],zmm23[4],zmm18[4],zmm23[6],zmm18[6]
9055 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm23, %zmm3
9056 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm18, %zmm1, %zmm3
9057 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm18, %zmm4, %zmm23
9058 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm18, %zmm2, %zmm0
9059 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9060 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%rdx), %zmm18
9061 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%rcx), %zmm0
9062 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm18, %zmm10
9063 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm9, %zmm10
9064 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9065 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm16
9066 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm18, %zmm15, %zmm16
9067 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm18, %zmm10
9068 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm1, %zmm10
9069 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9070 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm0, %zmm18, %zmm28
9071 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm28, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9072 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm26, %zmm18
9073 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm13, %zmm5, %zmm15
9074 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm5, %zmm13, %zmm9
9075 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm13, %zmm28
9076 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm26, %zmm13
9077 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm1, %zmm28
9078 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%rdi), %zmm20
9079 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%rsi), %zmm0
9080 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm20, %zmm21
9081 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm4, %zmm21
9082 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm20, %zmm10
9083 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm1, %zmm10
9084 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm27, %zmm26
9085 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm11, %zmm26
9086 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm6, %zmm5
9087 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm27, %zmm8, %zmm5
9088 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm15 {%k3} = zmm27[0],zmm6[0],zmm27[2],zmm6[2],zmm27[4],zmm6[4],zmm27[6],zmm6[6]
9089 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm6, %zmm27, %zmm1
9090 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm6, %zmm27, %zmm4
9091 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm2, %zmm27
9092 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm16 {%k3} = zmm20[0],zmm0[0],zmm20[2],zmm0[2],zmm20[4],zmm0[4],zmm20[6],zmm0[6]
9093 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm0, %zmm20, %zmm11
9094 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm20, %zmm0, %zmm8
9095 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
9096 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm0, %zmm20, %zmm6
9097 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9098 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm2, %zmm20
9099 ; AVX512DQBW-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9100 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
9101 ; AVX512DQBW-FAST-NEXT: movb $14, %sil
9102 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k3
9103 ; AVX512DQBW-FAST-NEXT: vmovdqu64 (%rsp), %zmm2 # 64-byte Reload
9104 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm2 {%k3}
9105 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
9106 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9107 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm7 {%k1}
9108 ; AVX512DQBW-FAST-NEXT: movb $120, %sil
9109 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k4
9110 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9111 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, %zmm0 {%k4}
9112 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9113 ; AVX512DQBW-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm24, %zmm2 # 64-byte Folded Reload
9114 ; AVX512DQBW-FAST-NEXT: # zmm2 = zmm24[0,1,2,3],mem[4,5,6,7]
9115 ; AVX512DQBW-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9116 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
9117 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm22 {%k3}
9118 ; AVX512DQBW-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9119 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
9120 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm15 {%k3}
9121 ; AVX512DQBW-FAST-NEXT: movb $-61, %sil
9122 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k5
9123 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9124 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm2 {%k5}
9125 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9126 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm25, %zmm3 {%k1}
9127 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
9128 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm7 {%k4}
9129 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm28, %zmm1 {%k1}
9130 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm24 # 64-byte Reload
9131 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm24 {%k4}
9132 ; AVX512DQBW-FAST-NEXT: movb $24, %sil
9133 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k3
9134 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
9135 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9136 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm6 {%k3}
9137 ; AVX512DQBW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm23[0,1,2,3],zmm30[4,5,6,7]
9138 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9139 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm1 {%k5}
9140 ; AVX512DQBW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm2 = zmm4[0,1,2,3],zmm9[4,5,6,7]
9141 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9142 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm2 {%k5}
9143 ; AVX512DQBW-FAST-NEXT: movb $-31, %sil
9144 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k4
9145 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9146 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm6 {%k4}
9147 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm6, %zmm4
9148 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm28 # 64-byte Reload
9149 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9150 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm28 {%k3}
9151 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm14, %zmm28 {%k4}
9152 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm13, %zmm27 {%k3}
9153 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm12, %zmm27 {%k4}
9154 ; AVX512DQBW-FAST-NEXT: vmovdqa (%rdx), %xmm0
9155 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
9156 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
9157 ; AVX512DQBW-FAST-NEXT: movb $12, %sil
9158 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k4
9159 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm29 {%k4}
9160 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [0,8,0,8,0,8,0,8]
9161 ; AVX512DQBW-FAST-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
9162 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
9163 ; AVX512DQBW-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm6 # 64-byte Folded Reload
9164 ; AVX512DQBW-FAST-NEXT: vmovdqa 64(%rdx), %xmm3
9165 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm3[0],mem[0]
9166 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
9167 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm19 {%k4}
9168 ; AVX512DQBW-FAST-NEXT: vmovdqa 128(%rdx), %xmm3
9169 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm3[0],mem[0]
9170 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
9171 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm26 {%k4}
9172 ; AVX512DQBW-FAST-NEXT: vmovdqa 192(%rdx), %xmm3
9173 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm3[0],mem[0]
9174 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
9175 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm11 {%k4}
9176 ; AVX512DQBW-FAST-NEXT: movb $112, %sil
9177 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k4
9178 ; AVX512DQBW-FAST-NEXT: vinserti64x2 $3, (%rax), %zmm6, %zmm29 {%k4}
9179 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
9180 ; AVX512DQBW-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm3 # 64-byte Folded Reload
9181 ; AVX512DQBW-FAST-NEXT: vinserti64x2 $3, 64(%rax), %zmm3, %zmm19 {%k4}
9182 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
9183 ; AVX512DQBW-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm3 # 64-byte Folded Reload
9184 ; AVX512DQBW-FAST-NEXT: vinserti64x2 $3, 128(%rax), %zmm3, %zmm26 {%k4}
9185 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%r8), %zmm3
9186 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%r9), %zmm6
9187 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm6, %zmm3, %zmm0
9188 ; AVX512DQBW-FAST-NEXT: vinserti64x2 $3, 192(%rax), %zmm0, %zmm11 {%k4}
9189 ; AVX512DQBW-FAST-NEXT: vpbroadcastq 8(%rcx), %ymm0
9190 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
9191 ; AVX512DQBW-FAST-NEXT: movb $6, %sil
9192 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k4
9193 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm31 {%k4}
9194 ; AVX512DQBW-FAST-NEXT: vpbroadcastq 72(%rcx), %ymm0
9195 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
9196 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm17 {%k4}
9197 ; AVX512DQBW-FAST-NEXT: vpbroadcastq 136(%rcx), %ymm0
9198 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
9199 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm5 {%k4}
9200 ; AVX512DQBW-FAST-NEXT: vpbroadcastq 200(%rcx), %ymm0
9201 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
9202 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm8 {%k4}
9203 ; AVX512DQBW-FAST-NEXT: movb $56, %cl
9204 ; AVX512DQBW-FAST-NEXT: kmovd %ecx, %k4
9205 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9206 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm31 {%k4}
9207 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9208 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm17 {%k4}
9209 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9210 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm5 {%k4}
9211 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
9212 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm6, %zmm3, %zmm9
9213 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%rax), %zmm0
9214 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
9215 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm12, %zmm9
9216 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm9, %zmm8 {%k4}
9217 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
9218 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm9, %zmm10 {%k1}
9219 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
9220 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
9221 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm9, %zmm14 {%k1}
9222 ; AVX512DQBW-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm21, %zmm9 # 64-byte Folded Reload
9223 ; AVX512DQBW-FAST-NEXT: # zmm9 = zmm21[0,1,2,3],mem[4,5,6,7]
9224 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm12 = <0,11,u,u,4,5,6,7>
9225 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm3, %zmm16, %zmm12
9226 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm13 = <0,1,11,u,4,5,6,7>
9227 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm6, %zmm12, %zmm13
9228 ; AVX512DQBW-FAST-NEXT: vmovdqa 192(%r8), %ymm12
9229 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm12 = ymm12[0],mem[0],ymm12[2],mem[2]
9230 ; AVX512DQBW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm14 {%k2} = zmm12[2,3,2,3],zmm0[2,3,2,3]
9231 ; AVX512DQBW-FAST-NEXT: movb $64, %al
9232 ; AVX512DQBW-FAST-NEXT: kmovd %eax, %k1
9233 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm9 {%k1}
9234 ; AVX512DQBW-FAST-NEXT: movb $8, %al
9235 ; AVX512DQBW-FAST-NEXT: kmovd %eax, %k1
9236 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm13 {%k1}
9237 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm18, %zmm20 {%k3}
9238 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm12 = <12,u,u,3,4,5,6,13>
9239 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm3, %zmm10, %zmm12
9240 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm10 = <u,1,2,3,4,15,u,u>
9241 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm3, %zmm20, %zmm10
9242 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm3 = <0,12,u,3,4,5,6,7>
9243 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm6, %zmm12, %zmm3
9244 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm12 = <13,u,2,3,4,5,6,14>
9245 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm6, %zmm9, %zmm12
9246 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm9 = <u,1,2,3,4,5,15,u>
9247 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm6, %zmm10, %zmm9
9248 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm6 = [0,1,12,3,4,5,6,7]
9249 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm0, %zmm3, %zmm6
9250 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,13,2,3,4,5,6,7]
9251 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm0, %zmm12, %zmm3
9252 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm10 = [14,1,2,3,4,5,6,15]
9253 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm0, %zmm9, %zmm10
9254 ; AVX512DQBW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
9255 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm14, 1472(%rax)
9256 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm8, 1408(%rax)
9257 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, 1344(%rax)
9258 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm27, 1280(%rax)
9259 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, 1216(%rax)
9260 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm24, 1152(%rax)
9261 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm15, 1088(%rax)
9262 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9263 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 1024(%rax)
9264 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, 960(%rax)
9265 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm26, 896(%rax)
9266 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm28, 832(%rax)
9267 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, 768(%rax)
9268 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, 704(%rax)
9269 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm22, 640(%rax)
9270 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9271 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 576(%rax)
9272 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm17, 512(%rax)
9273 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm19, 448(%rax)
9274 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, 384(%rax)
9275 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9276 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 320(%rax)
9277 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9278 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 256(%rax)
9279 ; AVX512DQBW-FAST-NEXT: vmovups (%rsp), %zmm0 # 64-byte Reload
9280 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 192(%rax)
9281 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
9282 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 128(%rax)
9283 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm31, 64(%rax)
9284 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm29, (%rax)
9285 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, 1728(%rax)
9286 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, 1664(%rax)
9287 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm6, 1600(%rax)
9288 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm13, 1536(%rax)
9289 ; AVX512DQBW-FAST-NEXT: addq $2056, %rsp # imm = 0x808
9290 ; AVX512DQBW-FAST-NEXT: vzeroupper
9291 ; AVX512DQBW-FAST-NEXT: retq
9292 %in.vec0 = load <32 x i64>, ptr %in.vecptr0, align 64
9293 %in.vec1 = load <32 x i64>, ptr %in.vecptr1, align 64
9294 %in.vec2 = load <32 x i64>, ptr %in.vecptr2, align 64
9295 %in.vec3 = load <32 x i64>, ptr %in.vecptr3, align 64
9296 %in.vec4 = load <32 x i64>, ptr %in.vecptr4, align 64
9297 %in.vec5 = load <32 x i64>, ptr %in.vecptr5, align 64
9298 %in.vec6 = load <32 x i64>, ptr %in.vecptr6, align 64
9299 %1 = shufflevector <32 x i64> %in.vec0, <32 x i64> %in.vec1, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
9300 %2 = shufflevector <32 x i64> %in.vec2, <32 x i64> %in.vec3, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
9301 %3 = shufflevector <32 x i64> %in.vec4, <32 x i64> %in.vec5, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
9302 %4 = shufflevector <64 x i64> %1, <64 x i64> %2, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
9303 %5 = shufflevector <32 x i64> %in.vec6, <32 x i64> poison, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
9304 %6 = shufflevector <64 x i64> %3, <64 x i64> %5, <96 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95>
9305 %7 = shufflevector <96 x i64> %6, <96 x i64> poison, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
9306 %8 = shufflevector <128 x i64> %4, <128 x i64> %7, <224 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223>
9307 %interleaved.vec = shufflevector <224 x i64> %8, <224 x i64> poison, <224 x i32> <i32 0, i32 32, i32 64, i32 96, i32 128, i32 160, i32 192, i32 1, i32 33, i32 65, i32 97, i32 129, i32 161, i32 193, i32 2, i32 34, i32 66, i32 98, i32 130, i32 162, i32 194, i32 3, i32 35, i32 67, i32 99, i32 131, i32 163, i32 195, i32 4, i32 36, i32 68, i32 100, i32 132, i32 164, i32 196, i32 5, i32 37, i32 69, i32 101, i32 133, i32 165, i32 197, i32 6, i32 38, i32 70, i32 102, i32 134, i32 166, i32 198, i32 7, i32 39, i32 71, i32 103, i32 135, i32 167, i32 199, i32 8, i32 40, i32 72, i32 104, i32 136, i32 168, i32 200, i32 9, i32 41, i32 73, i32 105, i32 137, i32 169, i32 201, i32 10, i32 42, i32 74, i32 106, i32 138, i32 170, i32 202, i32 11, i32 43, i32 75, i32 107, i32 139, i32 171, i32 203, i32 12, i32 44, i32 76, i32 108, i32 140, i32 172, i32 204, i32 13, i32 45, i32 77, i32 109, i32 141, i32 173, i32 205, i32 14, i32 46, i32 78, i32 110, i32 142, i32 174, i32 206, i32 15, i32 47, i32 79, i32 111, i32 143, i32 175, i32 207, i32 16, i32 48, i32 80, i32 112, i32 144, i32 176, i32 208, i32 17, i32 49, i32 81, i32 113, i32 145, i32 177, i32 209, i32 18, i32 50, i32 82, i32 114, i32 146, i32 178, i32 210, i32 19, i32 51, i32 83, i32 115, i32 147, i32 179, i32 211, i32 20, i32 52, i32 84, i32 116, i32 148, i32 180, i32 212, i32 21, i32 53, i32 85, i32 117, i32 149, i32 181, i32 213, i32 22, i32 54, i32 86, i32 118, i32 150, i32 182, i32 214, i32 23, i32 55, i32 87, i32 119, i32 151, i32 183, i32 215, i32 24, i32 56, i32 88, i32 120, i32 152, i32 184, i32 216, i32 25, i32 57, i32 89, i32 121, i32 153, i32 185, i32 217, i32 26, i32 58, i32 90, i32 122, i32 154, i32 186, i32 218, i32 27, i32 59, i32 91, i32 123, i32 155, i32 187, i32 219, i32 28, i32 60, i32 92, i32 124, i32 156, i32 188, i32 220, i32 29, i32 61, i32 93, i32 125, i32 157, i32 189, i32 221, i32 30, i32 62, i32 94, i32 126, i32 158, i32 190, i32 222, i32 31, i32 63, i32 95, i32 127, i32 159, i32 191, i32 223>
9308 store <224 x i64> %interleaved.vec, ptr %out.vec, align 64
9312 define void @store_i64_stride7_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %out.vec) nounwind {
9313 ; SSE-LABEL: store_i64_stride7_vf64:
9315 ; SSE-NEXT: subq $3224, %rsp # imm = 0xC98
9316 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
9317 ; SSE-NEXT: movapd (%rdi), %xmm2
9318 ; SSE-NEXT: movapd 16(%rdi), %xmm3
9319 ; SSE-NEXT: movapd 32(%rdi), %xmm4
9320 ; SSE-NEXT: movapd (%rsi), %xmm5
9321 ; SSE-NEXT: movapd 16(%rsi), %xmm6
9322 ; SSE-NEXT: movapd (%rdx), %xmm7
9323 ; SSE-NEXT: movapd 16(%rdx), %xmm8
9324 ; SSE-NEXT: movapd (%rcx), %xmm9
9325 ; SSE-NEXT: movapd 16(%rcx), %xmm10
9326 ; SSE-NEXT: movapd 16(%r8), %xmm12
9327 ; SSE-NEXT: movapd (%r8), %xmm11
9328 ; SSE-NEXT: movapd 16(%r9), %xmm14
9329 ; SSE-NEXT: movapd (%r9), %xmm13
9330 ; SSE-NEXT: movapd 16(%rax), %xmm0
9331 ; SSE-NEXT: movapd (%rax), %xmm1
9332 ; SSE-NEXT: movapd %xmm2, %xmm15
9333 ; SSE-NEXT: unpcklpd {{.*#+}} xmm15 = xmm15[0],xmm5[0]
9334 ; SSE-NEXT: movapd %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9335 ; SSE-NEXT: movsd {{.*#+}} xmm2 = xmm1[0],xmm2[1]
9336 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9337 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm7[1]
9338 ; SSE-NEXT: movapd %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9339 ; SSE-NEXT: unpcklpd {{.*#+}} xmm7 = xmm7[0],xmm9[0]
9340 ; SSE-NEXT: movapd %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9341 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm11[1]
9342 ; SSE-NEXT: movapd %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9343 ; SSE-NEXT: unpcklpd {{.*#+}} xmm11 = xmm11[0],xmm13[0]
9344 ; SSE-NEXT: movapd %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9345 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm1[1]
9346 ; SSE-NEXT: movapd %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9347 ; SSE-NEXT: movapd %xmm3, %xmm1
9348 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm6[0]
9349 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9350 ; SSE-NEXT: movsd {{.*#+}} xmm3 = xmm0[0],xmm3[1]
9351 ; SSE-NEXT: movapd %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9352 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm8[1]
9353 ; SSE-NEXT: movapd %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9354 ; SSE-NEXT: unpcklpd {{.*#+}} xmm8 = xmm8[0],xmm10[0]
9355 ; SSE-NEXT: movapd %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9356 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm12[1]
9357 ; SSE-NEXT: movapd %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9358 ; SSE-NEXT: unpcklpd {{.*#+}} xmm12 = xmm12[0],xmm14[0]
9359 ; SSE-NEXT: movapd %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9360 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm0[1]
9361 ; SSE-NEXT: movapd %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9362 ; SSE-NEXT: movapd 32(%rsi), %xmm1
9363 ; SSE-NEXT: movapd %xmm4, %xmm0
9364 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm1[0]
9365 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9366 ; SSE-NEXT: movapd 32(%rax), %xmm0
9367 ; SSE-NEXT: movsd {{.*#+}} xmm4 = xmm0[0],xmm4[1]
9368 ; SSE-NEXT: movapd %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9369 ; SSE-NEXT: movapd 32(%rdx), %xmm2
9370 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
9371 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9372 ; SSE-NEXT: movapd 32(%rcx), %xmm3
9373 ; SSE-NEXT: unpcklpd {{.*#+}} xmm2 = xmm2[0],xmm3[0]
9374 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9375 ; SSE-NEXT: movapd 32(%r8), %xmm1
9376 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm1[1]
9377 ; SSE-NEXT: movapd %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9378 ; SSE-NEXT: movapd 32(%r9), %xmm2
9379 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9380 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9381 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9382 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9383 ; SSE-NEXT: movapd 48(%rdi), %xmm1
9384 ; SSE-NEXT: movapd 48(%rsi), %xmm2
9385 ; SSE-NEXT: movapd %xmm1, %xmm0
9386 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
9387 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9388 ; SSE-NEXT: movapd 48(%rax), %xmm0
9389 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
9390 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9391 ; SSE-NEXT: movapd 48(%rdx), %xmm1
9392 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9393 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9394 ; SSE-NEXT: movapd 48(%rcx), %xmm2
9395 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9396 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9397 ; SSE-NEXT: movapd 48(%r8), %xmm1
9398 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9399 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9400 ; SSE-NEXT: movapd 48(%r9), %xmm2
9401 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9402 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9403 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9404 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9405 ; SSE-NEXT: movapd 64(%rdi), %xmm1
9406 ; SSE-NEXT: movapd 64(%rsi), %xmm2
9407 ; SSE-NEXT: movapd %xmm1, %xmm0
9408 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
9409 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9410 ; SSE-NEXT: movapd 64(%rax), %xmm0
9411 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
9412 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9413 ; SSE-NEXT: movapd 64(%rdx), %xmm1
9414 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9415 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9416 ; SSE-NEXT: movapd 64(%rcx), %xmm2
9417 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9418 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9419 ; SSE-NEXT: movapd 64(%r8), %xmm1
9420 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9421 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9422 ; SSE-NEXT: movapd 64(%r9), %xmm2
9423 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9424 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9425 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9426 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9427 ; SSE-NEXT: movapd 80(%rdi), %xmm1
9428 ; SSE-NEXT: movapd 80(%rsi), %xmm2
9429 ; SSE-NEXT: movapd %xmm1, %xmm0
9430 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
9431 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9432 ; SSE-NEXT: movapd 80(%rax), %xmm0
9433 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
9434 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9435 ; SSE-NEXT: movapd 80(%rdx), %xmm1
9436 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9437 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9438 ; SSE-NEXT: movapd 80(%rcx), %xmm2
9439 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9440 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9441 ; SSE-NEXT: movapd 80(%r8), %xmm1
9442 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9443 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9444 ; SSE-NEXT: movapd 80(%r9), %xmm2
9445 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9446 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9447 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9448 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9449 ; SSE-NEXT: movapd 96(%rdi), %xmm1
9450 ; SSE-NEXT: movapd 96(%rsi), %xmm2
9451 ; SSE-NEXT: movapd %xmm1, %xmm0
9452 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
9453 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9454 ; SSE-NEXT: movapd 96(%rax), %xmm0
9455 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
9456 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9457 ; SSE-NEXT: movapd 96(%rdx), %xmm1
9458 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9459 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9460 ; SSE-NEXT: movapd 96(%rcx), %xmm2
9461 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9462 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9463 ; SSE-NEXT: movapd 96(%r8), %xmm1
9464 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9465 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9466 ; SSE-NEXT: movapd 96(%r9), %xmm2
9467 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9468 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9469 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9470 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9471 ; SSE-NEXT: movapd 112(%rdi), %xmm1
9472 ; SSE-NEXT: movapd 112(%rsi), %xmm2
9473 ; SSE-NEXT: movapd %xmm1, %xmm0
9474 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
9475 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9476 ; SSE-NEXT: movapd 112(%rax), %xmm0
9477 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
9478 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9479 ; SSE-NEXT: movapd 112(%rdx), %xmm1
9480 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9481 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9482 ; SSE-NEXT: movapd 112(%rcx), %xmm2
9483 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9484 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9485 ; SSE-NEXT: movapd 112(%r8), %xmm1
9486 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9487 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9488 ; SSE-NEXT: movapd 112(%r9), %xmm2
9489 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9490 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9491 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9492 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9493 ; SSE-NEXT: movapd 128(%rdi), %xmm1
9494 ; SSE-NEXT: movapd 128(%rsi), %xmm2
9495 ; SSE-NEXT: movapd %xmm1, %xmm0
9496 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
9497 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9498 ; SSE-NEXT: movapd 128(%rax), %xmm0
9499 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
9500 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9501 ; SSE-NEXT: movapd 128(%rdx), %xmm1
9502 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9503 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9504 ; SSE-NEXT: movapd 128(%rcx), %xmm2
9505 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9506 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9507 ; SSE-NEXT: movapd 128(%r8), %xmm1
9508 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9509 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9510 ; SSE-NEXT: movapd 128(%r9), %xmm2
9511 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9512 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9513 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9514 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9515 ; SSE-NEXT: movapd 144(%rdi), %xmm1
9516 ; SSE-NEXT: movapd 144(%rsi), %xmm2
9517 ; SSE-NEXT: movapd %xmm1, %xmm0
9518 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
9519 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9520 ; SSE-NEXT: movapd 144(%rax), %xmm0
9521 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
9522 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9523 ; SSE-NEXT: movapd 144(%rdx), %xmm1
9524 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9525 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9526 ; SSE-NEXT: movapd 144(%rcx), %xmm2
9527 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9528 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9529 ; SSE-NEXT: movapd 144(%r8), %xmm1
9530 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9531 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9532 ; SSE-NEXT: movapd 144(%r9), %xmm2
9533 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9534 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9535 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9536 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9537 ; SSE-NEXT: movapd 160(%rdi), %xmm1
9538 ; SSE-NEXT: movapd 160(%rsi), %xmm2
9539 ; SSE-NEXT: movapd %xmm1, %xmm0
9540 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
9541 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9542 ; SSE-NEXT: movapd 160(%rax), %xmm0
9543 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
9544 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9545 ; SSE-NEXT: movapd 160(%rdx), %xmm1
9546 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9547 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9548 ; SSE-NEXT: movapd 160(%rcx), %xmm2
9549 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9550 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9551 ; SSE-NEXT: movapd 160(%r8), %xmm1
9552 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9553 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9554 ; SSE-NEXT: movapd 160(%r9), %xmm2
9555 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9556 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9557 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9558 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9559 ; SSE-NEXT: movapd 176(%rdi), %xmm1
9560 ; SSE-NEXT: movapd 176(%rsi), %xmm2
9561 ; SSE-NEXT: movapd %xmm1, %xmm0
9562 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
9563 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9564 ; SSE-NEXT: movapd 176(%rax), %xmm0
9565 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
9566 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9567 ; SSE-NEXT: movapd 176(%rdx), %xmm1
9568 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9569 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9570 ; SSE-NEXT: movapd 176(%rcx), %xmm2
9571 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9572 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9573 ; SSE-NEXT: movapd 176(%r8), %xmm1
9574 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9575 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9576 ; SSE-NEXT: movapd 176(%r9), %xmm2
9577 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9578 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9579 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9580 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9581 ; SSE-NEXT: movapd 192(%rdi), %xmm1
9582 ; SSE-NEXT: movapd 192(%rsi), %xmm2
9583 ; SSE-NEXT: movapd %xmm1, %xmm0
9584 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
9585 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9586 ; SSE-NEXT: movapd 192(%rax), %xmm0
9587 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
9588 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9589 ; SSE-NEXT: movapd 192(%rdx), %xmm1
9590 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9591 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9592 ; SSE-NEXT: movapd 192(%rcx), %xmm2
9593 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9594 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9595 ; SSE-NEXT: movapd 192(%r8), %xmm1
9596 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9597 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9598 ; SSE-NEXT: movapd 192(%r9), %xmm2
9599 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9600 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9601 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9602 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9603 ; SSE-NEXT: movapd 208(%rdi), %xmm1
9604 ; SSE-NEXT: movapd 208(%rsi), %xmm2
9605 ; SSE-NEXT: movapd %xmm1, %xmm0
9606 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
9607 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9608 ; SSE-NEXT: movapd 208(%rax), %xmm0
9609 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
9610 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9611 ; SSE-NEXT: movapd 208(%rdx), %xmm1
9612 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9613 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9614 ; SSE-NEXT: movapd 208(%rcx), %xmm2
9615 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9616 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9617 ; SSE-NEXT: movapd 208(%r8), %xmm1
9618 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9619 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9620 ; SSE-NEXT: movapd 208(%r9), %xmm2
9621 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9622 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9623 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9624 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9625 ; SSE-NEXT: movapd 224(%rdi), %xmm1
9626 ; SSE-NEXT: movapd 224(%rsi), %xmm2
9627 ; SSE-NEXT: movapd %xmm1, %xmm0
9628 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
9629 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9630 ; SSE-NEXT: movapd 224(%rax), %xmm0
9631 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
9632 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9633 ; SSE-NEXT: movapd 224(%rdx), %xmm1
9634 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9635 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9636 ; SSE-NEXT: movapd 224(%rcx), %xmm2
9637 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9638 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9639 ; SSE-NEXT: movapd 224(%r8), %xmm1
9640 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9641 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9642 ; SSE-NEXT: movapd 224(%r9), %xmm2
9643 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9644 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9645 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9646 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9647 ; SSE-NEXT: movapd 240(%rdi), %xmm1
9648 ; SSE-NEXT: movapd 240(%rsi), %xmm2
9649 ; SSE-NEXT: movapd %xmm1, %xmm0
9650 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
9651 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9652 ; SSE-NEXT: movapd 240(%rax), %xmm0
9653 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
9654 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9655 ; SSE-NEXT: movapd 240(%rdx), %xmm1
9656 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9657 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9658 ; SSE-NEXT: movapd 240(%rcx), %xmm2
9659 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9660 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9661 ; SSE-NEXT: movapd 240(%r8), %xmm1
9662 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9663 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9664 ; SSE-NEXT: movapd 240(%r9), %xmm2
9665 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9666 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9667 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9668 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9669 ; SSE-NEXT: movapd 256(%rdi), %xmm1
9670 ; SSE-NEXT: movapd 256(%rsi), %xmm2
9671 ; SSE-NEXT: movapd %xmm1, %xmm0
9672 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
9673 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9674 ; SSE-NEXT: movapd 256(%rax), %xmm0
9675 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
9676 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9677 ; SSE-NEXT: movapd 256(%rdx), %xmm1
9678 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9679 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9680 ; SSE-NEXT: movapd 256(%rcx), %xmm2
9681 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9682 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9683 ; SSE-NEXT: movapd 256(%r8), %xmm1
9684 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9685 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9686 ; SSE-NEXT: movapd 256(%r9), %xmm2
9687 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9688 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9689 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9690 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9691 ; SSE-NEXT: movapd 272(%rdi), %xmm1
9692 ; SSE-NEXT: movapd 272(%rsi), %xmm2
9693 ; SSE-NEXT: movapd %xmm1, %xmm0
9694 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
9695 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9696 ; SSE-NEXT: movapd 272(%rax), %xmm0
9697 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
9698 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9699 ; SSE-NEXT: movapd 272(%rdx), %xmm1
9700 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9701 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9702 ; SSE-NEXT: movapd 272(%rcx), %xmm2
9703 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9704 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9705 ; SSE-NEXT: movapd 272(%r8), %xmm1
9706 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9707 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9708 ; SSE-NEXT: movapd 272(%r9), %xmm2
9709 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9710 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9711 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9712 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9713 ; SSE-NEXT: movapd 288(%rdi), %xmm1
9714 ; SSE-NEXT: movapd 288(%rsi), %xmm2
9715 ; SSE-NEXT: movapd %xmm1, %xmm0
9716 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
9717 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9718 ; SSE-NEXT: movapd 288(%rax), %xmm0
9719 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
9720 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9721 ; SSE-NEXT: movapd 288(%rdx), %xmm1
9722 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9723 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9724 ; SSE-NEXT: movapd 288(%rcx), %xmm2
9725 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9726 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9727 ; SSE-NEXT: movapd 288(%r8), %xmm1
9728 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9729 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9730 ; SSE-NEXT: movapd 288(%r9), %xmm2
9731 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9732 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9733 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9734 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9735 ; SSE-NEXT: movapd 304(%rdi), %xmm1
9736 ; SSE-NEXT: movapd 304(%rsi), %xmm2
9737 ; SSE-NEXT: movapd %xmm1, %xmm0
9738 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
9739 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9740 ; SSE-NEXT: movapd 304(%rax), %xmm0
9741 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
9742 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9743 ; SSE-NEXT: movapd 304(%rdx), %xmm1
9744 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9745 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9746 ; SSE-NEXT: movapd 304(%rcx), %xmm2
9747 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9748 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9749 ; SSE-NEXT: movapd 304(%r8), %xmm1
9750 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9751 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9752 ; SSE-NEXT: movapd 304(%r9), %xmm2
9753 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9754 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9755 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9756 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9757 ; SSE-NEXT: movapd 320(%rdi), %xmm1
9758 ; SSE-NEXT: movapd 320(%rsi), %xmm2
9759 ; SSE-NEXT: movapd %xmm1, %xmm0
9760 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
9761 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9762 ; SSE-NEXT: movapd 320(%rax), %xmm0
9763 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
9764 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9765 ; SSE-NEXT: movapd 320(%rdx), %xmm1
9766 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9767 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9768 ; SSE-NEXT: movapd 320(%rcx), %xmm2
9769 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9770 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9771 ; SSE-NEXT: movapd 320(%r8), %xmm1
9772 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9773 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9774 ; SSE-NEXT: movapd 320(%r9), %xmm2
9775 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9776 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9777 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9778 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9779 ; SSE-NEXT: movapd 336(%rdi), %xmm1
9780 ; SSE-NEXT: movapd 336(%rsi), %xmm2
9781 ; SSE-NEXT: movapd %xmm1, %xmm0
9782 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
9783 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9784 ; SSE-NEXT: movapd 336(%rax), %xmm0
9785 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
9786 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9787 ; SSE-NEXT: movapd 336(%rdx), %xmm1
9788 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9789 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9790 ; SSE-NEXT: movapd 336(%rcx), %xmm2
9791 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9792 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9793 ; SSE-NEXT: movapd 336(%r8), %xmm1
9794 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9795 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9796 ; SSE-NEXT: movapd 336(%r9), %xmm2
9797 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9798 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9799 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9800 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9801 ; SSE-NEXT: movapd 352(%rdi), %xmm1
9802 ; SSE-NEXT: movapd 352(%rsi), %xmm2
9803 ; SSE-NEXT: movapd %xmm1, %xmm0
9804 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
9805 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9806 ; SSE-NEXT: movapd 352(%rax), %xmm0
9807 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
9808 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9809 ; SSE-NEXT: movapd 352(%rdx), %xmm1
9810 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9811 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9812 ; SSE-NEXT: movapd 352(%rcx), %xmm2
9813 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9814 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9815 ; SSE-NEXT: movapd 352(%r8), %xmm1
9816 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9817 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9818 ; SSE-NEXT: movapd 352(%r9), %xmm2
9819 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9820 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9821 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9822 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9823 ; SSE-NEXT: movapd 368(%rdi), %xmm1
9824 ; SSE-NEXT: movapd 368(%rsi), %xmm2
9825 ; SSE-NEXT: movapd %xmm1, %xmm0
9826 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
9827 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9828 ; SSE-NEXT: movapd 368(%rax), %xmm0
9829 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
9830 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9831 ; SSE-NEXT: movapd 368(%rdx), %xmm1
9832 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9833 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9834 ; SSE-NEXT: movapd 368(%rcx), %xmm2
9835 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9836 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9837 ; SSE-NEXT: movapd 368(%r8), %xmm1
9838 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9839 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9840 ; SSE-NEXT: movapd 368(%r9), %xmm2
9841 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9842 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9843 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9844 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9845 ; SSE-NEXT: movapd 384(%rdi), %xmm1
9846 ; SSE-NEXT: movapd 384(%rsi), %xmm2
9847 ; SSE-NEXT: movapd %xmm1, %xmm0
9848 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
9849 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9850 ; SSE-NEXT: movapd 384(%rax), %xmm0
9851 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
9852 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9853 ; SSE-NEXT: movapd 384(%rdx), %xmm1
9854 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9855 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9856 ; SSE-NEXT: movapd 384(%rcx), %xmm2
9857 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9858 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9859 ; SSE-NEXT: movapd 384(%r8), %xmm1
9860 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9861 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9862 ; SSE-NEXT: movapd 384(%r9), %xmm2
9863 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9864 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9865 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9866 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9867 ; SSE-NEXT: movapd 400(%rdi), %xmm1
9868 ; SSE-NEXT: movapd 400(%rsi), %xmm2
9869 ; SSE-NEXT: movapd %xmm1, %xmm0
9870 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
9871 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9872 ; SSE-NEXT: movapd 400(%rax), %xmm0
9873 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
9874 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9875 ; SSE-NEXT: movapd 400(%rdx), %xmm1
9876 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9877 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9878 ; SSE-NEXT: movapd 400(%rcx), %xmm2
9879 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9880 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9881 ; SSE-NEXT: movapd 400(%r8), %xmm1
9882 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9883 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9884 ; SSE-NEXT: movapd 400(%r9), %xmm2
9885 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9886 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9887 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9888 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9889 ; SSE-NEXT: movapd 416(%rdi), %xmm1
9890 ; SSE-NEXT: movapd 416(%rsi), %xmm2
9891 ; SSE-NEXT: movapd %xmm1, %xmm0
9892 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
9893 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9894 ; SSE-NEXT: movapd 416(%rax), %xmm0
9895 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
9896 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9897 ; SSE-NEXT: movapd 416(%rdx), %xmm1
9898 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9899 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9900 ; SSE-NEXT: movapd 416(%rcx), %xmm2
9901 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9902 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9903 ; SSE-NEXT: movapd 416(%r8), %xmm1
9904 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9905 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9906 ; SSE-NEXT: movapd 416(%r9), %xmm2
9907 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9908 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9909 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9910 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9911 ; SSE-NEXT: movapd 432(%rdi), %xmm1
9912 ; SSE-NEXT: movapd 432(%rsi), %xmm2
9913 ; SSE-NEXT: movapd %xmm1, %xmm0
9914 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
9915 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9916 ; SSE-NEXT: movapd 432(%rax), %xmm0
9917 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
9918 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9919 ; SSE-NEXT: movapd 432(%rdx), %xmm1
9920 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9921 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9922 ; SSE-NEXT: movapd 432(%rcx), %xmm2
9923 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9924 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9925 ; SSE-NEXT: movapd 432(%r8), %xmm1
9926 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9927 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9928 ; SSE-NEXT: movapd 432(%r9), %xmm2
9929 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9930 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9931 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9932 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9933 ; SSE-NEXT: movapd 448(%rdi), %xmm1
9934 ; SSE-NEXT: movapd 448(%rsi), %xmm2
9935 ; SSE-NEXT: movapd %xmm1, %xmm0
9936 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
9937 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9938 ; SSE-NEXT: movapd 448(%rax), %xmm0
9939 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
9940 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9941 ; SSE-NEXT: movapd 448(%rdx), %xmm1
9942 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9943 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9944 ; SSE-NEXT: movapd 448(%rcx), %xmm2
9945 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9946 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9947 ; SSE-NEXT: movapd 448(%r8), %xmm1
9948 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9949 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9950 ; SSE-NEXT: movapd 448(%r9), %xmm2
9951 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm2[0]
9952 ; SSE-NEXT: movapd %xmm1, (%rsp) # 16-byte Spill
9953 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm0[1]
9954 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9955 ; SSE-NEXT: movapd 464(%rdi), %xmm1
9956 ; SSE-NEXT: movapd 464(%rsi), %xmm2
9957 ; SSE-NEXT: movapd %xmm1, %xmm0
9958 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
9959 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9960 ; SSE-NEXT: movapd 464(%rax), %xmm0
9961 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
9962 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9963 ; SSE-NEXT: movapd 464(%rdx), %xmm1
9964 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
9965 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9966 ; SSE-NEXT: movapd 464(%rcx), %xmm14
9967 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm14[0]
9968 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9969 ; SSE-NEXT: movapd 464(%r8), %xmm1
9970 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm1[1]
9971 ; SSE-NEXT: movapd 464(%r9), %xmm13
9972 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm13[0]
9973 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9974 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm0[1]
9975 ; SSE-NEXT: movapd 480(%rdi), %xmm15
9976 ; SSE-NEXT: movapd 480(%rsi), %xmm12
9977 ; SSE-NEXT: movapd %xmm15, %xmm0
9978 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm12[0]
9979 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9980 ; SSE-NEXT: movapd 480(%rax), %xmm3
9981 ; SSE-NEXT: movsd {{.*#+}} xmm15 = xmm3[0],xmm15[1]
9982 ; SSE-NEXT: movapd 480(%rdx), %xmm11
9983 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm11[1]
9984 ; SSE-NEXT: movapd 480(%rcx), %xmm8
9985 ; SSE-NEXT: unpcklpd {{.*#+}} xmm11 = xmm11[0],xmm8[0]
9986 ; SSE-NEXT: movapd 480(%r8), %xmm9
9987 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm9[1]
9988 ; SSE-NEXT: movapd 480(%r9), %xmm6
9989 ; SSE-NEXT: unpcklpd {{.*#+}} xmm9 = xmm9[0],xmm6[0]
9990 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm3[1]
9991 ; SSE-NEXT: movapd 496(%rdi), %xmm5
9992 ; SSE-NEXT: movapd 496(%rsi), %xmm4
9993 ; SSE-NEXT: movapd %xmm5, %xmm7
9994 ; SSE-NEXT: unpcklpd {{.*#+}} xmm7 = xmm7[0],xmm4[0]
9995 ; SSE-NEXT: movapd 496(%rax), %xmm10
9996 ; SSE-NEXT: movsd {{.*#+}} xmm5 = xmm10[0],xmm5[1]
9997 ; SSE-NEXT: movapd 496(%rdx), %xmm3
9998 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm3[1]
9999 ; SSE-NEXT: movapd 496(%rcx), %xmm2
10000 ; SSE-NEXT: unpcklpd {{.*#+}} xmm3 = xmm3[0],xmm2[0]
10001 ; SSE-NEXT: movapd 496(%r8), %xmm1
10002 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm1[1]
10003 ; SSE-NEXT: movapd 496(%r9), %xmm0
10004 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm0[0]
10005 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm10[1]
10006 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
10007 ; SSE-NEXT: movapd %xmm0, 3568(%rax)
10008 ; SSE-NEXT: movapd %xmm2, 3552(%rax)
10009 ; SSE-NEXT: movapd %xmm4, 3536(%rax)
10010 ; SSE-NEXT: movapd %xmm5, 3520(%rax)
10011 ; SSE-NEXT: movapd %xmm1, 3504(%rax)
10012 ; SSE-NEXT: movapd %xmm3, 3488(%rax)
10013 ; SSE-NEXT: movapd %xmm7, 3472(%rax)
10014 ; SSE-NEXT: movapd %xmm6, 3456(%rax)
10015 ; SSE-NEXT: movapd %xmm8, 3440(%rax)
10016 ; SSE-NEXT: movapd %xmm12, 3424(%rax)
10017 ; SSE-NEXT: movapd %xmm15, 3408(%rax)
10018 ; SSE-NEXT: movapd %xmm9, 3392(%rax)
10019 ; SSE-NEXT: movapd %xmm11, 3376(%rax)
10020 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10021 ; SSE-NEXT: movaps %xmm0, 3360(%rax)
10022 ; SSE-NEXT: movapd %xmm13, 3344(%rax)
10023 ; SSE-NEXT: movapd %xmm14, 3328(%rax)
10024 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10025 ; SSE-NEXT: movaps %xmm0, 3312(%rax)
10026 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10027 ; SSE-NEXT: movaps %xmm0, 3296(%rax)
10028 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10029 ; SSE-NEXT: movaps %xmm0, 3280(%rax)
10030 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10031 ; SSE-NEXT: movaps %xmm0, 3264(%rax)
10032 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10033 ; SSE-NEXT: movaps %xmm0, 3248(%rax)
10034 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10035 ; SSE-NEXT: movaps %xmm0, 3232(%rax)
10036 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10037 ; SSE-NEXT: movaps %xmm0, 3216(%rax)
10038 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10039 ; SSE-NEXT: movaps %xmm0, 3200(%rax)
10040 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10041 ; SSE-NEXT: movaps %xmm0, 3184(%rax)
10042 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
10043 ; SSE-NEXT: movaps %xmm0, 3168(%rax)
10044 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10045 ; SSE-NEXT: movaps %xmm0, 3152(%rax)
10046 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10047 ; SSE-NEXT: movaps %xmm0, 3136(%rax)
10048 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10049 ; SSE-NEXT: movaps %xmm0, 3120(%rax)
10050 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10051 ; SSE-NEXT: movaps %xmm0, 3104(%rax)
10052 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10053 ; SSE-NEXT: movaps %xmm0, 3088(%rax)
10054 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10055 ; SSE-NEXT: movaps %xmm0, 3072(%rax)
10056 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10057 ; SSE-NEXT: movaps %xmm0, 3056(%rax)
10058 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10059 ; SSE-NEXT: movaps %xmm0, 3040(%rax)
10060 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10061 ; SSE-NEXT: movaps %xmm0, 3024(%rax)
10062 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10063 ; SSE-NEXT: movaps %xmm0, 3008(%rax)
10064 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10065 ; SSE-NEXT: movaps %xmm0, 2992(%rax)
10066 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10067 ; SSE-NEXT: movaps %xmm0, 2976(%rax)
10068 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10069 ; SSE-NEXT: movaps %xmm0, 2960(%rax)
10070 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10071 ; SSE-NEXT: movaps %xmm0, 2944(%rax)
10072 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10073 ; SSE-NEXT: movaps %xmm0, 2928(%rax)
10074 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10075 ; SSE-NEXT: movaps %xmm0, 2912(%rax)
10076 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10077 ; SSE-NEXT: movaps %xmm0, 2896(%rax)
10078 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10079 ; SSE-NEXT: movaps %xmm0, 2880(%rax)
10080 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10081 ; SSE-NEXT: movaps %xmm0, 2864(%rax)
10082 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10083 ; SSE-NEXT: movaps %xmm0, 2848(%rax)
10084 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10085 ; SSE-NEXT: movaps %xmm0, 2832(%rax)
10086 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10087 ; SSE-NEXT: movaps %xmm0, 2816(%rax)
10088 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10089 ; SSE-NEXT: movaps %xmm0, 2800(%rax)
10090 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10091 ; SSE-NEXT: movaps %xmm0, 2784(%rax)
10092 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10093 ; SSE-NEXT: movaps %xmm0, 2768(%rax)
10094 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10095 ; SSE-NEXT: movaps %xmm0, 2752(%rax)
10096 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10097 ; SSE-NEXT: movaps %xmm0, 2736(%rax)
10098 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10099 ; SSE-NEXT: movaps %xmm0, 2720(%rax)
10100 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10101 ; SSE-NEXT: movaps %xmm0, 2704(%rax)
10102 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10103 ; SSE-NEXT: movaps %xmm0, 2688(%rax)
10104 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10105 ; SSE-NEXT: movaps %xmm0, 2672(%rax)
10106 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10107 ; SSE-NEXT: movaps %xmm0, 2656(%rax)
10108 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10109 ; SSE-NEXT: movaps %xmm0, 2640(%rax)
10110 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10111 ; SSE-NEXT: movaps %xmm0, 2624(%rax)
10112 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10113 ; SSE-NEXT: movaps %xmm0, 2608(%rax)
10114 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10115 ; SSE-NEXT: movaps %xmm0, 2592(%rax)
10116 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10117 ; SSE-NEXT: movaps %xmm0, 2576(%rax)
10118 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10119 ; SSE-NEXT: movaps %xmm0, 2560(%rax)
10120 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10121 ; SSE-NEXT: movaps %xmm0, 2544(%rax)
10122 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10123 ; SSE-NEXT: movaps %xmm0, 2528(%rax)
10124 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10125 ; SSE-NEXT: movaps %xmm0, 2512(%rax)
10126 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10127 ; SSE-NEXT: movaps %xmm0, 2496(%rax)
10128 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10129 ; SSE-NEXT: movaps %xmm0, 2480(%rax)
10130 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10131 ; SSE-NEXT: movaps %xmm0, 2464(%rax)
10132 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10133 ; SSE-NEXT: movaps %xmm0, 2448(%rax)
10134 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10135 ; SSE-NEXT: movaps %xmm0, 2432(%rax)
10136 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10137 ; SSE-NEXT: movaps %xmm0, 2416(%rax)
10138 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10139 ; SSE-NEXT: movaps %xmm0, 2400(%rax)
10140 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10141 ; SSE-NEXT: movaps %xmm0, 2384(%rax)
10142 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10143 ; SSE-NEXT: movaps %xmm0, 2368(%rax)
10144 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10145 ; SSE-NEXT: movaps %xmm0, 2352(%rax)
10146 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10147 ; SSE-NEXT: movaps %xmm0, 2336(%rax)
10148 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10149 ; SSE-NEXT: movaps %xmm0, 2320(%rax)
10150 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10151 ; SSE-NEXT: movaps %xmm0, 2304(%rax)
10152 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10153 ; SSE-NEXT: movaps %xmm0, 2288(%rax)
10154 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10155 ; SSE-NEXT: movaps %xmm0, 2272(%rax)
10156 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10157 ; SSE-NEXT: movaps %xmm0, 2256(%rax)
10158 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10159 ; SSE-NEXT: movaps %xmm0, 2240(%rax)
10160 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10161 ; SSE-NEXT: movaps %xmm0, 2224(%rax)
10162 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10163 ; SSE-NEXT: movaps %xmm0, 2208(%rax)
10164 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10165 ; SSE-NEXT: movaps %xmm0, 2192(%rax)
10166 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10167 ; SSE-NEXT: movaps %xmm0, 2176(%rax)
10168 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10169 ; SSE-NEXT: movaps %xmm0, 2160(%rax)
10170 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10171 ; SSE-NEXT: movaps %xmm0, 2144(%rax)
10172 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10173 ; SSE-NEXT: movaps %xmm0, 2128(%rax)
10174 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10175 ; SSE-NEXT: movaps %xmm0, 2112(%rax)
10176 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10177 ; SSE-NEXT: movaps %xmm0, 2096(%rax)
10178 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10179 ; SSE-NEXT: movaps %xmm0, 2080(%rax)
10180 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10181 ; SSE-NEXT: movaps %xmm0, 2064(%rax)
10182 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10183 ; SSE-NEXT: movaps %xmm0, 2048(%rax)
10184 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10185 ; SSE-NEXT: movaps %xmm0, 2032(%rax)
10186 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10187 ; SSE-NEXT: movaps %xmm0, 2016(%rax)
10188 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10189 ; SSE-NEXT: movaps %xmm0, 2000(%rax)
10190 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10191 ; SSE-NEXT: movaps %xmm0, 1984(%rax)
10192 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10193 ; SSE-NEXT: movaps %xmm0, 1968(%rax)
10194 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10195 ; SSE-NEXT: movaps %xmm0, 1952(%rax)
10196 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10197 ; SSE-NEXT: movaps %xmm0, 1936(%rax)
10198 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10199 ; SSE-NEXT: movaps %xmm0, 1920(%rax)
10200 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10201 ; SSE-NEXT: movaps %xmm0, 1904(%rax)
10202 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10203 ; SSE-NEXT: movaps %xmm0, 1888(%rax)
10204 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10205 ; SSE-NEXT: movaps %xmm0, 1872(%rax)
10206 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10207 ; SSE-NEXT: movaps %xmm0, 1856(%rax)
10208 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10209 ; SSE-NEXT: movaps %xmm0, 1840(%rax)
10210 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10211 ; SSE-NEXT: movaps %xmm0, 1824(%rax)
10212 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10213 ; SSE-NEXT: movaps %xmm0, 1808(%rax)
10214 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10215 ; SSE-NEXT: movaps %xmm0, 1792(%rax)
10216 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10217 ; SSE-NEXT: movaps %xmm0, 1776(%rax)
10218 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10219 ; SSE-NEXT: movaps %xmm0, 1760(%rax)
10220 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10221 ; SSE-NEXT: movaps %xmm0, 1744(%rax)
10222 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10223 ; SSE-NEXT: movaps %xmm0, 1728(%rax)
10224 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10225 ; SSE-NEXT: movaps %xmm0, 1712(%rax)
10226 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10227 ; SSE-NEXT: movaps %xmm0, 1696(%rax)
10228 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10229 ; SSE-NEXT: movaps %xmm0, 1680(%rax)
10230 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10231 ; SSE-NEXT: movaps %xmm0, 1664(%rax)
10232 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10233 ; SSE-NEXT: movaps %xmm0, 1648(%rax)
10234 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10235 ; SSE-NEXT: movaps %xmm0, 1632(%rax)
10236 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10237 ; SSE-NEXT: movaps %xmm0, 1616(%rax)
10238 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10239 ; SSE-NEXT: movaps %xmm0, 1600(%rax)
10240 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10241 ; SSE-NEXT: movaps %xmm0, 1584(%rax)
10242 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10243 ; SSE-NEXT: movaps %xmm0, 1568(%rax)
10244 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10245 ; SSE-NEXT: movaps %xmm0, 1552(%rax)
10246 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10247 ; SSE-NEXT: movaps %xmm0, 1536(%rax)
10248 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10249 ; SSE-NEXT: movaps %xmm0, 1520(%rax)
10250 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10251 ; SSE-NEXT: movaps %xmm0, 1504(%rax)
10252 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10253 ; SSE-NEXT: movaps %xmm0, 1488(%rax)
10254 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10255 ; SSE-NEXT: movaps %xmm0, 1472(%rax)
10256 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10257 ; SSE-NEXT: movaps %xmm0, 1456(%rax)
10258 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10259 ; SSE-NEXT: movaps %xmm0, 1440(%rax)
10260 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10261 ; SSE-NEXT: movaps %xmm0, 1424(%rax)
10262 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10263 ; SSE-NEXT: movaps %xmm0, 1408(%rax)
10264 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10265 ; SSE-NEXT: movaps %xmm0, 1392(%rax)
10266 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10267 ; SSE-NEXT: movaps %xmm0, 1376(%rax)
10268 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10269 ; SSE-NEXT: movaps %xmm0, 1360(%rax)
10270 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10271 ; SSE-NEXT: movaps %xmm0, 1344(%rax)
10272 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10273 ; SSE-NEXT: movaps %xmm0, 1328(%rax)
10274 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10275 ; SSE-NEXT: movaps %xmm0, 1312(%rax)
10276 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10277 ; SSE-NEXT: movaps %xmm0, 1296(%rax)
10278 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10279 ; SSE-NEXT: movaps %xmm0, 1280(%rax)
10280 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10281 ; SSE-NEXT: movaps %xmm0, 1264(%rax)
10282 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10283 ; SSE-NEXT: movaps %xmm0, 1248(%rax)
10284 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10285 ; SSE-NEXT: movaps %xmm0, 1232(%rax)
10286 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10287 ; SSE-NEXT: movaps %xmm0, 1216(%rax)
10288 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10289 ; SSE-NEXT: movaps %xmm0, 1200(%rax)
10290 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10291 ; SSE-NEXT: movaps %xmm0, 1184(%rax)
10292 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10293 ; SSE-NEXT: movaps %xmm0, 1168(%rax)
10294 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10295 ; SSE-NEXT: movaps %xmm0, 1152(%rax)
10296 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10297 ; SSE-NEXT: movaps %xmm0, 1136(%rax)
10298 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10299 ; SSE-NEXT: movaps %xmm0, 1120(%rax)
10300 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10301 ; SSE-NEXT: movaps %xmm0, 1104(%rax)
10302 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10303 ; SSE-NEXT: movaps %xmm0, 1088(%rax)
10304 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10305 ; SSE-NEXT: movaps %xmm0, 1072(%rax)
10306 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10307 ; SSE-NEXT: movaps %xmm0, 1056(%rax)
10308 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10309 ; SSE-NEXT: movaps %xmm0, 1040(%rax)
10310 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10311 ; SSE-NEXT: movaps %xmm0, 1024(%rax)
10312 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10313 ; SSE-NEXT: movaps %xmm0, 1008(%rax)
10314 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10315 ; SSE-NEXT: movaps %xmm0, 992(%rax)
10316 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10317 ; SSE-NEXT: movaps %xmm0, 976(%rax)
10318 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10319 ; SSE-NEXT: movaps %xmm0, 960(%rax)
10320 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10321 ; SSE-NEXT: movaps %xmm0, 944(%rax)
10322 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10323 ; SSE-NEXT: movaps %xmm0, 928(%rax)
10324 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10325 ; SSE-NEXT: movaps %xmm0, 912(%rax)
10326 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10327 ; SSE-NEXT: movaps %xmm0, 896(%rax)
10328 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10329 ; SSE-NEXT: movaps %xmm0, 880(%rax)
10330 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10331 ; SSE-NEXT: movaps %xmm0, 864(%rax)
10332 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10333 ; SSE-NEXT: movaps %xmm0, 848(%rax)
10334 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10335 ; SSE-NEXT: movaps %xmm0, 832(%rax)
10336 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10337 ; SSE-NEXT: movaps %xmm0, 816(%rax)
10338 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10339 ; SSE-NEXT: movaps %xmm0, 800(%rax)
10340 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10341 ; SSE-NEXT: movaps %xmm0, 784(%rax)
10342 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10343 ; SSE-NEXT: movaps %xmm0, 768(%rax)
10344 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10345 ; SSE-NEXT: movaps %xmm0, 752(%rax)
10346 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10347 ; SSE-NEXT: movaps %xmm0, 736(%rax)
10348 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10349 ; SSE-NEXT: movaps %xmm0, 720(%rax)
10350 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10351 ; SSE-NEXT: movaps %xmm0, 704(%rax)
10352 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10353 ; SSE-NEXT: movaps %xmm0, 688(%rax)
10354 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10355 ; SSE-NEXT: movaps %xmm0, 672(%rax)
10356 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10357 ; SSE-NEXT: movaps %xmm0, 656(%rax)
10358 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10359 ; SSE-NEXT: movaps %xmm0, 640(%rax)
10360 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10361 ; SSE-NEXT: movaps %xmm0, 624(%rax)
10362 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10363 ; SSE-NEXT: movaps %xmm0, 608(%rax)
10364 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10365 ; SSE-NEXT: movaps %xmm0, 592(%rax)
10366 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10367 ; SSE-NEXT: movaps %xmm0, 576(%rax)
10368 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10369 ; SSE-NEXT: movaps %xmm0, 560(%rax)
10370 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10371 ; SSE-NEXT: movaps %xmm0, 544(%rax)
10372 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10373 ; SSE-NEXT: movaps %xmm0, 528(%rax)
10374 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10375 ; SSE-NEXT: movaps %xmm0, 512(%rax)
10376 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10377 ; SSE-NEXT: movaps %xmm0, 496(%rax)
10378 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10379 ; SSE-NEXT: movaps %xmm0, 480(%rax)
10380 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10381 ; SSE-NEXT: movaps %xmm0, 464(%rax)
10382 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10383 ; SSE-NEXT: movaps %xmm0, 448(%rax)
10384 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10385 ; SSE-NEXT: movaps %xmm0, 432(%rax)
10386 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10387 ; SSE-NEXT: movaps %xmm0, 416(%rax)
10388 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10389 ; SSE-NEXT: movaps %xmm0, 400(%rax)
10390 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10391 ; SSE-NEXT: movaps %xmm0, 384(%rax)
10392 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10393 ; SSE-NEXT: movaps %xmm0, 368(%rax)
10394 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10395 ; SSE-NEXT: movaps %xmm0, 352(%rax)
10396 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10397 ; SSE-NEXT: movaps %xmm0, 336(%rax)
10398 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10399 ; SSE-NEXT: movaps %xmm0, 320(%rax)
10400 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10401 ; SSE-NEXT: movaps %xmm0, 304(%rax)
10402 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10403 ; SSE-NEXT: movaps %xmm0, 288(%rax)
10404 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10405 ; SSE-NEXT: movaps %xmm0, 272(%rax)
10406 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10407 ; SSE-NEXT: movaps %xmm0, 256(%rax)
10408 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10409 ; SSE-NEXT: movaps %xmm0, 240(%rax)
10410 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10411 ; SSE-NEXT: movaps %xmm0, 224(%rax)
10412 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10413 ; SSE-NEXT: movaps %xmm0, 208(%rax)
10414 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10415 ; SSE-NEXT: movaps %xmm0, 192(%rax)
10416 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10417 ; SSE-NEXT: movaps %xmm0, 176(%rax)
10418 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10419 ; SSE-NEXT: movaps %xmm0, 160(%rax)
10420 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10421 ; SSE-NEXT: movaps %xmm0, 144(%rax)
10422 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10423 ; SSE-NEXT: movaps %xmm0, 128(%rax)
10424 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10425 ; SSE-NEXT: movaps %xmm0, 112(%rax)
10426 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10427 ; SSE-NEXT: movaps %xmm0, 96(%rax)
10428 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10429 ; SSE-NEXT: movaps %xmm0, 80(%rax)
10430 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10431 ; SSE-NEXT: movaps %xmm0, 64(%rax)
10432 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10433 ; SSE-NEXT: movaps %xmm0, 48(%rax)
10434 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10435 ; SSE-NEXT: movaps %xmm0, 32(%rax)
10436 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10437 ; SSE-NEXT: movaps %xmm0, 16(%rax)
10438 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10439 ; SSE-NEXT: movaps %xmm0, (%rax)
10440 ; SSE-NEXT: addq $3224, %rsp # imm = 0xC98
10443 ; AVX1-ONLY-LABEL: store_i64_stride7_vf64:
10444 ; AVX1-ONLY: # %bb.0:
10445 ; AVX1-ONLY-NEXT: subq $3816, %rsp # imm = 0xEE8
10446 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
10447 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %ymm4
10448 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10449 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %ymm0
10450 ; AVX1-ONLY-NEXT: vmovaps (%r9), %ymm5
10451 ; AVX1-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10452 ; AVX1-ONLY-NEXT: vmovaps (%r9), %xmm2
10453 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10454 ; AVX1-ONLY-NEXT: vmovaps (%r8), %xmm1
10455 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm2[0]
10456 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %xmm3
10457 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10458 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm3
10459 ; AVX1-ONLY-NEXT: vmovaps (%rax), %xmm6
10460 ; AVX1-ONLY-NEXT: vmovaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10461 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm2, %ymm2
10462 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1],ymm3[2,3],ymm2[4,5],ymm3[6,7]
10463 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10464 ; AVX1-ONLY-NEXT: vbroadcastsd 8(%rcx), %ymm2
10465 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm0[0,1,2,3],ymm2[4,5,6,7]
10466 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %xmm3
10467 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10468 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm3[2,3,2,3]
10469 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3,4,5,6,7]
10470 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
10471 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
10472 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10473 ; AVX1-ONLY-NEXT: vmovaps 16(%rax), %xmm1
10474 ; AVX1-ONLY-NEXT: vmovaps 16(%rdi), %xmm2
10475 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm4[1],ymm2[1],ymm4[3],ymm2[3]
10476 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
10477 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
10478 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10479 ; AVX1-ONLY-NEXT: vmovaps 16(%rcx), %xmm0
10480 ; AVX1-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10481 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm0 = xmm0[2,3,2,3]
10482 ; AVX1-ONLY-NEXT: vmovaps 16(%r8), %xmm1
10483 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm5[1],ymm1[1],ymm5[3],ymm1[3]
10484 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],mem[6,7]
10485 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3,4,5,6,7]
10486 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10487 ; AVX1-ONLY-NEXT: vmovaps 32(%rsi), %xmm0
10488 ; AVX1-ONLY-NEXT: vmovaps 32(%rdi), %xmm1
10489 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
10490 ; AVX1-ONLY-NEXT: vinsertf128 $1, 32(%rcx), %ymm2, %ymm3
10491 ; AVX1-ONLY-NEXT: vmovaps 32(%rdx), %xmm4
10492 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm2, %ymm2
10493 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0],ymm3[1],ymm2[2],ymm3[2]
10494 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10495 ; AVX1-ONLY-NEXT: vmovaps 32(%rax), %xmm5
10496 ; AVX1-ONLY-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10497 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
10498 ; AVX1-ONLY-NEXT: vmovaps 32(%r9), %xmm3
10499 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10500 ; AVX1-ONLY-NEXT: vmovaps 32(%r8), %xmm2
10501 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm3 = xmm2[0],xmm3[0]
10502 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm3, %ymm3
10503 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1,2,3,4,5],ymm1[6,7]
10504 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10505 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm4[1]
10506 ; AVX1-ONLY-NEXT: vbroadcastsd 40(%rcx), %ymm1
10507 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
10508 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm1
10509 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10510 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10511 ; AVX1-ONLY-NEXT: vmovaps 32(%rsi), %ymm1
10512 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10513 ; AVX1-ONLY-NEXT: vmovaps 48(%rdi), %xmm0
10514 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
10515 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
10516 ; AVX1-ONLY-NEXT: vmovaps 48(%rax), %xmm1
10517 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
10518 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10519 ; AVX1-ONLY-NEXT: vmovaps 32(%r9), %ymm1
10520 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10521 ; AVX1-ONLY-NEXT: vmovaps 48(%r8), %xmm0
10522 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
10523 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
10524 ; AVX1-ONLY-NEXT: vmovaps 48(%rcx), %xmm1
10525 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10526 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm1[2,3,2,3]
10527 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
10528 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10529 ; AVX1-ONLY-NEXT: vmovaps 64(%r9), %xmm1
10530 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10531 ; AVX1-ONLY-NEXT: vmovaps 64(%r8), %xmm0
10532 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm1 = xmm0[0],xmm1[0]
10533 ; AVX1-ONLY-NEXT: vmovaps 64(%rdi), %xmm2
10534 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10535 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm2
10536 ; AVX1-ONLY-NEXT: vmovaps 64(%rax), %xmm3
10537 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10538 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm1, %ymm1
10539 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3],ymm1[4,5],ymm2[6,7]
10540 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10541 ; AVX1-ONLY-NEXT: vmovaps 64(%rdx), %ymm1
10542 ; AVX1-ONLY-NEXT: vbroadcastsd 72(%rcx), %ymm2
10543 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm1[0,1,2,3],ymm2[4,5,6,7]
10544 ; AVX1-ONLY-NEXT: vmovaps 64(%rsi), %xmm3
10545 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10546 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm3[2,3,2,3]
10547 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3,4,5,6,7]
10548 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
10549 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
10550 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10551 ; AVX1-ONLY-NEXT: vmovaps 64(%rsi), %ymm2
10552 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10553 ; AVX1-ONLY-NEXT: vmovaps 80(%rdi), %xmm0
10554 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm0[1],ymm2[3],ymm0[3]
10555 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10556 ; AVX1-ONLY-NEXT: vmovaps 80(%rax), %xmm1
10557 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
10558 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10559 ; AVX1-ONLY-NEXT: vmovaps 64(%r9), %ymm1
10560 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10561 ; AVX1-ONLY-NEXT: vmovaps 80(%r8), %xmm0
10562 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
10563 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
10564 ; AVX1-ONLY-NEXT: vmovaps 80(%rcx), %xmm1
10565 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10566 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm1[2,3,2,3]
10567 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
10568 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10569 ; AVX1-ONLY-NEXT: vmovaps 96(%rsi), %xmm0
10570 ; AVX1-ONLY-NEXT: vmovaps 96(%rdi), %xmm1
10571 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
10572 ; AVX1-ONLY-NEXT: vinsertf128 $1, 96(%rcx), %ymm2, %ymm3
10573 ; AVX1-ONLY-NEXT: vmovaps 96(%rdx), %xmm4
10574 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm2, %ymm2
10575 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0],ymm3[1],ymm2[2],ymm3[2]
10576 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10577 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
10578 ; AVX1-ONLY-NEXT: vmovaps 96(%r9), %xmm3
10579 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10580 ; AVX1-ONLY-NEXT: vmovaps 96(%r8), %xmm2
10581 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm3 = xmm2[0],xmm3[0]
10582 ; AVX1-ONLY-NEXT: vmovaps 96(%rax), %xmm5
10583 ; AVX1-ONLY-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10584 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm3, %ymm3
10585 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1,2,3,4,5],ymm1[6,7]
10586 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10587 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm4[1]
10588 ; AVX1-ONLY-NEXT: vbroadcastsd 104(%rcx), %ymm1
10589 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
10590 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm1
10591 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10592 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10593 ; AVX1-ONLY-NEXT: vmovaps 96(%rsi), %ymm1
10594 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10595 ; AVX1-ONLY-NEXT: vmovaps 112(%rdi), %xmm0
10596 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
10597 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
10598 ; AVX1-ONLY-NEXT: vmovaps 112(%rax), %xmm1
10599 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
10600 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10601 ; AVX1-ONLY-NEXT: vmovaps 96(%r9), %ymm1
10602 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10603 ; AVX1-ONLY-NEXT: vmovaps 112(%r8), %xmm0
10604 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
10605 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
10606 ; AVX1-ONLY-NEXT: vmovaps 112(%rcx), %xmm1
10607 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10608 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm1[2,3,2,3]
10609 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
10610 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10611 ; AVX1-ONLY-NEXT: vmovaps 128(%r9), %xmm1
10612 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10613 ; AVX1-ONLY-NEXT: vmovaps 128(%r8), %xmm0
10614 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm1 = xmm0[0],xmm1[0]
10615 ; AVX1-ONLY-NEXT: vmovaps 128(%rdi), %xmm2
10616 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10617 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm2
10618 ; AVX1-ONLY-NEXT: vmovaps 128(%rax), %xmm3
10619 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10620 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm1, %ymm1
10621 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3],ymm1[4,5],ymm2[6,7]
10622 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10623 ; AVX1-ONLY-NEXT: vmovaps 128(%rdx), %ymm1
10624 ; AVX1-ONLY-NEXT: vbroadcastsd 136(%rcx), %ymm2
10625 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm1[0,1,2,3],ymm2[4,5,6,7]
10626 ; AVX1-ONLY-NEXT: vmovaps 128(%rsi), %xmm3
10627 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10628 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm3[2,3,2,3]
10629 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3,4,5,6,7]
10630 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
10631 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
10632 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10633 ; AVX1-ONLY-NEXT: vmovaps 128(%rsi), %ymm2
10634 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10635 ; AVX1-ONLY-NEXT: vmovaps 144(%rdi), %xmm0
10636 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm0[1],ymm2[3],ymm0[3]
10637 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10638 ; AVX1-ONLY-NEXT: vmovaps 144(%rax), %xmm1
10639 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
10640 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10641 ; AVX1-ONLY-NEXT: vmovaps 128(%r9), %ymm1
10642 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10643 ; AVX1-ONLY-NEXT: vmovaps 144(%r8), %xmm0
10644 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
10645 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
10646 ; AVX1-ONLY-NEXT: vmovaps 144(%rcx), %xmm1
10647 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10648 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm1[2,3,2,3]
10649 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
10650 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10651 ; AVX1-ONLY-NEXT: vmovaps 160(%rsi), %xmm0
10652 ; AVX1-ONLY-NEXT: vmovaps 160(%rdi), %xmm1
10653 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
10654 ; AVX1-ONLY-NEXT: vinsertf128 $1, 160(%rcx), %ymm2, %ymm3
10655 ; AVX1-ONLY-NEXT: vmovaps 160(%rdx), %xmm4
10656 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm2, %ymm2
10657 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0],ymm3[1],ymm2[2],ymm3[2]
10658 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10659 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
10660 ; AVX1-ONLY-NEXT: vmovaps 160(%r9), %xmm3
10661 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10662 ; AVX1-ONLY-NEXT: vmovaps 160(%r8), %xmm2
10663 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm3 = xmm2[0],xmm3[0]
10664 ; AVX1-ONLY-NEXT: vmovaps 160(%rax), %xmm5
10665 ; AVX1-ONLY-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10666 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm3, %ymm3
10667 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1,2,3,4,5],ymm1[6,7]
10668 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10669 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm4[1]
10670 ; AVX1-ONLY-NEXT: vbroadcastsd 168(%rcx), %ymm1
10671 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
10672 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm1
10673 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10674 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10675 ; AVX1-ONLY-NEXT: vmovaps 160(%rsi), %ymm1
10676 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10677 ; AVX1-ONLY-NEXT: vmovaps 176(%rdi), %xmm0
10678 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
10679 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
10680 ; AVX1-ONLY-NEXT: vmovaps 176(%rax), %xmm1
10681 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
10682 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10683 ; AVX1-ONLY-NEXT: vmovaps 160(%r9), %ymm1
10684 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10685 ; AVX1-ONLY-NEXT: vmovaps 176(%r8), %xmm0
10686 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
10687 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
10688 ; AVX1-ONLY-NEXT: vmovaps 176(%rcx), %xmm1
10689 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10690 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm1[2,3,2,3]
10691 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
10692 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10693 ; AVX1-ONLY-NEXT: vmovaps 192(%r9), %xmm1
10694 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10695 ; AVX1-ONLY-NEXT: vmovaps 192(%r8), %xmm0
10696 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm1 = xmm0[0],xmm1[0]
10697 ; AVX1-ONLY-NEXT: vmovaps 192(%rdi), %xmm2
10698 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10699 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm2
10700 ; AVX1-ONLY-NEXT: vmovaps 192(%rax), %xmm3
10701 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10702 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm1, %ymm1
10703 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3],ymm1[4,5],ymm2[6,7]
10704 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10705 ; AVX1-ONLY-NEXT: vmovaps 192(%rdx), %ymm1
10706 ; AVX1-ONLY-NEXT: vbroadcastsd 200(%rcx), %ymm2
10707 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm1[0,1,2,3],ymm2[4,5,6,7]
10708 ; AVX1-ONLY-NEXT: vmovaps 192(%rsi), %xmm3
10709 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10710 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm3[2,3,2,3]
10711 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3,4,5,6,7]
10712 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
10713 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
10714 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10715 ; AVX1-ONLY-NEXT: vmovaps 192(%rsi), %ymm2
10716 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10717 ; AVX1-ONLY-NEXT: vmovaps 208(%rdi), %xmm0
10718 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm0[1],ymm2[3],ymm0[3]
10719 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10720 ; AVX1-ONLY-NEXT: vmovaps 208(%rax), %xmm1
10721 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
10722 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10723 ; AVX1-ONLY-NEXT: vmovaps 192(%r9), %ymm1
10724 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10725 ; AVX1-ONLY-NEXT: vmovaps 208(%r8), %xmm0
10726 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
10727 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
10728 ; AVX1-ONLY-NEXT: vmovaps 208(%rcx), %xmm1
10729 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10730 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm1[2,3,2,3]
10731 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
10732 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10733 ; AVX1-ONLY-NEXT: vmovaps 224(%rsi), %xmm0
10734 ; AVX1-ONLY-NEXT: vmovaps 224(%rdi), %xmm1
10735 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
10736 ; AVX1-ONLY-NEXT: vinsertf128 $1, 224(%rcx), %ymm2, %ymm3
10737 ; AVX1-ONLY-NEXT: vmovaps 224(%rdx), %xmm4
10738 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm2, %ymm2
10739 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0],ymm3[1],ymm2[2],ymm3[2]
10740 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10741 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
10742 ; AVX1-ONLY-NEXT: vmovaps 224(%r9), %xmm3
10743 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10744 ; AVX1-ONLY-NEXT: vmovaps 224(%r8), %xmm2
10745 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm3 = xmm2[0],xmm3[0]
10746 ; AVX1-ONLY-NEXT: vmovaps 224(%rax), %xmm5
10747 ; AVX1-ONLY-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10748 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm3, %ymm3
10749 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1,2,3,4,5],ymm1[6,7]
10750 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10751 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm4[1]
10752 ; AVX1-ONLY-NEXT: vbroadcastsd 232(%rcx), %ymm1
10753 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
10754 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm1
10755 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10756 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10757 ; AVX1-ONLY-NEXT: vmovaps 224(%rsi), %ymm1
10758 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10759 ; AVX1-ONLY-NEXT: vmovaps 240(%rdi), %xmm0
10760 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
10761 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
10762 ; AVX1-ONLY-NEXT: vmovaps 240(%rax), %xmm1
10763 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
10764 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10765 ; AVX1-ONLY-NEXT: vmovaps 224(%r9), %ymm1
10766 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10767 ; AVX1-ONLY-NEXT: vmovaps 240(%r8), %xmm0
10768 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
10769 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
10770 ; AVX1-ONLY-NEXT: vmovaps 240(%rcx), %xmm1
10771 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10772 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm1[2,3,2,3]
10773 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
10774 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10775 ; AVX1-ONLY-NEXT: vmovaps 256(%r9), %xmm1
10776 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10777 ; AVX1-ONLY-NEXT: vmovaps 256(%r8), %xmm0
10778 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm1 = xmm0[0],xmm1[0]
10779 ; AVX1-ONLY-NEXT: vmovaps 256(%rdi), %xmm2
10780 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10781 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm2
10782 ; AVX1-ONLY-NEXT: vmovaps 256(%rax), %xmm3
10783 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10784 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm1, %ymm1
10785 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3],ymm1[4,5],ymm2[6,7]
10786 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10787 ; AVX1-ONLY-NEXT: vmovaps 256(%rdx), %ymm1
10788 ; AVX1-ONLY-NEXT: vbroadcastsd 264(%rcx), %ymm2
10789 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm1[0,1,2,3],ymm2[4,5,6,7]
10790 ; AVX1-ONLY-NEXT: vmovaps 256(%rsi), %xmm3
10791 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10792 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm3[2,3,2,3]
10793 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3,4,5,6,7]
10794 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
10795 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
10796 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10797 ; AVX1-ONLY-NEXT: vmovaps 256(%rsi), %ymm2
10798 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10799 ; AVX1-ONLY-NEXT: vmovaps 272(%rdi), %xmm0
10800 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm0[1],ymm2[3],ymm0[3]
10801 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10802 ; AVX1-ONLY-NEXT: vmovaps 272(%rax), %xmm1
10803 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
10804 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10805 ; AVX1-ONLY-NEXT: vmovaps 256(%r9), %ymm1
10806 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10807 ; AVX1-ONLY-NEXT: vmovaps 272(%r8), %xmm0
10808 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
10809 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
10810 ; AVX1-ONLY-NEXT: vmovaps 272(%rcx), %xmm1
10811 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10812 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm1[2,3,2,3]
10813 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
10814 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10815 ; AVX1-ONLY-NEXT: vmovaps 288(%rsi), %xmm0
10816 ; AVX1-ONLY-NEXT: vmovaps 288(%rdi), %xmm1
10817 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
10818 ; AVX1-ONLY-NEXT: vinsertf128 $1, 288(%rcx), %ymm2, %ymm3
10819 ; AVX1-ONLY-NEXT: vmovaps 288(%rdx), %xmm4
10820 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm2, %ymm2
10821 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0],ymm3[1],ymm2[2],ymm3[2]
10822 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10823 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
10824 ; AVX1-ONLY-NEXT: vmovaps 288(%r9), %xmm3
10825 ; AVX1-ONLY-NEXT: vmovaps %xmm3, (%rsp) # 16-byte Spill
10826 ; AVX1-ONLY-NEXT: vmovaps 288(%r8), %xmm2
10827 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm3 = xmm2[0],xmm3[0]
10828 ; AVX1-ONLY-NEXT: vmovaps 288(%rax), %xmm5
10829 ; AVX1-ONLY-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10830 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm3, %ymm3
10831 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1,2,3,4,5],ymm1[6,7]
10832 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10833 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm4[1]
10834 ; AVX1-ONLY-NEXT: vbroadcastsd 296(%rcx), %ymm1
10835 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
10836 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm1
10837 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10838 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10839 ; AVX1-ONLY-NEXT: vmovaps 288(%rsi), %ymm1
10840 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10841 ; AVX1-ONLY-NEXT: vmovaps 304(%rdi), %xmm0
10842 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
10843 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
10844 ; AVX1-ONLY-NEXT: vmovaps 304(%rax), %xmm1
10845 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
10846 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10847 ; AVX1-ONLY-NEXT: vmovaps 288(%r9), %ymm1
10848 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10849 ; AVX1-ONLY-NEXT: vmovaps 304(%r8), %xmm0
10850 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
10851 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
10852 ; AVX1-ONLY-NEXT: vmovaps 304(%rcx), %xmm1
10853 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10854 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm1[2,3,2,3]
10855 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
10856 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10857 ; AVX1-ONLY-NEXT: vmovaps 320(%r9), %xmm1
10858 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10859 ; AVX1-ONLY-NEXT: vmovaps 320(%r8), %xmm0
10860 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm1 = xmm0[0],xmm1[0]
10861 ; AVX1-ONLY-NEXT: vmovaps 320(%rdi), %xmm2
10862 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10863 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm2
10864 ; AVX1-ONLY-NEXT: vmovaps 320(%rax), %xmm3
10865 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10866 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm1, %ymm1
10867 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3],ymm1[4,5],ymm2[6,7]
10868 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10869 ; AVX1-ONLY-NEXT: vmovaps 320(%rdx), %ymm1
10870 ; AVX1-ONLY-NEXT: vbroadcastsd 328(%rcx), %ymm2
10871 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm1[0,1,2,3],ymm2[4,5,6,7]
10872 ; AVX1-ONLY-NEXT: vmovaps 320(%rsi), %xmm3
10873 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10874 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm3[2,3,2,3]
10875 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3,4,5,6,7]
10876 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
10877 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
10878 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10879 ; AVX1-ONLY-NEXT: vmovaps 320(%rsi), %ymm2
10880 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10881 ; AVX1-ONLY-NEXT: vmovaps 336(%rdi), %xmm0
10882 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm2[1],ymm0[1],ymm2[3],ymm0[3]
10883 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10884 ; AVX1-ONLY-NEXT: vmovaps 336(%rax), %xmm1
10885 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
10886 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10887 ; AVX1-ONLY-NEXT: vmovaps 320(%r9), %ymm1
10888 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10889 ; AVX1-ONLY-NEXT: vmovaps 336(%r8), %xmm0
10890 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
10891 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
10892 ; AVX1-ONLY-NEXT: vmovaps 336(%rcx), %xmm1
10893 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10894 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm1[2,3,2,3]
10895 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
10896 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10897 ; AVX1-ONLY-NEXT: vmovaps 352(%rsi), %xmm0
10898 ; AVX1-ONLY-NEXT: vmovaps 352(%rdi), %xmm1
10899 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
10900 ; AVX1-ONLY-NEXT: vinsertf128 $1, 352(%rcx), %ymm2, %ymm3
10901 ; AVX1-ONLY-NEXT: vmovaps 352(%rdx), %xmm4
10902 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm2, %ymm2
10903 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm2 = ymm2[0],ymm3[1],ymm2[2],ymm3[2]
10904 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10905 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
10906 ; AVX1-ONLY-NEXT: vmovaps 352(%r9), %xmm3
10907 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10908 ; AVX1-ONLY-NEXT: vmovaps 352(%r8), %xmm2
10909 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm3 = xmm2[0],xmm3[0]
10910 ; AVX1-ONLY-NEXT: vmovaps 352(%rax), %xmm5
10911 ; AVX1-ONLY-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10912 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm3, %ymm3
10913 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1,2,3,4,5],ymm1[6,7]
10914 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10915 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm4[1]
10916 ; AVX1-ONLY-NEXT: vbroadcastsd 360(%rcx), %ymm1
10917 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
10918 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm1
10919 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10920 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10921 ; AVX1-ONLY-NEXT: vmovaps 352(%rsi), %ymm15
10922 ; AVX1-ONLY-NEXT: vmovaps 368(%rdi), %xmm0
10923 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm15[1],ymm0[1],ymm15[3],ymm0[3]
10924 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
10925 ; AVX1-ONLY-NEXT: vmovaps 368(%rax), %xmm1
10926 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
10927 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10928 ; AVX1-ONLY-NEXT: vmovaps 352(%r9), %ymm14
10929 ; AVX1-ONLY-NEXT: vmovaps 368(%r8), %xmm0
10930 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm14[1],ymm0[1],ymm14[3],ymm0[3]
10931 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
10932 ; AVX1-ONLY-NEXT: vmovaps 368(%rcx), %xmm12
10933 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm12[2,3,2,3]
10934 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
10935 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10936 ; AVX1-ONLY-NEXT: vmovaps 384(%r9), %xmm13
10937 ; AVX1-ONLY-NEXT: vmovaps 384(%r8), %xmm0
10938 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm1 = xmm0[0],xmm13[0]
10939 ; AVX1-ONLY-NEXT: vmovaps 384(%rdi), %xmm2
10940 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10941 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm2
10942 ; AVX1-ONLY-NEXT: vmovaps 384(%rax), %xmm11
10943 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm1, %ymm1
10944 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3],ymm1[4,5],ymm2[6,7]
10945 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10946 ; AVX1-ONLY-NEXT: vmovaps 384(%rdx), %ymm1
10947 ; AVX1-ONLY-NEXT: vbroadcastsd 392(%rcx), %ymm2
10948 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm1[0,1,2,3],ymm2[4,5,6,7]
10949 ; AVX1-ONLY-NEXT: vmovaps 384(%rsi), %xmm3
10950 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10951 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm3[2,3,2,3]
10952 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3,4,5,6,7]
10953 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
10954 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
10955 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10956 ; AVX1-ONLY-NEXT: vmovaps 384(%rsi), %ymm10
10957 ; AVX1-ONLY-NEXT: vmovaps 400(%rdi), %xmm0
10958 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm10[1],ymm0[1],ymm10[3],ymm0[3]
10959 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10960 ; AVX1-ONLY-NEXT: vmovaps 400(%rax), %xmm1
10961 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
10962 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10963 ; AVX1-ONLY-NEXT: vmovaps 384(%r9), %ymm9
10964 ; AVX1-ONLY-NEXT: vmovaps 400(%r8), %xmm0
10965 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm9[1],ymm0[1],ymm9[3],ymm0[3]
10966 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
10967 ; AVX1-ONLY-NEXT: vmovaps 400(%rcx), %xmm8
10968 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm8[2,3,2,3]
10969 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
10970 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10971 ; AVX1-ONLY-NEXT: vmovaps 416(%rsi), %xmm5
10972 ; AVX1-ONLY-NEXT: vmovaps 416(%rdi), %xmm0
10973 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm1 = xmm0[0],xmm5[0]
10974 ; AVX1-ONLY-NEXT: vinsertf128 $1, 416(%rcx), %ymm1, %ymm6
10975 ; AVX1-ONLY-NEXT: vmovaps 416(%rdx), %xmm2
10976 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
10977 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm1 = ymm1[0],ymm6[1],ymm1[2],ymm6[2]
10978 ; AVX1-ONLY-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10979 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm6
10980 ; AVX1-ONLY-NEXT: vmovapd 416(%r9), %xmm7
10981 ; AVX1-ONLY-NEXT: vmovapd 416(%r8), %xmm0
10982 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm1 = xmm0[0],xmm7[0]
10983 ; AVX1-ONLY-NEXT: vmovapd 416(%rax), %xmm4
10984 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm1, %ymm1
10985 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm6[6,7]
10986 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10987 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm5[1],xmm2[1]
10988 ; AVX1-ONLY-NEXT: vbroadcastsd 424(%rcx), %ymm2
10989 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
10990 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
10991 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
10992 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10993 ; AVX1-ONLY-NEXT: vmovaps 448(%rsi), %xmm0
10994 ; AVX1-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10995 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm0 = xmm0[2,3,2,3]
10996 ; AVX1-ONLY-NEXT: vbroadcastsd 456(%rcx), %ymm1
10997 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3,4,5,6,7]
10998 ; AVX1-ONLY-NEXT: vmovapd 448(%rdx), %ymm5
10999 ; AVX1-ONLY-NEXT: vinsertf128 $1, 448(%r8), %ymm5, %ymm1
11000 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
11001 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11002 ; AVX1-ONLY-NEXT: vmovaps 480(%rsi), %xmm2
11003 ; AVX1-ONLY-NEXT: vmovaps 480(%rdi), %xmm6
11004 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm1 = xmm6[0],xmm2[0]
11005 ; AVX1-ONLY-NEXT: vinsertf128 $1, 480(%rcx), %ymm1, %ymm3
11006 ; AVX1-ONLY-NEXT: vmovaps 480(%rdx), %xmm0
11007 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm1
11008 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm1 = ymm1[0],ymm3[1],ymm1[2],ymm3[2]
11009 ; AVX1-ONLY-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11010 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm2[2,3,2,3]
11011 ; AVX1-ONLY-NEXT: vbroadcastsd 488(%rcx), %ymm2
11012 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
11013 ; AVX1-ONLY-NEXT: vinsertf128 $1, 480(%r8), %ymm0, %ymm0
11014 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
11015 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11016 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %ymm0
11017 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11018 ; AVX1-ONLY-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
11019 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11020 ; AVX1-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11021 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[1],mem[1]
11022 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11023 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11024 ; AVX1-ONLY-NEXT: vmovaps (%r8), %ymm0
11025 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11026 ; AVX1-ONLY-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
11027 ; AVX1-ONLY-NEXT: vmovaps 16(%rdx), %xmm1
11028 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11029 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[0],mem[0]
11030 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11031 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11032 ; AVX1-ONLY-NEXT: vmovaps 32(%rdi), %ymm0
11033 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11034 ; AVX1-ONLY-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
11035 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11036 ; AVX1-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11037 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[1],mem[1]
11038 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11039 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11040 ; AVX1-ONLY-NEXT: vmovaps 32(%r8), %ymm0
11041 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11042 ; AVX1-ONLY-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
11043 ; AVX1-ONLY-NEXT: vmovaps 48(%rdx), %xmm1
11044 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11045 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[0],mem[0]
11046 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11047 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11048 ; AVX1-ONLY-NEXT: vmovaps 64(%rdi), %ymm0
11049 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11050 ; AVX1-ONLY-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
11051 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11052 ; AVX1-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11053 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[1],mem[1]
11054 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11055 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11056 ; AVX1-ONLY-NEXT: vmovaps 64(%r8), %ymm0
11057 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11058 ; AVX1-ONLY-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
11059 ; AVX1-ONLY-NEXT: vmovaps 80(%rdx), %xmm1
11060 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11061 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[0],mem[0]
11062 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11063 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11064 ; AVX1-ONLY-NEXT: vmovaps 96(%rdi), %ymm0
11065 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11066 ; AVX1-ONLY-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
11067 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11068 ; AVX1-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11069 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[1],mem[1]
11070 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11071 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11072 ; AVX1-ONLY-NEXT: vmovaps 96(%r8), %ymm0
11073 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11074 ; AVX1-ONLY-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
11075 ; AVX1-ONLY-NEXT: vmovaps 112(%rdx), %xmm1
11076 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11077 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[0],mem[0]
11078 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11079 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11080 ; AVX1-ONLY-NEXT: vmovaps 128(%rdi), %ymm0
11081 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11082 ; AVX1-ONLY-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
11083 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11084 ; AVX1-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11085 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[1],mem[1]
11086 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11087 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11088 ; AVX1-ONLY-NEXT: vmovaps 128(%r8), %ymm0
11089 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11090 ; AVX1-ONLY-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
11091 ; AVX1-ONLY-NEXT: vmovaps 144(%rdx), %xmm1
11092 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11093 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[0],mem[0]
11094 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11095 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11096 ; AVX1-ONLY-NEXT: vmovaps 160(%rdi), %ymm0
11097 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11098 ; AVX1-ONLY-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
11099 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11100 ; AVX1-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11101 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[1],mem[1]
11102 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11103 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11104 ; AVX1-ONLY-NEXT: vmovaps 160(%r8), %ymm0
11105 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11106 ; AVX1-ONLY-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
11107 ; AVX1-ONLY-NEXT: vmovaps 176(%rdx), %xmm1
11108 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11109 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[0],mem[0]
11110 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11111 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11112 ; AVX1-ONLY-NEXT: vmovaps 192(%rdi), %ymm0
11113 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11114 ; AVX1-ONLY-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
11115 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11116 ; AVX1-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11117 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[1],mem[1]
11118 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11119 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11120 ; AVX1-ONLY-NEXT: vmovaps 192(%r8), %ymm0
11121 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11122 ; AVX1-ONLY-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
11123 ; AVX1-ONLY-NEXT: vmovaps 208(%rdx), %xmm1
11124 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11125 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[0],mem[0]
11126 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11127 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11128 ; AVX1-ONLY-NEXT: vmovaps 224(%rdi), %ymm0
11129 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11130 ; AVX1-ONLY-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
11131 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11132 ; AVX1-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11133 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[1],mem[1]
11134 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11135 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11136 ; AVX1-ONLY-NEXT: vmovaps 224(%r8), %ymm0
11137 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11138 ; AVX1-ONLY-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
11139 ; AVX1-ONLY-NEXT: vmovaps 240(%rdx), %xmm1
11140 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11141 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[0],mem[0]
11142 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11143 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11144 ; AVX1-ONLY-NEXT: vmovaps 256(%rdi), %ymm0
11145 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11146 ; AVX1-ONLY-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
11147 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11148 ; AVX1-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11149 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[1],mem[1]
11150 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11151 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11152 ; AVX1-ONLY-NEXT: vmovaps 256(%r8), %ymm0
11153 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11154 ; AVX1-ONLY-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
11155 ; AVX1-ONLY-NEXT: vmovaps 272(%rdx), %xmm1
11156 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11157 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[0],mem[0]
11158 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11159 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11160 ; AVX1-ONLY-NEXT: vmovaps 288(%rdi), %ymm0
11161 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11162 ; AVX1-ONLY-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
11163 ; AVX1-ONLY-NEXT: vmovaps (%rsp), %xmm1 # 16-byte Reload
11164 ; AVX1-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11165 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[1],mem[1]
11166 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11167 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11168 ; AVX1-ONLY-NEXT: vmovaps 288(%r8), %ymm0
11169 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11170 ; AVX1-ONLY-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
11171 ; AVX1-ONLY-NEXT: vmovaps 304(%rdx), %xmm1
11172 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11173 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[0],mem[0]
11174 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11175 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11176 ; AVX1-ONLY-NEXT: vmovaps 320(%rdi), %ymm0
11177 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11178 ; AVX1-ONLY-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
11179 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11180 ; AVX1-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11181 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[1],mem[1]
11182 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11183 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11184 ; AVX1-ONLY-NEXT: vmovaps 320(%r8), %ymm0
11185 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11186 ; AVX1-ONLY-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
11187 ; AVX1-ONLY-NEXT: vmovaps 336(%rdx), %xmm1
11188 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11189 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[0],mem[0]
11190 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11191 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11192 ; AVX1-ONLY-NEXT: vmovaps 352(%rdi), %ymm0
11193 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm15[0],ymm0[2],ymm15[2]
11194 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11195 ; AVX1-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11196 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[1],mem[1]
11197 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11198 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11199 ; AVX1-ONLY-NEXT: vmovaps 352(%r8), %ymm0
11200 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm14[0],ymm0[2],ymm14[2]
11201 ; AVX1-ONLY-NEXT: vmovaps 368(%rdx), %xmm1
11202 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm12[0]
11203 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11204 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11205 ; AVX1-ONLY-NEXT: vmovaps 384(%rdi), %ymm0
11206 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm10[0],ymm0[2],ymm10[2]
11207 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm13[1],xmm11[1]
11208 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11209 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11210 ; AVX1-ONLY-NEXT: vmovaps 384(%r8), %ymm0
11211 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm9[0],ymm0[2],ymm9[2]
11212 ; AVX1-ONLY-NEXT: vmovaps 400(%rdx), %xmm1
11213 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm8[0]
11214 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11215 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11216 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm7[1],xmm4[1]
11217 ; AVX1-ONLY-NEXT: vmovapd 416(%rdi), %ymm1
11218 ; AVX1-ONLY-NEXT: vmovapd 416(%rsi), %ymm2
11219 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm1[0],ymm2[0],ymm1[2],ymm2[2]
11220 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
11221 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11222 ; AVX1-ONLY-NEXT: vmovapd 416(%r8), %ymm0
11223 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
11224 ; AVX1-ONLY-NEXT: vmovapd 432(%rcx), %xmm1
11225 ; AVX1-ONLY-NEXT: vmovapd 432(%rdx), %xmm3
11226 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm3 = xmm3[0],xmm1[0]
11227 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm3[0,1],ymm0[2,3]
11228 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11229 ; AVX1-ONLY-NEXT: vmovapd 432(%rdi), %xmm0
11230 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],mem[2,3]
11231 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm3 = ymm2[0,0,3,2]
11232 ; AVX1-ONLY-NEXT: vmovapd 416(%rax), %ymm2
11233 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm2[2,3],ymm3[2,3]
11234 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm3[0],ymm0[1],ymm3[2],ymm0[3]
11235 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11236 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm0 = mem[0,0]
11237 ; AVX1-ONLY-NEXT: vmovaps 448(%rdi), %xmm4
11238 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm3
11239 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm3[0,1],ymm0[2,3],ymm3[4,5,6,7]
11240 ; AVX1-ONLY-NEXT: vmovapd 448(%r8), %ymm3
11241 ; AVX1-ONLY-NEXT: vinsertf128 $1, 448(%rax), %ymm3, %ymm7
11242 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm7[0,1],ymm0[2,3],ymm7[4,5],ymm0[6,7]
11243 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11244 ; AVX1-ONLY-NEXT: vmovapd 448(%rdi), %ymm0
11245 ; AVX1-ONLY-NEXT: vmovapd 448(%rsi), %ymm7
11246 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],ymm7[0],ymm0[2],ymm7[2]
11247 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm8 = mem[0,0]
11248 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm8[0,1],ymm0[2,3]
11249 ; AVX1-ONLY-NEXT: vmovapd 464(%rdi), %xmm8
11250 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm5 = ymm8[0,1],ymm5[2,3]
11251 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm7 = ymm7[0,0,3,2]
11252 ; AVX1-ONLY-NEXT: vmovapd 448(%rax), %ymm10
11253 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm7 = ymm10[2,3],ymm7[2,3]
11254 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm5 = ymm7[0],ymm5[1],ymm7[2],ymm5[3]
11255 ; AVX1-ONLY-NEXT: vmovupd %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11256 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm5
11257 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm6 = mem[0,0]
11258 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1],ymm6[2,3],ymm5[4,5,6,7]
11259 ; AVX1-ONLY-NEXT: vmovapd 480(%r8), %ymm13
11260 ; AVX1-ONLY-NEXT: vinsertf128 $1, 480(%rax), %ymm13, %ymm6
11261 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm6[0,1],ymm5[2,3],ymm6[4,5],ymm5[6,7]
11262 ; AVX1-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11263 ; AVX1-ONLY-NEXT: vmovapd 480(%rdi), %ymm5
11264 ; AVX1-ONLY-NEXT: vmovapd 480(%rsi), %ymm6
11265 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm5 = ymm5[0],ymm6[0],ymm5[2],ymm6[2]
11266 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm7 = mem[0,0]
11267 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm5 = ymm7[0,1],ymm5[2,3]
11268 ; AVX1-ONLY-NEXT: vmovapd 496(%rdi), %xmm7
11269 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm7 = ymm7[0,1],mem[2,3]
11270 ; AVX1-ONLY-NEXT: vshufpd {{.*#+}} ymm6 = ymm6[0,0,3,2]
11271 ; AVX1-ONLY-NEXT: vmovapd 480(%rax), %ymm15
11272 ; AVX1-ONLY-NEXT: vperm2f128 {{.*#+}} ymm6 = ymm15[2,3],ymm6[2,3]
11273 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2],ymm7[3]
11274 ; AVX1-ONLY-NEXT: vmovupd %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11275 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],mem[1]
11276 ; AVX1-ONLY-NEXT: vbroadcastsd 440(%r9), %ymm6
11277 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0,1],ymm6[2,3]
11278 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0,1,2],ymm2[3]
11279 ; AVX1-ONLY-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11280 ; AVX1-ONLY-NEXT: vmovapd 464(%rcx), %xmm1
11281 ; AVX1-ONLY-NEXT: vmovapd 464(%rdx), %xmm2
11282 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm2 = xmm2[0],xmm1[0]
11283 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm2 = ymm2[0,1],ymm3[2,3]
11284 ; AVX1-ONLY-NEXT: vbroadcastsd 464(%r9), %ymm3
11285 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm2 = ymm2[0,1,2],ymm3[3]
11286 ; AVX1-ONLY-NEXT: vmovupd %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11287 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],mem[1]
11288 ; AVX1-ONLY-NEXT: vbroadcastsd 472(%r9), %ymm2
11289 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3]
11290 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0],ymm10[1],ymm0[2,3]
11291 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11292 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm1[0,1,2],ymm10[3]
11293 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11294 ; AVX1-ONLY-NEXT: vmovapd 496(%rcx), %xmm0
11295 ; AVX1-ONLY-NEXT: vmovapd 496(%rdx), %xmm1
11296 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm1 = xmm1[0],xmm0[0]
11297 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0,1],ymm13[2,3]
11298 ; AVX1-ONLY-NEXT: vbroadcastsd 496(%r9), %ymm2
11299 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm1[0,1,2],ymm2[3]
11300 ; AVX1-ONLY-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11301 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],mem[1]
11302 ; AVX1-ONLY-NEXT: vbroadcastsd 504(%r9), %ymm1
11303 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3]
11304 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm1 = ymm5[0],ymm15[1],ymm5[2,3]
11305 ; AVX1-ONLY-NEXT: vmovupd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11306 ; AVX1-ONLY-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1,2],ymm15[3]
11307 ; AVX1-ONLY-NEXT: vmovupd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11308 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
11309 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm7 # 16-byte Folded Reload
11310 ; AVX1-ONLY-NEXT: # xmm7 = xmm0[0],mem[0]
11311 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
11312 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm6 # 16-byte Folded Reload
11313 ; AVX1-ONLY-NEXT: # xmm6 = xmm0[0],mem[0]
11314 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
11315 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm5 # 16-byte Folded Reload
11316 ; AVX1-ONLY-NEXT: # xmm5 = xmm0[0],mem[0]
11317 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
11318 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm3 # 16-byte Folded Reload
11319 ; AVX1-ONLY-NEXT: # xmm3 = xmm0[0],mem[0]
11320 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
11321 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm2 # 16-byte Folded Reload
11322 ; AVX1-ONLY-NEXT: # xmm2 = xmm0[0],mem[0]
11323 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm4, %xmm4 # 16-byte Folded Reload
11324 ; AVX1-ONLY-NEXT: # xmm4 = xmm4[0],mem[0]
11325 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
11326 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm1 # 16-byte Folded Reload
11327 ; AVX1-ONLY-NEXT: # xmm1 = xmm0[0],mem[0]
11328 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
11329 ; AVX1-ONLY-NEXT: vunpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
11330 ; AVX1-ONLY-NEXT: # xmm0 = xmm0[0],mem[0]
11331 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
11332 ; AVX1-ONLY-NEXT: vmovaps 256(%rdx), %xmm14
11333 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm14 = xmm14[0],mem[0]
11334 ; AVX1-ONLY-NEXT: vmovaps 128(%rdx), %xmm10
11335 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm10 = xmm10[0],mem[0]
11336 ; AVX1-ONLY-NEXT: vmovaps 64(%rdx), %xmm12
11337 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm12 = xmm12[0],mem[0]
11338 ; AVX1-ONLY-NEXT: vmovaps 192(%rdx), %xmm13
11339 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm13 = xmm13[0],mem[0]
11340 ; AVX1-ONLY-NEXT: vmovaps 320(%rdx), %xmm15
11341 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm15 = xmm15[0],mem[0]
11342 ; AVX1-ONLY-NEXT: vmovaps 448(%rdx), %xmm11
11343 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm11 = xmm11[0],mem[0]
11344 ; AVX1-ONLY-NEXT: vmovaps 384(%rdx), %xmm9
11345 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm9 = xmm9[0],mem[0]
11346 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %xmm8
11347 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} xmm8 = xmm8[0],mem[0]
11348 ; AVX1-ONLY-NEXT: vmovaps %xmm8, 16(%rax)
11349 ; AVX1-ONLY-NEXT: vmovaps %xmm0, (%rax)
11350 ; AVX1-ONLY-NEXT: vmovaps %xmm9, 2704(%rax)
11351 ; AVX1-ONLY-NEXT: vmovaps %xmm1, 2688(%rax)
11352 ; AVX1-ONLY-NEXT: vmovaps %xmm11, 3152(%rax)
11353 ; AVX1-ONLY-NEXT: vmovaps %xmm4, 3136(%rax)
11354 ; AVX1-ONLY-NEXT: vmovaps %xmm15, 2256(%rax)
11355 ; AVX1-ONLY-NEXT: vmovaps %xmm2, 2240(%rax)
11356 ; AVX1-ONLY-NEXT: vmovaps %xmm13, 1360(%rax)
11357 ; AVX1-ONLY-NEXT: vmovaps %xmm3, 1344(%rax)
11358 ; AVX1-ONLY-NEXT: vmovaps %xmm12, 464(%rax)
11359 ; AVX1-ONLY-NEXT: vmovaps %xmm5, 448(%rax)
11360 ; AVX1-ONLY-NEXT: vmovaps %xmm10, 912(%rax)
11361 ; AVX1-ONLY-NEXT: vmovaps %xmm6, 896(%rax)
11362 ; AVX1-ONLY-NEXT: vmovaps %xmm14, 1808(%rax)
11363 ; AVX1-ONLY-NEXT: vmovaps %xmm7, 1792(%rax)
11364 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11365 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3520(%rax)
11366 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11367 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3456(%rax)
11368 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11369 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3392(%rax)
11370 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11371 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3296(%rax)
11372 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11373 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3232(%rax)
11374 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11375 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3168(%rax)
11376 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11377 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3072(%rax)
11378 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11379 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3040(%rax)
11380 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11381 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3008(%rax)
11382 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11383 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2816(%rax)
11384 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11385 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2784(%rax)
11386 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11387 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2592(%rax)
11388 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11389 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2560(%rax)
11390 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11391 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2368(%rax)
11392 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11393 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2336(%rax)
11394 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11395 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2144(%rax)
11396 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11397 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2112(%rax)
11398 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11399 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1920(%rax)
11400 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11401 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1888(%rax)
11402 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11403 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1696(%rax)
11404 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11405 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1664(%rax)
11406 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11407 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1472(%rax)
11408 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11409 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1440(%rax)
11410 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11411 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1248(%rax)
11412 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11413 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1216(%rax)
11414 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11415 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1024(%rax)
11416 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11417 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 992(%rax)
11418 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11419 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 800(%rax)
11420 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11421 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 768(%rax)
11422 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11423 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 576(%rax)
11424 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11425 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 544(%rax)
11426 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11427 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 352(%rax)
11428 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11429 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 320(%rax)
11430 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11431 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 128(%rax)
11432 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11433 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 96(%rax)
11434 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11435 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3552(%rax)
11436 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11437 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3488(%rax)
11438 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11439 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3424(%rax)
11440 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11441 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3360(%rax)
11442 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11443 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3328(%rax)
11444 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11445 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3264(%rax)
11446 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11447 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3200(%rax)
11448 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11449 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 3104(%rax)
11450 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11451 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2976(%rax)
11452 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11453 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2944(%rax)
11454 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11455 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2912(%rax)
11456 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11457 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2880(%rax)
11458 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11459 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2848(%rax)
11460 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11461 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2752(%rax)
11462 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11463 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2720(%rax)
11464 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11465 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2656(%rax)
11466 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11467 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2624(%rax)
11468 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11469 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2528(%rax)
11470 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11471 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2496(%rax)
11472 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11473 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2464(%rax)
11474 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11475 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2432(%rax)
11476 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11477 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2400(%rax)
11478 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11479 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2304(%rax)
11480 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11481 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2272(%rax)
11482 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11483 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2208(%rax)
11484 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11485 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2176(%rax)
11486 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11487 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2080(%rax)
11488 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11489 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2048(%rax)
11490 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11491 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2016(%rax)
11492 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11493 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1984(%rax)
11494 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11495 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1952(%rax)
11496 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11497 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1856(%rax)
11498 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11499 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1824(%rax)
11500 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11501 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1760(%rax)
11502 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11503 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1728(%rax)
11504 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11505 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1632(%rax)
11506 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11507 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1600(%rax)
11508 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11509 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1568(%rax)
11510 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11511 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1536(%rax)
11512 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11513 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1504(%rax)
11514 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11515 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1408(%rax)
11516 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11517 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1376(%rax)
11518 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11519 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1312(%rax)
11520 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11521 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1280(%rax)
11522 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11523 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1184(%rax)
11524 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11525 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1152(%rax)
11526 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11527 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1120(%rax)
11528 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11529 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1088(%rax)
11530 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11531 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1056(%rax)
11532 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11533 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 960(%rax)
11534 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11535 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 928(%rax)
11536 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11537 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 864(%rax)
11538 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11539 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 832(%rax)
11540 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11541 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 736(%rax)
11542 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11543 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 704(%rax)
11544 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11545 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 672(%rax)
11546 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11547 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 640(%rax)
11548 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11549 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 608(%rax)
11550 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11551 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 512(%rax)
11552 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11553 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 480(%rax)
11554 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11555 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 416(%rax)
11556 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11557 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 384(%rax)
11558 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11559 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 288(%rax)
11560 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11561 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 256(%rax)
11562 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11563 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 224(%rax)
11564 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11565 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 192(%rax)
11566 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11567 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 160(%rax)
11568 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11569 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 64(%rax)
11570 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11571 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
11572 ; AVX1-ONLY-NEXT: addq $3816, %rsp # imm = 0xEE8
11573 ; AVX1-ONLY-NEXT: vzeroupper
11574 ; AVX1-ONLY-NEXT: retq
11576 ; AVX2-ONLY-LABEL: store_i64_stride7_vf64:
11577 ; AVX2-ONLY: # %bb.0:
11578 ; AVX2-ONLY-NEXT: subq $3880, %rsp # imm = 0xF28
11579 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
11580 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %ymm0
11581 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %ymm1
11582 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %ymm2
11583 ; AVX2-ONLY-NEXT: vmovaps (%r8), %ymm5
11584 ; AVX2-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11585 ; AVX2-ONLY-NEXT: vmovaps (%r9), %ymm6
11586 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11587 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %xmm3
11588 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%rcx), %ymm3, %ymm3
11589 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %xmm4
11590 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11591 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %xmm7
11592 ; AVX2-ONLY-NEXT: vmovaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11593 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%rdx), %ymm4, %ymm4
11594 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm4[0],ymm3[0],ymm4[2],ymm3[2]
11595 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11596 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
11597 ; AVX2-ONLY-NEXT: vbroadcastsd 8(%rcx), %ymm4
11598 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm2[0,1,2,3],ymm4[4,5,6,7]
11599 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1],ymm4[2,3,4,5,6,7]
11600 ; AVX2-ONLY-NEXT: vmovaps (%r8), %xmm4
11601 ; AVX2-ONLY-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11602 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm4
11603 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
11604 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11605 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
11606 ; AVX2-ONLY-NEXT: vmovaps (%rax), %xmm8
11607 ; AVX2-ONLY-NEXT: vmovaps %xmm8, (%rsp) # 16-byte Spill
11608 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm4 = mem[0,0]
11609 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm4 = xmm4[1],xmm8[1]
11610 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm4[0,1,2,3],ymm3[4,5,6,7]
11611 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11612 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm5[0],ymm6[0],ymm5[2],ymm6[2]
11613 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],mem[0],ymm2[2],mem[2]
11614 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm4[2,3],ymm3[2,3]
11615 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11616 ; AVX2-ONLY-NEXT: vmovaps 16(%rax), %xmm3
11617 ; AVX2-ONLY-NEXT: vmovaps 32(%rax), %xmm4
11618 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
11619 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,3,3]
11620 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm2[6,7]
11621 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm3[0,1],ymm0[2,3,4,5,6,7]
11622 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11623 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm0, %ymm0
11624 ; AVX2-ONLY-NEXT: vmovaps 32(%r8), %xmm1
11625 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm2 = xmm1[0],mem[0]
11626 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm2, %ymm2
11627 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
11628 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11629 ; AVX2-ONLY-NEXT: vmovaps 32(%rdx), %xmm2
11630 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11631 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm0 = mem[0,0]
11632 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
11633 ; AVX2-ONLY-NEXT: vbroadcastsd 40(%rcx), %ymm2
11634 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
11635 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
11636 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
11637 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11638 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm0 = mem[0,0]
11639 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm4[1]
11640 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %ymm1
11641 ; AVX2-ONLY-NEXT: vmovaps 32(%rsi), %ymm2
11642 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm1[0],ymm2[0],ymm1[2],ymm2[2]
11643 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
11644 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11645 ; AVX2-ONLY-NEXT: vmovaps 32(%rdx), %ymm0
11646 ; AVX2-ONLY-NEXT: vmovaps 32(%r8), %ymm4
11647 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11648 ; AVX2-ONLY-NEXT: vmovaps 32(%r9), %ymm3
11649 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11650 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm4[0],ymm3[0],ymm4[2],ymm3[2]
11651 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],mem[0],ymm0[2],mem[2]
11652 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm4[2,3],ymm3[2,3]
11653 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11654 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
11655 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,3,3]
11656 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
11657 ; AVX2-ONLY-NEXT: vmovaps 48(%rax), %xmm1
11658 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
11659 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11660 ; AVX2-ONLY-NEXT: vmovaps 64(%rsi), %xmm0
11661 ; AVX2-ONLY-NEXT: vinsertf128 $1, 64(%rcx), %ymm0, %ymm0
11662 ; AVX2-ONLY-NEXT: vmovaps 64(%rdi), %xmm1
11663 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11664 ; AVX2-ONLY-NEXT: vinsertf128 $1, 64(%rdx), %ymm1, %ymm1
11665 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11666 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11667 ; AVX2-ONLY-NEXT: vmovaps 64(%rdx), %ymm0
11668 ; AVX2-ONLY-NEXT: vbroadcastsd 72(%rcx), %ymm1
11669 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
11670 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = mem[0,0]
11671 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3,4,5,6,7]
11672 ; AVX2-ONLY-NEXT: vmovaps 64(%r8), %xmm2
11673 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11674 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
11675 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm2[6,7]
11676 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11677 ; AVX2-ONLY-NEXT: vmovaps 64(%rax), %xmm2
11678 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11679 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
11680 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
11681 ; AVX2-ONLY-NEXT: vmovaps 64(%rdi), %ymm2
11682 ; AVX2-ONLY-NEXT: vmovaps 64(%rsi), %ymm3
11683 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
11684 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm4[4,5,6,7]
11685 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11686 ; AVX2-ONLY-NEXT: vmovaps 64(%r8), %ymm4
11687 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11688 ; AVX2-ONLY-NEXT: vmovaps 64(%r9), %ymm1
11689 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11690 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm4[0],ymm1[0],ymm4[2],ymm1[2]
11691 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],mem[0],ymm0[2],mem[2]
11692 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm4[2,3],ymm1[2,3]
11693 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11694 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
11695 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,3,3]
11696 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
11697 ; AVX2-ONLY-NEXT: vmovaps 80(%rax), %xmm1
11698 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
11699 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11700 ; AVX2-ONLY-NEXT: vmovaps 96(%r8), %xmm0
11701 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm1 = xmm0[0],mem[0]
11702 ; AVX2-ONLY-NEXT: vmovaps 96(%rax), %xmm2
11703 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
11704 ; AVX2-ONLY-NEXT: vmovaps 96(%rdi), %xmm3
11705 ; AVX2-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11706 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
11707 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm3[6,7]
11708 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11709 ; AVX2-ONLY-NEXT: vmovaps 96(%rdx), %xmm3
11710 ; AVX2-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11711 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
11712 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
11713 ; AVX2-ONLY-NEXT: vbroadcastsd 104(%rcx), %ymm3
11714 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
11715 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
11716 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
11717 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11718 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm0 = mem[0,0]
11719 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
11720 ; AVX2-ONLY-NEXT: vmovaps 96(%rdi), %ymm1
11721 ; AVX2-ONLY-NEXT: vmovaps 96(%rsi), %ymm2
11722 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm1[0],ymm2[0],ymm1[2],ymm2[2]
11723 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
11724 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11725 ; AVX2-ONLY-NEXT: vmovaps 96(%rdx), %ymm0
11726 ; AVX2-ONLY-NEXT: vmovaps 96(%r8), %ymm4
11727 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11728 ; AVX2-ONLY-NEXT: vmovaps 96(%r9), %ymm3
11729 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11730 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm4[0],ymm3[0],ymm4[2],ymm3[2]
11731 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],mem[0],ymm0[2],mem[2]
11732 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm4[2,3],ymm3[2,3]
11733 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11734 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
11735 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,3,3]
11736 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
11737 ; AVX2-ONLY-NEXT: vmovaps 112(%rax), %xmm1
11738 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
11739 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11740 ; AVX2-ONLY-NEXT: vmovaps 128(%rsi), %xmm0
11741 ; AVX2-ONLY-NEXT: vinsertf128 $1, 128(%rcx), %ymm0, %ymm0
11742 ; AVX2-ONLY-NEXT: vmovaps 128(%rdi), %xmm1
11743 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11744 ; AVX2-ONLY-NEXT: vinsertf128 $1, 128(%rdx), %ymm1, %ymm1
11745 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11746 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11747 ; AVX2-ONLY-NEXT: vmovaps 128(%rdx), %ymm0
11748 ; AVX2-ONLY-NEXT: vbroadcastsd 136(%rcx), %ymm1
11749 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
11750 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = mem[0,0]
11751 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3,4,5,6,7]
11752 ; AVX2-ONLY-NEXT: vmovaps 128(%r8), %xmm2
11753 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11754 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
11755 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm2[6,7]
11756 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11757 ; AVX2-ONLY-NEXT: vmovaps 128(%rax), %xmm2
11758 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11759 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
11760 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
11761 ; AVX2-ONLY-NEXT: vmovaps 128(%rdi), %ymm2
11762 ; AVX2-ONLY-NEXT: vmovaps 128(%rsi), %ymm3
11763 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
11764 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm4[4,5,6,7]
11765 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11766 ; AVX2-ONLY-NEXT: vmovaps 128(%r8), %ymm4
11767 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11768 ; AVX2-ONLY-NEXT: vmovaps 128(%r9), %ymm1
11769 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11770 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm4[0],ymm1[0],ymm4[2],ymm1[2]
11771 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],mem[0],ymm0[2],mem[2]
11772 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm4[2,3],ymm1[2,3]
11773 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11774 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
11775 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,3,3]
11776 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
11777 ; AVX2-ONLY-NEXT: vmovaps 144(%rax), %xmm1
11778 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
11779 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11780 ; AVX2-ONLY-NEXT: vmovaps 160(%r8), %xmm0
11781 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm1 = xmm0[0],mem[0]
11782 ; AVX2-ONLY-NEXT: vmovaps 160(%rax), %xmm2
11783 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
11784 ; AVX2-ONLY-NEXT: vmovaps 160(%rdi), %xmm3
11785 ; AVX2-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11786 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
11787 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm3[6,7]
11788 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11789 ; AVX2-ONLY-NEXT: vmovaps 160(%rdx), %xmm3
11790 ; AVX2-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11791 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
11792 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
11793 ; AVX2-ONLY-NEXT: vbroadcastsd 168(%rcx), %ymm3
11794 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
11795 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
11796 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
11797 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11798 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm0 = mem[0,0]
11799 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
11800 ; AVX2-ONLY-NEXT: vmovaps 160(%rdi), %ymm1
11801 ; AVX2-ONLY-NEXT: vmovaps 160(%rsi), %ymm2
11802 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm1[0],ymm2[0],ymm1[2],ymm2[2]
11803 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
11804 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11805 ; AVX2-ONLY-NEXT: vmovaps 160(%rdx), %ymm0
11806 ; AVX2-ONLY-NEXT: vmovaps 160(%r8), %ymm4
11807 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11808 ; AVX2-ONLY-NEXT: vmovaps 160(%r9), %ymm3
11809 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11810 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm4[0],ymm3[0],ymm4[2],ymm3[2]
11811 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],mem[0],ymm0[2],mem[2]
11812 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm4[2,3],ymm3[2,3]
11813 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11814 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
11815 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,3,3]
11816 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
11817 ; AVX2-ONLY-NEXT: vmovaps 176(%rax), %xmm1
11818 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
11819 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11820 ; AVX2-ONLY-NEXT: vmovaps 192(%rsi), %xmm0
11821 ; AVX2-ONLY-NEXT: vinsertf128 $1, 192(%rcx), %ymm0, %ymm0
11822 ; AVX2-ONLY-NEXT: vmovaps 192(%rdi), %xmm1
11823 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11824 ; AVX2-ONLY-NEXT: vinsertf128 $1, 192(%rdx), %ymm1, %ymm1
11825 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11826 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11827 ; AVX2-ONLY-NEXT: vmovaps 192(%rdx), %ymm0
11828 ; AVX2-ONLY-NEXT: vbroadcastsd 200(%rcx), %ymm1
11829 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
11830 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = mem[0,0]
11831 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3,4,5,6,7]
11832 ; AVX2-ONLY-NEXT: vmovaps 192(%r8), %xmm2
11833 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11834 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
11835 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm2[6,7]
11836 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11837 ; AVX2-ONLY-NEXT: vmovaps 192(%rax), %xmm2
11838 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11839 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
11840 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
11841 ; AVX2-ONLY-NEXT: vmovaps 192(%rdi), %ymm2
11842 ; AVX2-ONLY-NEXT: vmovaps 192(%rsi), %ymm3
11843 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
11844 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm4[4,5,6,7]
11845 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11846 ; AVX2-ONLY-NEXT: vmovaps 192(%r8), %ymm4
11847 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11848 ; AVX2-ONLY-NEXT: vmovaps 192(%r9), %ymm1
11849 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11850 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm4[0],ymm1[0],ymm4[2],ymm1[2]
11851 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],mem[0],ymm0[2],mem[2]
11852 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm4[2,3],ymm1[2,3]
11853 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11854 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
11855 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,3,3]
11856 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
11857 ; AVX2-ONLY-NEXT: vmovaps 208(%rax), %xmm1
11858 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
11859 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11860 ; AVX2-ONLY-NEXT: vmovaps 224(%r8), %xmm0
11861 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm1 = xmm0[0],mem[0]
11862 ; AVX2-ONLY-NEXT: vmovaps 224(%rax), %xmm2
11863 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
11864 ; AVX2-ONLY-NEXT: vmovaps 224(%rdi), %xmm3
11865 ; AVX2-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11866 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
11867 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm3[6,7]
11868 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11869 ; AVX2-ONLY-NEXT: vmovaps 224(%rdx), %xmm3
11870 ; AVX2-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11871 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
11872 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
11873 ; AVX2-ONLY-NEXT: vbroadcastsd 232(%rcx), %ymm3
11874 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
11875 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
11876 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
11877 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11878 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm0 = mem[0,0]
11879 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
11880 ; AVX2-ONLY-NEXT: vmovaps 224(%rdi), %ymm1
11881 ; AVX2-ONLY-NEXT: vmovaps 224(%rsi), %ymm2
11882 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm1[0],ymm2[0],ymm1[2],ymm2[2]
11883 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
11884 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11885 ; AVX2-ONLY-NEXT: vmovaps 224(%rdx), %ymm0
11886 ; AVX2-ONLY-NEXT: vmovaps 224(%r8), %ymm4
11887 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11888 ; AVX2-ONLY-NEXT: vmovaps 224(%r9), %ymm3
11889 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11890 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm4[0],ymm3[0],ymm4[2],ymm3[2]
11891 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],mem[0],ymm0[2],mem[2]
11892 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm4[2,3],ymm3[2,3]
11893 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11894 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
11895 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,3,3]
11896 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
11897 ; AVX2-ONLY-NEXT: vmovaps 240(%rax), %xmm1
11898 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
11899 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11900 ; AVX2-ONLY-NEXT: vmovaps 256(%rsi), %xmm0
11901 ; AVX2-ONLY-NEXT: vinsertf128 $1, 256(%rcx), %ymm0, %ymm0
11902 ; AVX2-ONLY-NEXT: vmovaps 256(%rdi), %xmm1
11903 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11904 ; AVX2-ONLY-NEXT: vinsertf128 $1, 256(%rdx), %ymm1, %ymm1
11905 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11906 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11907 ; AVX2-ONLY-NEXT: vmovaps 256(%rdx), %ymm0
11908 ; AVX2-ONLY-NEXT: vbroadcastsd 264(%rcx), %ymm1
11909 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
11910 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = mem[0,0]
11911 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3,4,5,6,7]
11912 ; AVX2-ONLY-NEXT: vmovaps 256(%r8), %xmm2
11913 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11914 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
11915 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm2[6,7]
11916 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11917 ; AVX2-ONLY-NEXT: vmovaps 256(%rax), %xmm2
11918 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11919 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
11920 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
11921 ; AVX2-ONLY-NEXT: vmovaps 256(%rdi), %ymm2
11922 ; AVX2-ONLY-NEXT: vmovaps 256(%rsi), %ymm3
11923 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
11924 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm4[4,5,6,7]
11925 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11926 ; AVX2-ONLY-NEXT: vmovaps 256(%r8), %ymm4
11927 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11928 ; AVX2-ONLY-NEXT: vmovaps 256(%r9), %ymm1
11929 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11930 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm4[0],ymm1[0],ymm4[2],ymm1[2]
11931 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],mem[0],ymm0[2],mem[2]
11932 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm4[2,3],ymm1[2,3]
11933 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11934 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
11935 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,3,3]
11936 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
11937 ; AVX2-ONLY-NEXT: vmovaps 272(%rax), %xmm1
11938 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
11939 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11940 ; AVX2-ONLY-NEXT: vmovaps 288(%r8), %xmm0
11941 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm1 = xmm0[0],mem[0]
11942 ; AVX2-ONLY-NEXT: vmovaps 288(%rax), %xmm2
11943 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
11944 ; AVX2-ONLY-NEXT: vmovaps 288(%rdi), %xmm3
11945 ; AVX2-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11946 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
11947 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm3[6,7]
11948 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11949 ; AVX2-ONLY-NEXT: vmovaps 288(%rdx), %xmm3
11950 ; AVX2-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11951 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
11952 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
11953 ; AVX2-ONLY-NEXT: vbroadcastsd 296(%rcx), %ymm3
11954 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
11955 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
11956 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
11957 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11958 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm0 = mem[0,0]
11959 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
11960 ; AVX2-ONLY-NEXT: vmovaps 288(%rdi), %ymm1
11961 ; AVX2-ONLY-NEXT: vmovaps 288(%rsi), %ymm2
11962 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm1[0],ymm2[0],ymm1[2],ymm2[2]
11963 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
11964 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11965 ; AVX2-ONLY-NEXT: vmovaps 288(%rdx), %ymm0
11966 ; AVX2-ONLY-NEXT: vmovaps 288(%r8), %ymm4
11967 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11968 ; AVX2-ONLY-NEXT: vmovaps 288(%r9), %ymm3
11969 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11970 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm4[0],ymm3[0],ymm4[2],ymm3[2]
11971 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],mem[0],ymm0[2],mem[2]
11972 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm4[2,3],ymm3[2,3]
11973 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11974 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
11975 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,3,3]
11976 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
11977 ; AVX2-ONLY-NEXT: vmovaps 304(%rax), %xmm1
11978 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
11979 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11980 ; AVX2-ONLY-NEXT: vmovaps 320(%rsi), %xmm0
11981 ; AVX2-ONLY-NEXT: vinsertf128 $1, 320(%rcx), %ymm0, %ymm0
11982 ; AVX2-ONLY-NEXT: vmovaps 320(%rdi), %xmm1
11983 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11984 ; AVX2-ONLY-NEXT: vinsertf128 $1, 320(%rdx), %ymm1, %ymm1
11985 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
11986 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11987 ; AVX2-ONLY-NEXT: vmovaps 320(%rdx), %ymm0
11988 ; AVX2-ONLY-NEXT: vbroadcastsd 328(%rcx), %ymm1
11989 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
11990 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = mem[0,0]
11991 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3,4,5,6,7]
11992 ; AVX2-ONLY-NEXT: vmovaps 320(%r8), %xmm2
11993 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11994 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
11995 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm2[6,7]
11996 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11997 ; AVX2-ONLY-NEXT: vmovaps 320(%rax), %xmm10
11998 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
11999 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm10[1]
12000 ; AVX2-ONLY-NEXT: vmovaps 320(%rdi), %ymm2
12001 ; AVX2-ONLY-NEXT: vmovaps 320(%rsi), %ymm3
12002 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
12003 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm4[4,5,6,7]
12004 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12005 ; AVX2-ONLY-NEXT: vmovaps 320(%r8), %ymm9
12006 ; AVX2-ONLY-NEXT: vmovaps 320(%r9), %ymm1
12007 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12008 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm9[0],ymm1[0],ymm9[2],ymm1[2]
12009 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],mem[0],ymm0[2],mem[2]
12010 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm4[2,3],ymm1[2,3]
12011 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12012 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
12013 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,3,3]
12014 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
12015 ; AVX2-ONLY-NEXT: vmovaps 336(%rax), %xmm1
12016 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
12017 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12018 ; AVX2-ONLY-NEXT: vmovaps 352(%r8), %xmm0
12019 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm1 = xmm0[0],mem[0]
12020 ; AVX2-ONLY-NEXT: vmovaps 352(%rax), %xmm2
12021 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
12022 ; AVX2-ONLY-NEXT: vmovaps 352(%rdi), %xmm3
12023 ; AVX2-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12024 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
12025 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm3[6,7]
12026 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12027 ; AVX2-ONLY-NEXT: vmovaps 352(%rdx), %xmm7
12028 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
12029 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm7[1]
12030 ; AVX2-ONLY-NEXT: vbroadcastsd 360(%rcx), %ymm3
12031 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
12032 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
12033 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
12034 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12035 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm0 = mem[0,0]
12036 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
12037 ; AVX2-ONLY-NEXT: vmovaps 352(%rdi), %ymm1
12038 ; AVX2-ONLY-NEXT: vmovaps 352(%rsi), %ymm2
12039 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm1[0],ymm2[0],ymm1[2],ymm2[2]
12040 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
12041 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12042 ; AVX2-ONLY-NEXT: vmovaps 352(%rdx), %ymm0
12043 ; AVX2-ONLY-NEXT: vmovaps 352(%r8), %ymm5
12044 ; AVX2-ONLY-NEXT: vmovaps 352(%r9), %ymm3
12045 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12046 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm5[0],ymm3[0],ymm5[2],ymm3[2]
12047 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],mem[0],ymm0[2],mem[2]
12048 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm4[2,3],ymm3[2,3]
12049 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12050 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
12051 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,3,3]
12052 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
12053 ; AVX2-ONLY-NEXT: vmovaps 368(%rax), %xmm1
12054 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
12055 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12056 ; AVX2-ONLY-NEXT: vmovaps 384(%rsi), %xmm0
12057 ; AVX2-ONLY-NEXT: vinsertf128 $1, 384(%rcx), %ymm0, %ymm0
12058 ; AVX2-ONLY-NEXT: vmovaps 384(%rdi), %xmm1
12059 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12060 ; AVX2-ONLY-NEXT: vinsertf128 $1, 384(%rdx), %ymm1, %ymm1
12061 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12062 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12063 ; AVX2-ONLY-NEXT: vmovaps 384(%rdx), %ymm0
12064 ; AVX2-ONLY-NEXT: vbroadcastsd 392(%rcx), %ymm1
12065 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12066 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = mem[0,0]
12067 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3,4,5,6,7]
12068 ; AVX2-ONLY-NEXT: vmovaps 384(%r8), %xmm2
12069 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12070 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
12071 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm2[6,7]
12072 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12073 ; AVX2-ONLY-NEXT: vmovaps 384(%rax), %xmm2
12074 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12075 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
12076 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
12077 ; AVX2-ONLY-NEXT: vmovaps 384(%rdi), %ymm2
12078 ; AVX2-ONLY-NEXT: vmovaps 384(%rsi), %ymm3
12079 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm2[0],ymm3[0],ymm2[2],ymm3[2]
12080 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm4[4,5,6,7]
12081 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12082 ; AVX2-ONLY-NEXT: vmovaps 384(%r8), %ymm15
12083 ; AVX2-ONLY-NEXT: vmovaps 384(%r9), %ymm1
12084 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12085 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm15[0],ymm1[0],ymm15[2],ymm1[2]
12086 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],mem[0],ymm0[2],mem[2]
12087 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm4[2,3],ymm1[2,3]
12088 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12089 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm1 = ymm2[1],ymm3[1],ymm2[3],ymm3[3]
12090 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,3,3]
12091 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
12092 ; AVX2-ONLY-NEXT: vmovaps 400(%rax), %xmm1
12093 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
12094 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12095 ; AVX2-ONLY-NEXT: vmovaps 416(%r8), %xmm0
12096 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm1 = xmm0[0],mem[0]
12097 ; AVX2-ONLY-NEXT: vmovaps 416(%rax), %xmm2
12098 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
12099 ; AVX2-ONLY-NEXT: vmovaps 416(%rdi), %xmm13
12100 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm0, %ymm3
12101 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm3[6,7]
12102 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12103 ; AVX2-ONLY-NEXT: vmovaps 416(%rdx), %xmm14
12104 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
12105 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm14[1]
12106 ; AVX2-ONLY-NEXT: vbroadcastsd 424(%rcx), %ymm3
12107 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
12108 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
12109 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
12110 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12111 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm0 = mem[0,0]
12112 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
12113 ; AVX2-ONLY-NEXT: vmovaps 416(%rdi), %ymm1
12114 ; AVX2-ONLY-NEXT: vmovaps 416(%rsi), %ymm2
12115 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm1[0],ymm2[0],ymm1[2],ymm2[2]
12116 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
12117 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12118 ; AVX2-ONLY-NEXT: vmovaps 416(%r8), %ymm0
12119 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm0[0],mem[0],ymm0[2],mem[2]
12120 ; AVX2-ONLY-NEXT: vmovaps 416(%rdx), %ymm3
12121 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm3[0],mem[0],ymm3[2],mem[2]
12122 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm0 = ymm4[2,3],ymm0[2,3]
12123 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12124 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
12125 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,3,3]
12126 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm3[6,7]
12127 ; AVX2-ONLY-NEXT: vmovaps 432(%rax), %xmm1
12128 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
12129 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12130 ; AVX2-ONLY-NEXT: vmovaps 448(%rsi), %xmm0
12131 ; AVX2-ONLY-NEXT: vinsertf128 $1, 448(%rcx), %ymm0, %ymm0
12132 ; AVX2-ONLY-NEXT: vmovaps 448(%rdi), %xmm1
12133 ; AVX2-ONLY-NEXT: vinsertf128 $1, 448(%rdx), %ymm1, %ymm1
12134 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
12135 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12136 ; AVX2-ONLY-NEXT: vmovaps 448(%rdi), %ymm0
12137 ; AVX2-ONLY-NEXT: vmovaps 448(%r8), %ymm11
12138 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
12139 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm11[0,1],ymm0[0,1]
12140 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3],ymm2[4,5,6,7]
12141 ; AVX2-ONLY-NEXT: vbroadcastsd 448(%rax), %ymm2
12142 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5],ymm1[6,7]
12143 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12144 ; AVX2-ONLY-NEXT: vmovaps 448(%rdx), %ymm2
12145 ; AVX2-ONLY-NEXT: vbroadcastsd 456(%rcx), %ymm1
12146 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
12147 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
12148 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3,4,5,6,7]
12149 ; AVX2-ONLY-NEXT: vinsertf128 $1, 448(%r8), %ymm0, %ymm3
12150 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm3[6,7]
12151 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12152 ; AVX2-ONLY-NEXT: vmovaps 448(%rsi), %ymm1
12153 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
12154 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
12155 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm4[4,5,6,7]
12156 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12157 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
12158 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,3,3]
12159 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm2[6,7]
12160 ; AVX2-ONLY-NEXT: vmovaps 464(%rax), %xmm1
12161 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
12162 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12163 ; AVX2-ONLY-NEXT: vmovaps 480(%rdi), %ymm0
12164 ; AVX2-ONLY-NEXT: vmovaps 480(%r8), %ymm12
12165 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
12166 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm3 = ymm12[0,1],ymm0[0,1]
12167 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5,6,7]
12168 ; AVX2-ONLY-NEXT: vbroadcastsd 480(%rax), %ymm3
12169 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5],ymm1[6,7]
12170 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12171 ; AVX2-ONLY-NEXT: vmovaps 480(%rdx), %xmm8
12172 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
12173 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm8[1]
12174 ; AVX2-ONLY-NEXT: vbroadcastsd 488(%rcx), %ymm3
12175 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
12176 ; AVX2-ONLY-NEXT: vinsertf128 $1, 480(%r8), %ymm0, %ymm3
12177 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm3[6,7]
12178 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12179 ; AVX2-ONLY-NEXT: vmovaps 480(%rsi), %ymm1
12180 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
12181 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
12182 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm4[4,5,6,7]
12183 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12184 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm0[1],ymm1[1],ymm0[3],ymm1[3]
12185 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,3,3]
12186 ; AVX2-ONLY-NEXT: vmovaps 480(%rdx), %ymm6
12187 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm6[6,7]
12188 ; AVX2-ONLY-NEXT: vmovaps 496(%rax), %xmm1
12189 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
12190 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12191 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12192 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
12193 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
12194 ; AVX2-ONLY-NEXT: vbroadcastsd (%rsp), %ymm1 # 16-byte Folded Reload
12195 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5],ymm0[6,7]
12196 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12197 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12198 ; AVX2-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12199 ; AVX2-ONLY-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12200 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,3,3]
12201 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
12202 ; AVX2-ONLY-NEXT: vbroadcastsd 24(%rcx), %ymm1
12203 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
12204 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12205 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12206 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
12207 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
12208 ; AVX2-ONLY-NEXT: vbroadcastsd 32(%rcx), %ymm1
12209 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
12210 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12211 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12212 ; AVX2-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12213 ; AVX2-ONLY-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12214 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,3,3]
12215 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
12216 ; AVX2-ONLY-NEXT: vbroadcastsd 56(%rcx), %ymm1
12217 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
12218 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12219 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12220 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
12221 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
12222 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 16-byte Folded Reload
12223 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5],ymm0[6,7]
12224 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12225 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12226 ; AVX2-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12227 ; AVX2-ONLY-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12228 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,3,3]
12229 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
12230 ; AVX2-ONLY-NEXT: vbroadcastsd 88(%rcx), %ymm1
12231 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
12232 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12233 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12234 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
12235 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
12236 ; AVX2-ONLY-NEXT: vbroadcastsd 96(%rcx), %ymm1
12237 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
12238 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12239 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12240 ; AVX2-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12241 ; AVX2-ONLY-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12242 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,3,3]
12243 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
12244 ; AVX2-ONLY-NEXT: vbroadcastsd 120(%rcx), %ymm1
12245 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
12246 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12247 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12248 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
12249 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
12250 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 16-byte Folded Reload
12251 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5],ymm0[6,7]
12252 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12253 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12254 ; AVX2-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12255 ; AVX2-ONLY-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12256 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,3,3]
12257 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
12258 ; AVX2-ONLY-NEXT: vbroadcastsd 152(%rcx), %ymm1
12259 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
12260 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12261 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12262 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
12263 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
12264 ; AVX2-ONLY-NEXT: vbroadcastsd 160(%rcx), %ymm1
12265 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
12266 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12267 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12268 ; AVX2-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12269 ; AVX2-ONLY-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12270 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,3,3]
12271 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
12272 ; AVX2-ONLY-NEXT: vbroadcastsd 184(%rcx), %ymm1
12273 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
12274 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12275 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12276 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
12277 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
12278 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 16-byte Folded Reload
12279 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5],ymm0[6,7]
12280 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12281 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12282 ; AVX2-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12283 ; AVX2-ONLY-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12284 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,3,3]
12285 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
12286 ; AVX2-ONLY-NEXT: vbroadcastsd 216(%rcx), %ymm1
12287 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
12288 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12289 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12290 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
12291 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
12292 ; AVX2-ONLY-NEXT: vbroadcastsd 224(%rcx), %ymm1
12293 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
12294 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12295 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12296 ; AVX2-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12297 ; AVX2-ONLY-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12298 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,3,3]
12299 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
12300 ; AVX2-ONLY-NEXT: vbroadcastsd 248(%rcx), %ymm1
12301 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
12302 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12303 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12304 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
12305 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
12306 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 16-byte Folded Reload
12307 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5],ymm0[6,7]
12308 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12309 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12310 ; AVX2-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12311 ; AVX2-ONLY-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12312 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,3,3]
12313 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
12314 ; AVX2-ONLY-NEXT: vbroadcastsd 280(%rcx), %ymm1
12315 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
12316 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12317 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12318 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
12319 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
12320 ; AVX2-ONLY-NEXT: vbroadcastsd 288(%rcx), %ymm1
12321 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
12322 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12323 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12324 ; AVX2-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
12325 ; AVX2-ONLY-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
12326 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,3,3]
12327 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
12328 ; AVX2-ONLY-NEXT: vbroadcastsd 312(%rcx), %ymm1
12329 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5,6,7]
12330 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12331 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12332 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
12333 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
12334 ; AVX2-ONLY-NEXT: vbroadcastsd %xmm10, %ymm1
12335 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm0[0,1,2,3],ymm1[4,5],ymm0[6,7]
12336 ; AVX2-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm0 # 32-byte Folded Reload
12337 ; AVX2-ONLY-NEXT: # ymm0 = ymm9[1],mem[1],ymm9[3],mem[3]
12338 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,3,3]
12339 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
12340 ; AVX2-ONLY-NEXT: vbroadcastsd 344(%rcx), %ymm1
12341 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm1[0,1],ymm0[2,3,4,5,6,7]
12342 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12343 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
12344 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm0, %ymm0
12345 ; AVX2-ONLY-NEXT: vbroadcastsd 352(%rcx), %ymm1
12346 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm0[0,1,2,3,4,5],ymm1[6,7]
12347 ; AVX2-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm5, %ymm0 # 32-byte Folded Reload
12348 ; AVX2-ONLY-NEXT: # ymm0 = ymm5[1],mem[1],ymm5[3],mem[3]
12349 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,3,3]
12350 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
12351 ; AVX2-ONLY-NEXT: vbroadcastsd 376(%rcx), %ymm1
12352 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm1[0,1],ymm0[2,3,4,5,6,7]
12353 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12354 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],mem[0]
12355 ; AVX2-ONLY-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
12356 ; AVX2-ONLY-NEXT: vbroadcastsd {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 16-byte Folded Reload
12357 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm0[0,1,2,3],ymm1[4,5],ymm0[6,7]
12358 ; AVX2-ONLY-NEXT: vunpckhpd {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm0 # 32-byte Folded Reload
12359 ; AVX2-ONLY-NEXT: # ymm0 = ymm15[1],mem[1],ymm15[3],mem[3]
12360 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,3,3]
12361 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
12362 ; AVX2-ONLY-NEXT: vbroadcastsd 408(%rcx), %ymm15
12363 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm15[0,1],ymm0[2,3,4,5,6,7]
12364 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm15 = xmm13[0],mem[0]
12365 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm14, %ymm15, %ymm14
12366 ; AVX2-ONLY-NEXT: vbroadcastsd 416(%rcx), %ymm15
12367 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6,7]
12368 ; AVX2-ONLY-NEXT: vbroadcastsd 440(%rcx), %ymm15
12369 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm15 = xmm15[0,1],mem[2,3]
12370 ; AVX2-ONLY-NEXT: vbroadcastsd 440(%r9), %ymm13
12371 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm15 = ymm15[0,1,2,3],ymm13[4,5,6,7]
12372 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm2[0],mem[0],ymm2[2],mem[2]
12373 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm2[2,3],ymm11[2,3]
12374 ; AVX2-ONLY-NEXT: vbroadcastsd 464(%r9), %ymm11
12375 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm11[6,7]
12376 ; AVX2-ONLY-NEXT: vbroadcastsd 472(%rcx), %ymm11
12377 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm11 = xmm11[0,1],mem[2,3]
12378 ; AVX2-ONLY-NEXT: vbroadcastsd 472(%r9), %ymm13
12379 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm11[0,1,2,3],ymm13[4,5,6,7]
12380 ; AVX2-ONLY-NEXT: vmovaps 448(%rax), %ymm0
12381 ; AVX2-ONLY-NEXT: vblendps $243, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm11 # 32-byte Folded Reload
12382 ; AVX2-ONLY-NEXT: # ymm11 = mem[0,1],ymm0[2,3],mem[4,5,6,7]
12383 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm13[0,1,2,3,4,5],ymm0[6,7]
12384 ; AVX2-ONLY-NEXT: vmovaps 480(%rdi), %xmm13
12385 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} xmm13 = xmm13[0],mem[0]
12386 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm13, %ymm8
12387 ; AVX2-ONLY-NEXT: vbroadcastsd 480(%rcx), %ymm13
12388 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],ymm13[6,7]
12389 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm6 = ymm6[0],mem[0],ymm6[2],mem[2]
12390 ; AVX2-ONLY-NEXT: vperm2f128 {{.*#+}} ymm6 = ymm6[2,3],ymm12[2,3]
12391 ; AVX2-ONLY-NEXT: vbroadcastsd 496(%r9), %ymm12
12392 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3,4,5],ymm12[6,7]
12393 ; AVX2-ONLY-NEXT: vbroadcastsd 504(%rcx), %ymm12
12394 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm12 = xmm12[0,1],mem[2,3]
12395 ; AVX2-ONLY-NEXT: vbroadcastsd 504(%r9), %ymm13
12396 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm12[0,1,2,3],ymm13[4,5,6,7]
12397 ; AVX2-ONLY-NEXT: vmovaps 480(%rax), %ymm13
12398 ; AVX2-ONLY-NEXT: vblendps $243, {{[-0-9]+}}(%r{{[sb]}}p), %ymm13, %ymm0 # 32-byte Folded Reload
12399 ; AVX2-ONLY-NEXT: # ymm0 = mem[0,1],ymm13[2,3],mem[4,5,6,7]
12400 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm12[0,1,2,3,4,5],ymm13[6,7]
12401 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rcx
12402 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm15[0,1,2,3,4,5],mem[6,7]
12403 ; AVX2-ONLY-NEXT: vmovaps %ymm12, 3552(%rcx)
12404 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
12405 ; AVX2-ONLY-NEXT: vmovaps %ymm12, 3520(%rcx)
12406 ; AVX2-ONLY-NEXT: vmovaps %ymm6, 3488(%rcx)
12407 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3456(%rcx)
12408 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12409 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3424(%rcx)
12410 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12411 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3392(%rcx)
12412 ; AVX2-ONLY-NEXT: vmovaps %ymm8, 3360(%rcx)
12413 ; AVX2-ONLY-NEXT: vmovaps %ymm1, 3328(%rcx)
12414 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12415 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3296(%rcx)
12416 ; AVX2-ONLY-NEXT: vmovaps %ymm2, 3264(%rcx)
12417 ; AVX2-ONLY-NEXT: vmovaps %ymm11, 3232(%rcx)
12418 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12419 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3200(%rcx)
12420 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12421 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3168(%rcx)
12422 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12423 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3136(%rcx)
12424 ; AVX2-ONLY-NEXT: vmovaps %ymm13, 3104(%rcx)
12425 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12426 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3072(%rcx)
12427 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12428 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3040(%rcx)
12429 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12430 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 3008(%rcx)
12431 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12432 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2976(%rcx)
12433 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12434 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2944(%rcx)
12435 ; AVX2-ONLY-NEXT: vmovaps %ymm14, 2912(%rcx)
12436 ; AVX2-ONLY-NEXT: vmovaps %ymm3, 2880(%rcx)
12437 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12438 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2848(%rcx)
12439 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12440 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2816(%rcx)
12441 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12442 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2784(%rcx)
12443 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12444 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2752(%rcx)
12445 ; AVX2-ONLY-NEXT: vmovaps %ymm4, 2720(%rcx)
12446 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12447 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2688(%rcx)
12448 ; AVX2-ONLY-NEXT: vmovaps %ymm5, 2656(%rcx)
12449 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12450 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2624(%rcx)
12451 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12452 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2592(%rcx)
12453 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12454 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2560(%rcx)
12455 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12456 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2528(%rcx)
12457 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12458 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2496(%rcx)
12459 ; AVX2-ONLY-NEXT: vmovaps %ymm7, 2464(%rcx)
12460 ; AVX2-ONLY-NEXT: vmovaps %ymm9, 2432(%rcx)
12461 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12462 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2400(%rcx)
12463 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12464 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2368(%rcx)
12465 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12466 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2336(%rcx)
12467 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12468 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2304(%rcx)
12469 ; AVX2-ONLY-NEXT: vmovaps %ymm10, 2272(%rcx)
12470 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12471 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2240(%rcx)
12472 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12473 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2208(%rcx)
12474 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12475 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2176(%rcx)
12476 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12477 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2144(%rcx)
12478 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12479 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2112(%rcx)
12480 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12481 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2080(%rcx)
12482 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12483 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2048(%rcx)
12484 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12485 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2016(%rcx)
12486 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12487 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1984(%rcx)
12488 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12489 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1952(%rcx)
12490 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12491 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1920(%rcx)
12492 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12493 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1888(%rcx)
12494 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12495 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1856(%rcx)
12496 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12497 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1824(%rcx)
12498 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12499 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1792(%rcx)
12500 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12501 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1760(%rcx)
12502 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12503 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1728(%rcx)
12504 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12505 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1696(%rcx)
12506 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12507 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1664(%rcx)
12508 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12509 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1632(%rcx)
12510 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12511 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1600(%rcx)
12512 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12513 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1568(%rcx)
12514 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12515 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1536(%rcx)
12516 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12517 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1504(%rcx)
12518 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12519 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1472(%rcx)
12520 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12521 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1440(%rcx)
12522 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12523 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1408(%rcx)
12524 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12525 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1376(%rcx)
12526 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12527 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1344(%rcx)
12528 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12529 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1312(%rcx)
12530 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12531 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1280(%rcx)
12532 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12533 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1248(%rcx)
12534 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12535 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1216(%rcx)
12536 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12537 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1184(%rcx)
12538 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12539 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1152(%rcx)
12540 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12541 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1120(%rcx)
12542 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12543 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1088(%rcx)
12544 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12545 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1056(%rcx)
12546 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12547 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1024(%rcx)
12548 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12549 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 992(%rcx)
12550 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12551 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 960(%rcx)
12552 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12553 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 928(%rcx)
12554 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12555 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 896(%rcx)
12556 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12557 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 864(%rcx)
12558 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12559 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 832(%rcx)
12560 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12561 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 800(%rcx)
12562 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12563 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 768(%rcx)
12564 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12565 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 736(%rcx)
12566 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12567 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 704(%rcx)
12568 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12569 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 672(%rcx)
12570 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12571 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 640(%rcx)
12572 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12573 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 608(%rcx)
12574 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12575 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 576(%rcx)
12576 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12577 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 544(%rcx)
12578 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12579 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 512(%rcx)
12580 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12581 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 480(%rcx)
12582 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12583 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 448(%rcx)
12584 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12585 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 416(%rcx)
12586 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12587 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 384(%rcx)
12588 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12589 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 352(%rcx)
12590 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12591 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 320(%rcx)
12592 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12593 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 288(%rcx)
12594 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12595 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 256(%rcx)
12596 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12597 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 224(%rcx)
12598 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12599 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 192(%rcx)
12600 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12601 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 160(%rcx)
12602 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12603 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 128(%rcx)
12604 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12605 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 96(%rcx)
12606 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12607 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 64(%rcx)
12608 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12609 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 32(%rcx)
12610 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12611 ; AVX2-ONLY-NEXT: vmovaps %ymm0, (%rcx)
12612 ; AVX2-ONLY-NEXT: addq $3880, %rsp # imm = 0xF28
12613 ; AVX2-ONLY-NEXT: vzeroupper
12614 ; AVX2-ONLY-NEXT: retq
12616 ; AVX512F-ONLY-SLOW-LABEL: store_i64_stride7_vf64:
12617 ; AVX512F-ONLY-SLOW: # %bb.0:
12618 ; AVX512F-ONLY-SLOW-NEXT: subq $6600, %rsp # imm = 0x19C8
12619 ; AVX512F-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
12620 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rdi), %zmm3
12621 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12622 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm29
12623 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rsi), %zmm4
12624 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12625 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm13
12626 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm13, (%rsp) # 64-byte Spill
12627 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm20
12628 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12629 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rdx), %zmm5
12630 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12631 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rcx), %zmm8
12632 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm18
12633 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12634 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [11,3,11,3,11,3,11,3]
12635 ; AVX512F-ONLY-SLOW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
12636 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [2,10,0,3,2,10,0,3]
12637 ; AVX512F-ONLY-SLOW-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
12638 ; AVX512F-ONLY-SLOW-NEXT: movb $96, %r10b
12639 ; AVX512F-ONLY-SLOW-NEXT: kmovw %r10d, %k1
12640 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%r8), %zmm1
12641 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%r9), %zmm11
12642 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rax), %zmm6
12643 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%rax), %zmm9
12644 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12645 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [9,1,9,1,9,1,9,1]
12646 ; AVX512F-ONLY-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
12647 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
12648 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm0, %zmm2
12649 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm14
12650 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,9,0,3,4,9,0,3]
12651 ; AVX512F-ONLY-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
12652 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm0, %zmm2
12653 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm12
12654 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12655 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm0
12656 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm10, %zmm0
12657 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm2
12658 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm28, %zmm2
12659 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm2 {%k1}
12660 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r9), %ymm5
12661 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12662 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%r9), %ymm3
12663 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12664 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r8), %ymm0
12665 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12666 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%r8), %ymm4
12667 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12668 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm5[0],ymm0[2],ymm5[2]
12669 ; AVX512F-ONLY-SLOW-NEXT: movb $28, %r10b
12670 ; AVX512F-ONLY-SLOW-NEXT: kmovw %r10d, %k2
12671 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm2 {%k2} = zmm0[2,3,2,3],zmm6[2,3,2,3]
12672 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12673 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [4,12,0,5,4,12,0,5]
12674 ; AVX512F-ONLY-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
12675 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm0
12676 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm5
12677 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm2, %zmm0
12678 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm16
12679 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,12,7,0,1,12,7]
12680 ; AVX512F-ONLY-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
12681 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm2, %zmm0
12682 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm11
12683 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12684 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [5,0,14,6,5,0,14,6]
12685 ; AVX512F-ONLY-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
12686 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm2
12687 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm17
12688 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12689 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
12690 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm19
12691 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,13,6,7,0,13,6,7]
12692 ; AVX512F-ONLY-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
12693 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm0, %zmm2
12694 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm15
12695 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12696 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [15,7,15,7,15,7,15,7]
12697 ; AVX512F-ONLY-SLOW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
12698 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm17, %zmm5, %zmm1
12699 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [6,13,14,7,6,13,14,7]
12700 ; AVX512F-ONLY-SLOW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
12701 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm7, %zmm6
12702 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12703 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm29, %zmm1
12704 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm10, %zmm1
12705 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm20, %zmm2
12706 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm28, %zmm2
12707 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2 {%k1}
12708 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm1 = ymm4[0],ymm3[0],ymm4[2],ymm3[2]
12709 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm2 {%k2} = zmm1[2,3,2,3],zmm9[2,3,2,3]
12710 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12711 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%r8), %zmm1
12712 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 64(%r9), %zmm3
12713 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
12714 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm18
12715 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm14, %zmm2
12716 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, %zmm4
12717 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm12, %zmm2
12718 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12719 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
12720 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm16, %zmm2
12721 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm11, %zmm2
12722 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12723 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm2
12724 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12725 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm19, %zmm2
12726 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm0, %zmm2
12727 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12728 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12729 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm5, %zmm1
12730 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm7, %zmm9
12731 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12732 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%rdi), %zmm23
12733 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%rsi), %zmm24
12734 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm1
12735 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm10, %zmm1
12736 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm30
12737 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%rdx), %zmm25
12738 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%rcx), %zmm12
12739 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, %zmm2
12740 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm28, %zmm2
12741 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2 {%k1}
12742 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%rax), %zmm9
12743 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 128(%r9), %ymm6
12744 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12745 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 128(%r8), %ymm1
12746 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12747 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm1 = ymm1[0],ymm6[0],ymm1[2],ymm6[2]
12748 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm2 {%k2} = zmm1[2,3,2,3],zmm9[2,3,2,3]
12749 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12750 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%r8), %zmm1
12751 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 128(%r9), %zmm21
12752 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
12753 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm14, %zmm2
12754 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm4, %zmm2
12755 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12756 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm10
12757 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
12758 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm3
12759 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm16, %zmm2
12760 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm11, %zmm2
12761 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12762 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm2
12763 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12764 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm19, %zmm2
12765 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm0, %zmm2
12766 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12767 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12768 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm5, %zmm1
12769 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm7, %zmm9
12770 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12771 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%rdi), %zmm27
12772 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%rsi), %zmm26
12773 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, %zmm1
12774 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm30, %zmm1
12775 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%rdx), %zmm17
12776 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%rcx), %zmm14
12777 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm2
12778 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm28, %zmm2
12779 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2 {%k1}
12780 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%rax), %zmm21
12781 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 192(%r9), %ymm9
12782 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12783 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 192(%r8), %ymm1
12784 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12785 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm1 = ymm1[0],ymm9[0],ymm1[2],ymm9[2]
12786 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm2 {%k2} = zmm1[2,3,2,3],zmm21[2,3,2,3]
12787 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12788 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%r8), %zmm1
12789 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 192(%r9), %zmm0
12790 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
12791 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm18, %zmm2
12792 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm4, %zmm2
12793 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12794 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
12795 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm16, %zmm2
12796 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm11, %zmm2
12797 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12798 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm2
12799 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12800 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm19, %zmm2
12801 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm15, %zmm2
12802 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12803 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm15, %zmm31
12804 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12805 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm5, %zmm1
12806 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm7, %zmm21
12807 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12808 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 256(%rdi), %zmm21
12809 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 256(%rsi), %zmm22
12810 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm2
12811 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm30, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12812 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm30, %zmm2
12813 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 256(%rdx), %zmm16
12814 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 256(%rcx), %zmm15
12815 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm4
12816 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm15, %zmm28, %zmm4
12817 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
12818 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 256(%rax), %zmm0
12819 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 256(%r9), %ymm6
12820 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12821 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 256(%r8), %ymm2
12822 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12823 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm2[0],ymm6[0],ymm2[2],ymm6[2]
12824 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm6
12825 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm4 {%k2} = zmm2[2,3,2,3],zmm0[2,3,2,3]
12826 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12827 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 256(%r8), %zmm2
12828 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 256(%r9), %zmm13
12829 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4
12830 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm18, %zmm4
12831 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12832 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm10, %zmm4
12833 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12834 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12835 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4
12836 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm0
12837 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12838 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm3, %zmm4
12839 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm11, %zmm4
12840 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12841 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12842 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, %zmm4
12843 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12844 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12845 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm19, %zmm4
12846 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm31, %zmm4
12847 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm31, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12848 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12849 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12850 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm5, %zmm2
12851 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm7, %zmm6
12852 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12853 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 320(%rdi), %zmm6
12854 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 320(%rsi), %zmm2
12855 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12856 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm4
12857 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm30, %zmm4
12858 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 320(%rdx), %zmm2
12859 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 320(%rcx), %zmm13
12860 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm9
12861 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm28, %zmm9
12862 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm9 {%k1}
12863 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 320(%rax), %zmm3
12864 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 320(%r9), %ymm30
12865 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %ymm30, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12866 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 320(%r8), %ymm4
12867 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12868 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm4[0],ymm30[0],ymm4[2],ymm30[2]
12869 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm30
12870 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm9 {%k2} = zmm4[2,3,2,3],zmm3[2,3,2,3]
12871 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12872 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 320(%r8), %zmm4
12873 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 320(%r9), %zmm9
12874 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm3
12875 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm18, %zmm3
12876 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm30, %zmm10, %zmm3
12877 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12878 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm3
12879 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm0, %zmm3
12880 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm30, %zmm11, %zmm3
12881 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12882 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm0
12883 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12884 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm19, %zmm0
12885 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm30, %zmm31, %zmm0
12886 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12887 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12888 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm5, %zmm4
12889 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm7, %zmm30
12890 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm30, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12891 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm11 = [13,5,13,5,13,5,13,5]
12892 ; AVX512F-ONLY-SLOW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
12893 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
12894 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm0
12895 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm11, %zmm0
12896 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12897 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [6,14,6,14,6,14,6,14]
12898 ; AVX512F-ONLY-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
12899 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm3
12900 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm0, %zmm3
12901 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12902 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3
12903 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm0
12904 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm5, %zmm0
12905 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12906 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [3,0,12,4,3,0,12,4]
12907 ; AVX512F-ONLY-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
12908 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm0, %zmm8
12909 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm9
12910 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
12911 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm4
12912 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
12913 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm11, %zmm4
12914 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12915 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm4
12916 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm3, %zmm4
12917 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12918 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm4
12919 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm5, %zmm4
12920 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12921 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm0, %zmm8
12922 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm10
12923 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, %zmm1
12924 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm11, %zmm1
12925 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12926 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, %zmm1
12927 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm3, %zmm1
12928 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12929 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, %zmm1
12930 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm5, %zmm1
12931 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12932 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm25, %zmm0, %zmm12
12933 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, %zmm30
12934 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm1
12935 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm11, %zmm1
12936 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12937 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm1
12938 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm3, %zmm1
12939 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12940 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm1
12941 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm5, %zmm1
12942 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12943 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm17, %zmm0, %zmm14
12944 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm17
12945 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm1
12946 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm15, %zmm11, %zmm1
12947 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12948 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm1
12949 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm15, %zmm3, %zmm1
12950 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12951 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm1
12952 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm15, %zmm5, %zmm1
12953 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12954 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm16, %zmm0, %zmm15
12955 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm15, %zmm16
12956 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm1
12957 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm11, %zmm1
12958 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12959 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm1
12960 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm4
12961 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm3, %zmm1
12962 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12963 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm1
12964 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm5, %zmm1
12965 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12966 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm0, %zmm13
12967 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, %zmm18
12968 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 448(%rdx), %zmm14
12969 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 448(%rcx), %zmm1
12970 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm12
12971 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm28, %zmm12
12972 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm19
12973 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm3, %zmm19
12974 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 384(%rdx), %zmm3
12975 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 384(%rcx), %zmm20
12976 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm20, %zmm3, %zmm28
12977 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm28, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12978 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm2
12979 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm11, %zmm2
12980 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12981 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm20, %zmm3, %zmm4
12982 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12983 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm25
12984 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm5, %zmm25
12985 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm0, %zmm20
12986 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm7
12987 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm0, %zmm7
12988 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm15
12989 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm5, %zmm14
12990 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm11, %zmm15
12991 ; AVX512F-ONLY-SLOW-NEXT: movb $48, %r10b
12992 ; AVX512F-ONLY-SLOW-NEXT: kmovw %r10d, %k3
12993 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [1,0,10,2,1,0,10,2]
12994 ; AVX512F-ONLY-SLOW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
12995 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
12996 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm2
12997 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12998 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm1, %zmm2
12999 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13000 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm9 {%k3} = zmm0[0],zmm3[0],zmm0[2],zmm3[2],zmm0[4],zmm3[4],zmm0[6],zmm3[6]
13001 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13002 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm4
13003 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm5
13004 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm9
13005 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,8,0,1,0,8,0,1]
13006 ; AVX512F-ONLY-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
13007 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm2, %zmm0
13008 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13009 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm11, %zmm4
13010 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13011 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm13 = [6,14,6,14]
13012 ; AVX512F-ONLY-SLOW-NEXT: # ymm13 = mem[0,1,0,1]
13013 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm13, %zmm5
13014 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13015 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm8 = [15,7,15,7]
13016 ; AVX512F-ONLY-SLOW-NEXT: # ymm8 = mem[0,1,0,1]
13017 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm8, %zmm9
13018 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13019 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
13020 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3
13021 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm1, %zmm3
13022 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13023 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm10 {%k3} = zmm29[0],zmm0[0],zmm29[2],zmm0[2],zmm29[4],zmm0[4],zmm29[6],zmm0[6]
13024 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13025 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm29, %zmm4
13026 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm29, %zmm3
13027 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm29, %zmm5
13028 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm2, %zmm29
13029 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm29, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13030 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm11, %zmm4
13031 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13032 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm13, %zmm3
13033 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13034 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm8, %zmm5
13035 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13036 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm24, %zmm0
13037 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm23, %zmm1, %zmm0
13038 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
13039 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm30 {%k3} = zmm23[0],zmm24[0],zmm23[2],zmm24[2],zmm23[4],zmm24[4],zmm23[6],zmm24[6]
13040 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm30, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13041 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm3
13042 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm0
13043 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm4
13044 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm2, %zmm23
13045 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13046 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm11, %zmm3
13047 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13048 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm13, %zmm0
13049 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13050 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm8, %zmm4
13051 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13052 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm26, %zmm0
13053 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm1, %zmm0
13054 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13055 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm17 {%k3} = zmm27[0],zmm26[0],zmm27[2],zmm26[2],zmm27[4],zmm26[4],zmm27[6],zmm26[6]
13056 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13057 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, %zmm3
13058 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, %zmm0
13059 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, %zmm31
13060 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm2, %zmm27
13061 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13062 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm11, %zmm3
13063 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13064 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm13, %zmm0
13065 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13066 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm8, %zmm31
13067 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm22, %zmm30
13068 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm1, %zmm30
13069 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm16 {%k3} = zmm21[0],zmm22[0],zmm21[2],zmm22[2],zmm21[4],zmm22[4],zmm21[6],zmm22[6]
13070 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13071 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm3
13072 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm0
13073 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm27
13074 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm29
13075 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm2, %zmm29
13076 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm11, %zmm3
13077 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13078 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm13, %zmm0
13079 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13080 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm8, %zmm27
13081 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13082 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm26
13083 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm1, %zmm26
13084 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm18 {%k3} = zmm6[0],zmm0[0],zmm6[2],zmm0[2],zmm6[4],zmm0[4],zmm6[6],zmm0[6]
13085 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13086 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm28
13087 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm3
13088 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm23
13089 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm24
13090 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm2, %zmm24
13091 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm11, %zmm28
13092 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm13, %zmm3
13093 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13094 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm8, %zmm23
13095 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 448(%rdi), %zmm9
13096 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 448(%rsi), %zmm0
13097 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm6
13098 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
13099 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm5, %zmm6
13100 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm3
13101 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm13, %zmm3
13102 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm4
13103 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm11, %zmm4
13104 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 384(%rdi), %zmm21
13105 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 384(%rsi), %zmm17
13106 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm22
13107 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm1, %zmm22
13108 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm17, %zmm21, %zmm5
13109 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13110 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm17, %zmm21, %zmm11
13111 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm17, %zmm21, %zmm13
13112 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm20 {%k3} = zmm21[0],zmm17[0],zmm21[2],zmm17[2],zmm21[4],zmm17[4],zmm21[6],zmm17[6]
13113 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm18
13114 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm17, %zmm2, %zmm21
13115 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm17, %zmm8, %zmm18
13116 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm7 {%k3} = zmm9[0],zmm0[0],zmm9[2],zmm0[2],zmm9[4],zmm0[4],zmm9[6],zmm0[6]
13117 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm0, %zmm9, %zmm2
13118 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm9, %zmm0, %zmm1
13119 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm8, %zmm9
13120 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm12 {%k1}
13121 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm3 = zmm3[0,1,2,3],zmm19[4,5,6,7]
13122 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 448(%r8), %zmm6
13123 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = <0,11,u,u,4,5,6,7>
13124 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm6, %zmm7, %zmm0
13125 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 448(%r9), %zmm7
13126 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm8 = <0,1,11,u,4,5,6,7>
13127 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm0, %zmm8
13128 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm10
13129 ; AVX512F-ONLY-SLOW-NEXT: movb $4, %sil
13130 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k3
13131 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm12 {%k3}
13132 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm8 = <0,1,2,10,u,5,6,7>
13133 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm12, %zmm8
13134 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm15, %zmm4 {%k1}
13135 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = <12,u,u,3,4,5,6,13>
13136 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm6, %zmm4, %zmm0
13137 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm4 = <0,12,u,3,4,5,6,7>
13138 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm0, %zmm4
13139 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [0,8,0,8,0,8,0,8]
13140 ; AVX512F-ONLY-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
13141 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
13142 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm19 # 64-byte Folded Reload
13143 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
13144 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm12 # 64-byte Folded Reload
13145 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13146 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
13147 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm12 # 64-byte Folded Reload
13148 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13149 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
13150 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm5 # 64-byte Folded Reload
13151 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
13152 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm12 # 64-byte Folded Reload
13153 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13154 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
13155 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm12 # 64-byte Folded Reload
13156 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13157 ; AVX512F-ONLY-SLOW-NEXT: movb $24, %sil
13158 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k5
13159 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, %zmm18 {%k5}
13160 ; AVX512F-ONLY-SLOW-NEXT: movb $6, %sil
13161 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k3
13162 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq 456(%rcx), %ymm12
13163 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = mem[0,1,2,3],ymm12[4,5,6,7]
13164 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm1 {%k3}
13165 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <0,1,2,9,u,u,6,7>
13166 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm6, %zmm1, %zmm12
13167 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm9 {%k5}
13168 ; AVX512F-ONLY-SLOW-NEXT: movb $64, %sil
13169 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k4
13170 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm3 {%k4}
13171 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,1,2,3,4,15,u,u>
13172 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm1, %zmm9
13173 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 384(%r8), %zmm6
13174 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm1, %zmm18
13175 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 384(%r9), %zmm1
13176 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm1, %zmm6, %zmm0
13177 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
13178 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm1, %zmm6, %zmm14
13179 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
13180 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm1, %zmm6, %zmm15
13181 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
13182 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm6, %zmm1, %zmm16
13183 ; AVX512F-ONLY-SLOW-NEXT: movb $12, %sil
13184 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k4
13185 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 448(%rdx), %xmm6
13186 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
13187 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
13188 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm2 {%k4}
13189 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $2, 448(%r8), %zmm2, %zmm2
13190 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm6 = <0,1,2,3,4,8,u,7>
13191 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm2, %zmm6
13192 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <u,1,2,3,4,5,15,u>
13193 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm2, %zmm18
13194 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,1,2,3,9,u,6,7>
13195 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm12, %zmm1
13196 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <13,u,2,3,4,5,6,14>
13197 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm3, %zmm12
13198 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm2, %zmm9
13199 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 448(%rax), %zmm2
13200 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,2,3,10,5,6,7]
13201 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm2, %zmm8, %zmm3
13202 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13203 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,12,3,4,5,6,7]
13204 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm2, %zmm4, %zmm3
13205 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13206 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 384(%rax), %zmm3
13207 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
13208 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm4, %zmm14
13209 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm7
13210 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
13211 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
13212 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8 {%k1}
13213 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 384(%r9), %ymm14
13214 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13215 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 384(%r8), %ymm4
13216 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13217 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm4[0],ymm14[0],ymm4[2],ymm14[2]
13218 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm8 {%k2} = zmm4[2,3,2,3],zmm3[2,3,2,3]
13219 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13220 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
13221 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm4, %zmm15
13222 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm15, %zmm8
13223 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
13224 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm4, %zmm16
13225 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13226 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm4 = [0,1,2,3,4,5,8,7]
13227 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm2, %zmm6, %zmm4
13228 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13229 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm4 = [14,1,2,3,4,5,6,15]
13230 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm4, %zmm18
13231 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,2,3,4,9,6,7]
13232 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm2, %zmm1, %zmm3
13233 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13234 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,13,2,3,4,5,6,7]
13235 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm2, %zmm12, %zmm1
13236 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13237 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm4, %zmm9
13238 ; AVX512F-ONLY-SLOW-NEXT: movb $8, %sil
13239 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k2
13240 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm10 {%k2}
13241 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13242 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13243 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13244 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2 {%k5}
13245 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
13246 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13247 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm3 {%k5}
13248 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
13249 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13250 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm4 {%k5}
13251 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13252 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm31 {%k5}
13253 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13254 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm27 {%k5}
13255 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13256 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm23 {%k5}
13257 ; AVX512F-ONLY-SLOW-NEXT: movb $-31, %sil
13258 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k2
13259 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13260 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2 {%k2}
13261 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13262 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13263 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm3 {%k2}
13264 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13265 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13266 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm4 {%k2}
13267 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13268 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13269 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm31 {%k2}
13270 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm31, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13271 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13272 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm27 {%k2}
13273 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13274 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm23 {%k2}
13275 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdx), %xmm1
13276 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
13277 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
13278 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13279 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm2 {%k4}
13280 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rdx), %xmm1
13281 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
13282 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
13283 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
13284 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm3 {%k4}
13285 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 128(%rdx), %xmm1
13286 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
13287 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
13288 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
13289 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm4 {%k4}
13290 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 192(%rdx), %xmm1
13291 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
13292 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
13293 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
13294 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm6 {%k4}
13295 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 256(%rdx), %xmm1
13296 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
13297 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
13298 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm29 {%k4}
13299 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 320(%rdx), %xmm1
13300 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
13301 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
13302 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm24 {%k4}
13303 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 384(%rdx), %xmm1
13304 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
13305 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
13306 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm21 {%k4}
13307 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $3, (%rax), %zmm19, %zmm1
13308 ; AVX512F-ONLY-SLOW-NEXT: movb $112, %sil
13309 ; AVX512F-ONLY-SLOW-NEXT: kmovw %esi, %k2
13310 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2 {%k2}
13311 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13312 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13313 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $3, 64(%rax), %zmm1, %zmm1
13314 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm3 {%k2}
13315 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13316 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13317 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $3, 128(%rax), %zmm1, %zmm1
13318 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm4 {%k2}
13319 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13320 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $3, 192(%rax), %zmm5, %zmm1
13321 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm6 {%k2}
13322 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13323 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13324 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $3, 256(%rax), %zmm1, %zmm1
13325 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm29 {%k2}
13326 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13327 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $3, 320(%rax), %zmm1, %zmm1
13328 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm24 {%k2}
13329 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $3, 384(%rax), %zmm0, %zmm0
13330 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm21 {%k2}
13331 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq 8(%rcx), %ymm0
13332 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
13333 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13334 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm1 {%k3}
13335 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq 72(%rcx), %ymm0
13336 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
13337 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13338 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm2 {%k3}
13339 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq 136(%rcx), %ymm0
13340 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
13341 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 (%rsp), %zmm3 # 64-byte Reload
13342 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm3 {%k3}
13343 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq 200(%rcx), %ymm0
13344 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
13345 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
13346 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm4 {%k3}
13347 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq 264(%rcx), %ymm0
13348 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
13349 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm30 {%k3}
13350 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq 328(%rcx), %ymm0
13351 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
13352 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm26 {%k3}
13353 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq 392(%rcx), %ymm0
13354 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
13355 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm22 {%k3}
13356 ; AVX512F-ONLY-SLOW-NEXT: movb $56, %cl
13357 ; AVX512F-ONLY-SLOW-NEXT: kmovw %ecx, %k2
13358 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13359 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
13360 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13361 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13362 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm2 {%k2}
13363 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13364 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13365 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3 {%k2}
13366 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, (%rsp) # 64-byte Spill
13367 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13368 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm4 {%k2}
13369 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13370 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13371 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm30 {%k2}
13372 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13373 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm26 {%k2}
13374 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm22 {%k2}
13375 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13376 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13377 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
13378 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13379 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13380 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm2 {%k1}
13381 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13382 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
13383 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3 {%k1}
13384 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13385 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
13386 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm4 {%k1}
13387 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13388 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
13389 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
13390 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13391 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm28 {%k1}
13392 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13393 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm11 {%k1}
13394 ; AVX512F-ONLY-SLOW-NEXT: movb $120, %cl
13395 ; AVX512F-ONLY-SLOW-NEXT: kmovw %ecx, %k1
13396 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
13397 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm16 {%k1}
13398 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13399 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm15 # 64-byte Folded Reload
13400 ; AVX512F-ONLY-SLOW-NEXT: # zmm15 = zmm0[0,1,2,3],mem[4,5,6,7]
13401 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
13402 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm17 {%k1}
13403 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
13404 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm19 {%k1}
13405 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm25 # 64-byte Reload
13406 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm25 {%k1}
13407 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
13408 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm10 {%k1}
13409 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
13410 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm28, %zmm31 {%k1}
13411 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm5
13412 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm5 {%k1}
13413 ; AVX512F-ONLY-SLOW-NEXT: movb $-61, %cl
13414 ; AVX512F-ONLY-SLOW-NEXT: kmovw %ecx, %k1
13415 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13416 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm15 {%k1}
13417 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13418 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm14 # 64-byte Folded Reload
13419 ; AVX512F-ONLY-SLOW-NEXT: # zmm14 = zmm0[0,1,2,3],mem[4,5,6,7]
13420 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13421 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm14 {%k1}
13422 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13423 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm12 # 64-byte Folded Reload
13424 ; AVX512F-ONLY-SLOW-NEXT: # zmm12 = zmm0[0,1,2,3],mem[4,5,6,7]
13425 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
13426 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm12 {%k1}
13427 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13428 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm3 # 64-byte Folded Reload
13429 ; AVX512F-ONLY-SLOW-NEXT: # zmm3 = zmm0[0,1,2,3],mem[4,5,6,7]
13430 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
13431 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm3 {%k1}
13432 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13433 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm4 # 64-byte Folded Reload
13434 ; AVX512F-ONLY-SLOW-NEXT: # zmm4 = zmm0[0,1,2,3],mem[4,5,6,7]
13435 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
13436 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm4 {%k1}
13437 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13438 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm6 # 64-byte Folded Reload
13439 ; AVX512F-ONLY-SLOW-NEXT: # zmm6 = zmm0[0,1,2,3],mem[4,5,6,7]
13440 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
13441 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm6 {%k1}
13442 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm13, %zmm7 # 64-byte Folded Reload
13443 ; AVX512F-ONLY-SLOW-NEXT: # zmm7 = zmm13[0,1,2,3],mem[4,5,6,7]
13444 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13445 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm7 {%k1}
13446 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
13447 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm8 # 32-byte Folded Reload
13448 ; AVX512F-ONLY-SLOW-NEXT: # ymm8 = ymm8[1],mem[1],ymm8[3],mem[3]
13449 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,2,3,3]
13450 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],mem[6,7]
13451 ; AVX512F-ONLY-SLOW-NEXT: movb $14, %cl
13452 ; AVX512F-ONLY-SLOW-NEXT: kmovw %ecx, %k1
13453 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
13454 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm0, %zmm13 {%k1}
13455 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
13456 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm8 # 32-byte Folded Reload
13457 ; AVX512F-ONLY-SLOW-NEXT: # ymm8 = ymm8[1],mem[1],ymm8[3],mem[3]
13458 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,2,3,3]
13459 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],mem[6,7]
13460 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
13461 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm0, %zmm11 {%k1}
13462 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
13463 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm8 # 32-byte Folded Reload
13464 ; AVX512F-ONLY-SLOW-NEXT: # ymm8 = ymm8[1],mem[1],ymm8[3],mem[3]
13465 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,2,3,3]
13466 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],mem[6,7]
13467 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm28 # 64-byte Reload
13468 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm0, %zmm28 {%k1}
13469 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
13470 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm8 # 32-byte Folded Reload
13471 ; AVX512F-ONLY-SLOW-NEXT: # ymm8 = ymm8[1],mem[1],ymm8[3],mem[3]
13472 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,2,3,3]
13473 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],mem[6,7]
13474 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13475 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm0, %zmm2 {%k1}
13476 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
13477 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm8 # 32-byte Folded Reload
13478 ; AVX512F-ONLY-SLOW-NEXT: # ymm8 = ymm8[1],mem[1],ymm8[3],mem[3]
13479 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,2,3,3]
13480 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],mem[6,7]
13481 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13482 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm0, %zmm1 {%k1}
13483 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
13484 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm8 # 32-byte Folded Reload
13485 ; AVX512F-ONLY-SLOW-NEXT: # ymm8 = ymm8[1],mem[1],ymm8[3],mem[3]
13486 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,2,3,3]
13487 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],mem[6,7]
13488 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13489 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm0, %zmm0 {%k1}
13490 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
13491 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm8 # 32-byte Folded Reload
13492 ; AVX512F-ONLY-SLOW-NEXT: # ymm8 = ymm8[1],mem[1],ymm8[3],mem[3]
13493 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,2,3,3]
13494 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],mem[6,7]
13495 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm0, %zmm20 {%k1}
13496 ; AVX512F-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
13497 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, 3008(%rax)
13498 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, 2944(%rax)
13499 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm20, 2880(%rax)
13500 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
13501 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm7, 2816(%rax)
13502 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm22, 2752(%rax)
13503 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, 2688(%rax)
13504 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, 2624(%rax)
13505 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, 2560(%rax)
13506 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, 2496(%rax)
13507 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, 2432(%rax)
13508 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
13509 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm6, 2368(%rax)
13510 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm26, 2304(%rax)
13511 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm24, 2240(%rax)
13512 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, 2176(%rax)
13513 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, 2112(%rax)
13514 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, 2048(%rax)
13515 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, 1984(%rax)
13516 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
13517 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm4, 1920(%rax)
13518 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm30, 1856(%rax)
13519 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm29, 1792(%rax)
13520 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13521 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 1728(%rax)
13522 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, 1664(%rax)
13523 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, 1600(%rax)
13524 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, 1536(%rax)
13525 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
13526 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm3, 1472(%rax)
13527 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13528 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 1408(%rax)
13529 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13530 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 1344(%rax)
13531 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13532 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 1280(%rax)
13533 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, 1216(%rax)
13534 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm19, 1152(%rax)
13535 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm28, 1088(%rax)
13536 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13537 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm2, 1024(%rax)
13538 ; AVX512F-ONLY-SLOW-NEXT: vmovups (%rsp), %zmm0 # 64-byte Reload
13539 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 960(%rax)
13540 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13541 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 896(%rax)
13542 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13543 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 832(%rax)
13544 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, 768(%rax)
13545 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, 704(%rax)
13546 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, 640(%rax)
13547 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13548 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm1, 576(%rax)
13549 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13550 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 512(%rax)
13551 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13552 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 448(%rax)
13553 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13554 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 384(%rax)
13555 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm15, 320(%rax)
13556 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, 256(%rax)
13557 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, 192(%rax)
13558 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13559 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 128(%rax)
13560 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13561 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 64(%rax)
13562 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13563 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, (%rax)
13564 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, 3520(%rax)
13565 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13566 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 3456(%rax)
13567 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13568 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 3392(%rax)
13569 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13570 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 3328(%rax)
13571 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13572 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 3264(%rax)
13573 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13574 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 3200(%rax)
13575 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, 3072(%rax)
13576 ; AVX512F-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13577 ; AVX512F-ONLY-SLOW-NEXT: vmovaps %zmm0, 3136(%rax)
13578 ; AVX512F-ONLY-SLOW-NEXT: addq $6600, %rsp # imm = 0x19C8
13579 ; AVX512F-ONLY-SLOW-NEXT: vzeroupper
13580 ; AVX512F-ONLY-SLOW-NEXT: retq
13582 ; AVX512F-ONLY-FAST-LABEL: store_i64_stride7_vf64:
13583 ; AVX512F-ONLY-FAST: # %bb.0:
13584 ; AVX512F-ONLY-FAST-NEXT: subq $6696, %rsp # imm = 0x1A28
13585 ; AVX512F-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
13586 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rdi), %zmm5
13587 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13588 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rdi), %zmm22
13589 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13590 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rsi), %zmm23
13591 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rsi), %zmm17
13592 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13593 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rdx), %zmm24
13594 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13595 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rdx), %zmm6
13596 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13597 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rcx), %zmm7
13598 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13599 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rcx), %zmm9
13600 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13601 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [11,3,11,3,11,3,11,3]
13602 ; AVX512F-ONLY-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
13603 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [2,10,0,3,2,10,0,3]
13604 ; AVX512F-ONLY-FAST-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
13605 ; AVX512F-ONLY-FAST-NEXT: movb $96, %r10b
13606 ; AVX512F-ONLY-FAST-NEXT: kmovw %r10d, %k1
13607 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%r8), %zmm1
13608 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%r9), %zmm8
13609 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rax), %zmm3
13610 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rax), %zmm4
13611 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13612 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [9,1,9,1,9,1,9,1]
13613 ; AVX512F-ONLY-FAST-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
13614 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm2
13615 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm0, %zmm2
13616 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm14
13617 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [4,9,0,3,4,9,0,3]
13618 ; AVX512F-ONLY-FAST-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
13619 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm27, %zmm2
13620 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13621 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm0
13622 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm23, %zmm10, %zmm0
13623 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm2
13624 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm12, %zmm2
13625 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm2 {%k1}
13626 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r9), %ymm0
13627 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13628 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%r9), %ymm11
13629 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13630 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r8), %ymm6
13631 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13632 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%r8), %ymm26
13633 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm6[0],ymm0[0],ymm6[2],ymm0[2]
13634 ; AVX512F-ONLY-FAST-NEXT: movb $28, %r10b
13635 ; AVX512F-ONLY-FAST-NEXT: kmovw %r10d, %k2
13636 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm2 {%k2} = zmm0[2,3,2,3],zmm3[2,3,2,3]
13637 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13638 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [4,12,0,5,4,12,0,5]
13639 ; AVX512F-ONLY-FAST-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
13640 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm0
13641 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm6
13642 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm2, %zmm0
13643 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm7
13644 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,12,7,0,1,12,7]
13645 ; AVX512F-ONLY-FAST-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
13646 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm2, %zmm0
13647 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm18
13648 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13649 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [5,0,14,6,5,0,14,6]
13650 ; AVX512F-ONLY-FAST-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
13651 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm2
13652 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13653 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm8, %zmm2
13654 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,13,6,7,0,13,6,7]
13655 ; AVX512F-ONLY-FAST-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
13656 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm0, %zmm2
13657 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm20
13658 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13659 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [15,7,15,7,15,7,15,7]
13660 ; AVX512F-ONLY-FAST-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
13661 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm15, %zmm1
13662 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm16 = [6,13,14,7,6,13,14,7]
13663 ; AVX512F-ONLY-FAST-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3]
13664 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm16, %zmm3
13665 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13666 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm22, %zmm1
13667 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm10, %zmm1
13668 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm24, %zmm2
13669 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm12, %zmm2
13670 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm2 {%k1}
13671 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm1 = ymm26[0],ymm11[0],ymm26[2],ymm11[2]
13672 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm2 {%k2} = zmm1[2,3,2,3],zmm4[2,3,2,3]
13673 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13674 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%r8), %zmm1
13675 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%r9), %zmm3
13676 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm2
13677 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm14, %zmm2
13678 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13679 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm27, %zmm2
13680 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13681 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm2
13682 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm7, %zmm2
13683 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm18, %zmm2
13684 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13685 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm2
13686 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm8, %zmm2
13687 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm0, %zmm2
13688 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13689 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13690 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm15, %zmm1
13691 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm16, %zmm4
13692 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13693 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%rdi), %zmm1
13694 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13695 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%rsi), %zmm19
13696 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm10, %zmm1
13697 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%rdx), %zmm3
13698 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13699 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%rcx), %zmm24
13700 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm12, %zmm3
13701 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm3 {%k1}
13702 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%rax), %zmm6
13703 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 128(%r9), %ymm1
13704 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13705 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 128(%r8), %ymm4
13706 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm4[0],ymm1[0],ymm4[2],ymm1[2]
13707 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm4, %ymm25
13708 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm3 {%k2} = zmm2[2,3,2,3],zmm6[2,3,2,3]
13709 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13710 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%r8), %zmm2
13711 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 128(%r9), %zmm1
13712 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
13713 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm14, %zmm3
13714 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13715 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm27, %zmm3
13716 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13717 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
13718 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm4
13719 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm7, %zmm3
13720 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm18, %zmm3
13721 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13722 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm3
13723 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm8, %zmm3
13724 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm0, %zmm3
13725 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13726 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13727 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm15, %zmm2
13728 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm16, %zmm6
13729 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13730 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%rdi), %zmm2
13731 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13732 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%rsi), %zmm11
13733 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm10, %zmm2
13734 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm28
13735 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%rdx), %zmm10
13736 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13737 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%rcx), %zmm1
13738 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13739 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm12, %zmm10
13740 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm5
13741 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm10 {%k1}
13742 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%rax), %zmm30
13743 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%r9), %ymm17
13744 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 192(%r8), %ymm12
13745 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm3 = ymm12[0],ymm17[0],ymm12[2],ymm17[2]
13746 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm10 {%k2} = zmm3[2,3,2,3],zmm30[2,3,2,3]
13747 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13748 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%r8), %zmm3
13749 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 192(%r9), %zmm0
13750 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm10
13751 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm14, %zmm10
13752 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm14, %zmm7
13753 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm30, %zmm27, %zmm10
13754 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13755 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm13
13756 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm4, %zmm13
13757 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm2
13758 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm18, %zmm6
13759 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm30, %zmm18, %zmm13
13760 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13761 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13762 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm4
13763 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm8, %zmm4
13764 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm30, %zmm20, %zmm4
13765 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13766 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13767 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm15, %zmm3
13768 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm16, %zmm30
13769 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm30, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13770 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 256(%rdi), %zmm3
13771 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13772 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 256(%rsi), %zmm13
13773 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm28, %zmm3
13774 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm28, %zmm29
13775 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm28, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13776 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 256(%rdx), %zmm28
13777 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 256(%rcx), %zmm18
13778 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm28, %zmm14
13779 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm5, %zmm14
13780 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm30
13781 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13782 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm14 {%k1}
13783 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 256(%rax), %zmm0
13784 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 256(%r9), %ymm9
13785 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 256(%r8), %ymm22
13786 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm22[0],ymm9[0],ymm22[2],ymm9[2]
13787 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm14 {%k2} = zmm4[2,3,2,3],zmm0[2,3,2,3]
13788 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13789 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 256(%r8), %zmm4
13790 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 256(%r9), %zmm3
13791 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm14
13792 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm7, %zmm14
13793 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm5
13794 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13795 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm27, %zmm14
13796 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13797 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13798 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm14
13799 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
13800 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13801 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm2, %zmm14
13802 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm6, %zmm14
13803 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13804 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13805 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm1
13806 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13807 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm10
13808 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm8, %zmm10
13809 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13810 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm20, %zmm10
13811 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13812 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm20, %zmm5
13813 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13814 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13815 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm15, %zmm4
13816 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm16, %zmm0
13817 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13818 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 320(%rdi), %zmm4
13819 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13820 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 320(%rsi), %zmm20
13821 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm29, %zmm4
13822 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 320(%rdx), %zmm31
13823 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 320(%rcx), %zmm14
13824 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm21
13825 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm30, %zmm21
13826 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm21 {%k1}
13827 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 320(%rax), %zmm1
13828 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 320(%r9), %ymm4
13829 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 320(%r8), %ymm2
13830 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm10 = ymm2[0],ymm4[0],ymm2[2],ymm4[2]
13831 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm21 {%k2} = zmm10[2,3,2,3],zmm1[2,3,2,3]
13832 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13833 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 320(%r8), %zmm10
13834 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 320(%r9), %zmm21
13835 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm0
13836 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm7, %zmm0
13837 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm27, %zmm0
13838 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13839 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm0
13840 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm3, %zmm0
13841 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm6, %zmm0
13842 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13843 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm21, %zmm0
13844 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm10, %zmm8, %zmm0
13845 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm5, %zmm0
13846 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13847 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13848 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm15, %zmm10
13849 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm10, %zmm16, %zmm1
13850 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13851 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [1,3,7,7]
13852 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13853 ; AVX512F-ONLY-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm1 # 32-byte Folded Reload
13854 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13855 ; AVX512F-ONLY-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm26 # 32-byte Folded Reload
13856 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %ymm26, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13857 ; AVX512F-ONLY-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm25 # 32-byte Folded Reload
13858 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %ymm25, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13859 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %ymm17, %ymm0, %ymm12
13860 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13861 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %ymm9, %ymm0, %ymm22
13862 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %ymm22, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13863 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %ymm4, %ymm0, %ymm2
13864 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13865 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 384(%r9), %ymm1
13866 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 384(%r8), %ymm2
13867 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm3 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
13868 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13869 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %ymm1, %ymm0, %ymm2
13870 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13871 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [3,0,12,4,3,0,12,4]
13872 ; AVX512F-ONLY-FAST-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
13873 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13874 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm2
13875 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13876 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm27, %zmm2
13877 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm12 = [13,5,13,5,13,5,13,5]
13878 ; AVX512F-ONLY-FAST-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
13879 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm3
13880 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm12, %zmm3
13881 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13882 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm30 = [6,14,6,14,6,14,6,14]
13883 ; AVX512F-ONLY-FAST-NEXT: # zmm30 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
13884 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm3
13885 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm30, %zmm3
13886 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13887 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm15, %zmm0
13888 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13889 ; AVX512F-ONLY-FAST-NEXT: movb $48, %r10b
13890 ; AVX512F-ONLY-FAST-NEXT: kmovw %r10d, %k3
13891 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [0,8,0,1,0,8,0,1]
13892 ; AVX512F-ONLY-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
13893 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13894 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
13895 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm23, %zmm10, %zmm1
13896 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13897 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [1,0,10,2,1,0,10,2]
13898 ; AVX512F-ONLY-FAST-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
13899 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm9
13900 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm7, %zmm9
13901 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm2 {%k3} = zmm0[0],zmm23[0],zmm0[2],zmm23[2],zmm0[4],zmm23[4],zmm0[6],zmm23[6]
13902 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13903 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
13904 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm23, %zmm12, %zmm1
13905 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13906 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} ymm29 = [6,14,6,14]
13907 ; AVX512F-ONLY-FAST-NEXT: # ymm29 = mem[0,1,2,3,0,1,2,3]
13908 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
13909 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm23, %zmm29, %zmm1
13910 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13911 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} ymm25 = [15,7,15,7]
13912 ; AVX512F-ONLY-FAST-NEXT: # ymm25 = mem[0,1,2,3,0,1,2,3]
13913 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm23, %zmm25, %zmm0
13914 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13915 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13916 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
13917 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
13918 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm27, %zmm1
13919 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm2
13920 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm12, %zmm2
13921 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13922 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm2
13923 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm30, %zmm2
13924 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13925 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm15, %zmm3
13926 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13927 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
13928 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm2
13929 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13930 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm10, %zmm2
13931 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13932 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm2
13933 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm7, %zmm2
13934 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13935 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm1 {%k3} = zmm3[0],zmm0[0],zmm3[2],zmm0[2],zmm3[4],zmm0[4],zmm3[6],zmm0[6]
13936 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13937 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm1
13938 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm12, %zmm1
13939 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13940 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm1
13941 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm29, %zmm1
13942 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13943 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm25, %zmm3
13944 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13945 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm24, %zmm0
13946 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13947 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm27, %zmm0
13948 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
13949 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm12, %zmm1
13950 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13951 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
13952 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm30, %zmm1
13953 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13954 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm15, %zmm2
13955 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13956 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13957 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
13958 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm10, %zmm1
13959 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13960 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm19, %zmm1
13961 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm7, %zmm1
13962 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13963 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm0 {%k3} = zmm2[0],zmm19[0],zmm2[2],zmm19[2],zmm2[4],zmm19[4],zmm2[6],zmm19[6]
13964 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13965 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm0
13966 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm12, %zmm0
13967 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13968 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm0
13969 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm29, %zmm0
13970 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13971 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm25, %zmm2
13972 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13973 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13974 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm0
13975 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
13976 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm27, %zmm0
13977 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm1
13978 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm12, %zmm1
13979 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13980 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm1
13981 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm30, %zmm1
13982 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13983 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm15, %zmm3
13984 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13985 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13986 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
13987 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm10, %zmm1
13988 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13989 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm1
13990 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm7, %zmm1
13991 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13992 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm0 {%k3} = zmm2[0],zmm11[0],zmm2[2],zmm11[2],zmm2[4],zmm11[4],zmm2[6],zmm11[6]
13993 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13994 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm0
13995 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm12, %zmm0
13996 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13997 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm0
13998 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm29, %zmm0
13999 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14000 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm25, %zmm2
14001 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14002 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm18, %zmm0
14003 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm28, %zmm27, %zmm0
14004 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
14005 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm28, %zmm0
14006 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm12, %zmm0
14007 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14008 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm28, %zmm0
14009 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm30, %zmm0
14010 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14011 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm15, %zmm28
14012 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm28, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14013 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14014 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm2
14015 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm10, %zmm2
14016 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14017 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm2
14018 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm7, %zmm2
14019 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14020 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm1 {%k3} = zmm0[0],zmm13[0],zmm0[2],zmm13[2],zmm0[4],zmm13[4],zmm0[6],zmm13[6]
14021 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14022 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
14023 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm12, %zmm1
14024 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14025 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
14026 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm29, %zmm1
14027 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14028 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm25, %zmm0
14029 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14030 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm14, %zmm1
14031 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm31, %zmm27, %zmm1
14032 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm2
14033 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm12, %zmm2
14034 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14035 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm2
14036 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm30, %zmm2
14037 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14038 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm15, %zmm31
14039 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm31, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14040 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14041 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm2
14042 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm10, %zmm2
14043 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14044 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm20, %zmm28
14045 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm7, %zmm28
14046 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm1 {%k3} = zmm0[0],zmm20[0],zmm0[2],zmm20[2],zmm0[4],zmm20[4],zmm0[6],zmm20[6]
14047 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14048 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
14049 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm12, %zmm1
14050 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14051 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
14052 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm29, %zmm1
14053 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14054 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm25, %zmm0
14055 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm22
14056 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 448(%rdx), %zmm13
14057 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 448(%rcx), %zmm5
14058 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm14
14059 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14060 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm0, %zmm14
14061 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm16
14062 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm30, %zmm16
14063 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm20
14064 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm27, %zmm20
14065 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm19
14066 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm12, %zmm19
14067 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm15, %zmm13
14068 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 384(%rdx), %zmm4
14069 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 384(%rcx), %zmm5
14070 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm5, %zmm4, %zmm0
14071 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14072 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm4, %zmm5, %zmm27
14073 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm5, %zmm4, %zmm30
14074 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm0
14075 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm15, %zmm4
14076 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm12, %zmm0
14077 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14078 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 448(%rdi), %zmm11
14079 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 448(%rsi), %zmm0
14080 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm8
14081 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14082 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
14083 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm2
14084 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm29, %zmm2
14085 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm3
14086 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm12, %zmm3
14087 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 384(%rdi), %zmm6
14088 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 384(%rsi), %zmm5
14089 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm24
14090 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm10, %zmm24
14091 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm23
14092 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm7, %zmm23
14093 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm5, %zmm6, %zmm1
14094 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14095 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm27 {%k3} = zmm6[0],zmm5[0],zmm6[2],zmm5[2],zmm6[4],zmm5[4],zmm6[6],zmm5[6]
14096 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm5, %zmm6, %zmm12
14097 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm5, %zmm6, %zmm29
14098 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm25, %zmm6
14099 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm20 {%k3} = zmm11[0],zmm0[0],zmm11[2],zmm0[2],zmm11[4],zmm0[4],zmm11[6],zmm0[6]
14100 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm11, %zmm10
14101 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm11, %zmm0, %zmm7
14102 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm25, %zmm11
14103 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm14 {%k1}
14104 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm2[0,1,2,3],zmm16[4,5,6,7]
14105 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 448(%r8), %zmm2
14106 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = <0,11,u,u,4,5,6,7>
14107 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm2, %zmm20, %zmm5
14108 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 448(%r9), %zmm25
14109 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm15 = <0,1,11,u,4,5,6,7>
14110 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm25, %zmm5, %zmm15
14111 ; AVX512F-ONLY-FAST-NEXT: movb $4, %sil
14112 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k3
14113 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm14 {%k3}
14114 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm16 = <0,1,2,10,u,5,6,7>
14115 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm25, %zmm14, %zmm16
14116 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm19, %zmm3 {%k1}
14117 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = <12,u,u,3,4,5,6,13>
14118 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm2, %zmm3, %zmm5
14119 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm19 = <0,12,u,3,4,5,6,7>
14120 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm25, %zmm5, %zmm19
14121 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [0,8,0,8,0,8,0,8]
14122 ; AVX512F-ONLY-FAST-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14123 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14124 ; AVX512F-ONLY-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm20, %zmm1 # 64-byte Folded Reload
14125 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14126 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14127 ; AVX512F-ONLY-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm20, %zmm1 # 64-byte Folded Reload
14128 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14129 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14130 ; AVX512F-ONLY-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm20, %zmm1 # 64-byte Folded Reload
14131 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14132 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14133 ; AVX512F-ONLY-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm20, %zmm1 # 64-byte Folded Reload
14134 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14135 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
14136 ; AVX512F-ONLY-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm20, %zmm17 # 64-byte Folded Reload
14137 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm26 # 64-byte Reload
14138 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm20, %zmm26
14139 ; AVX512F-ONLY-FAST-NEXT: movb $24, %sil
14140 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k4
14141 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm6 {%k4}
14142 ; AVX512F-ONLY-FAST-NEXT: movb $6, %sil
14143 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k5
14144 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastq 456(%rcx), %ymm1
14145 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
14146 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm7 {%k5}
14147 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,1,2,9,u,u,6,7>
14148 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm2, %zmm7, %zmm1
14149 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm11 {%k4}
14150 ; AVX512F-ONLY-FAST-NEXT: movb $64, %sil
14151 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k3
14152 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm0 {%k3}
14153 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm3 = <u,1,2,3,4,15,u,u>
14154 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm3, %zmm11
14155 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 384(%r8), %zmm2
14156 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm3, %zmm6
14157 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 384(%r9), %zmm3
14158 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm3, %zmm2, %zmm20
14159 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
14160 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm3, %zmm2, %zmm4
14161 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
14162 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm3, %zmm2, %zmm14
14163 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
14164 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm2, %zmm3, %zmm13
14165 ; AVX512F-ONLY-FAST-NEXT: movb $12, %sil
14166 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k3
14167 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 448(%rdx), %xmm2
14168 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],mem[0]
14169 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
14170 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm10 {%k3}
14171 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $2, 448(%r8), %zmm10, %zmm2
14172 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = <0,1,2,3,4,8,u,7>
14173 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm25, %zmm2, %zmm5
14174 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm2 = <u,1,2,3,4,5,15,u>
14175 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm2, %zmm6
14176 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm7 = <0,1,2,3,9,u,6,7>
14177 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm25, %zmm1, %zmm7
14178 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm10 = <13,u,2,3,4,5,6,14>
14179 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm25, %zmm0, %zmm10
14180 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm25, %zmm2, %zmm11
14181 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastq 8(%rcx), %ymm0
14182 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
14183 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm9 {%k5}
14184 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastq 72(%rcx), %ymm0
14185 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
14186 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
14187 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm21 {%k5}
14188 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastq 136(%rcx), %ymm0
14189 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
14190 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
14191 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm8 {%k5}
14192 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastq 200(%rcx), %ymm0
14193 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
14194 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
14195 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm18 {%k5}
14196 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastq 264(%rcx), %ymm0
14197 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
14198 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm25 # 64-byte Reload
14199 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm25 {%k5}
14200 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastq 328(%rcx), %ymm0
14201 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
14202 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm28 {%k5}
14203 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastq 392(%rcx), %ymm0
14204 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
14205 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm23 {%k5}
14206 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 448(%rax), %zmm0
14207 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,1,2,3,10,5,6,7]
14208 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm16, %zmm1
14209 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14210 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,1,12,3,4,5,6,7]
14211 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm19, %zmm1
14212 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14213 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 384(%rax), %zmm1
14214 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
14215 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm3, %zmm4
14216 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
14217 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
14218 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm16 {%k1}
14219 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
14220 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm16 {%k2} = zmm3[2,3,2,3],zmm1[2,3,2,3]
14221 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14222 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
14223 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm3, %zmm14
14224 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14225 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
14226 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm3, %zmm13
14227 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14228 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,2,3,4,5,8,7]
14229 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm5, %zmm3
14230 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14231 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = [14,1,2,3,4,5,6,15]
14232 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm5, %zmm6
14233 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,1,2,3,4,9,6,7]
14234 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm7, %zmm1
14235 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14236 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,13,2,3,4,5,6,7]
14237 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm10, %zmm1
14238 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14239 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm5, %zmm11
14240 ; AVX512F-ONLY-FAST-NEXT: movb $8, %sil
14241 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k2
14242 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm15 {%k2}
14243 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14244 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14245 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14246 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1 {%k4}
14247 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
14248 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14249 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm3 {%k4}
14250 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
14251 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14252 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm5 {%k4}
14253 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
14254 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14255 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm7 {%k4}
14256 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
14257 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14258 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm10 {%k4}
14259 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14260 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm22 {%k4}
14261 ; AVX512F-ONLY-FAST-NEXT: movb $-31, %sil
14262 ; AVX512F-ONLY-FAST-NEXT: kmovw %esi, %k2
14263 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14264 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
14265 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14266 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14267 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm3 {%k2}
14268 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14269 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14270 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
14271 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14272 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14273 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm7 {%k2}
14274 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14275 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14276 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
14277 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14278 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14279 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm22 {%k2}
14280 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14281 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdx), %xmm0
14282 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
14283 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
14284 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14285 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm1 {%k3}
14286 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rdx), %xmm0
14287 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
14288 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
14289 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
14290 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm3 {%k3}
14291 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 128(%rdx), %xmm0
14292 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
14293 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
14294 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
14295 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm5 {%k3}
14296 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 192(%rdx), %xmm0
14297 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
14298 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
14299 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
14300 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm7 {%k3}
14301 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 256(%rdx), %xmm0
14302 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
14303 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
14304 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
14305 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm10 {%k3}
14306 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 320(%rdx), %xmm0
14307 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
14308 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
14309 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
14310 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm13 {%k3}
14311 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 384(%rdx), %xmm0
14312 ; AVX512F-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
14313 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
14314 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm24 {%k3}
14315 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14316 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $3, (%rax), %zmm0, %zmm0
14317 ; AVX512F-ONLY-FAST-NEXT: movb $112, %cl
14318 ; AVX512F-ONLY-FAST-NEXT: kmovw %ecx, %k2
14319 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
14320 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14321 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14322 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $3, 64(%rax), %zmm0, %zmm0
14323 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm3 {%k2}
14324 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14325 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14326 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $3, 128(%rax), %zmm0, %zmm0
14327 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
14328 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14329 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14330 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $3, 192(%rax), %zmm0, %zmm0
14331 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm7 {%k2}
14332 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14333 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $3, 256(%rax), %zmm17, %zmm0
14334 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
14335 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14336 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $3, 320(%rax), %zmm26, %zmm0
14337 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm13 {%k2}
14338 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm26
14339 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $3, 384(%rax), %zmm20, %zmm0
14340 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm24 {%k2}
14341 ; AVX512F-ONLY-FAST-NEXT: movb $56, %cl
14342 ; AVX512F-ONLY-FAST-NEXT: kmovw %ecx, %k2
14343 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14344 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
14345 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14346 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14347 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm21 {%k2}
14348 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14349 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14350 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
14351 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14352 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14353 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
14354 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14355 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14356 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm25 {%k2}
14357 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14358 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm28 {%k2}
14359 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm23 {%k2}
14360 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14361 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
14362 ; AVX512F-ONLY-FAST-NEXT: movb $14, %cl
14363 ; AVX512F-ONLY-FAST-NEXT: kmovw %ecx, %k2
14364 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm22 # 64-byte Reload
14365 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm22 {%k2}
14366 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14367 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
14368 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
14369 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm21 {%k2}
14370 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14371 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
14372 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
14373 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm19 {%k2}
14374 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14375 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
14376 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
14377 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm15 {%k2}
14378 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14379 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
14380 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
14381 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm8 {%k2}
14382 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14383 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
14384 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
14385 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm3 {%k2}
14386 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14387 ; AVX512F-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
14388 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm27 {%k2}
14389 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14390 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14391 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
14392 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14393 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
14394 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm2 {%k1}
14395 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14396 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
14397 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
14398 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14399 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
14400 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm7 {%k1}
14401 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14402 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
14403 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm9 {%k1}
14404 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14405 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
14406 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm10 {%k1}
14407 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14408 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
14409 ; AVX512F-ONLY-FAST-NEXT: movb $120, %al
14410 ; AVX512F-ONLY-FAST-NEXT: kmovw %eax, %k1
14411 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
14412 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm18 {%k1}
14413 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14414 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm16 # 64-byte Folded Reload
14415 ; AVX512F-ONLY-FAST-NEXT: # zmm16 = zmm0[0,1,2,3],mem[4,5,6,7]
14416 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
14417 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm14 {%k1}
14418 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
14419 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm4 {%k1}
14420 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
14421 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm2 {%k1}
14422 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14423 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm1 {%k1}
14424 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14425 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm0 {%k1}
14426 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
14427 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm20 {%k1}
14428 ; AVX512F-ONLY-FAST-NEXT: movb $-61, %al
14429 ; AVX512F-ONLY-FAST-NEXT: kmovw %eax, %k1
14430 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
14431 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm16 {%k1}
14432 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
14433 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm5, %zmm5 # 64-byte Folded Reload
14434 ; AVX512F-ONLY-FAST-NEXT: # zmm5 = zmm5[0,1,2,3],mem[4,5,6,7]
14435 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
14436 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm5 {%k1}
14437 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
14438 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7, %zmm7 # 64-byte Folded Reload
14439 ; AVX512F-ONLY-FAST-NEXT: # zmm7 = zmm7[0,1,2,3],mem[4,5,6,7]
14440 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
14441 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm7 {%k1}
14442 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
14443 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm9, %zmm9 # 64-byte Folded Reload
14444 ; AVX512F-ONLY-FAST-NEXT: # zmm9 = zmm9[0,1,2,3],mem[4,5,6,7]
14445 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
14446 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm9 {%k1}
14447 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
14448 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm10 # 64-byte Folded Reload
14449 ; AVX512F-ONLY-FAST-NEXT: # zmm10 = zmm10[0,1,2,3],mem[4,5,6,7]
14450 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
14451 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm10 {%k1}
14452 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
14453 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm12, %zmm13 # 64-byte Folded Reload
14454 ; AVX512F-ONLY-FAST-NEXT: # zmm13 = zmm12[0,1,2,3],mem[4,5,6,7]
14455 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
14456 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm13 {%k1}
14457 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm17 = zmm29[0,1,2,3],zmm30[4,5,6,7]
14458 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
14459 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm17 {%k1}
14460 ; AVX512F-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
14461 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm17, 3008(%rax)
14462 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm20, 2944(%rax)
14463 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm27, 2880(%rax)
14464 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
14465 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm12, 2816(%rax)
14466 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm23, 2752(%rax)
14467 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm24, 2688(%rax)
14468 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
14469 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm12, 2624(%rax)
14470 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, 2560(%rax)
14471 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, 2496(%rax)
14472 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, 2432(%rax)
14473 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
14474 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm12, 2368(%rax)
14475 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm28, 2304(%rax)
14476 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm26, 2240(%rax)
14477 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14478 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 2176(%rax)
14479 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, 2112(%rax)
14480 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, 2048(%rax)
14481 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm8, 1984(%rax)
14482 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
14483 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm10, 1920(%rax)
14484 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm25, 1856(%rax)
14485 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14486 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 1792(%rax)
14487 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14488 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 1728(%rax)
14489 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm9, 1664(%rax)
14490 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, 1600(%rax)
14491 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm15, 1536(%rax)
14492 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
14493 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm9, 1472(%rax)
14494 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14495 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 1408(%rax)
14496 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14497 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 1344(%rax)
14498 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14499 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 1280(%rax)
14500 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, 1216(%rax)
14501 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, 1152(%rax)
14502 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm19, 1088(%rax)
14503 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
14504 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm7, 1024(%rax)
14505 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14506 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 960(%rax)
14507 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14508 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 896(%rax)
14509 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14510 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 832(%rax)
14511 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, 768(%rax)
14512 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm14, 704(%rax)
14513 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm21, 640(%rax)
14514 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
14515 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm5, 576(%rax)
14516 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14517 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 512(%rax)
14518 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14519 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 448(%rax)
14520 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14521 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 384(%rax)
14522 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm16, 320(%rax)
14523 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm18, 256(%rax)
14524 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm22, 192(%rax)
14525 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14526 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 128(%rax)
14527 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14528 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 64(%rax)
14529 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14530 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, (%rax)
14531 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm11, 3520(%rax)
14532 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14533 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 3456(%rax)
14534 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14535 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 3392(%rax)
14536 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14537 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 3328(%rax)
14538 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14539 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 3264(%rax)
14540 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14541 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 3200(%rax)
14542 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm6, 3072(%rax)
14543 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14544 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm0, 3136(%rax)
14545 ; AVX512F-ONLY-FAST-NEXT: addq $6696, %rsp # imm = 0x1A28
14546 ; AVX512F-ONLY-FAST-NEXT: vzeroupper
14547 ; AVX512F-ONLY-FAST-NEXT: retq
14549 ; AVX512DQ-SLOW-LABEL: store_i64_stride7_vf64:
14550 ; AVX512DQ-SLOW: # %bb.0:
14551 ; AVX512DQ-SLOW-NEXT: subq $6472, %rsp # imm = 0x1948
14552 ; AVX512DQ-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
14553 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rdi), %zmm3
14554 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14555 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm21
14556 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14557 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rsi), %zmm4
14558 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14559 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm18
14560 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm18, (%rsp) # 64-byte Spill
14561 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm13
14562 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14563 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rdx), %zmm5
14564 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14565 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rcx), %zmm12
14566 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm20
14567 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14568 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm17 = [11,3,11,3,11,3,11,3]
14569 ; AVX512DQ-SLOW-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14570 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [2,10,0,3,2,10,0,3]
14571 ; AVX512DQ-SLOW-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
14572 ; AVX512DQ-SLOW-NEXT: movb $96, %r10b
14573 ; AVX512DQ-SLOW-NEXT: kmovw %r10d, %k1
14574 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%r8), %zmm1
14575 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%r9), %zmm9
14576 ; AVX512DQ-SLOW-NEXT: vmovdqa64 (%rax), %zmm6
14577 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%rax), %zmm7
14578 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14579 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [9,1,9,1,9,1,9,1]
14580 ; AVX512DQ-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14581 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
14582 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm9, %zmm0, %zmm2
14583 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm11
14584 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,9,0,3,4,9,0,3]
14585 ; AVX512DQ-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
14586 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm0, %zmm2
14587 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm10
14588 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14589 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm0
14590 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm17, %zmm0
14591 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm2
14592 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm12, %zmm27, %zmm2
14593 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm2 {%k1}
14594 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r9), %ymm5
14595 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14596 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%r9), %ymm3
14597 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14598 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r8), %ymm0
14599 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14600 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%r8), %ymm4
14601 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14602 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm5[0],ymm0[2],ymm5[2]
14603 ; AVX512DQ-SLOW-NEXT: movb $28, %r10b
14604 ; AVX512DQ-SLOW-NEXT: kmovw %r10d, %k2
14605 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm2 {%k2} = zmm0[2,3,2,3],zmm6[2,3,2,3]
14606 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14607 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [4,12,0,5,4,12,0,5]
14608 ; AVX512DQ-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
14609 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm0
14610 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, %zmm5
14611 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm9, %zmm2, %zmm0
14612 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm16
14613 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,12,7,0,1,12,7]
14614 ; AVX512DQ-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
14615 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm2, %zmm0
14616 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm9
14617 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14618 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm19 = [5,0,14,6,5,0,14,6]
14619 ; AVX512DQ-SLOW-NEXT: # zmm19 = mem[0,1,2,3,0,1,2,3]
14620 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm2
14621 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm14
14622 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14623 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm19, %zmm2
14624 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm15 = [0,13,6,7,0,13,6,7]
14625 ; AVX512DQ-SLOW-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3]
14626 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm15, %zmm2
14627 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14628 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [15,7,15,7,15,7,15,7]
14629 ; AVX512DQ-SLOW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14630 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm14, %zmm5, %zmm1
14631 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [6,13,14,7,6,13,14,7]
14632 ; AVX512DQ-SLOW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
14633 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm8, %zmm6
14634 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14635 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm21, %zmm1
14636 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm18, %zmm17, %zmm1
14637 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm13, %zmm2
14638 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm20, %zmm27, %zmm2
14639 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2 {%k1}
14640 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm1 = ymm4[0],ymm3[0],ymm4[2],ymm3[2]
14641 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm2 {%k2} = zmm1[2,3,2,3],zmm7[2,3,2,3]
14642 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14643 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%r8), %zmm1
14644 ; AVX512DQ-SLOW-NEXT: vmovdqa64 64(%r9), %zmm3
14645 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
14646 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm11, %zmm2
14647 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm10, %zmm2
14648 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14649 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
14650 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm16, %zmm2
14651 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm9, %zmm2
14652 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14653 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm2
14654 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14655 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm19, %zmm2
14656 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm15, %zmm2
14657 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14658 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14659 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm5, %zmm1
14660 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm8, %zmm7
14661 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14662 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%rdi), %zmm22
14663 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%rsi), %zmm23
14664 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm22, %zmm1
14665 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm23, %zmm17, %zmm1
14666 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm17, %zmm29
14667 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%rdx), %zmm18
14668 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%rcx), %zmm14
14669 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm18, %zmm2
14670 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm14, %zmm27, %zmm2
14671 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2 {%k1}
14672 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%rax), %zmm7
14673 ; AVX512DQ-SLOW-NEXT: vmovdqa 128(%r9), %ymm6
14674 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14675 ; AVX512DQ-SLOW-NEXT: vmovdqa 128(%r8), %ymm1
14676 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14677 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm1 = ymm1[0],ymm6[0],ymm1[2],ymm6[2]
14678 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm2 {%k2} = zmm1[2,3,2,3],zmm7[2,3,2,3]
14679 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14680 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%r8), %zmm1
14681 ; AVX512DQ-SLOW-NEXT: vmovdqa64 128(%r9), %zmm6
14682 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
14683 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm11, %zmm2
14684 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm31
14685 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm10, %zmm2
14686 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14687 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, %zmm30
14688 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
14689 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm16, %zmm2
14690 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm9, %zmm2
14691 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14692 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, %zmm2
14693 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14694 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm19, %zmm2
14695 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm15, %zmm2
14696 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14697 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14698 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm5, %zmm1
14699 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm8, %zmm7
14700 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14701 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%rdi), %zmm28
14702 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%rsi), %zmm21
14703 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm28, %zmm1
14704 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm21, %zmm17, %zmm1
14705 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%rdx), %zmm13
14706 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%rcx), %zmm26
14707 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm13, %zmm2
14708 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm26, %zmm27, %zmm2
14709 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2 {%k1}
14710 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%rax), %zmm6
14711 ; AVX512DQ-SLOW-NEXT: vmovdqa 192(%r9), %ymm4
14712 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14713 ; AVX512DQ-SLOW-NEXT: vmovdqa 192(%r8), %ymm1
14714 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14715 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm1 = ymm1[0],ymm4[0],ymm1[2],ymm4[2]
14716 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm2 {%k2} = zmm1[2,3,2,3],zmm6[2,3,2,3]
14717 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14718 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%r8), %zmm1
14719 ; AVX512DQ-SLOW-NEXT: vmovdqa64 192(%r9), %zmm3
14720 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
14721 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm11, %zmm2
14722 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm10, %zmm2
14723 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14724 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
14725 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm16, %zmm2
14726 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm9, %zmm2
14727 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14728 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, %zmm0
14729 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm2
14730 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14731 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm19, %zmm2
14732 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm15, %zmm2
14733 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14734 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14735 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm5, %zmm1
14736 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm8, %zmm6
14737 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14738 ; AVX512DQ-SLOW-NEXT: vmovdqa64 256(%rdi), %zmm25
14739 ; AVX512DQ-SLOW-NEXT: vmovdqa64 256(%rsi), %zmm17
14740 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm25, %zmm2
14741 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm29, %zmm9
14742 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm29, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14743 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm17, %zmm29, %zmm2
14744 ; AVX512DQ-SLOW-NEXT: vmovdqa64 256(%rdx), %zmm11
14745 ; AVX512DQ-SLOW-NEXT: vmovdqa64 256(%rcx), %zmm29
14746 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm4
14747 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm29, %zmm27, %zmm4
14748 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
14749 ; AVX512DQ-SLOW-NEXT: vmovdqa64 256(%rax), %zmm1
14750 ; AVX512DQ-SLOW-NEXT: vmovdqa 256(%r9), %ymm7
14751 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14752 ; AVX512DQ-SLOW-NEXT: vmovdqa 256(%r8), %ymm2
14753 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14754 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm2[0],ymm7[0],ymm2[2],ymm7[2]
14755 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm4 {%k2} = zmm2[2,3,2,3],zmm1[2,3,2,3]
14756 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14757 ; AVX512DQ-SLOW-NEXT: vmovdqa64 256(%r8), %zmm2
14758 ; AVX512DQ-SLOW-NEXT: vmovdqa64 256(%r9), %zmm6
14759 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4
14760 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm31, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14761 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm31, %zmm4
14762 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14763 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm10, %zmm4
14764 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14765 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4
14766 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14767 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm16, %zmm4
14768 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm0, %zmm4
14769 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3
14770 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14771 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14772 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, %zmm4
14773 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14774 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm19, %zmm4
14775 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14776 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm15, %zmm4
14777 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14778 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14779 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14780 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm5, %zmm2
14781 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm8, %zmm1
14782 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14783 ; AVX512DQ-SLOW-NEXT: vmovdqa64 320(%rdi), %zmm7
14784 ; AVX512DQ-SLOW-NEXT: vmovdqa64 320(%rsi), %zmm10
14785 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, %zmm4
14786 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm10, %zmm9, %zmm4
14787 ; AVX512DQ-SLOW-NEXT: vmovdqa64 320(%rdx), %zmm6
14788 ; AVX512DQ-SLOW-NEXT: vmovdqa64 320(%rcx), %zmm24
14789 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, %zmm9
14790 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm24, %zmm27, %zmm9
14791 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm9 {%k1}
14792 ; AVX512DQ-SLOW-NEXT: vmovdqa64 320(%rax), %zmm2
14793 ; AVX512DQ-SLOW-NEXT: vmovdqa 320(%r9), %ymm1
14794 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14795 ; AVX512DQ-SLOW-NEXT: vmovdqa 320(%r8), %ymm0
14796 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
14797 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
14798 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm9 {%k2} = zmm4[2,3,2,3],zmm2[2,3,2,3]
14799 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14800 ; AVX512DQ-SLOW-NEXT: vmovdqa64 320(%r8), %zmm4
14801 ; AVX512DQ-SLOW-NEXT: vmovdqa64 320(%r9), %zmm1
14802 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm0
14803 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm31, %zmm0
14804 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm30, %zmm0
14805 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14806 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm0
14807 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm16, %zmm0
14808 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm3, %zmm0
14809 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14810 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm0
14811 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14812 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm19, %zmm0
14813 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm15, %zmm0
14814 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14815 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14816 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm5, %zmm4
14817 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm8, %zmm2
14818 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14819 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [13,5,13,5,13,5,13,5]
14820 ; AVX512DQ-SLOW-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14821 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
14822 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm0
14823 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm12, %zmm9, %zmm0
14824 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14825 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [6,14,6,14,6,14,6,14]
14826 ; AVX512DQ-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
14827 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm1
14828 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm12, %zmm0, %zmm1
14829 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14830 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm4
14831 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm0
14832 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm12, %zmm5, %zmm0
14833 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14834 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [3,0,12,4,3,0,12,4]
14835 ; AVX512DQ-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
14836 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm0, %zmm12
14837 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm12, %zmm8
14838 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
14839 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
14840 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
14841 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm12, %zmm9, %zmm2
14842 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14843 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
14844 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm12, %zmm4, %zmm2
14845 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14846 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
14847 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm12, %zmm5, %zmm2
14848 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14849 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm0, %zmm12
14850 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm12, %zmm15
14851 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm18, %zmm1
14852 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm14, %zmm9, %zmm1
14853 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14854 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm18, %zmm1
14855 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm14, %zmm4, %zmm1
14856 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14857 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm18, %zmm1
14858 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm14, %zmm5, %zmm1
14859 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14860 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm18, %zmm0, %zmm14
14861 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm14, %zmm18
14862 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm13, %zmm1
14863 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm26, %zmm9, %zmm1
14864 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14865 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm13, %zmm1
14866 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm26, %zmm4, %zmm1
14867 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14868 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm13, %zmm1
14869 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm26, %zmm5, %zmm1
14870 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14871 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm13, %zmm0, %zmm26
14872 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm1
14873 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm29, %zmm9, %zmm1
14874 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14875 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm1
14876 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm29, %zmm4, %zmm1
14877 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14878 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, %zmm1
14879 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm29, %zmm5, %zmm1
14880 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14881 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm11, %zmm0, %zmm29
14882 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, %zmm1
14883 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm24, %zmm9, %zmm1
14884 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14885 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, %zmm1
14886 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm24, %zmm4, %zmm1
14887 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14888 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, %zmm1
14889 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm24, %zmm5, %zmm1
14890 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14891 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm6, %zmm0, %zmm24
14892 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm24, %zmm6
14893 ; AVX512DQ-SLOW-NEXT: vmovdqa64 448(%rdx), %zmm14
14894 ; AVX512DQ-SLOW-NEXT: vmovdqa64 448(%rcx), %zmm1
14895 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm14, %zmm12
14896 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm27, %zmm12
14897 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm14, %zmm19
14898 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm4, %zmm19
14899 ; AVX512DQ-SLOW-NEXT: vmovdqa64 384(%rdx), %zmm3
14900 ; AVX512DQ-SLOW-NEXT: vmovdqa64 384(%rcx), %zmm20
14901 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm20, %zmm3, %zmm27
14902 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14903 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm2
14904 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm20, %zmm9, %zmm2
14905 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14906 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm20, %zmm3, %zmm4
14907 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14908 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm27
14909 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm20, %zmm5, %zmm27
14910 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm3, %zmm0, %zmm20
14911 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm3
14912 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm14, %zmm0, %zmm3
14913 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm14, %zmm16
14914 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm5, %zmm14
14915 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm9, %zmm16
14916 ; AVX512DQ-SLOW-NEXT: movb $48, %r10b
14917 ; AVX512DQ-SLOW-NEXT: kmovw %r10d, %k3
14918 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [1,0,10,2,1,0,10,2]
14919 ; AVX512DQ-SLOW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
14920 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
14921 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm2
14922 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14923 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm0, %zmm1, %zmm2
14924 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14925 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm8 {%k3} = zmm0[0],zmm4[0],zmm0[2],zmm4[2],zmm0[4],zmm4[4],zmm0[6],zmm4[6]
14926 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14927 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm5
14928 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm8
14929 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm11
14930 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,8,0,1,0,8,0,1]
14931 ; AVX512DQ-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
14932 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm2, %zmm0
14933 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14934 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm9, %zmm5
14935 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14936 ; AVX512DQ-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm13 = [6,14,6,14]
14937 ; AVX512DQ-SLOW-NEXT: # ymm13 = mem[0,1,0,1]
14938 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm13, %zmm8
14939 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14940 ; AVX512DQ-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm8 = [15,7,15,7]
14941 ; AVX512DQ-SLOW-NEXT: # ymm8 = mem[0,1,0,1]
14942 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm8, %zmm11
14943 ; AVX512DQ-SLOW-NEXT: vmovdqu64 (%rsp), %zmm4 # 64-byte Reload
14944 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm5
14945 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
14946 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm0, %zmm1, %zmm5
14947 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14948 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm15 {%k3} = zmm0[0],zmm4[0],zmm0[2],zmm4[2],zmm0[4],zmm4[4],zmm0[6],zmm4[6]
14949 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14950 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm15
14951 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm5
14952 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm24
14953 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm2, %zmm0
14954 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14955 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm9, %zmm15
14956 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14957 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm13, %zmm5
14958 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14959 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm8, %zmm24
14960 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14961 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm23, %zmm0
14962 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm22, %zmm1, %zmm0
14963 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14964 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm18 {%k3} = zmm22[0],zmm23[0],zmm22[2],zmm23[2],zmm22[4],zmm23[4],zmm22[6],zmm23[6]
14965 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14966 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm22, %zmm4
14967 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm22, %zmm0
14968 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm22, %zmm5
14969 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm23, %zmm2, %zmm22
14970 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14971 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm23, %zmm9, %zmm4
14972 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14973 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm23, %zmm13, %zmm0
14974 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14975 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm23, %zmm8, %zmm5
14976 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14977 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm21, %zmm0
14978 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm28, %zmm1, %zmm0
14979 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
14980 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm26 {%k3} = zmm28[0],zmm21[0],zmm28[2],zmm21[2],zmm28[4],zmm21[4],zmm28[6],zmm21[6]
14981 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14982 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm28, %zmm4
14983 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm28, %zmm0
14984 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm28, %zmm23
14985 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm21, %zmm2, %zmm28
14986 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm28, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14987 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm21, %zmm9, %zmm4
14988 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14989 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm21, %zmm13, %zmm0
14990 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14991 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm21, %zmm8, %zmm23
14992 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm17, %zmm0
14993 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm25, %zmm1, %zmm0
14994 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14995 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm29 {%k3} = zmm25[0],zmm17[0],zmm25[2],zmm17[2],zmm25[4],zmm17[4],zmm25[6],zmm17[6]
14996 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm29, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14997 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm25, %zmm31
14998 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm25, %zmm0
14999 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm25, %zmm29
15000 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm25, %zmm30
15001 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm17, %zmm2, %zmm30
15002 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm17, %zmm9, %zmm31
15003 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm17, %zmm13, %zmm0
15004 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15005 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm17, %zmm8, %zmm29
15006 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, %zmm26
15007 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm7, %zmm1, %zmm26
15008 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k3} = zmm7[0],zmm10[0],zmm7[2],zmm10[2],zmm7[4],zmm10[4],zmm7[6],zmm10[6]
15009 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15010 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, %zmm28
15011 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, %zmm0
15012 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, %zmm24
15013 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, %zmm25
15014 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm10, %zmm2, %zmm25
15015 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm10, %zmm9, %zmm28
15016 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm10, %zmm13, %zmm0
15017 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15018 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm10, %zmm8, %zmm24
15019 ; AVX512DQ-SLOW-NEXT: vmovdqa64 448(%rdi), %zmm6
15020 ; AVX512DQ-SLOW-NEXT: vmovdqa64 448(%rsi), %zmm0
15021 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, %zmm7
15022 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
15023 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm0, %zmm10, %zmm7
15024 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, %zmm4
15025 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm0, %zmm13, %zmm4
15026 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, %zmm5
15027 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm0, %zmm9, %zmm5
15028 ; AVX512DQ-SLOW-NEXT: vmovdqa64 384(%rdi), %zmm21
15029 ; AVX512DQ-SLOW-NEXT: vmovdqa64 384(%rsi), %zmm17
15030 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm17, %zmm22
15031 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm21, %zmm1, %zmm22
15032 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm17, %zmm21, %zmm10
15033 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15034 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm17, %zmm21, %zmm9
15035 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm17, %zmm21, %zmm13
15036 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm20 {%k3} = zmm21[0],zmm17[0],zmm21[2],zmm17[2],zmm21[4],zmm17[4],zmm21[6],zmm17[6]
15037 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm21, %zmm18
15038 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm17, %zmm2, %zmm21
15039 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm17, %zmm8, %zmm18
15040 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm3 {%k3} = zmm6[0],zmm0[0],zmm6[2],zmm0[2],zmm6[4],zmm0[4],zmm6[6],zmm0[6]
15041 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm0, %zmm6, %zmm2
15042 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm6, %zmm0, %zmm1
15043 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm0, %zmm8, %zmm6
15044 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, %zmm12 {%k1}
15045 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm4[0,1,2,3],zmm19[4,5,6,7]
15046 ; AVX512DQ-SLOW-NEXT: vmovdqa64 448(%r8), %zmm4
15047 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <0,11,u,u,4,5,6,7>
15048 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm4, %zmm3, %zmm7
15049 ; AVX512DQ-SLOW-NEXT: vmovdqa64 448(%r9), %zmm8
15050 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm3 = <0,1,11,u,4,5,6,7>
15051 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm8, %zmm7, %zmm3
15052 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm15
15053 ; AVX512DQ-SLOW-NEXT: movb $4, %sil
15054 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k3
15055 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm12 {%k3}
15056 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <0,1,2,10,u,5,6,7>
15057 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm8, %zmm12, %zmm7
15058 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm16, %zmm5 {%k1}
15059 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm3 = <12,u,u,3,4,5,6,13>
15060 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm4, %zmm5, %zmm3
15061 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm5 = <0,12,u,3,4,5,6,7>
15062 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm8, %zmm3, %zmm5
15063 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [0,8,0,8,0,8,0,8]
15064 ; AVX512DQ-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15065 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
15066 ; AVX512DQ-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm19 # 64-byte Folded Reload
15067 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
15068 ; AVX512DQ-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm12 # 64-byte Folded Reload
15069 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15070 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
15071 ; AVX512DQ-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm10 # 64-byte Folded Reload
15072 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
15073 ; AVX512DQ-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm12 # 64-byte Folded Reload
15074 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15075 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
15076 ; AVX512DQ-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm12 # 64-byte Folded Reload
15077 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15078 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
15079 ; AVX512DQ-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm12 # 64-byte Folded Reload
15080 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15081 ; AVX512DQ-SLOW-NEXT: movb $24, %sil
15082 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k5
15083 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm27, %zmm18 {%k5}
15084 ; AVX512DQ-SLOW-NEXT: movb $6, %sil
15085 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k3
15086 ; AVX512DQ-SLOW-NEXT: vpbroadcastq 456(%rcx), %ymm12
15087 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = mem[0,1,2,3],ymm12[4,5,6,7]
15088 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm1 {%k3}
15089 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <0,1,2,9,u,u,6,7>
15090 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm4, %zmm1, %zmm12
15091 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm14, %zmm6 {%k5}
15092 ; AVX512DQ-SLOW-NEXT: movb $64, %sil
15093 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k4
15094 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm0 {%k4}
15095 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,1,2,3,4,15,u,u>
15096 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm1, %zmm6
15097 ; AVX512DQ-SLOW-NEXT: vmovdqa64 384(%r8), %zmm4
15098 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm4, %zmm1, %zmm18
15099 ; AVX512DQ-SLOW-NEXT: vmovdqa64 384(%r9), %zmm1
15100 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm1, %zmm4, %zmm3
15101 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
15102 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm1, %zmm4, %zmm14
15103 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
15104 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm1, %zmm4, %zmm16
15105 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
15106 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm4, %zmm1, %zmm17
15107 ; AVX512DQ-SLOW-NEXT: movb $12, %sil
15108 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k4
15109 ; AVX512DQ-SLOW-NEXT: vmovdqa 448(%rdx), %xmm4
15110 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],mem[0]
15111 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
15112 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm0, %zmm2 {%k4}
15113 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $2, 448(%r8), %zmm2, %zmm2
15114 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm4 = <0,1,2,3,4,8,u,7>
15115 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm8, %zmm2, %zmm4
15116 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <u,1,2,3,4,5,15,u>
15117 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm1, %zmm2, %zmm18
15118 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,1,2,3,9,u,6,7>
15119 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm8, %zmm12, %zmm1
15120 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <13,u,2,3,4,5,6,14>
15121 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm8, %zmm0, %zmm12
15122 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm8, %zmm2, %zmm6
15123 ; AVX512DQ-SLOW-NEXT: vmovdqa64 448(%rax), %zmm2
15124 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,3,10,5,6,7]
15125 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm2, %zmm7, %zmm0
15126 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15127 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,12,3,4,5,6,7]
15128 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm2, %zmm5, %zmm0
15129 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15130 ; AVX512DQ-SLOW-NEXT: vmovdqa64 384(%rax), %zmm5
15131 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15132 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm0, %zmm14
15133 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm14, %zmm8
15134 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
15135 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15136 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, %zmm14 {%k1}
15137 ; AVX512DQ-SLOW-NEXT: vmovdqa 384(%r9), %ymm7
15138 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
15139 ; AVX512DQ-SLOW-NEXT: vmovdqa 384(%r8), %ymm0
15140 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm7 = ymm0[0],ymm7[0],ymm0[2],ymm7[2]
15141 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm14 {%k2} = zmm7[2,3,2,3],zmm5[2,3,2,3]
15142 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15143 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
15144 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm7, %zmm16
15145 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm16, %zmm14
15146 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
15147 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm7, %zmm17
15148 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm7 = [0,1,2,3,4,5,8,7]
15149 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm2, %zmm4, %zmm7
15150 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15151 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm4 = [14,1,2,3,4,5,6,15]
15152 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm5, %zmm4, %zmm18
15153 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm5 = [0,1,2,3,4,9,6,7]
15154 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm2, %zmm1, %zmm5
15155 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15156 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,13,2,3,4,5,6,7]
15157 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm2, %zmm12, %zmm1
15158 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15159 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm4, %zmm6
15160 ; AVX512DQ-SLOW-NEXT: movb $8, %sil
15161 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k2
15162 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm15 {%k2}
15163 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15164 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15165 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm11 {%k5}
15166 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15167 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15168 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2 {%k5}
15169 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
15170 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15171 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm4 {%k5}
15172 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15173 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm23 {%k5}
15174 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15175 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm29 {%k5}
15176 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15177 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm24 {%k5}
15178 ; AVX512DQ-SLOW-NEXT: movb $-31, %sil
15179 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k2
15180 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15181 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm11 {%k2}
15182 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15183 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15184 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2 {%k2}
15185 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15186 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15187 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm4 {%k2}
15188 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15189 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15190 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm23 {%k2}
15191 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15192 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15193 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm29 {%k2}
15194 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15195 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm24 {%k2}
15196 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdx), %xmm1
15197 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
15198 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
15199 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
15200 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm7 {%k4}
15201 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rdx), %xmm1
15202 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
15203 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
15204 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15205 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm2 {%k4}
15206 ; AVX512DQ-SLOW-NEXT: vmovdqa 128(%rdx), %xmm1
15207 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
15208 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
15209 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
15210 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm4 {%k4}
15211 ; AVX512DQ-SLOW-NEXT: vmovdqa 192(%rdx), %xmm1
15212 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
15213 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
15214 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15215 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm5 {%k4}
15216 ; AVX512DQ-SLOW-NEXT: vmovdqa 256(%rdx), %xmm1
15217 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
15218 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
15219 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm30 {%k4}
15220 ; AVX512DQ-SLOW-NEXT: vmovdqa 320(%rdx), %xmm1
15221 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
15222 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
15223 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm25 {%k4}
15224 ; AVX512DQ-SLOW-NEXT: vmovdqa 384(%rdx), %xmm1
15225 ; AVX512DQ-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
15226 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
15227 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm21 {%k4}
15228 ; AVX512DQ-SLOW-NEXT: movb $112, %sil
15229 ; AVX512DQ-SLOW-NEXT: kmovw %esi, %k2
15230 ; AVX512DQ-SLOW-NEXT: vinserti64x2 $3, (%rax), %zmm19, %zmm7 {%k2}
15231 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15232 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15233 ; AVX512DQ-SLOW-NEXT: vinserti64x2 $3, 64(%rax), %zmm1, %zmm2 {%k2}
15234 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15235 ; AVX512DQ-SLOW-NEXT: vinserti64x2 $3, 128(%rax), %zmm10, %zmm4 {%k2}
15236 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15237 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15238 ; AVX512DQ-SLOW-NEXT: vinserti64x2 $3, 192(%rax), %zmm1, %zmm5 {%k2}
15239 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15240 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15241 ; AVX512DQ-SLOW-NEXT: vinserti64x2 $3, 256(%rax), %zmm1, %zmm30 {%k2}
15242 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15243 ; AVX512DQ-SLOW-NEXT: vinserti64x2 $3, 320(%rax), %zmm1, %zmm25 {%k2}
15244 ; AVX512DQ-SLOW-NEXT: vinserti64x2 $3, 384(%rax), %zmm3, %zmm21 {%k2}
15245 ; AVX512DQ-SLOW-NEXT: vpbroadcastq 8(%rcx), %ymm1
15246 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
15247 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15248 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm5 {%k3}
15249 ; AVX512DQ-SLOW-NEXT: vpbroadcastq 72(%rcx), %ymm1
15250 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
15251 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15252 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm2 {%k3}
15253 ; AVX512DQ-SLOW-NEXT: vpbroadcastq 136(%rcx), %ymm1
15254 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
15255 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
15256 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm3 {%k3}
15257 ; AVX512DQ-SLOW-NEXT: vpbroadcastq 200(%rcx), %ymm1
15258 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
15259 ; AVX512DQ-SLOW-NEXT: vmovdqu64 (%rsp), %zmm4 # 64-byte Reload
15260 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm4 {%k3}
15261 ; AVX512DQ-SLOW-NEXT: vpbroadcastq 264(%rcx), %ymm1
15262 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
15263 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
15264 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm7 {%k3}
15265 ; AVX512DQ-SLOW-NEXT: vpbroadcastq 328(%rcx), %ymm1
15266 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
15267 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm26 {%k3}
15268 ; AVX512DQ-SLOW-NEXT: vpbroadcastq 392(%rcx), %ymm1
15269 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
15270 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm22 {%k3}
15271 ; AVX512DQ-SLOW-NEXT: movb $56, %cl
15272 ; AVX512DQ-SLOW-NEXT: kmovw %ecx, %k2
15273 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15274 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm5 {%k2}
15275 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15276 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15277 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2 {%k2}
15278 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15279 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15280 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm3 {%k2}
15281 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15282 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15283 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm4 {%k2}
15284 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, (%rsp) # 64-byte Spill
15285 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15286 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm7 {%k2}
15287 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15288 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15289 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm26 {%k2}
15290 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, %zmm22 {%k2}
15291 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15292 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15293 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2 {%k1}
15294 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15295 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
15296 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm3 {%k1}
15297 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15298 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
15299 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm4 {%k1}
15300 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15301 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15302 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm5 {%k1}
15303 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15304 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm31 {%k1}
15305 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15306 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm28 {%k1}
15307 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15308 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, %zmm9 {%k1}
15309 ; AVX512DQ-SLOW-NEXT: movb $120, %cl
15310 ; AVX512DQ-SLOW-NEXT: kmovw %ecx, %k1
15311 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
15312 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm16 {%k1}
15313 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15314 ; AVX512DQ-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm15 # 64-byte Folded Reload
15315 ; AVX512DQ-SLOW-NEXT: # zmm15 = zmm1[0,1,2,3],mem[4,5,6,7]
15316 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
15317 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm19 {%k1}
15318 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
15319 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm27 {%k1}
15320 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
15321 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm12 {%k1}
15322 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
15323 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm31, %zmm10 {%k1}
15324 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
15325 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm28, %zmm31 {%k1}
15326 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, %zmm14 {%k1}
15327 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm14, %zmm23
15328 ; AVX512DQ-SLOW-NEXT: movb $-61, %cl
15329 ; AVX512DQ-SLOW-NEXT: kmovw %ecx, %k1
15330 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15331 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, %zmm15 {%k1}
15332 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15333 ; AVX512DQ-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm14 # 64-byte Folded Reload
15334 ; AVX512DQ-SLOW-NEXT: # zmm14 = zmm1[0,1,2,3],mem[4,5,6,7]
15335 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
15336 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, %zmm14 {%k1}
15337 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15338 ; AVX512DQ-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm11 # 64-byte Folded Reload
15339 ; AVX512DQ-SLOW-NEXT: # zmm11 = zmm1[0,1,2,3],mem[4,5,6,7]
15340 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
15341 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, %zmm11 {%k1}
15342 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15343 ; AVX512DQ-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm4 # 64-byte Folded Reload
15344 ; AVX512DQ-SLOW-NEXT: # zmm4 = zmm1[0,1,2,3],mem[4,5,6,7]
15345 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
15346 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, %zmm4 {%k1}
15347 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15348 ; AVX512DQ-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm5 # 64-byte Folded Reload
15349 ; AVX512DQ-SLOW-NEXT: # zmm5 = zmm1[0,1,2,3],mem[4,5,6,7]
15350 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
15351 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, %zmm5 {%k1}
15352 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15353 ; AVX512DQ-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm7 # 64-byte Folded Reload
15354 ; AVX512DQ-SLOW-NEXT: # zmm7 = zmm1[0,1,2,3],mem[4,5,6,7]
15355 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
15356 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, %zmm7 {%k1}
15357 ; AVX512DQ-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm13, %zmm8 # 64-byte Folded Reload
15358 ; AVX512DQ-SLOW-NEXT: # zmm8 = zmm13[0,1,2,3],mem[4,5,6,7]
15359 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm17, %zmm8 {%k1}
15360 ; AVX512DQ-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
15361 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm9 # 32-byte Folded Reload
15362 ; AVX512DQ-SLOW-NEXT: # ymm9 = ymm9[1],mem[1],ymm9[3],mem[3]
15363 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,2,3,3]
15364 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],mem[6,7]
15365 ; AVX512DQ-SLOW-NEXT: movb $14, %cl
15366 ; AVX512DQ-SLOW-NEXT: kmovw %ecx, %k1
15367 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
15368 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm9, %zmm0, %zmm13 {%k1}
15369 ; AVX512DQ-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
15370 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm9 # 32-byte Folded Reload
15371 ; AVX512DQ-SLOW-NEXT: # ymm9 = ymm9[1],mem[1],ymm9[3],mem[3]
15372 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,2,3,3]
15373 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],mem[6,7]
15374 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
15375 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm9, %zmm0, %zmm17 {%k1}
15376 ; AVX512DQ-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
15377 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm9 # 32-byte Folded Reload
15378 ; AVX512DQ-SLOW-NEXT: # ymm9 = ymm9[1],mem[1],ymm9[3],mem[3]
15379 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,2,3,3]
15380 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],mem[6,7]
15381 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm28 # 64-byte Reload
15382 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm9, %zmm0, %zmm28 {%k1}
15383 ; AVX512DQ-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
15384 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm9 # 32-byte Folded Reload
15385 ; AVX512DQ-SLOW-NEXT: # ymm9 = ymm9[1],mem[1],ymm9[3],mem[3]
15386 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,2,3,3]
15387 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],mem[6,7]
15388 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
15389 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm9, %zmm0, %zmm3 {%k1}
15390 ; AVX512DQ-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
15391 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm9 # 32-byte Folded Reload
15392 ; AVX512DQ-SLOW-NEXT: # ymm9 = ymm9[1],mem[1],ymm9[3],mem[3]
15393 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,2,3,3]
15394 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],mem[6,7]
15395 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15396 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm9, %zmm0, %zmm2 {%k1}
15397 ; AVX512DQ-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
15398 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm9 # 32-byte Folded Reload
15399 ; AVX512DQ-SLOW-NEXT: # ymm9 = ymm9[1],mem[1],ymm9[3],mem[3]
15400 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,2,3,3]
15401 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],mem[6,7]
15402 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
15403 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm9, %zmm0, %zmm1 {%k1}
15404 ; AVX512DQ-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
15405 ; AVX512DQ-SLOW-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
15406 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,3,3]
15407 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
15408 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm20 {%k1}
15409 ; AVX512DQ-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
15410 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, 3008(%rax)
15411 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm23, 2944(%rax)
15412 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm20, 2880(%rax)
15413 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15414 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 2816(%rax)
15415 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm22, 2752(%rax)
15416 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm21, 2688(%rax)
15417 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm24, 2624(%rax)
15418 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, 2560(%rax)
15419 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm31, 2496(%rax)
15420 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, 2432(%rax)
15421 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15422 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 2368(%rax)
15423 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm26, 2304(%rax)
15424 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm25, 2240(%rax)
15425 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm29, 2176(%rax)
15426 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, 2112(%rax)
15427 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, 2048(%rax)
15428 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, 1984(%rax)
15429 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15430 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 1920(%rax)
15431 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15432 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 1856(%rax)
15433 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm30, 1792(%rax)
15434 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15435 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 1728(%rax)
15436 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, 1664(%rax)
15437 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm12, 1600(%rax)
15438 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, 1536(%rax)
15439 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15440 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 1472(%rax)
15441 ; AVX512DQ-SLOW-NEXT: vmovups (%rsp), %zmm0 # 64-byte Reload
15442 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 1408(%rax)
15443 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15444 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 1344(%rax)
15445 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15446 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 1280(%rax)
15447 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm11, 1216(%rax)
15448 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm27, 1152(%rax)
15449 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm28, 1088(%rax)
15450 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15451 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 1024(%rax)
15452 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15453 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 960(%rax)
15454 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15455 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 896(%rax)
15456 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15457 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 832(%rax)
15458 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm14, 768(%rax)
15459 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm19, 704(%rax)
15460 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm17, 640(%rax)
15461 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15462 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 576(%rax)
15463 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15464 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 512(%rax)
15465 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15466 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 448(%rax)
15467 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15468 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 384(%rax)
15469 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm15, 320(%rax)
15470 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm16, 256(%rax)
15471 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm13, 192(%rax)
15472 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15473 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 128(%rax)
15474 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15475 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 64(%rax)
15476 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15477 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, (%rax)
15478 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, 3520(%rax)
15479 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15480 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 3456(%rax)
15481 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15482 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 3392(%rax)
15483 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15484 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 3328(%rax)
15485 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15486 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 3264(%rax)
15487 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15488 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 3200(%rax)
15489 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm18, 3072(%rax)
15490 ; AVX512DQ-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15491 ; AVX512DQ-SLOW-NEXT: vmovaps %zmm0, 3136(%rax)
15492 ; AVX512DQ-SLOW-NEXT: addq $6472, %rsp # imm = 0x1948
15493 ; AVX512DQ-SLOW-NEXT: vzeroupper
15494 ; AVX512DQ-SLOW-NEXT: retq
15496 ; AVX512DQ-FAST-LABEL: store_i64_stride7_vf64:
15497 ; AVX512DQ-FAST: # %bb.0:
15498 ; AVX512DQ-FAST-NEXT: subq $6568, %rsp # imm = 0x19A8
15499 ; AVX512DQ-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
15500 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rdi), %zmm5
15501 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15502 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rdi), %zmm11
15503 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15504 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rsi), %zmm26
15505 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rsi), %zmm22
15506 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15507 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rdx), %zmm7
15508 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15509 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rdx), %zmm6
15510 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15511 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rcx), %zmm9
15512 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15513 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rcx), %zmm19
15514 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15515 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [11,3,11,3,11,3,11,3]
15516 ; AVX512DQ-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15517 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [2,10,0,3,2,10,0,3]
15518 ; AVX512DQ-FAST-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
15519 ; AVX512DQ-FAST-NEXT: movb $96, %r10b
15520 ; AVX512DQ-FAST-NEXT: kmovw %r10d, %k1
15521 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%r8), %zmm1
15522 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%r9), %zmm8
15523 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rax), %zmm3
15524 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rax), %zmm4
15525 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15526 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [9,1,9,1,9,1,9,1]
15527 ; AVX512DQ-FAST-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15528 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm2
15529 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm8, %zmm0, %zmm2
15530 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm16
15531 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,9,0,3,4,9,0,3]
15532 ; AVX512DQ-FAST-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
15533 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm0, %zmm2
15534 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm13
15535 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15536 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm0
15537 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm26, %zmm10, %zmm0
15538 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, %zmm2
15539 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm9, %zmm12, %zmm2
15540 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm2 {%k1}
15541 ; AVX512DQ-FAST-NEXT: vmovdqa (%r9), %ymm0
15542 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
15543 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%r9), %ymm9
15544 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
15545 ; AVX512DQ-FAST-NEXT: vmovdqa (%r8), %ymm6
15546 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
15547 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%r8), %ymm30
15548 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm6[0],ymm0[0],ymm6[2],ymm0[2]
15549 ; AVX512DQ-FAST-NEXT: movb $28, %r10b
15550 ; AVX512DQ-FAST-NEXT: kmovw %r10d, %k2
15551 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm2 {%k2} = zmm0[2,3,2,3],zmm3[2,3,2,3]
15552 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15553 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [4,12,0,5,4,12,0,5]
15554 ; AVX512DQ-FAST-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
15555 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm0
15556 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm8, %zmm6
15557 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm8, %zmm2, %zmm0
15558 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm20
15559 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,12,7,0,1,12,7]
15560 ; AVX512DQ-FAST-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
15561 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm2, %zmm0
15562 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm17
15563 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15564 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [5,0,14,6,5,0,14,6]
15565 ; AVX512DQ-FAST-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
15566 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm8, %zmm2
15567 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15568 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
15569 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm21
15570 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,13,6,7,0,13,6,7]
15571 ; AVX512DQ-FAST-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
15572 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm0, %zmm2
15573 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm18
15574 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15575 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [15,7,15,7,15,7,15,7]
15576 ; AVX512DQ-FAST-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15577 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm8, %zmm1
15578 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [6,13,14,7,6,13,14,7]
15579 ; AVX512DQ-FAST-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
15580 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm14, %zmm3
15581 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15582 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, %zmm1
15583 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm10, %zmm1
15584 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, %zmm2
15585 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm12, %zmm7
15586 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm19, %zmm12, %zmm2
15587 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm2 {%k1}
15588 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm1 = ymm30[0],ymm9[0],ymm30[2],ymm9[2]
15589 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm2 {%k2} = zmm1[2,3,2,3],zmm4[2,3,2,3]
15590 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15591 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%r8), %zmm1
15592 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%r9), %zmm3
15593 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm2
15594 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm16, %zmm2
15595 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15596 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm13, %zmm2
15597 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15598 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm2
15599 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm20, %zmm2
15600 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm17, %zmm2
15601 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15602 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm2
15603 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm21, %zmm2
15604 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm0, %zmm2
15605 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15606 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15607 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm8, %zmm1
15608 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm14, %zmm4
15609 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15610 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%rdi), %zmm1
15611 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15612 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%rsi), %zmm19
15613 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm19, %zmm10, %zmm1
15614 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%rdx), %zmm3
15615 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15616 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%rcx), %zmm27
15617 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm27, %zmm12, %zmm3
15618 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm3 {%k1}
15619 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%rax), %zmm5
15620 ; AVX512DQ-FAST-NEXT: vmovdqa 128(%r9), %ymm0
15621 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
15622 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%r8), %ymm24
15623 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm24[0],ymm0[0],ymm24[2],ymm0[2]
15624 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm3 {%k2} = zmm2[2,3,2,3],zmm5[2,3,2,3]
15625 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15626 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%r8), %zmm2
15627 ; AVX512DQ-FAST-NEXT: vmovdqa64 128(%r9), %zmm0
15628 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
15629 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm16, %zmm4
15630 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm16, %zmm3
15631 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15632 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm13, %zmm3
15633 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15634 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
15635 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm20, %zmm3
15636 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm17, %zmm3
15637 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15638 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm3
15639 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm21, %zmm3
15640 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm18, %zmm3
15641 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15642 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15643 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm8, %zmm2
15644 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm14, %zmm5
15645 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15646 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%rdi), %zmm2
15647 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15648 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%rsi), %zmm16
15649 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, %zmm1
15650 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm16, %zmm10, %zmm2
15651 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%rdx), %zmm5
15652 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15653 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%rcx), %zmm15
15654 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm15, %zmm12, %zmm5
15655 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm5 {%k1}
15656 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%rax), %zmm23
15657 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%r9), %ymm25
15658 ; AVX512DQ-FAST-NEXT: vmovdqa 192(%r8), %ymm11
15659 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm3 = ymm11[0],ymm25[0],ymm11[2],ymm25[2]
15660 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm5 {%k2} = zmm3[2,3,2,3],zmm23[2,3,2,3]
15661 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15662 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%r8), %zmm3
15663 ; AVX512DQ-FAST-NEXT: vmovdqa64 192(%r9), %zmm6
15664 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm5
15665 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm4, %zmm5
15666 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, %zmm2
15667 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm23, %zmm13, %zmm5
15668 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15669 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm13, %zmm0
15670 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm4
15671 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, %zmm5
15672 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm20, %zmm4
15673 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm20, %zmm6
15674 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm17, %zmm10
15675 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm23, %zmm17, %zmm4
15676 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15677 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm4
15678 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15679 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm21, %zmm4
15680 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm23, %zmm18, %zmm4
15681 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15682 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15683 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm8, %zmm3
15684 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm14, %zmm23
15685 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15686 ; AVX512DQ-FAST-NEXT: vmovdqa64 256(%rdi), %zmm3
15687 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15688 ; AVX512DQ-FAST-NEXT: vmovdqa64 256(%rsi), %zmm31
15689 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm31, %zmm1, %zmm3
15690 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm22
15691 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15692 ; AVX512DQ-FAST-NEXT: vmovdqa64 256(%rdx), %zmm28
15693 ; AVX512DQ-FAST-NEXT: vmovdqa64 256(%rcx), %zmm12
15694 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm28, %zmm13
15695 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm12, %zmm7, %zmm13
15696 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15697 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm13 {%k1}
15698 ; AVX512DQ-FAST-NEXT: vmovdqa64 256(%rax), %zmm23
15699 ; AVX512DQ-FAST-NEXT: vmovdqa 256(%r9), %ymm9
15700 ; AVX512DQ-FAST-NEXT: vmovdqa 256(%r8), %ymm5
15701 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm5[0],ymm9[0],ymm5[2],ymm9[2]
15702 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm13 {%k2} = zmm4[2,3,2,3],zmm23[2,3,2,3]
15703 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15704 ; AVX512DQ-FAST-NEXT: vmovdqa64 256(%r8), %zmm4
15705 ; AVX512DQ-FAST-NEXT: vmovdqa64 256(%r9), %zmm1
15706 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, %zmm13
15707 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm2, %zmm13
15708 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
15709 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15710 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm23, %zmm0, %zmm13
15711 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15712 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm17
15713 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15714 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, %zmm0
15715 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15716 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm2
15717 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm20, %zmm0
15718 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm23, %zmm10, %zmm0
15719 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15720 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, %zmm1
15721 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15722 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm10
15723 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15724 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm21, %zmm10
15725 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm21, %zmm29
15726 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15727 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm23, %zmm18, %zmm10
15728 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15729 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15730 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15731 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm8, %zmm4
15732 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm14, %zmm23
15733 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15734 ; AVX512DQ-FAST-NEXT: vmovdqa64 320(%rdi), %zmm23
15735 ; AVX512DQ-FAST-NEXT: vmovdqa64 320(%rsi), %zmm20
15736 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, %zmm4
15737 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm22, %zmm4
15738 ; AVX512DQ-FAST-NEXT: vmovdqa64 320(%rdx), %zmm0
15739 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15740 ; AVX512DQ-FAST-NEXT: vmovdqa64 320(%rcx), %zmm13
15741 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm13, %zmm7, %zmm0
15742 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, %zmm0 {%k1}
15743 ; AVX512DQ-FAST-NEXT: vmovdqa64 320(%rax), %zmm21
15744 ; AVX512DQ-FAST-NEXT: vmovdqa 320(%r9), %ymm4
15745 ; AVX512DQ-FAST-NEXT: vmovdqa 320(%r8), %ymm2
15746 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm10 = ymm2[0],ymm4[0],ymm2[2],ymm4[2]
15747 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 {%k2} = zmm10[2,3,2,3],zmm21[2,3,2,3]
15748 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15749 ; AVX512DQ-FAST-NEXT: vmovdqa64 320(%r8), %zmm10
15750 ; AVX512DQ-FAST-NEXT: vmovdqa64 320(%r9), %zmm22
15751 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, %zmm0
15752 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm3, %zmm0
15753 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm21, %zmm17, %zmm0
15754 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15755 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, %zmm0
15756 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm6, %zmm0
15757 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm21, %zmm1, %zmm0
15758 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15759 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm22, %zmm0
15760 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm10, %zmm29, %zmm0
15761 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm21, %zmm18, %zmm0
15762 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15763 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15764 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm8, %zmm10
15765 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm10, %zmm14, %zmm21
15766 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15767 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [1,3,7,7]
15768 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
15769 ; AVX512DQ-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm1 # 32-byte Folded Reload
15770 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
15771 ; AVX512DQ-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm30 # 32-byte Folded Reload
15772 ; AVX512DQ-FAST-NEXT: vmovdqu64 %ymm30, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
15773 ; AVX512DQ-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm24 # 32-byte Folded Reload
15774 ; AVX512DQ-FAST-NEXT: vmovdqu64 %ymm24, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
15775 ; AVX512DQ-FAST-NEXT: vpermt2q %ymm25, %ymm0, %ymm11
15776 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
15777 ; AVX512DQ-FAST-NEXT: vpermt2q %ymm9, %ymm0, %ymm5
15778 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
15779 ; AVX512DQ-FAST-NEXT: vpermt2q %ymm4, %ymm0, %ymm2
15780 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
15781 ; AVX512DQ-FAST-NEXT: vmovdqa 384(%r9), %ymm1
15782 ; AVX512DQ-FAST-NEXT: vmovdqa 384(%r8), %ymm2
15783 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm3 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
15784 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15785 ; AVX512DQ-FAST-NEXT: vpermt2q %ymm1, %ymm0, %ymm2
15786 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
15787 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm29 = [3,0,12,4,3,0,12,4]
15788 ; AVX512DQ-FAST-NEXT: # zmm29 = mem[0,1,2,3,0,1,2,3]
15789 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
15790 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, %zmm1
15791 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15792 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm29, %zmm1
15793 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm21 = [13,5,13,5,13,5,13,5]
15794 ; AVX512DQ-FAST-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15795 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm2
15796 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm21, %zmm2
15797 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15798 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [6,14,6,14,6,14,6,14]
15799 ; AVX512DQ-FAST-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
15800 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm2
15801 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm3, %zmm2
15802 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15803 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm8, %zmm0
15804 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15805 ; AVX512DQ-FAST-NEXT: movb $48, %r10b
15806 ; AVX512DQ-FAST-NEXT: kmovw %r10d, %k3
15807 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [0,8,0,1,0,8,0,1]
15808 ; AVX512DQ-FAST-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
15809 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15810 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm2
15811 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm26, %zmm11, %zmm2
15812 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15813 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [1,0,10,2,1,0,10,2]
15814 ; AVX512DQ-FAST-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
15815 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm26, %zmm2
15816 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm7, %zmm2
15817 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15818 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm1 {%k3} = zmm0[0],zmm26[0],zmm0[2],zmm26[2],zmm0[4],zmm26[4],zmm0[6],zmm26[6]
15819 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15820 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
15821 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm26, %zmm21, %zmm1
15822 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15823 ; AVX512DQ-FAST-NEXT: vbroadcasti64x2 {{.*#+}} ymm30 = [6,14,6,14]
15824 ; AVX512DQ-FAST-NEXT: # ymm30 = mem[0,1,0,1]
15825 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
15826 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm26, %zmm30, %zmm1
15827 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15828 ; AVX512DQ-FAST-NEXT: vbroadcasti64x2 {{.*#+}} ymm18 = [15,7,15,7]
15829 ; AVX512DQ-FAST-NEXT: # ymm18 = mem[0,1,0,1]
15830 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm26, %zmm18, %zmm0
15831 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15832 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15833 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
15834 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
15835 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm29, %zmm1
15836 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, %zmm2
15837 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm21, %zmm2
15838 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15839 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, %zmm2
15840 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm3, %zmm2
15841 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15842 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm8, %zmm4
15843 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15844 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
15845 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, %zmm0
15846 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15847 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm11, %zmm0
15848 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15849 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm0
15850 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm7, %zmm0
15851 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15852 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm1 {%k3} = zmm4[0],zmm2[0],zmm4[2],zmm2[2],zmm4[4],zmm2[4],zmm4[6],zmm2[6]
15853 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15854 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, %zmm0
15855 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm21, %zmm0
15856 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15857 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, %zmm0
15858 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm30, %zmm0
15859 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15860 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm18, %zmm4
15861 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15862 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm27, %zmm0
15863 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15864 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm29, %zmm0
15865 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
15866 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm27, %zmm21, %zmm1
15867 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15868 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
15869 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm27, %zmm3, %zmm1
15870 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15871 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm27, %zmm8, %zmm2
15872 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15873 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15874 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
15875 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm19, %zmm11, %zmm1
15876 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15877 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm19, %zmm1
15878 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm7, %zmm1
15879 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15880 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm0 {%k3} = zmm2[0],zmm19[0],zmm2[2],zmm19[2],zmm2[4],zmm19[4],zmm2[6],zmm19[6]
15881 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15882 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm0
15883 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm19, %zmm21, %zmm0
15884 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15885 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm0
15886 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm19, %zmm30, %zmm0
15887 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15888 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm19, %zmm18, %zmm2
15889 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15890 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm15, %zmm0
15891 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15892 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm29, %zmm0
15893 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
15894 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm15, %zmm21, %zmm1
15895 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15896 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
15897 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm15, %zmm3, %zmm1
15898 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15899 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm15, %zmm8, %zmm2
15900 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15901 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
15902 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
15903 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm16, %zmm11, %zmm1
15904 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15905 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm16, %zmm17
15906 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm7, %zmm17
15907 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm0 {%k3} = zmm2[0],zmm16[0],zmm2[2],zmm16[2],zmm2[4],zmm16[4],zmm2[6],zmm16[6]
15908 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15909 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm0
15910 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm16, %zmm21, %zmm0
15911 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15912 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm0
15913 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm16, %zmm30, %zmm0
15914 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15915 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm16, %zmm18, %zmm2
15916 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15917 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm12, %zmm0
15918 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm28, %zmm29, %zmm0
15919 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
15920 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm28, %zmm0
15921 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm12, %zmm21, %zmm0
15922 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15923 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm28, %zmm0
15924 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm12, %zmm3, %zmm0
15925 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15926 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm12, %zmm8, %zmm28
15927 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm28, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15928 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15929 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm2
15930 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm31, %zmm11, %zmm2
15931 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15932 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm31, %zmm2
15933 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm7, %zmm2
15934 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm19
15935 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm1 {%k3} = zmm0[0],zmm31[0],zmm0[2],zmm31[2],zmm0[4],zmm31[4],zmm0[6],zmm31[6]
15936 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15937 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
15938 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm31, %zmm21, %zmm1
15939 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15940 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
15941 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm31, %zmm30, %zmm1
15942 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15943 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm31, %zmm18, %zmm0
15944 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15945 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm13, %zmm0
15946 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
15947 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm15, %zmm29, %zmm0
15948 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm15, %zmm1
15949 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm13, %zmm21, %zmm1
15950 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15951 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm15, %zmm1
15952 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm13, %zmm3, %zmm1
15953 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15954 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm13, %zmm8, %zmm15
15955 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, %zmm1
15956 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm11, %zmm1
15957 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15958 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm20, %zmm28
15959 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm23, %zmm7, %zmm28
15960 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm0 {%k3} = zmm23[0],zmm20[0],zmm23[2],zmm20[2],zmm23[4],zmm20[4],zmm23[6],zmm20[6]
15961 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15962 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, %zmm0
15963 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm21, %zmm0
15964 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15965 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, %zmm0
15966 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm30, %zmm0
15967 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15968 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm18, %zmm23
15969 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, %zmm26
15970 ; AVX512DQ-FAST-NEXT: vmovdqa64 448(%rdx), %zmm20
15971 ; AVX512DQ-FAST-NEXT: vmovdqa64 448(%rcx), %zmm5
15972 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm20, %zmm13
15973 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15974 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm0, %zmm13
15975 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm20, %zmm10
15976 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm3, %zmm10
15977 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm9
15978 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm20, %zmm29, %zmm9
15979 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm20, %zmm16
15980 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm21, %zmm16
15981 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm8, %zmm20
15982 ; AVX512DQ-FAST-NEXT: vmovdqa64 384(%rdx), %zmm1
15983 ; AVX512DQ-FAST-NEXT: vmovdqa64 384(%rcx), %zmm4
15984 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm4, %zmm1, %zmm0
15985 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15986 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm1, %zmm4, %zmm29
15987 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm4, %zmm1, %zmm3
15988 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15989 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm0
15990 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm8, %zmm1
15991 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm21, %zmm0
15992 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
15993 ; AVX512DQ-FAST-NEXT: vmovdqa64 448(%rdi), %zmm12
15994 ; AVX512DQ-FAST-NEXT: vmovdqa64 448(%rsi), %zmm4
15995 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm12, %zmm8
15996 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
15997 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm0, %zmm8
15998 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm12, %zmm3
15999 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm30, %zmm3
16000 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm12, %zmm2
16001 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm21, %zmm2
16002 ; AVX512DQ-FAST-NEXT: vmovdqa64 384(%rdi), %zmm6
16003 ; AVX512DQ-FAST-NEXT: vmovdqa64 384(%rsi), %zmm5
16004 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, %zmm25
16005 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm11, %zmm25
16006 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm24
16007 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm7, %zmm24
16008 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm5, %zmm6, %zmm0
16009 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16010 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm29 {%k3} = zmm6[0],zmm5[0],zmm6[2],zmm5[2],zmm6[4],zmm5[4],zmm6[6],zmm5[6]
16011 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm5, %zmm6, %zmm21
16012 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm5, %zmm6, %zmm30
16013 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm18, %zmm6
16014 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm9 {%k3} = zmm12[0],zmm4[0],zmm12[2],zmm4[2],zmm12[4],zmm4[4],zmm12[6],zmm4[6]
16015 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm4, %zmm12, %zmm11
16016 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm12, %zmm4, %zmm7
16017 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm18, %zmm12
16018 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm8, %zmm13 {%k1}
16019 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm3 = zmm3[0,1,2,3],zmm10[4,5,6,7]
16020 ; AVX512DQ-FAST-NEXT: vmovdqa64 448(%r8), %zmm5
16021 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm4 = <0,11,u,u,4,5,6,7>
16022 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm5, %zmm9, %zmm4
16023 ; AVX512DQ-FAST-NEXT: vmovdqa64 448(%r9), %zmm8
16024 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm14 = <0,1,11,u,4,5,6,7>
16025 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm8, %zmm4, %zmm14
16026 ; AVX512DQ-FAST-NEXT: movb $4, %sil
16027 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k3
16028 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm13 {%k3}
16029 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm4 = <0,1,2,10,u,5,6,7>
16030 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm8, %zmm13, %zmm4
16031 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm16, %zmm2 {%k1}
16032 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm10 = <12,u,u,3,4,5,6,13>
16033 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm5, %zmm2, %zmm10
16034 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm18 = <0,12,u,3,4,5,6,7>
16035 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm8, %zmm10, %zmm18
16036 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [0,8,0,8,0,8,0,8]
16037 ; AVX512DQ-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
16038 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16039 ; AVX512DQ-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm0 # 64-byte Folded Reload
16040 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16041 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16042 ; AVX512DQ-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm0 # 64-byte Folded Reload
16043 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16044 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16045 ; AVX512DQ-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm0 # 64-byte Folded Reload
16046 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16047 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16048 ; AVX512DQ-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm0 # 64-byte Folded Reload
16049 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16050 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
16051 ; AVX512DQ-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm23 # 64-byte Folded Reload
16052 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
16053 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm22, %zmm10, %zmm27
16054 ; AVX512DQ-FAST-NEXT: movb $24, %sil
16055 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k4
16056 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm6 {%k4}
16057 ; AVX512DQ-FAST-NEXT: movb $6, %sil
16058 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k5
16059 ; AVX512DQ-FAST-NEXT: vpbroadcastq 456(%rcx), %ymm0
16060 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
16061 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm7 {%k5}
16062 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,1,2,9,u,u,6,7>
16063 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm5, %zmm7, %zmm1
16064 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm20, %zmm12 {%k4}
16065 ; AVX512DQ-FAST-NEXT: movb $64, %sil
16066 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k3
16067 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm3 {%k3}
16068 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm2 = <u,1,2,3,4,15,u,u>
16069 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm2, %zmm12
16070 ; AVX512DQ-FAST-NEXT: vmovdqa64 384(%r8), %zmm5
16071 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm2, %zmm6
16072 ; AVX512DQ-FAST-NEXT: vmovdqa64 384(%r9), %zmm2
16073 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm2, %zmm5, %zmm10
16074 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
16075 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm2, %zmm5, %zmm16
16076 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
16077 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm2, %zmm5, %zmm13
16078 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16079 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm5, %zmm2, %zmm0
16080 ; AVX512DQ-FAST-NEXT: movb $12, %sil
16081 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k3
16082 ; AVX512DQ-FAST-NEXT: vmovdqa 448(%rdx), %xmm5
16083 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
16084 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
16085 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm11 {%k3}
16086 ; AVX512DQ-FAST-NEXT: vinserti32x4 $2, 448(%r8), %zmm11, %zmm5
16087 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm7 = <0,1,2,3,4,8,u,7>
16088 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm8, %zmm5, %zmm7
16089 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = <u,1,2,3,4,5,15,u>
16090 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm5, %zmm6
16091 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm11 = <0,1,2,3,9,u,6,7>
16092 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm8, %zmm1, %zmm11
16093 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm2 = <13,u,2,3,4,5,6,14>
16094 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm8, %zmm3, %zmm2
16095 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm8, %zmm5, %zmm12
16096 ; AVX512DQ-FAST-NEXT: vpbroadcastq 8(%rcx), %ymm1
16097 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
16098 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
16099 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm9 {%k5}
16100 ; AVX512DQ-FAST-NEXT: vpbroadcastq 72(%rcx), %ymm1
16101 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
16102 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
16103 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm31 {%k5}
16104 ; AVX512DQ-FAST-NEXT: vpbroadcastq 136(%rcx), %ymm1
16105 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
16106 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16107 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm8 {%k5}
16108 ; AVX512DQ-FAST-NEXT: vpbroadcastq 200(%rcx), %ymm1
16109 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
16110 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm17, %zmm3
16111 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm3 {%k5}
16112 ; AVX512DQ-FAST-NEXT: vpbroadcastq 264(%rcx), %ymm1
16113 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
16114 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm19, %zmm20
16115 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm20 {%k5}
16116 ; AVX512DQ-FAST-NEXT: vpbroadcastq 328(%rcx), %ymm1
16117 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
16118 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm28 {%k5}
16119 ; AVX512DQ-FAST-NEXT: vpbroadcastq 392(%rcx), %ymm1
16120 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
16121 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm24 {%k5}
16122 ; AVX512DQ-FAST-NEXT: vmovdqa64 448(%rax), %zmm5
16123 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,1,2,3,10,5,6,7]
16124 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm5, %zmm4, %zmm1
16125 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16126 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,1,12,3,4,5,6,7]
16127 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm5, %zmm18, %zmm1
16128 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16129 ; AVX512DQ-FAST-NEXT: vmovdqa64 384(%rax), %zmm1
16130 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
16131 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm4, %zmm16
16132 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm16, %zmm4
16133 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
16134 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
16135 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm17, %zmm16 {%k1}
16136 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
16137 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm16 {%k2} = zmm17[2,3,2,3],zmm1[2,3,2,3]
16138 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16139 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
16140 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm16, %zmm13
16141 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm13, %zmm18
16142 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
16143 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm13, %zmm0
16144 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16145 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,3,4,5,8,7]
16146 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm5, %zmm7, %zmm0
16147 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16148 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm7 = [14,1,2,3,4,5,6,15]
16149 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm7, %zmm6
16150 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,3,4,9,6,7]
16151 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm5, %zmm11, %zmm0
16152 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16153 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,13,2,3,4,5,6,7]
16154 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm5, %zmm2, %zmm0
16155 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16156 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm7, %zmm12
16157 ; AVX512DQ-FAST-NEXT: movb $8, %sil
16158 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k2
16159 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm14 {%k2}
16160 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16161 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16162 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16163 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm1 {%k4}
16164 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16165 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16166 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm5 {%k4}
16167 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
16168 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16169 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm7 {%k4}
16170 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16171 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16172 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm2 {%k4}
16173 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
16174 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16175 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm11 {%k4}
16176 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm15, %zmm26 {%k4}
16177 ; AVX512DQ-FAST-NEXT: movb $-31, %sil
16178 ; AVX512DQ-FAST-NEXT: kmovw %esi, %k2
16179 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16180 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
16181 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16182 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16183 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
16184 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16185 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16186 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm7 {%k2}
16187 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16188 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16189 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm2 {%k2}
16190 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16191 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16192 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
16193 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16194 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16195 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm26 {%k2}
16196 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16197 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdx), %xmm0
16198 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
16199 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
16200 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
16201 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm13 {%k3}
16202 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rdx), %xmm0
16203 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
16204 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
16205 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16206 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm1 {%k3}
16207 ; AVX512DQ-FAST-NEXT: vmovdqa 128(%rdx), %xmm0
16208 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
16209 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
16210 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16211 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm2 {%k3}
16212 ; AVX512DQ-FAST-NEXT: vmovdqa 192(%rdx), %xmm0
16213 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
16214 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
16215 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16216 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm5 {%k3}
16217 ; AVX512DQ-FAST-NEXT: vmovdqa 256(%rdx), %xmm0
16218 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
16219 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
16220 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
16221 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm7 {%k3}
16222 ; AVX512DQ-FAST-NEXT: vmovdqa 320(%rdx), %xmm0
16223 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
16224 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
16225 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
16226 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm11 {%k3}
16227 ; AVX512DQ-FAST-NEXT: vmovdqa 384(%rdx), %xmm0
16228 ; AVX512DQ-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
16229 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
16230 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm25 {%k3}
16231 ; AVX512DQ-FAST-NEXT: movb $112, %cl
16232 ; AVX512DQ-FAST-NEXT: kmovw %ecx, %k2
16233 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16234 ; AVX512DQ-FAST-NEXT: vinserti64x2 $3, (%rax), %zmm0, %zmm13 {%k2}
16235 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16236 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16237 ; AVX512DQ-FAST-NEXT: vinserti64x2 $3, 64(%rax), %zmm0, %zmm1 {%k2}
16238 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16239 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16240 ; AVX512DQ-FAST-NEXT: vinserti64x2 $3, 128(%rax), %zmm0, %zmm2 {%k2}
16241 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16242 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16243 ; AVX512DQ-FAST-NEXT: vinserti64x2 $3, 192(%rax), %zmm0, %zmm5 {%k2}
16244 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16245 ; AVX512DQ-FAST-NEXT: vinserti64x2 $3, 256(%rax), %zmm23, %zmm7 {%k2}
16246 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16247 ; AVX512DQ-FAST-NEXT: vinserti64x2 $3, 320(%rax), %zmm27, %zmm11 {%k2}
16248 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, %zmm27
16249 ; AVX512DQ-FAST-NEXT: vinserti64x2 $3, 384(%rax), %zmm10, %zmm25 {%k2}
16250 ; AVX512DQ-FAST-NEXT: movb $56, %cl
16251 ; AVX512DQ-FAST-NEXT: kmovw %ecx, %k2
16252 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16253 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
16254 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16255 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16256 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
16257 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16258 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
16259 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16260 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16261 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm3 {%k2}
16262 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16263 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16264 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm20 {%k2}
16265 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm20, %zmm26
16266 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16267 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm28 {%k2}
16268 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, %zmm24 {%k2}
16269 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
16270 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
16271 ; AVX512DQ-FAST-NEXT: movb $14, %cl
16272 ; AVX512DQ-FAST-NEXT: kmovw %ecx, %k2
16273 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
16274 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm23 {%k2}
16275 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
16276 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
16277 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm22 # 64-byte Reload
16278 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm22 {%k2}
16279 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
16280 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
16281 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
16282 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm20 {%k2}
16283 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
16284 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
16285 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
16286 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm16 {%k2}
16287 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
16288 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
16289 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
16290 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm14 {%k2}
16291 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
16292 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
16293 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
16294 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm4 {%k2}
16295 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
16296 ; AVX512DQ-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
16297 ; AVX512DQ-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm29 {%k2}
16298 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16299 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16300 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
16301 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16302 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16303 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm2 {%k1}
16304 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16305 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
16306 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm3 {%k1}
16307 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16308 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16309 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
16310 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16311 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
16312 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm7 {%k1}
16313 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16314 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16315 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
16316 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16317 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm21 {%k1}
16318 ; AVX512DQ-FAST-NEXT: movb $120, %al
16319 ; AVX512DQ-FAST-NEXT: kmovw %eax, %k1
16320 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
16321 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, %zmm19 {%k1}
16322 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16323 ; AVX512DQ-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm17 # 64-byte Folded Reload
16324 ; AVX512DQ-FAST-NEXT: # zmm17 = zmm0[0,1,2,3],mem[4,5,6,7]
16325 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
16326 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm15 {%k1}
16327 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
16328 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm13 {%k1}
16329 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
16330 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm3 {%k1}
16331 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16332 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, %zmm1 {%k1}
16333 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16334 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm8, %zmm0 {%k1}
16335 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm21, %zmm18 {%k1}
16336 ; AVX512DQ-FAST-NEXT: movb $-61, %al
16337 ; AVX512DQ-FAST-NEXT: kmovw %eax, %k1
16338 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16339 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm17 {%k1}
16340 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16341 ; AVX512DQ-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm5 # 64-byte Folded Reload
16342 ; AVX512DQ-FAST-NEXT: # zmm5 = zmm2[0,1,2,3],mem[4,5,6,7]
16343 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16344 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm5 {%k1}
16345 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16346 ; AVX512DQ-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm7 # 64-byte Folded Reload
16347 ; AVX512DQ-FAST-NEXT: # zmm7 = zmm2[0,1,2,3],mem[4,5,6,7]
16348 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16349 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm7 {%k1}
16350 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16351 ; AVX512DQ-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm8 # 64-byte Folded Reload
16352 ; AVX512DQ-FAST-NEXT: # zmm8 = zmm2[0,1,2,3],mem[4,5,6,7]
16353 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16354 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm8 {%k1}
16355 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16356 ; AVX512DQ-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm9 # 64-byte Folded Reload
16357 ; AVX512DQ-FAST-NEXT: # zmm9 = zmm2[0,1,2,3],mem[4,5,6,7]
16358 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16359 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm9 {%k1}
16360 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16361 ; AVX512DQ-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm10 # 64-byte Folded Reload
16362 ; AVX512DQ-FAST-NEXT: # zmm10 = zmm2[0,1,2,3],mem[4,5,6,7]
16363 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16364 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm10 {%k1}
16365 ; AVX512DQ-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm30, %zmm11 # 64-byte Folded Reload
16366 ; AVX512DQ-FAST-NEXT: # zmm11 = zmm30[0,1,2,3],mem[4,5,6,7]
16367 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16368 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, %zmm11 {%k1}
16369 ; AVX512DQ-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
16370 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm11, 3008(%rax)
16371 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm18, 2944(%rax)
16372 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm29, 2880(%rax)
16373 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16374 ; AVX512DQ-FAST-NEXT: vmovaps %zmm2, 2816(%rax)
16375 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm24, 2752(%rax)
16376 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm25, 2688(%rax)
16377 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16378 ; AVX512DQ-FAST-NEXT: vmovaps %zmm2, 2624(%rax)
16379 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, 2560(%rax)
16380 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, 2496(%rax)
16381 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, 2432(%rax)
16382 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16383 ; AVX512DQ-FAST-NEXT: vmovaps %zmm2, 2368(%rax)
16384 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm28, 2304(%rax)
16385 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm27, 2240(%rax)
16386 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16387 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 2176(%rax)
16388 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm9, 2112(%rax)
16389 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, 2048(%rax)
16390 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm14, 1984(%rax)
16391 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16392 ; AVX512DQ-FAST-NEXT: vmovaps %zmm2, 1920(%rax)
16393 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm26, 1856(%rax)
16394 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16395 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 1792(%rax)
16396 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16397 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 1728(%rax)
16398 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm8, 1664(%rax)
16399 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, 1600(%rax)
16400 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm16, 1536(%rax)
16401 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16402 ; AVX512DQ-FAST-NEXT: vmovaps %zmm2, 1472(%rax)
16403 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16404 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 1408(%rax)
16405 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16406 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 1344(%rax)
16407 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16408 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 1280(%rax)
16409 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, 1216(%rax)
16410 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm13, 1152(%rax)
16411 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm20, 1088(%rax)
16412 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16413 ; AVX512DQ-FAST-NEXT: vmovaps %zmm2, 1024(%rax)
16414 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16415 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 960(%rax)
16416 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16417 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 896(%rax)
16418 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16419 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 832(%rax)
16420 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, 768(%rax)
16421 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm15, 704(%rax)
16422 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm22, 640(%rax)
16423 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
16424 ; AVX512DQ-FAST-NEXT: vmovaps %zmm2, 576(%rax)
16425 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm31, 512(%rax)
16426 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16427 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 448(%rax)
16428 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16429 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 384(%rax)
16430 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm17, 320(%rax)
16431 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm19, 256(%rax)
16432 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, 192(%rax)
16433 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16434 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 128(%rax)
16435 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16436 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 64(%rax)
16437 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16438 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, (%rax)
16439 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm12, 3520(%rax)
16440 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16441 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 3456(%rax)
16442 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16443 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 3392(%rax)
16444 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16445 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 3328(%rax)
16446 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16447 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 3264(%rax)
16448 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16449 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 3200(%rax)
16450 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, 3072(%rax)
16451 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16452 ; AVX512DQ-FAST-NEXT: vmovaps %zmm0, 3136(%rax)
16453 ; AVX512DQ-FAST-NEXT: addq $6568, %rsp # imm = 0x19A8
16454 ; AVX512DQ-FAST-NEXT: vzeroupper
16455 ; AVX512DQ-FAST-NEXT: retq
16457 ; AVX512BW-ONLY-SLOW-LABEL: store_i64_stride7_vf64:
16458 ; AVX512BW-ONLY-SLOW: # %bb.0:
16459 ; AVX512BW-ONLY-SLOW-NEXT: subq $6600, %rsp # imm = 0x19C8
16460 ; AVX512BW-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
16461 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rdi), %zmm3
16462 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16463 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm29
16464 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rsi), %zmm4
16465 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16466 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm13
16467 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm13, (%rsp) # 64-byte Spill
16468 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm20
16469 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16470 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rdx), %zmm5
16471 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16472 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rcx), %zmm8
16473 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm18
16474 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16475 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [11,3,11,3,11,3,11,3]
16476 ; AVX512BW-ONLY-SLOW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
16477 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [2,10,0,3,2,10,0,3]
16478 ; AVX512BW-ONLY-SLOW-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
16479 ; AVX512BW-ONLY-SLOW-NEXT: movb $96, %r10b
16480 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %r10d, %k1
16481 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%r8), %zmm1
16482 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%r9), %zmm11
16483 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rax), %zmm6
16484 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%rax), %zmm9
16485 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16486 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [9,1,9,1,9,1,9,1]
16487 ; AVX512BW-ONLY-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
16488 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
16489 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm0, %zmm2
16490 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm14
16491 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,9,0,3,4,9,0,3]
16492 ; AVX512BW-ONLY-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
16493 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm0, %zmm2
16494 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm12
16495 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16496 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm0
16497 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm10, %zmm0
16498 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm2
16499 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm28, %zmm2
16500 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm2 {%k1}
16501 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%r9), %ymm5
16502 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
16503 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 64(%r9), %ymm3
16504 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
16505 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%r8), %ymm0
16506 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
16507 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 64(%r8), %ymm4
16508 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
16509 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm5[0],ymm0[2],ymm5[2]
16510 ; AVX512BW-ONLY-SLOW-NEXT: movb $28, %r10b
16511 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %r10d, %k2
16512 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm2 {%k2} = zmm0[2,3,2,3],zmm6[2,3,2,3]
16513 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16514 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [4,12,0,5,4,12,0,5]
16515 ; AVX512BW-ONLY-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
16516 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm0
16517 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm5
16518 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm11, %zmm2, %zmm0
16519 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm16
16520 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,12,7,0,1,12,7]
16521 ; AVX512BW-ONLY-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
16522 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm2, %zmm0
16523 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm11
16524 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16525 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [5,0,14,6,5,0,14,6]
16526 ; AVX512BW-ONLY-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
16527 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm2
16528 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm17
16529 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16530 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
16531 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm19
16532 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,13,6,7,0,13,6,7]
16533 ; AVX512BW-ONLY-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
16534 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm0, %zmm2
16535 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm15
16536 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16537 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [15,7,15,7,15,7,15,7]
16538 ; AVX512BW-ONLY-SLOW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
16539 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm17, %zmm5, %zmm1
16540 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [6,13,14,7,6,13,14,7]
16541 ; AVX512BW-ONLY-SLOW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
16542 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm7, %zmm6
16543 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16544 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm29, %zmm1
16545 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm10, %zmm1
16546 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm20, %zmm2
16547 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm18, %zmm28, %zmm2
16548 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2 {%k1}
16549 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm1 = ymm4[0],ymm3[0],ymm4[2],ymm3[2]
16550 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm2 {%k2} = zmm1[2,3,2,3],zmm9[2,3,2,3]
16551 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16552 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%r8), %zmm1
16553 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 64(%r9), %zmm3
16554 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
16555 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm18
16556 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm14, %zmm2
16557 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, %zmm4
16558 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm12, %zmm2
16559 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16560 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
16561 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm16, %zmm2
16562 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm11, %zmm2
16563 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16564 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm2
16565 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16566 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm19, %zmm2
16567 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm0, %zmm2
16568 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16569 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16570 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm5, %zmm1
16571 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm7, %zmm9
16572 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16573 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%rdi), %zmm23
16574 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%rsi), %zmm24
16575 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm1
16576 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm10, %zmm1
16577 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, %zmm30
16578 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%rdx), %zmm25
16579 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%rcx), %zmm12
16580 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, %zmm2
16581 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm28, %zmm2
16582 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2 {%k1}
16583 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%rax), %zmm9
16584 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 128(%r9), %ymm6
16585 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
16586 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 128(%r8), %ymm1
16587 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
16588 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm1 = ymm1[0],ymm6[0],ymm1[2],ymm6[2]
16589 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm2 {%k2} = zmm1[2,3,2,3],zmm9[2,3,2,3]
16590 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16591 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%r8), %zmm1
16592 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 128(%r9), %zmm21
16593 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
16594 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm14, %zmm2
16595 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm4, %zmm2
16596 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16597 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm10
16598 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
16599 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm3
16600 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm16, %zmm2
16601 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm11, %zmm2
16602 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16603 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm2
16604 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16605 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm19, %zmm2
16606 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm0, %zmm2
16607 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16608 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16609 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm5, %zmm1
16610 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm7, %zmm9
16611 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16612 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%rdi), %zmm27
16613 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%rsi), %zmm26
16614 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, %zmm1
16615 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm30, %zmm1
16616 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%rdx), %zmm17
16617 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%rcx), %zmm14
16618 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm2
16619 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm28, %zmm2
16620 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2 {%k1}
16621 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%rax), %zmm21
16622 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 192(%r9), %ymm9
16623 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
16624 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 192(%r8), %ymm1
16625 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
16626 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm1 = ymm1[0],ymm9[0],ymm1[2],ymm9[2]
16627 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm2 {%k2} = zmm1[2,3,2,3],zmm21[2,3,2,3]
16628 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16629 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%r8), %zmm1
16630 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 192(%r9), %zmm0
16631 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
16632 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm18, %zmm2
16633 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm4, %zmm2
16634 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16635 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
16636 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm16, %zmm2
16637 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm11, %zmm2
16638 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16639 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm2
16640 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16641 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm19, %zmm2
16642 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm15, %zmm2
16643 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16644 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm15, %zmm31
16645 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16646 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm5, %zmm1
16647 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm7, %zmm21
16648 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16649 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 256(%rdi), %zmm21
16650 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 256(%rsi), %zmm22
16651 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm2
16652 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm30, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16653 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm30, %zmm2
16654 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 256(%rdx), %zmm16
16655 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 256(%rcx), %zmm15
16656 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm4
16657 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm15, %zmm28, %zmm4
16658 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
16659 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 256(%rax), %zmm0
16660 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 256(%r9), %ymm6
16661 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
16662 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 256(%r8), %ymm2
16663 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
16664 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm2[0],ymm6[0],ymm2[2],ymm6[2]
16665 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm6
16666 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm4 {%k2} = zmm2[2,3,2,3],zmm0[2,3,2,3]
16667 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16668 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 256(%r8), %zmm2
16669 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 256(%r9), %zmm13
16670 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4
16671 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm18, %zmm4
16672 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16673 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm10, %zmm4
16674 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16675 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16676 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4
16677 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm0
16678 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16679 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm3, %zmm4
16680 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm11, %zmm4
16681 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16682 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16683 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, %zmm4
16684 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16685 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16686 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm19, %zmm4
16687 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm31, %zmm4
16688 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm31, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16689 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16690 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16691 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm5, %zmm2
16692 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm7, %zmm6
16693 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16694 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 320(%rdi), %zmm6
16695 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 320(%rsi), %zmm2
16696 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16697 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm4
16698 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm30, %zmm4
16699 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 320(%rdx), %zmm2
16700 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 320(%rcx), %zmm13
16701 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm9
16702 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm28, %zmm9
16703 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm9 {%k1}
16704 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 320(%rax), %zmm3
16705 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 320(%r9), %ymm30
16706 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %ymm30, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
16707 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 320(%r8), %ymm4
16708 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
16709 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm4[0],ymm30[0],ymm4[2],ymm30[2]
16710 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm30
16711 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm9 {%k2} = zmm4[2,3,2,3],zmm3[2,3,2,3]
16712 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16713 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 320(%r8), %zmm4
16714 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 320(%r9), %zmm9
16715 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm3
16716 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm18, %zmm3
16717 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm30, %zmm10, %zmm3
16718 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16719 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm3
16720 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm0, %zmm3
16721 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm30, %zmm11, %zmm3
16722 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16723 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm0
16724 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16725 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm19, %zmm0
16726 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm30, %zmm31, %zmm0
16727 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16728 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16729 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm9, %zmm5, %zmm4
16730 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm4, %zmm7, %zmm30
16731 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm30, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16732 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm11 = [13,5,13,5,13,5,13,5]
16733 ; AVX512BW-ONLY-SLOW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
16734 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16735 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm0
16736 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm11, %zmm0
16737 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16738 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [6,14,6,14,6,14,6,14]
16739 ; AVX512BW-ONLY-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
16740 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm3
16741 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm0, %zmm3
16742 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16743 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3
16744 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm0
16745 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm5, %zmm0
16746 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16747 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [3,0,12,4,3,0,12,4]
16748 ; AVX512BW-ONLY-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
16749 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm0, %zmm8
16750 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm9
16751 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
16752 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm4
16753 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
16754 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm11, %zmm4
16755 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16756 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm4
16757 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm3, %zmm4
16758 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16759 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm4
16760 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm8, %zmm5, %zmm4
16761 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16762 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm0, %zmm8
16763 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm10
16764 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, %zmm1
16765 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm11, %zmm1
16766 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16767 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, %zmm1
16768 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm3, %zmm1
16769 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16770 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, %zmm1
16771 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm12, %zmm5, %zmm1
16772 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16773 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm25, %zmm0, %zmm12
16774 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, %zmm30
16775 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm1
16776 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm11, %zmm1
16777 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16778 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm1
16779 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm3, %zmm1
16780 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16781 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm1
16782 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm5, %zmm1
16783 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16784 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm17, %zmm0, %zmm14
16785 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm17
16786 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm1
16787 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm15, %zmm11, %zmm1
16788 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16789 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm1
16790 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm15, %zmm3, %zmm1
16791 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16792 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, %zmm1
16793 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm15, %zmm5, %zmm1
16794 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16795 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm16, %zmm0, %zmm15
16796 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm15, %zmm16
16797 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm1
16798 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm11, %zmm1
16799 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16800 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm1
16801 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm4
16802 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm3, %zmm1
16803 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16804 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm1
16805 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm13, %zmm5, %zmm1
16806 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16807 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm0, %zmm13
16808 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, %zmm18
16809 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 448(%rdx), %zmm14
16810 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 448(%rcx), %zmm1
16811 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm12
16812 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm28, %zmm12
16813 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm19
16814 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm3, %zmm19
16815 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 384(%rdx), %zmm3
16816 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 384(%rcx), %zmm20
16817 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm20, %zmm3, %zmm28
16818 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm28, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16819 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm2
16820 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm11, %zmm2
16821 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16822 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm20, %zmm3, %zmm4
16823 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16824 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm25
16825 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm20, %zmm5, %zmm25
16826 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm0, %zmm20
16827 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm7
16828 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm14, %zmm0, %zmm7
16829 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm15
16830 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm5, %zmm14
16831 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm11, %zmm15
16832 ; AVX512BW-ONLY-SLOW-NEXT: movb $48, %r10b
16833 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %r10d, %k3
16834 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [1,0,10,2,1,0,10,2]
16835 ; AVX512BW-ONLY-SLOW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
16836 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
16837 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm2
16838 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16839 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm1, %zmm2
16840 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16841 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm9 {%k3} = zmm0[0],zmm3[0],zmm0[2],zmm3[2],zmm0[4],zmm3[4],zmm0[6],zmm3[6]
16842 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16843 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm4
16844 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm5
16845 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm9
16846 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,8,0,1,0,8,0,1]
16847 ; AVX512BW-ONLY-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
16848 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm2, %zmm0
16849 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16850 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm11, %zmm4
16851 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16852 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm13 = [6,14,6,14]
16853 ; AVX512BW-ONLY-SLOW-NEXT: # ymm13 = mem[0,1,0,1]
16854 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm13, %zmm5
16855 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16856 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm8 = [15,7,15,7]
16857 ; AVX512BW-ONLY-SLOW-NEXT: # ymm8 = mem[0,1,0,1]
16858 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm8, %zmm9
16859 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16860 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
16861 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3
16862 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm29, %zmm1, %zmm3
16863 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16864 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm10 {%k3} = zmm29[0],zmm0[0],zmm29[2],zmm0[2],zmm29[4],zmm0[4],zmm29[6],zmm0[6]
16865 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16866 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm29, %zmm4
16867 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm29, %zmm3
16868 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm29, %zmm5
16869 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm2, %zmm29
16870 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm29, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16871 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm11, %zmm4
16872 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16873 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm13, %zmm3
16874 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16875 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm8, %zmm5
16876 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16877 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm24, %zmm0
16878 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm23, %zmm1, %zmm0
16879 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
16880 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm30 {%k3} = zmm23[0],zmm24[0],zmm23[2],zmm24[2],zmm23[4],zmm24[4],zmm23[6],zmm24[6]
16881 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm30, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16882 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm3
16883 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm0
16884 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, %zmm4
16885 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm2, %zmm23
16886 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16887 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm11, %zmm3
16888 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16889 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm13, %zmm0
16890 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16891 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm24, %zmm8, %zmm4
16892 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16893 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm26, %zmm0
16894 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm27, %zmm1, %zmm0
16895 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16896 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm17 {%k3} = zmm27[0],zmm26[0],zmm27[2],zmm26[2],zmm27[4],zmm26[4],zmm27[6],zmm26[6]
16897 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16898 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, %zmm3
16899 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, %zmm0
16900 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, %zmm31
16901 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm2, %zmm27
16902 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16903 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm11, %zmm3
16904 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16905 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm13, %zmm0
16906 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16907 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm26, %zmm8, %zmm31
16908 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm22, %zmm30
16909 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm1, %zmm30
16910 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm16 {%k3} = zmm21[0],zmm22[0],zmm21[2],zmm22[2],zmm21[4],zmm22[4],zmm21[6],zmm22[6]
16911 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16912 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm3
16913 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm0
16914 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm27
16915 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm29
16916 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm2, %zmm29
16917 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm11, %zmm3
16918 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16919 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm13, %zmm0
16920 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16921 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm22, %zmm8, %zmm27
16922 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
16923 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm26
16924 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm1, %zmm26
16925 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm18 {%k3} = zmm6[0],zmm0[0],zmm6[2],zmm0[2],zmm6[4],zmm0[4],zmm6[6],zmm0[6]
16926 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16927 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm28
16928 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm3
16929 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm23
16930 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm24
16931 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm2, %zmm24
16932 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm11, %zmm28
16933 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm13, %zmm3
16934 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16935 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm8, %zmm23
16936 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 448(%rdi), %zmm9
16937 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 448(%rsi), %zmm0
16938 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm6
16939 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16940 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm5, %zmm6
16941 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm3
16942 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm13, %zmm3
16943 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, %zmm4
16944 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm11, %zmm4
16945 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 384(%rdi), %zmm21
16946 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 384(%rsi), %zmm17
16947 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, %zmm22
16948 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm21, %zmm1, %zmm22
16949 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm17, %zmm21, %zmm5
16950 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16951 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm17, %zmm21, %zmm11
16952 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm17, %zmm21, %zmm13
16953 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm20 {%k3} = zmm21[0],zmm17[0],zmm21[2],zmm17[2],zmm21[4],zmm17[4],zmm21[6],zmm17[6]
16954 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, %zmm18
16955 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm17, %zmm2, %zmm21
16956 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm17, %zmm8, %zmm18
16957 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm7 {%k3} = zmm9[0],zmm0[0],zmm9[2],zmm0[2],zmm9[4],zmm0[4],zmm9[6],zmm0[6]
16958 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm0, %zmm9, %zmm2
16959 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm9, %zmm0, %zmm1
16960 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm8, %zmm9
16961 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm12 {%k1}
16962 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm3 = zmm3[0,1,2,3],zmm19[4,5,6,7]
16963 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 448(%r8), %zmm6
16964 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = <0,11,u,u,4,5,6,7>
16965 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm6, %zmm7, %zmm0
16966 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 448(%r9), %zmm7
16967 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm8 = <0,1,11,u,4,5,6,7>
16968 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm0, %zmm8
16969 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm10
16970 ; AVX512BW-ONLY-SLOW-NEXT: movb $4, %sil
16971 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k3
16972 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm12 {%k3}
16973 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm8 = <0,1,2,10,u,5,6,7>
16974 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm12, %zmm8
16975 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm15, %zmm4 {%k1}
16976 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = <12,u,u,3,4,5,6,13>
16977 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm6, %zmm4, %zmm0
16978 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm4 = <0,12,u,3,4,5,6,7>
16979 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm0, %zmm4
16980 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [0,8,0,8,0,8,0,8]
16981 ; AVX512BW-ONLY-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
16982 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
16983 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm19 # 64-byte Folded Reload
16984 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
16985 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm12 # 64-byte Folded Reload
16986 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16987 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
16988 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm12 # 64-byte Folded Reload
16989 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16990 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
16991 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm5 # 64-byte Folded Reload
16992 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
16993 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm12 # 64-byte Folded Reload
16994 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16995 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
16996 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm12 # 64-byte Folded Reload
16997 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
16998 ; AVX512BW-ONLY-SLOW-NEXT: movb $24, %sil
16999 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k5
17000 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, %zmm18 {%k5}
17001 ; AVX512BW-ONLY-SLOW-NEXT: movb $6, %sil
17002 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k3
17003 ; AVX512BW-ONLY-SLOW-NEXT: vpbroadcastq 456(%rcx), %ymm12
17004 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = mem[0,1,2,3],ymm12[4,5,6,7]
17005 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm1 {%k3}
17006 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <0,1,2,9,u,u,6,7>
17007 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm6, %zmm1, %zmm12
17008 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm9 {%k5}
17009 ; AVX512BW-ONLY-SLOW-NEXT: movb $64, %sil
17010 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k4
17011 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm3 {%k4}
17012 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,1,2,3,4,15,u,u>
17013 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm1, %zmm9
17014 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 384(%r8), %zmm6
17015 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm6, %zmm1, %zmm18
17016 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 384(%r9), %zmm1
17017 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm1, %zmm6, %zmm0
17018 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
17019 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm1, %zmm6, %zmm14
17020 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
17021 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm1, %zmm6, %zmm15
17022 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
17023 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm6, %zmm1, %zmm16
17024 ; AVX512BW-ONLY-SLOW-NEXT: movb $12, %sil
17025 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k4
17026 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 448(%rdx), %xmm6
17027 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],mem[0]
17028 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
17029 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm6, %zmm0, %zmm2 {%k4}
17030 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $2, 448(%r8), %zmm2, %zmm2
17031 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm6 = <0,1,2,3,4,8,u,7>
17032 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm2, %zmm6
17033 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <u,1,2,3,4,5,15,u>
17034 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm1, %zmm2, %zmm18
17035 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,1,2,3,9,u,6,7>
17036 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm12, %zmm1
17037 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <13,u,2,3,4,5,6,14>
17038 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm7, %zmm3, %zmm12
17039 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm7, %zmm2, %zmm9
17040 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 448(%rax), %zmm2
17041 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,2,3,10,5,6,7]
17042 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm2, %zmm8, %zmm3
17043 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17044 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,12,3,4,5,6,7]
17045 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm2, %zmm4, %zmm3
17046 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17047 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 384(%rax), %zmm3
17048 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
17049 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm4, %zmm14
17050 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, %zmm7
17051 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
17052 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
17053 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm8 {%k1}
17054 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 384(%r9), %ymm14
17055 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
17056 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 384(%r8), %ymm4
17057 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
17058 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm4[0],ymm14[0],ymm4[2],ymm14[2]
17059 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm8 {%k2} = zmm4[2,3,2,3],zmm3[2,3,2,3]
17060 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17061 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
17062 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm4, %zmm15
17063 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm15, %zmm8
17064 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
17065 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm4, %zmm16
17066 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17067 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm4 = [0,1,2,3,4,5,8,7]
17068 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm2, %zmm6, %zmm4
17069 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17070 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm4 = [14,1,2,3,4,5,6,15]
17071 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm3, %zmm4, %zmm18
17072 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,2,3,4,9,6,7]
17073 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm2, %zmm1, %zmm3
17074 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17075 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,13,2,3,4,5,6,7]
17076 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2q %zmm2, %zmm12, %zmm1
17077 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17078 ; AVX512BW-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm4, %zmm9
17079 ; AVX512BW-ONLY-SLOW-NEXT: movb $8, %sil
17080 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k2
17081 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm10 {%k2}
17082 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17083 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17084 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17085 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2 {%k5}
17086 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
17087 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17088 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm3 {%k5}
17089 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
17090 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17091 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm4 {%k5}
17092 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17093 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm31 {%k5}
17094 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17095 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm27 {%k5}
17096 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17097 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm23 {%k5}
17098 ; AVX512BW-ONLY-SLOW-NEXT: movb $-31, %sil
17099 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k2
17100 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17101 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2 {%k2}
17102 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17103 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17104 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm3 {%k2}
17105 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17106 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17107 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm4 {%k2}
17108 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17109 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17110 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm31 {%k2}
17111 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm31, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17112 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17113 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm27 {%k2}
17114 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17115 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm23 {%k2}
17116 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%rdx), %xmm1
17117 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
17118 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
17119 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17120 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm2 {%k4}
17121 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 64(%rdx), %xmm1
17122 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
17123 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
17124 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
17125 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm3 {%k4}
17126 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 128(%rdx), %xmm1
17127 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
17128 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
17129 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
17130 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm4 {%k4}
17131 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 192(%rdx), %xmm1
17132 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
17133 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
17134 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
17135 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm6 {%k4}
17136 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 256(%rdx), %xmm1
17137 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
17138 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
17139 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm29 {%k4}
17140 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 320(%rdx), %xmm1
17141 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
17142 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
17143 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm24 {%k4}
17144 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 384(%rdx), %xmm1
17145 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
17146 ; AVX512BW-ONLY-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
17147 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm21 {%k4}
17148 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $3, (%rax), %zmm19, %zmm1
17149 ; AVX512BW-ONLY-SLOW-NEXT: movb $112, %sil
17150 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %esi, %k2
17151 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2 {%k2}
17152 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17153 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17154 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $3, 64(%rax), %zmm1, %zmm1
17155 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm3 {%k2}
17156 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17157 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17158 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $3, 128(%rax), %zmm1, %zmm1
17159 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm4 {%k2}
17160 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17161 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $3, 192(%rax), %zmm5, %zmm1
17162 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm6 {%k2}
17163 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17164 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17165 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $3, 256(%rax), %zmm1, %zmm1
17166 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm29 {%k2}
17167 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17168 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $3, 320(%rax), %zmm1, %zmm1
17169 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm24 {%k2}
17170 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $3, 384(%rax), %zmm0, %zmm0
17171 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm21 {%k2}
17172 ; AVX512BW-ONLY-SLOW-NEXT: vpbroadcastq 8(%rcx), %ymm0
17173 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
17174 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17175 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm1 {%k3}
17176 ; AVX512BW-ONLY-SLOW-NEXT: vpbroadcastq 72(%rcx), %ymm0
17177 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
17178 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17179 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm2 {%k3}
17180 ; AVX512BW-ONLY-SLOW-NEXT: vpbroadcastq 136(%rcx), %ymm0
17181 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
17182 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 (%rsp), %zmm3 # 64-byte Reload
17183 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm3 {%k3}
17184 ; AVX512BW-ONLY-SLOW-NEXT: vpbroadcastq 200(%rcx), %ymm0
17185 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
17186 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
17187 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm4 {%k3}
17188 ; AVX512BW-ONLY-SLOW-NEXT: vpbroadcastq 264(%rcx), %ymm0
17189 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
17190 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm30 {%k3}
17191 ; AVX512BW-ONLY-SLOW-NEXT: vpbroadcastq 328(%rcx), %ymm0
17192 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
17193 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm26 {%k3}
17194 ; AVX512BW-ONLY-SLOW-NEXT: vpbroadcastq 392(%rcx), %ymm0
17195 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
17196 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm22 {%k3}
17197 ; AVX512BW-ONLY-SLOW-NEXT: movb $56, %cl
17198 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %ecx, %k2
17199 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17200 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
17201 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17202 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17203 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm2 {%k2}
17204 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17205 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17206 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3 {%k2}
17207 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, (%rsp) # 64-byte Spill
17208 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17209 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm4 {%k2}
17210 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17211 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17212 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm30 {%k2}
17213 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17214 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm26 {%k2}
17215 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm22 {%k2}
17216 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17217 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17218 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
17219 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17220 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17221 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm2 {%k1}
17222 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17223 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
17224 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3 {%k1}
17225 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17226 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
17227 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm4 {%k1}
17228 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17229 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
17230 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
17231 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17232 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm28 {%k1}
17233 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17234 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm11 {%k1}
17235 ; AVX512BW-ONLY-SLOW-NEXT: movb $120, %cl
17236 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %ecx, %k1
17237 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
17238 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm16 {%k1}
17239 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17240 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm15 # 64-byte Folded Reload
17241 ; AVX512BW-ONLY-SLOW-NEXT: # zmm15 = zmm0[0,1,2,3],mem[4,5,6,7]
17242 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
17243 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm17 {%k1}
17244 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
17245 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm19 {%k1}
17246 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm25 # 64-byte Reload
17247 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm25 {%k1}
17248 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
17249 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, %zmm10 {%k1}
17250 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
17251 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm28, %zmm31 {%k1}
17252 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, %zmm5
17253 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, %zmm5 {%k1}
17254 ; AVX512BW-ONLY-SLOW-NEXT: movb $-61, %cl
17255 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %ecx, %k1
17256 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17257 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, %zmm15 {%k1}
17258 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17259 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm14 # 64-byte Folded Reload
17260 ; AVX512BW-ONLY-SLOW-NEXT: # zmm14 = zmm0[0,1,2,3],mem[4,5,6,7]
17261 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17262 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, %zmm14 {%k1}
17263 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17264 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm12 # 64-byte Folded Reload
17265 ; AVX512BW-ONLY-SLOW-NEXT: # zmm12 = zmm0[0,1,2,3],mem[4,5,6,7]
17266 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
17267 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, %zmm12 {%k1}
17268 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17269 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm3 # 64-byte Folded Reload
17270 ; AVX512BW-ONLY-SLOW-NEXT: # zmm3 = zmm0[0,1,2,3],mem[4,5,6,7]
17271 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
17272 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, %zmm3 {%k1}
17273 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17274 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm4 # 64-byte Folded Reload
17275 ; AVX512BW-ONLY-SLOW-NEXT: # zmm4 = zmm0[0,1,2,3],mem[4,5,6,7]
17276 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
17277 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, %zmm4 {%k1}
17278 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17279 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm6 # 64-byte Folded Reload
17280 ; AVX512BW-ONLY-SLOW-NEXT: # zmm6 = zmm0[0,1,2,3],mem[4,5,6,7]
17281 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
17282 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, %zmm6 {%k1}
17283 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm13, %zmm7 # 64-byte Folded Reload
17284 ; AVX512BW-ONLY-SLOW-NEXT: # zmm7 = zmm13[0,1,2,3],mem[4,5,6,7]
17285 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17286 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, %zmm7 {%k1}
17287 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
17288 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm8 # 32-byte Folded Reload
17289 ; AVX512BW-ONLY-SLOW-NEXT: # ymm8 = ymm8[1],mem[1],ymm8[3],mem[3]
17290 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,2,3,3]
17291 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],mem[6,7]
17292 ; AVX512BW-ONLY-SLOW-NEXT: movb $14, %cl
17293 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %ecx, %k1
17294 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
17295 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm0, %zmm13 {%k1}
17296 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
17297 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm8 # 32-byte Folded Reload
17298 ; AVX512BW-ONLY-SLOW-NEXT: # ymm8 = ymm8[1],mem[1],ymm8[3],mem[3]
17299 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,2,3,3]
17300 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],mem[6,7]
17301 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
17302 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm0, %zmm11 {%k1}
17303 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
17304 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm8 # 32-byte Folded Reload
17305 ; AVX512BW-ONLY-SLOW-NEXT: # ymm8 = ymm8[1],mem[1],ymm8[3],mem[3]
17306 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,2,3,3]
17307 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],mem[6,7]
17308 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm28 # 64-byte Reload
17309 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm0, %zmm28 {%k1}
17310 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
17311 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm8 # 32-byte Folded Reload
17312 ; AVX512BW-ONLY-SLOW-NEXT: # ymm8 = ymm8[1],mem[1],ymm8[3],mem[3]
17313 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,2,3,3]
17314 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],mem[6,7]
17315 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17316 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm0, %zmm2 {%k1}
17317 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
17318 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm8 # 32-byte Folded Reload
17319 ; AVX512BW-ONLY-SLOW-NEXT: # ymm8 = ymm8[1],mem[1],ymm8[3],mem[3]
17320 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,2,3,3]
17321 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],mem[6,7]
17322 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17323 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm0, %zmm1 {%k1}
17324 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
17325 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm8 # 32-byte Folded Reload
17326 ; AVX512BW-ONLY-SLOW-NEXT: # ymm8 = ymm8[1],mem[1],ymm8[3],mem[3]
17327 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,2,3,3]
17328 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],mem[6,7]
17329 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17330 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm0, %zmm0 {%k1}
17331 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
17332 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm8 # 32-byte Folded Reload
17333 ; AVX512BW-ONLY-SLOW-NEXT: # ymm8 = ymm8[1],mem[1],ymm8[3],mem[3]
17334 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,2,3,3]
17335 ; AVX512BW-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],mem[6,7]
17336 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $0, %ymm8, %zmm0, %zmm20 {%k1}
17337 ; AVX512BW-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
17338 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, 3008(%rax)
17339 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, 2944(%rax)
17340 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm20, 2880(%rax)
17341 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
17342 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm7, 2816(%rax)
17343 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm22, 2752(%rax)
17344 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, 2688(%rax)
17345 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, 2624(%rax)
17346 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, 2560(%rax)
17347 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm31, 2496(%rax)
17348 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, 2432(%rax)
17349 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
17350 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm6, 2368(%rax)
17351 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm26, 2304(%rax)
17352 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm24, 2240(%rax)
17353 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm27, 2176(%rax)
17354 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, 2112(%rax)
17355 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, 2048(%rax)
17356 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, 1984(%rax)
17357 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
17358 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm4, 1920(%rax)
17359 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm30, 1856(%rax)
17360 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm29, 1792(%rax)
17361 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17362 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 1728(%rax)
17363 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, 1664(%rax)
17364 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, 1600(%rax)
17365 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, 1536(%rax)
17366 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
17367 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm3, 1472(%rax)
17368 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17369 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 1408(%rax)
17370 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17371 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 1344(%rax)
17372 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17373 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 1280(%rax)
17374 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, 1216(%rax)
17375 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm19, 1152(%rax)
17376 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm28, 1088(%rax)
17377 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17378 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm2, 1024(%rax)
17379 ; AVX512BW-ONLY-SLOW-NEXT: vmovups (%rsp), %zmm0 # 64-byte Reload
17380 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 960(%rax)
17381 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17382 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 896(%rax)
17383 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17384 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 832(%rax)
17385 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, 768(%rax)
17386 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, 704(%rax)
17387 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, 640(%rax)
17388 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17389 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm1, 576(%rax)
17390 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17391 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 512(%rax)
17392 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17393 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 448(%rax)
17394 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17395 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 384(%rax)
17396 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm15, 320(%rax)
17397 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, 256(%rax)
17398 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, 192(%rax)
17399 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17400 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 128(%rax)
17401 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17402 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 64(%rax)
17403 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17404 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, (%rax)
17405 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, 3520(%rax)
17406 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17407 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 3456(%rax)
17408 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17409 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 3392(%rax)
17410 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17411 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 3328(%rax)
17412 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17413 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 3264(%rax)
17414 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17415 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 3200(%rax)
17416 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, 3072(%rax)
17417 ; AVX512BW-ONLY-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17418 ; AVX512BW-ONLY-SLOW-NEXT: vmovaps %zmm0, 3136(%rax)
17419 ; AVX512BW-ONLY-SLOW-NEXT: addq $6600, %rsp # imm = 0x19C8
17420 ; AVX512BW-ONLY-SLOW-NEXT: vzeroupper
17421 ; AVX512BW-ONLY-SLOW-NEXT: retq
17423 ; AVX512BW-ONLY-FAST-LABEL: store_i64_stride7_vf64:
17424 ; AVX512BW-ONLY-FAST: # %bb.0:
17425 ; AVX512BW-ONLY-FAST-NEXT: subq $6696, %rsp # imm = 0x1A28
17426 ; AVX512BW-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
17427 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rdi), %zmm5
17428 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17429 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rdi), %zmm22
17430 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17431 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rsi), %zmm23
17432 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rsi), %zmm17
17433 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17434 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rdx), %zmm24
17435 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17436 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rdx), %zmm6
17437 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17438 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rcx), %zmm7
17439 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17440 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rcx), %zmm9
17441 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17442 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [11,3,11,3,11,3,11,3]
17443 ; AVX512BW-ONLY-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
17444 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [2,10,0,3,2,10,0,3]
17445 ; AVX512BW-ONLY-FAST-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
17446 ; AVX512BW-ONLY-FAST-NEXT: movb $96, %r10b
17447 ; AVX512BW-ONLY-FAST-NEXT: kmovd %r10d, %k1
17448 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%r8), %zmm1
17449 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%r9), %zmm8
17450 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 (%rax), %zmm3
17451 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%rax), %zmm4
17452 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17453 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [9,1,9,1,9,1,9,1]
17454 ; AVX512BW-ONLY-FAST-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
17455 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm2
17456 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm0, %zmm2
17457 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm14
17458 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [4,9,0,3,4,9,0,3]
17459 ; AVX512BW-ONLY-FAST-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
17460 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm27, %zmm2
17461 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17462 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm0
17463 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm23, %zmm10, %zmm0
17464 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm2
17465 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm12, %zmm2
17466 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm2 {%k1}
17467 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa (%r9), %ymm0
17468 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
17469 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 64(%r9), %ymm11
17470 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
17471 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa (%r8), %ymm6
17472 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
17473 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%r8), %ymm26
17474 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm6[0],ymm0[0],ymm6[2],ymm0[2]
17475 ; AVX512BW-ONLY-FAST-NEXT: movb $28, %r10b
17476 ; AVX512BW-ONLY-FAST-NEXT: kmovd %r10d, %k2
17477 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm2 {%k2} = zmm0[2,3,2,3],zmm3[2,3,2,3]
17478 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17479 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [4,12,0,5,4,12,0,5]
17480 ; AVX512BW-ONLY-FAST-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
17481 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm0
17482 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm6
17483 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm2, %zmm0
17484 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm7
17485 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,12,7,0,1,12,7]
17486 ; AVX512BW-ONLY-FAST-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
17487 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm2, %zmm0
17488 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm18
17489 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17490 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [5,0,14,6,5,0,14,6]
17491 ; AVX512BW-ONLY-FAST-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
17492 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm2
17493 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17494 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm8, %zmm2
17495 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,13,6,7,0,13,6,7]
17496 ; AVX512BW-ONLY-FAST-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
17497 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm0, %zmm2
17498 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm20
17499 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17500 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm15 = [15,7,15,7,15,7,15,7]
17501 ; AVX512BW-ONLY-FAST-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
17502 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm15, %zmm1
17503 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm16 = [6,13,14,7,6,13,14,7]
17504 ; AVX512BW-ONLY-FAST-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3]
17505 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm16, %zmm3
17506 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17507 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm22, %zmm1
17508 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm17, %zmm10, %zmm1
17509 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm24, %zmm2
17510 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm12, %zmm2
17511 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm2 {%k1}
17512 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm1 = ymm26[0],ymm11[0],ymm26[2],ymm11[2]
17513 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm2 {%k2} = zmm1[2,3,2,3],zmm4[2,3,2,3]
17514 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17515 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%r8), %zmm1
17516 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 64(%r9), %zmm3
17517 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm2
17518 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm14, %zmm2
17519 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17520 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm27, %zmm2
17521 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17522 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm2
17523 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm7, %zmm2
17524 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm18, %zmm2
17525 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17526 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm2
17527 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm8, %zmm2
17528 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm0, %zmm2
17529 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17530 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17531 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm15, %zmm1
17532 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm16, %zmm4
17533 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17534 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%rdi), %zmm1
17535 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17536 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%rsi), %zmm19
17537 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm10, %zmm1
17538 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%rdx), %zmm3
17539 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17540 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%rcx), %zmm24
17541 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm12, %zmm3
17542 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm3 {%k1}
17543 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%rax), %zmm6
17544 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 128(%r9), %ymm1
17545 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
17546 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 128(%r8), %ymm4
17547 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm4[0],ymm1[0],ymm4[2],ymm1[2]
17548 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %ymm4, %ymm25
17549 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm3 {%k2} = zmm2[2,3,2,3],zmm6[2,3,2,3]
17550 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17551 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%r8), %zmm2
17552 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 128(%r9), %zmm1
17553 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
17554 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm14, %zmm3
17555 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17556 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm27, %zmm3
17557 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17558 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
17559 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm4
17560 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm7, %zmm3
17561 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm18, %zmm3
17562 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17563 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm3
17564 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm8, %zmm3
17565 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm0, %zmm3
17566 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17567 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17568 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm15, %zmm2
17569 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm16, %zmm6
17570 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17571 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%rdi), %zmm2
17572 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17573 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%rsi), %zmm11
17574 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm10, %zmm2
17575 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm28
17576 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%rdx), %zmm10
17577 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17578 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%rcx), %zmm1
17579 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17580 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm12, %zmm10
17581 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm5
17582 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm10 {%k1}
17583 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%rax), %zmm30
17584 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%r9), %ymm17
17585 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 192(%r8), %ymm12
17586 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm3 = ymm12[0],ymm17[0],ymm12[2],ymm17[2]
17587 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm10 {%k2} = zmm3[2,3,2,3],zmm30[2,3,2,3]
17588 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17589 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%r8), %zmm3
17590 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 192(%r9), %zmm0
17591 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm10
17592 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm14, %zmm10
17593 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm14, %zmm7
17594 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm30, %zmm27, %zmm10
17595 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17596 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm13
17597 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm4, %zmm13
17598 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm2
17599 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm18, %zmm6
17600 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm30, %zmm18, %zmm13
17601 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17602 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17603 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm4
17604 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm8, %zmm4
17605 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm30, %zmm20, %zmm4
17606 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17607 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17608 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm15, %zmm3
17609 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm16, %zmm30
17610 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm30, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17611 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 256(%rdi), %zmm3
17612 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17613 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 256(%rsi), %zmm13
17614 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm28, %zmm3
17615 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm28, %zmm29
17616 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm28, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17617 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 256(%rdx), %zmm28
17618 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 256(%rcx), %zmm18
17619 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm28, %zmm14
17620 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm5, %zmm14
17621 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm30
17622 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17623 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm14 {%k1}
17624 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 256(%rax), %zmm0
17625 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 256(%r9), %ymm9
17626 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 256(%r8), %ymm22
17627 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm22[0],ymm9[0],ymm22[2],ymm9[2]
17628 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm14 {%k2} = zmm4[2,3,2,3],zmm0[2,3,2,3]
17629 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17630 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 256(%r8), %zmm4
17631 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 256(%r9), %zmm3
17632 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm14
17633 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm7, %zmm14
17634 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm5
17635 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17636 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm27, %zmm14
17637 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17638 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17639 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm14
17640 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
17641 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17642 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm2, %zmm14
17643 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm6, %zmm14
17644 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17645 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17646 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm1
17647 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17648 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm10
17649 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm8, %zmm10
17650 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17651 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm20, %zmm10
17652 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17653 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm20, %zmm5
17654 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17655 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17656 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm15, %zmm4
17657 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm16, %zmm0
17658 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17659 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 320(%rdi), %zmm4
17660 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17661 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 320(%rsi), %zmm20
17662 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm29, %zmm4
17663 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 320(%rdx), %zmm31
17664 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 320(%rcx), %zmm14
17665 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm21
17666 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm30, %zmm21
17667 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm21 {%k1}
17668 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 320(%rax), %zmm1
17669 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 320(%r9), %ymm4
17670 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 320(%r8), %ymm2
17671 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm10 = ymm2[0],ymm4[0],ymm2[2],ymm4[2]
17672 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm21 {%k2} = zmm10[2,3,2,3],zmm1[2,3,2,3]
17673 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17674 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 320(%r8), %zmm10
17675 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 320(%r9), %zmm21
17676 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm0
17677 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm7, %zmm0
17678 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm27, %zmm0
17679 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17680 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm0
17681 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm3, %zmm0
17682 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm6, %zmm0
17683 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17684 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm21, %zmm0
17685 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm10, %zmm8, %zmm0
17686 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm5, %zmm0
17687 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17688 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17689 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm15, %zmm10
17690 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm10, %zmm16, %zmm1
17691 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17692 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [1,3,7,7]
17693 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
17694 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm1 # 32-byte Folded Reload
17695 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
17696 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm26 # 32-byte Folded Reload
17697 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %ymm26, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
17698 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm25 # 32-byte Folded Reload
17699 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %ymm25, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
17700 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %ymm17, %ymm0, %ymm12
17701 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
17702 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %ymm9, %ymm0, %ymm22
17703 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %ymm22, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
17704 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %ymm4, %ymm0, %ymm2
17705 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
17706 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 384(%r9), %ymm1
17707 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 384(%r8), %ymm2
17708 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm3 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
17709 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17710 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %ymm1, %ymm0, %ymm2
17711 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
17712 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [3,0,12,4,3,0,12,4]
17713 ; AVX512BW-ONLY-FAST-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
17714 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17715 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm2
17716 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17717 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm27, %zmm2
17718 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm12 = [13,5,13,5,13,5,13,5]
17719 ; AVX512BW-ONLY-FAST-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
17720 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm3
17721 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm12, %zmm3
17722 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17723 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm30 = [6,14,6,14,6,14,6,14]
17724 ; AVX512BW-ONLY-FAST-NEXT: # zmm30 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
17725 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm3
17726 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm30, %zmm3
17727 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17728 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm15, %zmm0
17729 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17730 ; AVX512BW-ONLY-FAST-NEXT: movb $48, %r10b
17731 ; AVX512BW-ONLY-FAST-NEXT: kmovd %r10d, %k3
17732 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [0,8,0,1,0,8,0,1]
17733 ; AVX512BW-ONLY-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
17734 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17735 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
17736 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm23, %zmm10, %zmm1
17737 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17738 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [1,0,10,2,1,0,10,2]
17739 ; AVX512BW-ONLY-FAST-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
17740 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm23, %zmm9
17741 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm7, %zmm9
17742 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm2 {%k3} = zmm0[0],zmm23[0],zmm0[2],zmm23[2],zmm0[4],zmm23[4],zmm0[6],zmm23[6]
17743 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17744 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
17745 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm23, %zmm12, %zmm1
17746 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17747 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} ymm29 = [6,14,6,14]
17748 ; AVX512BW-ONLY-FAST-NEXT: # ymm29 = mem[0,1,2,3,0,1,2,3]
17749 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
17750 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm23, %zmm29, %zmm1
17751 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17752 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} ymm25 = [15,7,15,7]
17753 ; AVX512BW-ONLY-FAST-NEXT: # ymm25 = mem[0,1,2,3,0,1,2,3]
17754 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm23, %zmm25, %zmm0
17755 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17756 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17757 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
17758 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
17759 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm27, %zmm1
17760 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm2
17761 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm12, %zmm2
17762 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17763 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm2
17764 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm30, %zmm2
17765 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17766 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm15, %zmm3
17767 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17768 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
17769 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm2
17770 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17771 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm10, %zmm2
17772 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17773 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm2
17774 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm7, %zmm2
17775 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17776 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm1 {%k3} = zmm3[0],zmm0[0],zmm3[2],zmm0[2],zmm3[4],zmm0[4],zmm3[6],zmm0[6]
17777 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17778 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm1
17779 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm12, %zmm1
17780 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17781 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm1
17782 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm29, %zmm1
17783 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17784 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm25, %zmm3
17785 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17786 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm24, %zmm0
17787 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17788 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm27, %zmm0
17789 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
17790 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm12, %zmm1
17791 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17792 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
17793 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm30, %zmm1
17794 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17795 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm24, %zmm15, %zmm2
17796 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17797 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17798 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
17799 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm10, %zmm1
17800 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17801 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm19, %zmm1
17802 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm7, %zmm1
17803 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17804 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm0 {%k3} = zmm2[0],zmm19[0],zmm2[2],zmm19[2],zmm2[4],zmm19[4],zmm2[6],zmm19[6]
17805 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17806 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm0
17807 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm12, %zmm0
17808 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17809 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm0
17810 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm29, %zmm0
17811 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17812 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm19, %zmm25, %zmm2
17813 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17814 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17815 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm0
17816 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
17817 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm27, %zmm0
17818 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm1
17819 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm12, %zmm1
17820 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17821 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm1
17822 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm30, %zmm1
17823 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17824 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm15, %zmm3
17825 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17826 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
17827 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
17828 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm10, %zmm1
17829 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17830 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm1
17831 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm7, %zmm1
17832 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17833 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm0 {%k3} = zmm2[0],zmm11[0],zmm2[2],zmm11[2],zmm2[4],zmm11[4],zmm2[6],zmm11[6]
17834 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17835 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm0
17836 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm12, %zmm0
17837 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17838 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm0
17839 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm29, %zmm0
17840 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17841 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm11, %zmm25, %zmm2
17842 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17843 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm18, %zmm0
17844 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm28, %zmm27, %zmm0
17845 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
17846 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm28, %zmm0
17847 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm12, %zmm0
17848 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17849 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm28, %zmm0
17850 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm30, %zmm0
17851 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17852 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm18, %zmm15, %zmm28
17853 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm28, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17854 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17855 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm2
17856 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm10, %zmm2
17857 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17858 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm2
17859 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm7, %zmm2
17860 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17861 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm1 {%k3} = zmm0[0],zmm13[0],zmm0[2],zmm13[2],zmm0[4],zmm13[4],zmm0[6],zmm13[6]
17862 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17863 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
17864 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm12, %zmm1
17865 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17866 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
17867 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm29, %zmm1
17868 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17869 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm25, %zmm0
17870 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17871 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm14, %zmm1
17872 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm31, %zmm27, %zmm1
17873 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm2
17874 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm12, %zmm2
17875 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17876 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm2
17877 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm30, %zmm2
17878 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17879 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm14, %zmm15, %zmm31
17880 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm31, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17881 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17882 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm2
17883 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm10, %zmm2
17884 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17885 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm20, %zmm28
17886 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm7, %zmm28
17887 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm1 {%k3} = zmm0[0],zmm20[0],zmm0[2],zmm20[2],zmm0[4],zmm20[4],zmm0[6],zmm20[6]
17888 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17889 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
17890 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm12, %zmm1
17891 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17892 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
17893 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm29, %zmm1
17894 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17895 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm20, %zmm25, %zmm0
17896 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm22
17897 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 448(%rdx), %zmm13
17898 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 448(%rcx), %zmm5
17899 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm14
17900 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
17901 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm0, %zmm14
17902 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm16
17903 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm30, %zmm16
17904 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm20
17905 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm13, %zmm27, %zmm20
17906 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm19
17907 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm12, %zmm19
17908 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm15, %zmm13
17909 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 384(%rdx), %zmm4
17910 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 384(%rcx), %zmm5
17911 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm5, %zmm4, %zmm0
17912 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17913 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm4, %zmm5, %zmm27
17914 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm5, %zmm4, %zmm30
17915 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm0
17916 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm15, %zmm4
17917 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm12, %zmm0
17918 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17919 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 448(%rdi), %zmm11
17920 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 448(%rsi), %zmm0
17921 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm8
17922 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17923 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm1, %zmm8
17924 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm2
17925 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm29, %zmm2
17926 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, %zmm3
17927 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm12, %zmm3
17928 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 384(%rdi), %zmm6
17929 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 384(%rsi), %zmm5
17930 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm6, %zmm24
17931 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm10, %zmm24
17932 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm23
17933 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm7, %zmm23
17934 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm5, %zmm6, %zmm1
17935 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17936 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm27 {%k3} = zmm6[0],zmm5[0],zmm6[2],zmm5[2],zmm6[4],zmm5[4],zmm6[6],zmm5[6]
17937 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm5, %zmm6, %zmm12
17938 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm5, %zmm6, %zmm29
17939 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm25, %zmm6
17940 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm20 {%k3} = zmm11[0],zmm0[0],zmm11[2],zmm0[2],zmm11[4],zmm0[4],zmm11[6],zmm0[6]
17941 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm11, %zmm10
17942 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm11, %zmm0, %zmm7
17943 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm25, %zmm11
17944 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm8, %zmm14 {%k1}
17945 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm2[0,1,2,3],zmm16[4,5,6,7]
17946 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 448(%r8), %zmm2
17947 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = <0,11,u,u,4,5,6,7>
17948 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm2, %zmm20, %zmm5
17949 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 448(%r9), %zmm25
17950 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm15 = <0,1,11,u,4,5,6,7>
17951 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm25, %zmm5, %zmm15
17952 ; AVX512BW-ONLY-FAST-NEXT: movb $4, %sil
17953 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k3
17954 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm14 {%k3}
17955 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm16 = <0,1,2,10,u,5,6,7>
17956 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm25, %zmm14, %zmm16
17957 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm19, %zmm3 {%k1}
17958 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = <12,u,u,3,4,5,6,13>
17959 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm2, %zmm3, %zmm5
17960 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm19 = <0,12,u,3,4,5,6,7>
17961 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm25, %zmm5, %zmm19
17962 ; AVX512BW-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm20 = [0,8,0,8,0,8,0,8]
17963 ; AVX512BW-ONLY-FAST-NEXT: # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
17964 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17965 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm20, %zmm1 # 64-byte Folded Reload
17966 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17967 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17968 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm20, %zmm1 # 64-byte Folded Reload
17969 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17970 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17971 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm20, %zmm1 # 64-byte Folded Reload
17972 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17973 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
17974 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm20, %zmm1 # 64-byte Folded Reload
17975 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
17976 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
17977 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm20, %zmm17 # 64-byte Folded Reload
17978 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm26 # 64-byte Reload
17979 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm21, %zmm20, %zmm26
17980 ; AVX512BW-ONLY-FAST-NEXT: movb $24, %sil
17981 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k4
17982 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm6 {%k4}
17983 ; AVX512BW-ONLY-FAST-NEXT: movb $6, %sil
17984 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k5
17985 ; AVX512BW-ONLY-FAST-NEXT: vpbroadcastq 456(%rcx), %ymm1
17986 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
17987 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm7 {%k5}
17988 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,1,2,9,u,u,6,7>
17989 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm2, %zmm7, %zmm1
17990 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm11 {%k4}
17991 ; AVX512BW-ONLY-FAST-NEXT: movb $64, %sil
17992 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k3
17993 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm0 {%k3}
17994 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm3 = <u,1,2,3,4,15,u,u>
17995 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm3, %zmm11
17996 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 384(%r8), %zmm2
17997 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm3, %zmm6
17998 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 384(%r9), %zmm3
17999 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm3, %zmm2, %zmm20
18000 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
18001 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm3, %zmm2, %zmm4
18002 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
18003 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm3, %zmm2, %zmm14
18004 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
18005 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm2, %zmm3, %zmm13
18006 ; AVX512BW-ONLY-FAST-NEXT: movb $12, %sil
18007 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k3
18008 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 448(%rdx), %xmm2
18009 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],mem[0]
18010 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
18011 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm10 {%k3}
18012 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $2, 448(%r8), %zmm10, %zmm2
18013 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = <0,1,2,3,4,8,u,7>
18014 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm25, %zmm2, %zmm5
18015 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm2 = <u,1,2,3,4,5,15,u>
18016 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm2, %zmm6
18017 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm7 = <0,1,2,3,9,u,6,7>
18018 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm25, %zmm1, %zmm7
18019 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm10 = <13,u,2,3,4,5,6,14>
18020 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm25, %zmm0, %zmm10
18021 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm25, %zmm2, %zmm11
18022 ; AVX512BW-ONLY-FAST-NEXT: vpbroadcastq 8(%rcx), %ymm0
18023 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
18024 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm9 {%k5}
18025 ; AVX512BW-ONLY-FAST-NEXT: vpbroadcastq 72(%rcx), %ymm0
18026 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
18027 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
18028 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm21 {%k5}
18029 ; AVX512BW-ONLY-FAST-NEXT: vpbroadcastq 136(%rcx), %ymm0
18030 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
18031 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18032 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm8 {%k5}
18033 ; AVX512BW-ONLY-FAST-NEXT: vpbroadcastq 200(%rcx), %ymm0
18034 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
18035 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
18036 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm18 {%k5}
18037 ; AVX512BW-ONLY-FAST-NEXT: vpbroadcastq 264(%rcx), %ymm0
18038 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
18039 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm25 # 64-byte Reload
18040 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm25 {%k5}
18041 ; AVX512BW-ONLY-FAST-NEXT: vpbroadcastq 328(%rcx), %ymm0
18042 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
18043 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm28 {%k5}
18044 ; AVX512BW-ONLY-FAST-NEXT: vpbroadcastq 392(%rcx), %ymm0
18045 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
18046 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm23 {%k5}
18047 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 448(%rax), %zmm0
18048 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,1,2,3,10,5,6,7]
18049 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm16, %zmm1
18050 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18051 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,1,12,3,4,5,6,7]
18052 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm19, %zmm1
18053 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18054 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 384(%rax), %zmm1
18055 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
18056 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm3, %zmm4
18057 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
18058 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
18059 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm16 {%k1}
18060 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
18061 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm16 {%k2} = zmm3[2,3,2,3],zmm1[2,3,2,3]
18062 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18063 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
18064 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm3, %zmm14
18065 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18066 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
18067 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm3, %zmm13
18068 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18069 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,2,3,4,5,8,7]
18070 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm5, %zmm3
18071 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18072 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = [14,1,2,3,4,5,6,15]
18073 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm5, %zmm6
18074 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,1,2,3,4,9,6,7]
18075 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm7, %zmm1
18076 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18077 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,13,2,3,4,5,6,7]
18078 ; AVX512BW-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm10, %zmm1
18079 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18080 ; AVX512BW-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm5, %zmm11
18081 ; AVX512BW-ONLY-FAST-NEXT: movb $8, %sil
18082 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k2
18083 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm15 {%k2}
18084 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18085 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
18086 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18087 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1 {%k4}
18088 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
18089 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18090 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm3 {%k4}
18091 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18092 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18093 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm5 {%k4}
18094 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
18095 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18096 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm7 {%k4}
18097 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
18098 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18099 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm10 {%k4}
18100 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18101 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm22 {%k4}
18102 ; AVX512BW-ONLY-FAST-NEXT: movb $-31, %sil
18103 ; AVX512BW-ONLY-FAST-NEXT: kmovd %esi, %k2
18104 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18105 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
18106 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18107 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18108 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm3 {%k2}
18109 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18110 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18111 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18112 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18113 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18114 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm7 {%k2}
18115 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18116 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18117 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
18118 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18119 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18120 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm22 {%k2}
18121 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18122 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa (%rdx), %xmm0
18123 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
18124 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
18125 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
18126 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm1 {%k3}
18127 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 64(%rdx), %xmm0
18128 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
18129 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
18130 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
18131 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm3 {%k3}
18132 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 128(%rdx), %xmm0
18133 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
18134 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
18135 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18136 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm5 {%k3}
18137 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 192(%rdx), %xmm0
18138 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
18139 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
18140 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
18141 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm7 {%k3}
18142 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 256(%rdx), %xmm0
18143 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
18144 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
18145 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
18146 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm10 {%k3}
18147 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 320(%rdx), %xmm0
18148 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
18149 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
18150 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
18151 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm13 {%k3}
18152 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa 384(%rdx), %xmm0
18153 ; AVX512BW-ONLY-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
18154 ; AVX512BW-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
18155 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm24 {%k3}
18156 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18157 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $3, (%rax), %zmm0, %zmm0
18158 ; AVX512BW-ONLY-FAST-NEXT: movb $112, %cl
18159 ; AVX512BW-ONLY-FAST-NEXT: kmovd %ecx, %k2
18160 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
18161 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18162 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18163 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $3, 64(%rax), %zmm0, %zmm0
18164 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm3 {%k2}
18165 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18166 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18167 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $3, 128(%rax), %zmm0, %zmm0
18168 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
18169 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18170 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18171 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $3, 192(%rax), %zmm0, %zmm0
18172 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm7 {%k2}
18173 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18174 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $3, 256(%rax), %zmm17, %zmm0
18175 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
18176 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18177 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $3, 320(%rax), %zmm26, %zmm0
18178 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm13 {%k2}
18179 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm26
18180 ; AVX512BW-ONLY-FAST-NEXT: vinserti32x4 $3, 384(%rax), %zmm20, %zmm0
18181 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm24 {%k2}
18182 ; AVX512BW-ONLY-FAST-NEXT: movb $56, %cl
18183 ; AVX512BW-ONLY-FAST-NEXT: kmovd %ecx, %k2
18184 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18185 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
18186 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18187 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18188 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm21 {%k2}
18189 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18190 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18191 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
18192 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18193 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18194 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
18195 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18196 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18197 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm25 {%k2}
18198 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18199 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm28 {%k2}
18200 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm23 {%k2}
18201 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
18202 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
18203 ; AVX512BW-ONLY-FAST-NEXT: movb $14, %cl
18204 ; AVX512BW-ONLY-FAST-NEXT: kmovd %ecx, %k2
18205 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm22 # 64-byte Reload
18206 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm22 {%k2}
18207 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
18208 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
18209 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
18210 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm21 {%k2}
18211 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
18212 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
18213 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
18214 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm19 {%k2}
18215 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
18216 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
18217 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
18218 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm15 {%k2}
18219 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
18220 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
18221 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
18222 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm8 {%k2}
18223 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
18224 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
18225 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
18226 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm3 {%k2}
18227 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
18228 ; AVX512BW-ONLY-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
18229 ; AVX512BW-ONLY-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm27 {%k2}
18230 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18231 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
18232 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
18233 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18234 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18235 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm2 {%k1}
18236 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18237 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18238 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
18239 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18240 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
18241 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm7 {%k1}
18242 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18243 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
18244 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm9 {%k1}
18245 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18246 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
18247 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm10 {%k1}
18248 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18249 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm12 {%k1}
18250 ; AVX512BW-ONLY-FAST-NEXT: movb $120, %al
18251 ; AVX512BW-ONLY-FAST-NEXT: kmovd %eax, %k1
18252 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
18253 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm18 {%k1}
18254 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18255 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm16 # 64-byte Folded Reload
18256 ; AVX512BW-ONLY-FAST-NEXT: # zmm16 = zmm0[0,1,2,3],mem[4,5,6,7]
18257 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
18258 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm14 {%k1}
18259 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
18260 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm4 {%k1}
18261 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18262 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm2 {%k1}
18263 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
18264 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm1 {%k1}
18265 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18266 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm0 {%k1}
18267 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
18268 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm20 {%k1}
18269 ; AVX512BW-ONLY-FAST-NEXT: movb $-61, %al
18270 ; AVX512BW-ONLY-FAST-NEXT: kmovd %eax, %k1
18271 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18272 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm16 {%k1}
18273 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18274 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm5, %zmm5 # 64-byte Folded Reload
18275 ; AVX512BW-ONLY-FAST-NEXT: # zmm5 = zmm5[0,1,2,3],mem[4,5,6,7]
18276 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
18277 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm5 {%k1}
18278 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
18279 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7, %zmm7 # 64-byte Folded Reload
18280 ; AVX512BW-ONLY-FAST-NEXT: # zmm7 = zmm7[0,1,2,3],mem[4,5,6,7]
18281 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
18282 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm9, %zmm7 {%k1}
18283 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
18284 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm9, %zmm9 # 64-byte Folded Reload
18285 ; AVX512BW-ONLY-FAST-NEXT: # zmm9 = zmm9[0,1,2,3],mem[4,5,6,7]
18286 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
18287 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm10, %zmm9 {%k1}
18288 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
18289 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm10 # 64-byte Folded Reload
18290 ; AVX512BW-ONLY-FAST-NEXT: # zmm10 = zmm10[0,1,2,3],mem[4,5,6,7]
18291 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
18292 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, %zmm10 {%k1}
18293 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
18294 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm12, %zmm13 # 64-byte Folded Reload
18295 ; AVX512BW-ONLY-FAST-NEXT: # zmm13 = zmm12[0,1,2,3],mem[4,5,6,7]
18296 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
18297 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm31, %zmm13 {%k1}
18298 ; AVX512BW-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm17 = zmm29[0,1,2,3],zmm30[4,5,6,7]
18299 ; AVX512BW-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
18300 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm17 {%k1}
18301 ; AVX512BW-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
18302 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm17, 3008(%rax)
18303 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm20, 2944(%rax)
18304 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm27, 2880(%rax)
18305 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
18306 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm12, 2816(%rax)
18307 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm23, 2752(%rax)
18308 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm24, 2688(%rax)
18309 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
18310 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm12, 2624(%rax)
18311 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm13, 2560(%rax)
18312 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm0, 2496(%rax)
18313 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm3, 2432(%rax)
18314 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
18315 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm12, 2368(%rax)
18316 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm28, 2304(%rax)
18317 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm26, 2240(%rax)
18318 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18319 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 2176(%rax)
18320 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm10, 2112(%rax)
18321 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm1, 2048(%rax)
18322 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm8, 1984(%rax)
18323 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
18324 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm10, 1920(%rax)
18325 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm25, 1856(%rax)
18326 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18327 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 1792(%rax)
18328 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18329 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 1728(%rax)
18330 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm9, 1664(%rax)
18331 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm2, 1600(%rax)
18332 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm15, 1536(%rax)
18333 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
18334 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm9, 1472(%rax)
18335 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18336 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 1408(%rax)
18337 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18338 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 1344(%rax)
18339 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18340 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 1280(%rax)
18341 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm7, 1216(%rax)
18342 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm4, 1152(%rax)
18343 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm19, 1088(%rax)
18344 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
18345 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm7, 1024(%rax)
18346 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18347 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 960(%rax)
18348 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18349 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 896(%rax)
18350 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18351 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 832(%rax)
18352 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm5, 768(%rax)
18353 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm14, 704(%rax)
18354 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm21, 640(%rax)
18355 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
18356 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm5, 576(%rax)
18357 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18358 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 512(%rax)
18359 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18360 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 448(%rax)
18361 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18362 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 384(%rax)
18363 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm16, 320(%rax)
18364 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm18, 256(%rax)
18365 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm22, 192(%rax)
18366 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18367 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 128(%rax)
18368 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18369 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 64(%rax)
18370 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18371 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, (%rax)
18372 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm11, 3520(%rax)
18373 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18374 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 3456(%rax)
18375 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18376 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 3392(%rax)
18377 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18378 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 3328(%rax)
18379 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18380 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 3264(%rax)
18381 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18382 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 3200(%rax)
18383 ; AVX512BW-ONLY-FAST-NEXT: vmovdqa64 %zmm6, 3072(%rax)
18384 ; AVX512BW-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18385 ; AVX512BW-ONLY-FAST-NEXT: vmovaps %zmm0, 3136(%rax)
18386 ; AVX512BW-ONLY-FAST-NEXT: addq $6696, %rsp # imm = 0x1A28
18387 ; AVX512BW-ONLY-FAST-NEXT: vzeroupper
18388 ; AVX512BW-ONLY-FAST-NEXT: retq
18390 ; AVX512DQBW-SLOW-LABEL: store_i64_stride7_vf64:
18391 ; AVX512DQBW-SLOW: # %bb.0:
18392 ; AVX512DQBW-SLOW-NEXT: subq $6472, %rsp # imm = 0x1948
18393 ; AVX512DQBW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
18394 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rdi), %zmm3
18395 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18396 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rdi), %zmm21
18397 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18398 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rsi), %zmm4
18399 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18400 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rsi), %zmm18
18401 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm18, (%rsp) # 64-byte Spill
18402 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rdx), %zmm13
18403 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18404 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rdx), %zmm5
18405 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18406 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rcx), %zmm12
18407 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rcx), %zmm20
18408 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18409 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm17 = [11,3,11,3,11,3,11,3]
18410 ; AVX512DQBW-SLOW-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
18411 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm27 = [2,10,0,3,2,10,0,3]
18412 ; AVX512DQBW-SLOW-NEXT: # zmm27 = mem[0,1,2,3,0,1,2,3]
18413 ; AVX512DQBW-SLOW-NEXT: movb $96, %r10b
18414 ; AVX512DQBW-SLOW-NEXT: kmovd %r10d, %k1
18415 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%r8), %zmm1
18416 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%r9), %zmm9
18417 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rax), %zmm6
18418 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%rax), %zmm7
18419 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18420 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [9,1,9,1,9,1,9,1]
18421 ; AVX512DQBW-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
18422 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
18423 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm9, %zmm0, %zmm2
18424 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm11
18425 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,9,0,3,4,9,0,3]
18426 ; AVX512DQBW-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
18427 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm0, %zmm2
18428 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm10
18429 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18430 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm0
18431 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm17, %zmm0
18432 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm2
18433 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm12, %zmm27, %zmm2
18434 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm2 {%k1}
18435 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%r9), %ymm5
18436 ; AVX512DQBW-SLOW-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
18437 ; AVX512DQBW-SLOW-NEXT: vmovdqa 64(%r9), %ymm3
18438 ; AVX512DQBW-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
18439 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%r8), %ymm0
18440 ; AVX512DQBW-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
18441 ; AVX512DQBW-SLOW-NEXT: vmovdqa 64(%r8), %ymm4
18442 ; AVX512DQBW-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
18443 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm5[0],ymm0[2],ymm5[2]
18444 ; AVX512DQBW-SLOW-NEXT: movb $28, %r10b
18445 ; AVX512DQBW-SLOW-NEXT: kmovd %r10d, %k2
18446 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm2 {%k2} = zmm0[2,3,2,3],zmm6[2,3,2,3]
18447 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18448 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [4,12,0,5,4,12,0,5]
18449 ; AVX512DQBW-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
18450 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm0
18451 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, %zmm5
18452 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm9, %zmm2, %zmm0
18453 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm16
18454 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,12,7,0,1,12,7]
18455 ; AVX512DQBW-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
18456 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm2, %zmm0
18457 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm9
18458 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18459 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm19 = [5,0,14,6,5,0,14,6]
18460 ; AVX512DQBW-SLOW-NEXT: # zmm19 = mem[0,1,2,3,0,1,2,3]
18461 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm2
18462 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm14
18463 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18464 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm19, %zmm2
18465 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm15 = [0,13,6,7,0,13,6,7]
18466 ; AVX512DQBW-SLOW-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3]
18467 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm15, %zmm2
18468 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18469 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm5 = [15,7,15,7,15,7,15,7]
18470 ; AVX512DQBW-SLOW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
18471 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm14, %zmm5, %zmm1
18472 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [6,13,14,7,6,13,14,7]
18473 ; AVX512DQBW-SLOW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
18474 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm8, %zmm6
18475 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18476 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm21, %zmm1
18477 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm18, %zmm17, %zmm1
18478 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm13, %zmm2
18479 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm20, %zmm27, %zmm2
18480 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2 {%k1}
18481 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm1 = ymm4[0],ymm3[0],ymm4[2],ymm3[2]
18482 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm2 {%k2} = zmm1[2,3,2,3],zmm7[2,3,2,3]
18483 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18484 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%r8), %zmm1
18485 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 64(%r9), %zmm3
18486 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
18487 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm11, %zmm2
18488 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm10, %zmm2
18489 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18490 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
18491 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm16, %zmm2
18492 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm9, %zmm2
18493 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18494 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm2
18495 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18496 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm19, %zmm2
18497 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm15, %zmm2
18498 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18499 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18500 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm5, %zmm1
18501 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm8, %zmm7
18502 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18503 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%rdi), %zmm22
18504 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%rsi), %zmm23
18505 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm22, %zmm1
18506 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm23, %zmm17, %zmm1
18507 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm17, %zmm29
18508 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%rdx), %zmm18
18509 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%rcx), %zmm14
18510 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm18, %zmm2
18511 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm14, %zmm27, %zmm2
18512 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2 {%k1}
18513 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%rax), %zmm7
18514 ; AVX512DQBW-SLOW-NEXT: vmovdqa 128(%r9), %ymm6
18515 ; AVX512DQBW-SLOW-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
18516 ; AVX512DQBW-SLOW-NEXT: vmovdqa 128(%r8), %ymm1
18517 ; AVX512DQBW-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
18518 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm1 = ymm1[0],ymm6[0],ymm1[2],ymm6[2]
18519 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm2 {%k2} = zmm1[2,3,2,3],zmm7[2,3,2,3]
18520 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18521 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%r8), %zmm1
18522 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 128(%r9), %zmm6
18523 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
18524 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm11, %zmm2
18525 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm31
18526 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm10, %zmm2
18527 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18528 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, %zmm30
18529 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
18530 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm16, %zmm2
18531 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm9, %zmm2
18532 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18533 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, %zmm2
18534 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18535 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm19, %zmm2
18536 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm15, %zmm2
18537 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18538 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18539 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm5, %zmm1
18540 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm8, %zmm7
18541 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18542 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%rdi), %zmm28
18543 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%rsi), %zmm21
18544 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm28, %zmm1
18545 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm21, %zmm17, %zmm1
18546 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%rdx), %zmm13
18547 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%rcx), %zmm26
18548 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm13, %zmm2
18549 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm26, %zmm27, %zmm2
18550 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2 {%k1}
18551 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%rax), %zmm6
18552 ; AVX512DQBW-SLOW-NEXT: vmovdqa 192(%r9), %ymm4
18553 ; AVX512DQBW-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
18554 ; AVX512DQBW-SLOW-NEXT: vmovdqa 192(%r8), %ymm1
18555 ; AVX512DQBW-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
18556 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm1 = ymm1[0],ymm4[0],ymm1[2],ymm4[2]
18557 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm2 {%k2} = zmm1[2,3,2,3],zmm6[2,3,2,3]
18558 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18559 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%r8), %zmm1
18560 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 192(%r9), %zmm3
18561 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
18562 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm11, %zmm2
18563 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm10, %zmm2
18564 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18565 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
18566 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm16, %zmm2
18567 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm9, %zmm2
18568 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18569 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, %zmm0
18570 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm2
18571 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18572 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm19, %zmm2
18573 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm15, %zmm2
18574 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18575 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18576 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm5, %zmm1
18577 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm8, %zmm6
18578 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18579 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 256(%rdi), %zmm25
18580 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 256(%rsi), %zmm17
18581 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm25, %zmm2
18582 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm29, %zmm9
18583 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm29, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18584 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm17, %zmm29, %zmm2
18585 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 256(%rdx), %zmm11
18586 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 256(%rcx), %zmm29
18587 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm4
18588 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm29, %zmm27, %zmm4
18589 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
18590 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 256(%rax), %zmm1
18591 ; AVX512DQBW-SLOW-NEXT: vmovdqa 256(%r9), %ymm7
18592 ; AVX512DQBW-SLOW-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
18593 ; AVX512DQBW-SLOW-NEXT: vmovdqa 256(%r8), %ymm2
18594 ; AVX512DQBW-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
18595 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm2[0],ymm7[0],ymm2[2],ymm7[2]
18596 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm4 {%k2} = zmm2[2,3,2,3],zmm1[2,3,2,3]
18597 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18598 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 256(%r8), %zmm2
18599 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 256(%r9), %zmm6
18600 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4
18601 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm31, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18602 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm31, %zmm4
18603 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18604 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm10, %zmm4
18605 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18606 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4
18607 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18608 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm16, %zmm4
18609 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm0, %zmm4
18610 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm3
18611 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18612 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18613 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, %zmm4
18614 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18615 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm19, %zmm4
18616 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18617 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm15, %zmm4
18618 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18619 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18620 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18621 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm5, %zmm2
18622 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm8, %zmm1
18623 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18624 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 320(%rdi), %zmm7
18625 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 320(%rsi), %zmm10
18626 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, %zmm4
18627 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm10, %zmm9, %zmm4
18628 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 320(%rdx), %zmm6
18629 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 320(%rcx), %zmm24
18630 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, %zmm9
18631 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm24, %zmm27, %zmm9
18632 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm9 {%k1}
18633 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 320(%rax), %zmm2
18634 ; AVX512DQBW-SLOW-NEXT: vmovdqa 320(%r9), %ymm1
18635 ; AVX512DQBW-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
18636 ; AVX512DQBW-SLOW-NEXT: vmovdqa 320(%r8), %ymm0
18637 ; AVX512DQBW-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
18638 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
18639 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm9 {%k2} = zmm4[2,3,2,3],zmm2[2,3,2,3]
18640 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18641 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 320(%r8), %zmm4
18642 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 320(%r9), %zmm1
18643 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm0
18644 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm31, %zmm0
18645 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm30, %zmm0
18646 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18647 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm0
18648 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm16, %zmm0
18649 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm3, %zmm0
18650 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18651 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm0
18652 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18653 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm19, %zmm0
18654 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm15, %zmm0
18655 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18656 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18657 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm5, %zmm4
18658 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm8, %zmm2
18659 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18660 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm9 = [13,5,13,5,13,5,13,5]
18661 ; AVX512DQBW-SLOW-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
18662 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
18663 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm0
18664 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm12, %zmm9, %zmm0
18665 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18666 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [6,14,6,14,6,14,6,14]
18667 ; AVX512DQBW-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
18668 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm1
18669 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm12, %zmm0, %zmm1
18670 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18671 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm4
18672 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm0
18673 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm12, %zmm5, %zmm0
18674 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18675 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [3,0,12,4,3,0,12,4]
18676 ; AVX512DQBW-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
18677 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm0, %zmm12
18678 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm12, %zmm8
18679 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
18680 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
18681 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
18682 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm12, %zmm9, %zmm2
18683 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18684 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
18685 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm12, %zmm4, %zmm2
18686 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18687 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2
18688 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm12, %zmm5, %zmm2
18689 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18690 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm0, %zmm12
18691 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm12, %zmm15
18692 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm18, %zmm1
18693 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm14, %zmm9, %zmm1
18694 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18695 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm18, %zmm1
18696 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm14, %zmm4, %zmm1
18697 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18698 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm18, %zmm1
18699 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm14, %zmm5, %zmm1
18700 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18701 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm18, %zmm0, %zmm14
18702 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm14, %zmm18
18703 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm13, %zmm1
18704 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm26, %zmm9, %zmm1
18705 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18706 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm13, %zmm1
18707 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm26, %zmm4, %zmm1
18708 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18709 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm13, %zmm1
18710 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm26, %zmm5, %zmm1
18711 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18712 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm13, %zmm0, %zmm26
18713 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm1
18714 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm29, %zmm9, %zmm1
18715 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18716 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm1
18717 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm29, %zmm4, %zmm1
18718 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18719 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, %zmm1
18720 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm29, %zmm5, %zmm1
18721 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18722 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm11, %zmm0, %zmm29
18723 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, %zmm1
18724 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm24, %zmm9, %zmm1
18725 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18726 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, %zmm1
18727 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm24, %zmm4, %zmm1
18728 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18729 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, %zmm1
18730 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm24, %zmm5, %zmm1
18731 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18732 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm6, %zmm0, %zmm24
18733 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm24, %zmm6
18734 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 448(%rdx), %zmm14
18735 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 448(%rcx), %zmm1
18736 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm14, %zmm12
18737 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm27, %zmm12
18738 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm14, %zmm19
18739 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm4, %zmm19
18740 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 384(%rdx), %zmm3
18741 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 384(%rcx), %zmm20
18742 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm20, %zmm3, %zmm27
18743 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18744 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm2
18745 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm20, %zmm9, %zmm2
18746 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18747 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm20, %zmm3, %zmm4
18748 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18749 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm27
18750 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm20, %zmm5, %zmm27
18751 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm3, %zmm0, %zmm20
18752 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm3
18753 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm14, %zmm0, %zmm3
18754 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm14, %zmm16
18755 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm5, %zmm14
18756 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm9, %zmm16
18757 ; AVX512DQBW-SLOW-NEXT: movb $48, %r10b
18758 ; AVX512DQBW-SLOW-NEXT: kmovd %r10d, %k3
18759 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [1,0,10,2,1,0,10,2]
18760 ; AVX512DQBW-SLOW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
18761 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
18762 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm2
18763 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18764 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm0, %zmm1, %zmm2
18765 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18766 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm8 {%k3} = zmm0[0],zmm4[0],zmm0[2],zmm4[2],zmm0[4],zmm4[4],zmm0[6],zmm4[6]
18767 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18768 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm5
18769 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm8
18770 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm11
18771 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,8,0,1,0,8,0,1]
18772 ; AVX512DQBW-SLOW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
18773 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm2, %zmm0
18774 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18775 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm9, %zmm5
18776 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18777 ; AVX512DQBW-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm13 = [6,14,6,14]
18778 ; AVX512DQBW-SLOW-NEXT: # ymm13 = mem[0,1,0,1]
18779 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm13, %zmm8
18780 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18781 ; AVX512DQBW-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm8 = [15,7,15,7]
18782 ; AVX512DQBW-SLOW-NEXT: # ymm8 = mem[0,1,0,1]
18783 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm8, %zmm11
18784 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 (%rsp), %zmm4 # 64-byte Reload
18785 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm5
18786 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18787 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm0, %zmm1, %zmm5
18788 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18789 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm15 {%k3} = zmm0[0],zmm4[0],zmm0[2],zmm4[2],zmm0[4],zmm4[4],zmm0[6],zmm4[6]
18790 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18791 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm15
18792 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm5
18793 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm24
18794 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm2, %zmm0
18795 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18796 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm9, %zmm15
18797 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18798 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm13, %zmm5
18799 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18800 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm8, %zmm24
18801 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18802 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm23, %zmm0
18803 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm22, %zmm1, %zmm0
18804 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18805 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm18 {%k3} = zmm22[0],zmm23[0],zmm22[2],zmm23[2],zmm22[4],zmm23[4],zmm22[6],zmm23[6]
18806 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18807 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm22, %zmm4
18808 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm22, %zmm0
18809 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm22, %zmm5
18810 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm23, %zmm2, %zmm22
18811 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18812 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm23, %zmm9, %zmm4
18813 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18814 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm23, %zmm13, %zmm0
18815 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18816 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm23, %zmm8, %zmm5
18817 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18818 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm21, %zmm0
18819 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm28, %zmm1, %zmm0
18820 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
18821 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm26 {%k3} = zmm28[0],zmm21[0],zmm28[2],zmm21[2],zmm28[4],zmm21[4],zmm28[6],zmm21[6]
18822 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18823 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm28, %zmm4
18824 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm28, %zmm0
18825 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm28, %zmm23
18826 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm21, %zmm2, %zmm28
18827 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm28, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18828 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm21, %zmm9, %zmm4
18829 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18830 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm21, %zmm13, %zmm0
18831 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18832 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm21, %zmm8, %zmm23
18833 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm17, %zmm0
18834 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm25, %zmm1, %zmm0
18835 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18836 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm29 {%k3} = zmm25[0],zmm17[0],zmm25[2],zmm17[2],zmm25[4],zmm17[4],zmm25[6],zmm17[6]
18837 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm29, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18838 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm25, %zmm31
18839 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm25, %zmm0
18840 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm25, %zmm29
18841 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm25, %zmm30
18842 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm17, %zmm2, %zmm30
18843 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm17, %zmm9, %zmm31
18844 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm17, %zmm13, %zmm0
18845 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18846 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm17, %zmm8, %zmm29
18847 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, %zmm26
18848 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm7, %zmm1, %zmm26
18849 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm6 {%k3} = zmm7[0],zmm10[0],zmm7[2],zmm10[2],zmm7[4],zmm10[4],zmm7[6],zmm10[6]
18850 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18851 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, %zmm28
18852 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, %zmm0
18853 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, %zmm24
18854 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, %zmm25
18855 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm10, %zmm2, %zmm25
18856 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm10, %zmm9, %zmm28
18857 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm10, %zmm13, %zmm0
18858 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18859 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm10, %zmm8, %zmm24
18860 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 448(%rdi), %zmm6
18861 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 448(%rsi), %zmm0
18862 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, %zmm7
18863 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
18864 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm0, %zmm10, %zmm7
18865 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, %zmm4
18866 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm0, %zmm13, %zmm4
18867 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, %zmm5
18868 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm0, %zmm9, %zmm5
18869 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 384(%rdi), %zmm21
18870 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 384(%rsi), %zmm17
18871 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm17, %zmm22
18872 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm21, %zmm1, %zmm22
18873 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm17, %zmm21, %zmm10
18874 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18875 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm17, %zmm21, %zmm9
18876 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm17, %zmm21, %zmm13
18877 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm20 {%k3} = zmm21[0],zmm17[0],zmm21[2],zmm17[2],zmm21[4],zmm17[4],zmm21[6],zmm17[6]
18878 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm21, %zmm18
18879 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm17, %zmm2, %zmm21
18880 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm17, %zmm8, %zmm18
18881 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} zmm3 {%k3} = zmm6[0],zmm0[0],zmm6[2],zmm0[2],zmm6[4],zmm0[4],zmm6[6],zmm0[6]
18882 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm0, %zmm6, %zmm2
18883 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm6, %zmm0, %zmm1
18884 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm0, %zmm8, %zmm6
18885 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, %zmm12 {%k1}
18886 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm4[0,1,2,3],zmm19[4,5,6,7]
18887 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 448(%r8), %zmm4
18888 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <0,11,u,u,4,5,6,7>
18889 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm4, %zmm3, %zmm7
18890 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 448(%r9), %zmm8
18891 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm3 = <0,1,11,u,4,5,6,7>
18892 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm8, %zmm7, %zmm3
18893 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm15
18894 ; AVX512DQBW-SLOW-NEXT: movb $4, %sil
18895 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k3
18896 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm12 {%k3}
18897 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <0,1,2,10,u,5,6,7>
18898 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm8, %zmm12, %zmm7
18899 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm16, %zmm5 {%k1}
18900 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm3 = <12,u,u,3,4,5,6,13>
18901 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm4, %zmm5, %zmm3
18902 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm5 = <0,12,u,3,4,5,6,7>
18903 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm8, %zmm3, %zmm5
18904 ; AVX512DQBW-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [0,8,0,8,0,8,0,8]
18905 ; AVX512DQBW-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
18906 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
18907 ; AVX512DQBW-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm19 # 64-byte Folded Reload
18908 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
18909 ; AVX512DQBW-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm12 # 64-byte Folded Reload
18910 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18911 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
18912 ; AVX512DQBW-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm10 # 64-byte Folded Reload
18913 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
18914 ; AVX512DQBW-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm12 # 64-byte Folded Reload
18915 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18916 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
18917 ; AVX512DQBW-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm12 # 64-byte Folded Reload
18918 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18919 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
18920 ; AVX512DQBW-SLOW-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm12 # 64-byte Folded Reload
18921 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18922 ; AVX512DQBW-SLOW-NEXT: movb $24, %sil
18923 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k5
18924 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm27, %zmm18 {%k5}
18925 ; AVX512DQBW-SLOW-NEXT: movb $6, %sil
18926 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k3
18927 ; AVX512DQBW-SLOW-NEXT: vpbroadcastq 456(%rcx), %ymm12
18928 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = mem[0,1,2,3],ymm12[4,5,6,7]
18929 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm12, %zmm0, %zmm1 {%k3}
18930 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <0,1,2,9,u,u,6,7>
18931 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm4, %zmm1, %zmm12
18932 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm14, %zmm6 {%k5}
18933 ; AVX512DQBW-SLOW-NEXT: movb $64, %sil
18934 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k4
18935 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm0 {%k4}
18936 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,1,2,3,4,15,u,u>
18937 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm1, %zmm6
18938 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 384(%r8), %zmm4
18939 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm4, %zmm1, %zmm18
18940 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 384(%r9), %zmm1
18941 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm1, %zmm4, %zmm3
18942 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
18943 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm1, %zmm4, %zmm14
18944 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
18945 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm1, %zmm4, %zmm16
18946 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
18947 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm4, %zmm1, %zmm17
18948 ; AVX512DQBW-SLOW-NEXT: movb $12, %sil
18949 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k4
18950 ; AVX512DQBW-SLOW-NEXT: vmovdqa 448(%rdx), %xmm4
18951 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],mem[0]
18952 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
18953 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm4, %zmm0, %zmm2 {%k4}
18954 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $2, 448(%r8), %zmm2, %zmm2
18955 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm4 = <0,1,2,3,4,8,u,7>
18956 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm8, %zmm2, %zmm4
18957 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <u,1,2,3,4,5,15,u>
18958 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm1, %zmm2, %zmm18
18959 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,1,2,3,9,u,6,7>
18960 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm8, %zmm12, %zmm1
18961 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <13,u,2,3,4,5,6,14>
18962 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm8, %zmm0, %zmm12
18963 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm8, %zmm2, %zmm6
18964 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 448(%rax), %zmm2
18965 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,3,10,5,6,7]
18966 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm2, %zmm7, %zmm0
18967 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18968 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,12,3,4,5,6,7]
18969 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm2, %zmm5, %zmm0
18970 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18971 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 384(%rax), %zmm5
18972 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18973 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm0, %zmm14
18974 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm14, %zmm8
18975 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
18976 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
18977 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, %zmm14 {%k1}
18978 ; AVX512DQBW-SLOW-NEXT: vmovdqa 384(%r9), %ymm7
18979 ; AVX512DQBW-SLOW-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
18980 ; AVX512DQBW-SLOW-NEXT: vmovdqa 384(%r8), %ymm0
18981 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm7 = ymm0[0],ymm7[0],ymm0[2],ymm7[2]
18982 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm14 {%k2} = zmm7[2,3,2,3],zmm5[2,3,2,3]
18983 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18984 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
18985 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm7, %zmm16
18986 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm16, %zmm14
18987 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
18988 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm7, %zmm17
18989 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm7 = [0,1,2,3,4,5,8,7]
18990 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm2, %zmm4, %zmm7
18991 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18992 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm4 = [14,1,2,3,4,5,6,15]
18993 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm5, %zmm4, %zmm18
18994 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm5 = [0,1,2,3,4,9,6,7]
18995 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm2, %zmm1, %zmm5
18996 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
18997 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,13,2,3,4,5,6,7]
18998 ; AVX512DQBW-SLOW-NEXT: vpermi2q %zmm2, %zmm12, %zmm1
18999 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19000 ; AVX512DQBW-SLOW-NEXT: vpermt2q %zmm2, %zmm4, %zmm6
19001 ; AVX512DQBW-SLOW-NEXT: movb $8, %sil
19002 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k2
19003 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm15 {%k2}
19004 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19005 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19006 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm11 {%k5}
19007 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19008 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19009 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2 {%k5}
19010 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
19011 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19012 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm4 {%k5}
19013 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19014 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm23 {%k5}
19015 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19016 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm29 {%k5}
19017 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19018 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm24 {%k5}
19019 ; AVX512DQBW-SLOW-NEXT: movb $-31, %sil
19020 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k2
19021 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19022 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm11 {%k2}
19023 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19024 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19025 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2 {%k2}
19026 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19027 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19028 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm4 {%k2}
19029 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19030 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19031 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm23 {%k2}
19032 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19033 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19034 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm29 {%k2}
19035 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19036 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm24 {%k2}
19037 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%rdx), %xmm1
19038 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
19039 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
19040 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
19041 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm7 {%k4}
19042 ; AVX512DQBW-SLOW-NEXT: vmovdqa 64(%rdx), %xmm1
19043 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
19044 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
19045 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19046 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm2 {%k4}
19047 ; AVX512DQBW-SLOW-NEXT: vmovdqa 128(%rdx), %xmm1
19048 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
19049 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
19050 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
19051 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm4 {%k4}
19052 ; AVX512DQBW-SLOW-NEXT: vmovdqa 192(%rdx), %xmm1
19053 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
19054 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
19055 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19056 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm5 {%k4}
19057 ; AVX512DQBW-SLOW-NEXT: vmovdqa 256(%rdx), %xmm1
19058 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
19059 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
19060 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm30 {%k4}
19061 ; AVX512DQBW-SLOW-NEXT: vmovdqa 320(%rdx), %xmm1
19062 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
19063 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
19064 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm25 {%k4}
19065 ; AVX512DQBW-SLOW-NEXT: vmovdqa 384(%rdx), %xmm1
19066 ; AVX512DQBW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
19067 ; AVX512DQBW-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
19068 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm21 {%k4}
19069 ; AVX512DQBW-SLOW-NEXT: movb $112, %sil
19070 ; AVX512DQBW-SLOW-NEXT: kmovd %esi, %k2
19071 ; AVX512DQBW-SLOW-NEXT: vinserti64x2 $3, (%rax), %zmm19, %zmm7 {%k2}
19072 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19073 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19074 ; AVX512DQBW-SLOW-NEXT: vinserti64x2 $3, 64(%rax), %zmm1, %zmm2 {%k2}
19075 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19076 ; AVX512DQBW-SLOW-NEXT: vinserti64x2 $3, 128(%rax), %zmm10, %zmm4 {%k2}
19077 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19078 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19079 ; AVX512DQBW-SLOW-NEXT: vinserti64x2 $3, 192(%rax), %zmm1, %zmm5 {%k2}
19080 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19081 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19082 ; AVX512DQBW-SLOW-NEXT: vinserti64x2 $3, 256(%rax), %zmm1, %zmm30 {%k2}
19083 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19084 ; AVX512DQBW-SLOW-NEXT: vinserti64x2 $3, 320(%rax), %zmm1, %zmm25 {%k2}
19085 ; AVX512DQBW-SLOW-NEXT: vinserti64x2 $3, 384(%rax), %zmm3, %zmm21 {%k2}
19086 ; AVX512DQBW-SLOW-NEXT: vpbroadcastq 8(%rcx), %ymm1
19087 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
19088 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19089 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm5 {%k3}
19090 ; AVX512DQBW-SLOW-NEXT: vpbroadcastq 72(%rcx), %ymm1
19091 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
19092 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19093 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm2 {%k3}
19094 ; AVX512DQBW-SLOW-NEXT: vpbroadcastq 136(%rcx), %ymm1
19095 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
19096 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
19097 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm3 {%k3}
19098 ; AVX512DQBW-SLOW-NEXT: vpbroadcastq 200(%rcx), %ymm1
19099 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
19100 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 (%rsp), %zmm4 # 64-byte Reload
19101 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm4 {%k3}
19102 ; AVX512DQBW-SLOW-NEXT: vpbroadcastq 264(%rcx), %ymm1
19103 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
19104 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
19105 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm7 {%k3}
19106 ; AVX512DQBW-SLOW-NEXT: vpbroadcastq 328(%rcx), %ymm1
19107 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
19108 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm26 {%k3}
19109 ; AVX512DQBW-SLOW-NEXT: vpbroadcastq 392(%rcx), %ymm1
19110 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
19111 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm22 {%k3}
19112 ; AVX512DQBW-SLOW-NEXT: movb $56, %cl
19113 ; AVX512DQBW-SLOW-NEXT: kmovd %ecx, %k2
19114 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19115 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm5 {%k2}
19116 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19117 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19118 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2 {%k2}
19119 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19120 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19121 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm3 {%k2}
19122 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19123 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19124 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm4 {%k2}
19125 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm4, (%rsp) # 64-byte Spill
19126 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19127 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm7 {%k2}
19128 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19129 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19130 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm26 {%k2}
19131 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm8, %zmm22 {%k2}
19132 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19133 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19134 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm2 {%k1}
19135 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19136 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
19137 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm3 {%k1}
19138 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19139 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
19140 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm4 {%k1}
19141 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19142 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19143 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm5 {%k1}
19144 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19145 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm31 {%k1}
19146 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19147 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm28 {%k1}
19148 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19149 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm9 {%k1}
19150 ; AVX512DQBW-SLOW-NEXT: movb $120, %cl
19151 ; AVX512DQBW-SLOW-NEXT: kmovd %ecx, %k1
19152 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
19153 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm16 {%k1}
19154 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19155 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm15 # 64-byte Folded Reload
19156 ; AVX512DQBW-SLOW-NEXT: # zmm15 = zmm1[0,1,2,3],mem[4,5,6,7]
19157 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
19158 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm19 {%k1}
19159 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
19160 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm27 {%k1}
19161 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
19162 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm12 {%k1}
19163 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
19164 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm31, %zmm10 {%k1}
19165 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
19166 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm28, %zmm31 {%k1}
19167 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, %zmm14 {%k1}
19168 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm14, %zmm23
19169 ; AVX512DQBW-SLOW-NEXT: movb $-61, %cl
19170 ; AVX512DQBW-SLOW-NEXT: kmovd %ecx, %k1
19171 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19172 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, %zmm15 {%k1}
19173 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19174 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm14 # 64-byte Folded Reload
19175 ; AVX512DQBW-SLOW-NEXT: # zmm14 = zmm1[0,1,2,3],mem[4,5,6,7]
19176 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
19177 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, %zmm14 {%k1}
19178 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19179 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm11 # 64-byte Folded Reload
19180 ; AVX512DQBW-SLOW-NEXT: # zmm11 = zmm1[0,1,2,3],mem[4,5,6,7]
19181 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
19182 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, %zmm11 {%k1}
19183 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19184 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm4 # 64-byte Folded Reload
19185 ; AVX512DQBW-SLOW-NEXT: # zmm4 = zmm1[0,1,2,3],mem[4,5,6,7]
19186 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
19187 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm4 {%k1}
19188 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19189 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm5 # 64-byte Folded Reload
19190 ; AVX512DQBW-SLOW-NEXT: # zmm5 = zmm1[0,1,2,3],mem[4,5,6,7]
19191 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
19192 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, %zmm5 {%k1}
19193 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19194 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm7 # 64-byte Folded Reload
19195 ; AVX512DQBW-SLOW-NEXT: # zmm7 = zmm1[0,1,2,3],mem[4,5,6,7]
19196 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19197 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm8, %zmm7 {%k1}
19198 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm13, %zmm8 # 64-byte Folded Reload
19199 ; AVX512DQBW-SLOW-NEXT: # zmm8 = zmm13[0,1,2,3],mem[4,5,6,7]
19200 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm17, %zmm8 {%k1}
19201 ; AVX512DQBW-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
19202 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm9 # 32-byte Folded Reload
19203 ; AVX512DQBW-SLOW-NEXT: # ymm9 = ymm9[1],mem[1],ymm9[3],mem[3]
19204 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,2,3,3]
19205 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],mem[6,7]
19206 ; AVX512DQBW-SLOW-NEXT: movb $14, %cl
19207 ; AVX512DQBW-SLOW-NEXT: kmovd %ecx, %k1
19208 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
19209 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm9, %zmm0, %zmm13 {%k1}
19210 ; AVX512DQBW-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
19211 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm9 # 32-byte Folded Reload
19212 ; AVX512DQBW-SLOW-NEXT: # ymm9 = ymm9[1],mem[1],ymm9[3],mem[3]
19213 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,2,3,3]
19214 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],mem[6,7]
19215 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
19216 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm9, %zmm0, %zmm17 {%k1}
19217 ; AVX512DQBW-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
19218 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm9 # 32-byte Folded Reload
19219 ; AVX512DQBW-SLOW-NEXT: # ymm9 = ymm9[1],mem[1],ymm9[3],mem[3]
19220 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,2,3,3]
19221 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],mem[6,7]
19222 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm28 # 64-byte Reload
19223 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm9, %zmm0, %zmm28 {%k1}
19224 ; AVX512DQBW-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
19225 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm9 # 32-byte Folded Reload
19226 ; AVX512DQBW-SLOW-NEXT: # ymm9 = ymm9[1],mem[1],ymm9[3],mem[3]
19227 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,2,3,3]
19228 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],mem[6,7]
19229 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
19230 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm9, %zmm0, %zmm3 {%k1}
19231 ; AVX512DQBW-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
19232 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm9 # 32-byte Folded Reload
19233 ; AVX512DQBW-SLOW-NEXT: # ymm9 = ymm9[1],mem[1],ymm9[3],mem[3]
19234 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,2,3,3]
19235 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],mem[6,7]
19236 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19237 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm9, %zmm0, %zmm2 {%k1}
19238 ; AVX512DQBW-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
19239 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm9 # 32-byte Folded Reload
19240 ; AVX512DQBW-SLOW-NEXT: # ymm9 = ymm9[1],mem[1],ymm9[3],mem[3]
19241 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,2,3,3]
19242 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],mem[6,7]
19243 ; AVX512DQBW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
19244 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm9, %zmm0, %zmm1 {%k1}
19245 ; AVX512DQBW-SLOW-NEXT: vpunpckhqdq {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
19246 ; AVX512DQBW-SLOW-NEXT: # ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
19247 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,3,3]
19248 ; AVX512DQBW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
19249 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm20 {%k1}
19250 ; AVX512DQBW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
19251 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm8, 3008(%rax)
19252 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm23, 2944(%rax)
19253 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm20, 2880(%rax)
19254 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19255 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 2816(%rax)
19256 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm22, 2752(%rax)
19257 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm21, 2688(%rax)
19258 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm24, 2624(%rax)
19259 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, 2560(%rax)
19260 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm31, 2496(%rax)
19261 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, 2432(%rax)
19262 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19263 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 2368(%rax)
19264 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm26, 2304(%rax)
19265 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm25, 2240(%rax)
19266 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm29, 2176(%rax)
19267 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, 2112(%rax)
19268 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm10, 2048(%rax)
19269 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm2, 1984(%rax)
19270 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19271 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 1920(%rax)
19272 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19273 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 1856(%rax)
19274 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm30, 1792(%rax)
19275 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19276 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 1728(%rax)
19277 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm4, 1664(%rax)
19278 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm12, 1600(%rax)
19279 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, 1536(%rax)
19280 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19281 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 1472(%rax)
19282 ; AVX512DQBW-SLOW-NEXT: vmovups (%rsp), %zmm0 # 64-byte Reload
19283 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 1408(%rax)
19284 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19285 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 1344(%rax)
19286 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19287 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 1280(%rax)
19288 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, 1216(%rax)
19289 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm27, 1152(%rax)
19290 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm28, 1088(%rax)
19291 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19292 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 1024(%rax)
19293 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19294 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 960(%rax)
19295 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19296 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 896(%rax)
19297 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19298 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 832(%rax)
19299 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm14, 768(%rax)
19300 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm19, 704(%rax)
19301 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm17, 640(%rax)
19302 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19303 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 576(%rax)
19304 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19305 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 512(%rax)
19306 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19307 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 448(%rax)
19308 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19309 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 384(%rax)
19310 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm15, 320(%rax)
19311 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm16, 256(%rax)
19312 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm13, 192(%rax)
19313 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19314 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 128(%rax)
19315 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19316 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 64(%rax)
19317 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19318 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, (%rax)
19319 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm6, 3520(%rax)
19320 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19321 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 3456(%rax)
19322 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19323 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 3392(%rax)
19324 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19325 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 3328(%rax)
19326 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19327 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 3264(%rax)
19328 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19329 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 3200(%rax)
19330 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm18, 3072(%rax)
19331 ; AVX512DQBW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19332 ; AVX512DQBW-SLOW-NEXT: vmovaps %zmm0, 3136(%rax)
19333 ; AVX512DQBW-SLOW-NEXT: addq $6472, %rsp # imm = 0x1948
19334 ; AVX512DQBW-SLOW-NEXT: vzeroupper
19335 ; AVX512DQBW-SLOW-NEXT: retq
19337 ; AVX512DQBW-FAST-LABEL: store_i64_stride7_vf64:
19338 ; AVX512DQBW-FAST: # %bb.0:
19339 ; AVX512DQBW-FAST-NEXT: subq $6568, %rsp # imm = 0x19A8
19340 ; AVX512DQBW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
19341 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rdi), %zmm5
19342 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19343 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rdi), %zmm11
19344 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19345 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rsi), %zmm26
19346 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rsi), %zmm22
19347 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19348 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rdx), %zmm7
19349 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19350 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rdx), %zmm6
19351 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19352 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rcx), %zmm9
19353 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19354 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rcx), %zmm19
19355 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19356 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [11,3,11,3,11,3,11,3]
19357 ; AVX512DQBW-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
19358 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [2,10,0,3,2,10,0,3]
19359 ; AVX512DQBW-FAST-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
19360 ; AVX512DQBW-FAST-NEXT: movb $96, %r10b
19361 ; AVX512DQBW-FAST-NEXT: kmovd %r10d, %k1
19362 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%r8), %zmm1
19363 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%r9), %zmm8
19364 ; AVX512DQBW-FAST-NEXT: vmovdqa64 (%rax), %zmm3
19365 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%rax), %zmm4
19366 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19367 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm0 = [9,1,9,1,9,1,9,1]
19368 ; AVX512DQBW-FAST-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
19369 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm2
19370 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm8, %zmm0, %zmm2
19371 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm16
19372 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [4,9,0,3,4,9,0,3]
19373 ; AVX512DQBW-FAST-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
19374 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm0, %zmm2
19375 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm13
19376 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19377 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm0
19378 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm26, %zmm10, %zmm0
19379 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm6, %zmm2
19380 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm9, %zmm12, %zmm2
19381 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm2 {%k1}
19382 ; AVX512DQBW-FAST-NEXT: vmovdqa (%r9), %ymm0
19383 ; AVX512DQBW-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
19384 ; AVX512DQBW-FAST-NEXT: vmovdqa 64(%r9), %ymm9
19385 ; AVX512DQBW-FAST-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
19386 ; AVX512DQBW-FAST-NEXT: vmovdqa (%r8), %ymm6
19387 ; AVX512DQBW-FAST-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
19388 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%r8), %ymm30
19389 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm6[0],ymm0[0],ymm6[2],ymm0[2]
19390 ; AVX512DQBW-FAST-NEXT: movb $28, %r10b
19391 ; AVX512DQBW-FAST-NEXT: kmovd %r10d, %k2
19392 ; AVX512DQBW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm2 {%k2} = zmm0[2,3,2,3],zmm3[2,3,2,3]
19393 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19394 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [4,12,0,5,4,12,0,5]
19395 ; AVX512DQBW-FAST-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
19396 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm0
19397 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm8, %zmm6
19398 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm8, %zmm2, %zmm0
19399 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm20
19400 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,1,12,7,0,1,12,7]
19401 ; AVX512DQBW-FAST-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
19402 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm2, %zmm0
19403 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm17
19404 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19405 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [5,0,14,6,5,0,14,6]
19406 ; AVX512DQBW-FAST-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
19407 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm8, %zmm2
19408 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19409 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm0, %zmm2
19410 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm21
19411 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [0,13,6,7,0,13,6,7]
19412 ; AVX512DQBW-FAST-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
19413 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm0, %zmm2
19414 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm18
19415 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19416 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm8 = [15,7,15,7,15,7,15,7]
19417 ; AVX512DQBW-FAST-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
19418 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm8, %zmm1
19419 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [6,13,14,7,6,13,14,7]
19420 ; AVX512DQBW-FAST-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
19421 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm14, %zmm3
19422 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19423 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, %zmm1
19424 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm10, %zmm1
19425 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, %zmm2
19426 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm12, %zmm7
19427 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm19, %zmm12, %zmm2
19428 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm2 {%k1}
19429 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm1 = ymm30[0],ymm9[0],ymm30[2],ymm9[2]
19430 ; AVX512DQBW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm2 {%k2} = zmm1[2,3,2,3],zmm4[2,3,2,3]
19431 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19432 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%r8), %zmm1
19433 ; AVX512DQBW-FAST-NEXT: vmovdqa64 64(%r9), %zmm3
19434 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm2
19435 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm16, %zmm2
19436 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19437 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm13, %zmm2
19438 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19439 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm2
19440 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm20, %zmm2
19441 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm17, %zmm2
19442 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19443 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm2
19444 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm21, %zmm2
19445 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm0, %zmm2
19446 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19447 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19448 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm8, %zmm1
19449 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm14, %zmm4
19450 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19451 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%rdi), %zmm1
19452 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19453 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%rsi), %zmm19
19454 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm19, %zmm10, %zmm1
19455 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%rdx), %zmm3
19456 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19457 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%rcx), %zmm27
19458 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm27, %zmm12, %zmm3
19459 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm3 {%k1}
19460 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%rax), %zmm5
19461 ; AVX512DQBW-FAST-NEXT: vmovdqa 128(%r9), %ymm0
19462 ; AVX512DQBW-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
19463 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%r8), %ymm24
19464 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm2 = ymm24[0],ymm0[0],ymm24[2],ymm0[2]
19465 ; AVX512DQBW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm3 {%k2} = zmm2[2,3,2,3],zmm5[2,3,2,3]
19466 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19467 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%r8), %zmm2
19468 ; AVX512DQBW-FAST-NEXT: vmovdqa64 128(%r9), %zmm0
19469 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
19470 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm16, %zmm4
19471 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm16, %zmm3
19472 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19473 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm13, %zmm3
19474 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19475 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
19476 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm20, %zmm3
19477 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm17, %zmm3
19478 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19479 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm3
19480 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm21, %zmm3
19481 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm18, %zmm3
19482 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19483 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19484 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm8, %zmm2
19485 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm14, %zmm5
19486 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19487 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%rdi), %zmm2
19488 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19489 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%rsi), %zmm16
19490 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, %zmm1
19491 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm16, %zmm10, %zmm2
19492 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%rdx), %zmm5
19493 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19494 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%rcx), %zmm15
19495 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm15, %zmm12, %zmm5
19496 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm5 {%k1}
19497 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%rax), %zmm23
19498 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%r9), %ymm25
19499 ; AVX512DQBW-FAST-NEXT: vmovdqa 192(%r8), %ymm11
19500 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm3 = ymm11[0],ymm25[0],ymm11[2],ymm25[2]
19501 ; AVX512DQBW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm5 {%k2} = zmm3[2,3,2,3],zmm23[2,3,2,3]
19502 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19503 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%r8), %zmm3
19504 ; AVX512DQBW-FAST-NEXT: vmovdqa64 192(%r9), %zmm6
19505 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm5
19506 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm4, %zmm5
19507 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, %zmm2
19508 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm23, %zmm13, %zmm5
19509 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19510 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm13, %zmm0
19511 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm4
19512 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm6, %zmm5
19513 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm20, %zmm4
19514 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm20, %zmm6
19515 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm17, %zmm10
19516 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm23, %zmm17, %zmm4
19517 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19518 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm4
19519 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19520 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm21, %zmm4
19521 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm23, %zmm18, %zmm4
19522 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19523 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19524 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm8, %zmm3
19525 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm3, %zmm14, %zmm23
19526 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19527 ; AVX512DQBW-FAST-NEXT: vmovdqa64 256(%rdi), %zmm3
19528 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19529 ; AVX512DQBW-FAST-NEXT: vmovdqa64 256(%rsi), %zmm31
19530 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm31, %zmm1, %zmm3
19531 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm22
19532 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19533 ; AVX512DQBW-FAST-NEXT: vmovdqa64 256(%rdx), %zmm28
19534 ; AVX512DQBW-FAST-NEXT: vmovdqa64 256(%rcx), %zmm12
19535 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm28, %zmm13
19536 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm12, %zmm7, %zmm13
19537 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19538 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm13 {%k1}
19539 ; AVX512DQBW-FAST-NEXT: vmovdqa64 256(%rax), %zmm23
19540 ; AVX512DQBW-FAST-NEXT: vmovdqa 256(%r9), %ymm9
19541 ; AVX512DQBW-FAST-NEXT: vmovdqa 256(%r8), %ymm5
19542 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm4 = ymm5[0],ymm9[0],ymm5[2],ymm9[2]
19543 ; AVX512DQBW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm13 {%k2} = zmm4[2,3,2,3],zmm23[2,3,2,3]
19544 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19545 ; AVX512DQBW-FAST-NEXT: vmovdqa64 256(%r8), %zmm4
19546 ; AVX512DQBW-FAST-NEXT: vmovdqa64 256(%r9), %zmm1
19547 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, %zmm13
19548 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm2, %zmm13
19549 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
19550 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19551 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm23, %zmm0, %zmm13
19552 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19553 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm17
19554 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19555 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, %zmm0
19556 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19557 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm2
19558 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm20, %zmm0
19559 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm23, %zmm10, %zmm0
19560 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19561 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, %zmm1
19562 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19563 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm10
19564 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19565 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm21, %zmm10
19566 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm21, %zmm29
19567 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19568 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm23, %zmm18, %zmm10
19569 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19570 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19571 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19572 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm8, %zmm4
19573 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm14, %zmm23
19574 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19575 ; AVX512DQBW-FAST-NEXT: vmovdqa64 320(%rdi), %zmm23
19576 ; AVX512DQBW-FAST-NEXT: vmovdqa64 320(%rsi), %zmm20
19577 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm23, %zmm4
19578 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm22, %zmm4
19579 ; AVX512DQBW-FAST-NEXT: vmovdqa64 320(%rdx), %zmm0
19580 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19581 ; AVX512DQBW-FAST-NEXT: vmovdqa64 320(%rcx), %zmm13
19582 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm13, %zmm7, %zmm0
19583 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, %zmm0 {%k1}
19584 ; AVX512DQBW-FAST-NEXT: vmovdqa64 320(%rax), %zmm21
19585 ; AVX512DQBW-FAST-NEXT: vmovdqa 320(%r9), %ymm4
19586 ; AVX512DQBW-FAST-NEXT: vmovdqa 320(%r8), %ymm2
19587 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm10 = ymm2[0],ymm4[0],ymm2[2],ymm4[2]
19588 ; AVX512DQBW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 {%k2} = zmm10[2,3,2,3],zmm21[2,3,2,3]
19589 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19590 ; AVX512DQBW-FAST-NEXT: vmovdqa64 320(%r8), %zmm10
19591 ; AVX512DQBW-FAST-NEXT: vmovdqa64 320(%r9), %zmm22
19592 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, %zmm0
19593 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm3, %zmm0
19594 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm21, %zmm17, %zmm0
19595 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19596 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, %zmm0
19597 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm6, %zmm0
19598 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm21, %zmm1, %zmm0
19599 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19600 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm22, %zmm0
19601 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm10, %zmm29, %zmm0
19602 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm21, %zmm18, %zmm0
19603 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19604 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19605 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm8, %zmm10
19606 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm10, %zmm14, %zmm21
19607 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19608 ; AVX512DQBW-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [1,3,7,7]
19609 ; AVX512DQBW-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
19610 ; AVX512DQBW-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm1 # 32-byte Folded Reload
19611 ; AVX512DQBW-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
19612 ; AVX512DQBW-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm30 # 32-byte Folded Reload
19613 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %ymm30, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
19614 ; AVX512DQBW-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm24 # 32-byte Folded Reload
19615 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %ymm24, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
19616 ; AVX512DQBW-FAST-NEXT: vpermt2q %ymm25, %ymm0, %ymm11
19617 ; AVX512DQBW-FAST-NEXT: vmovdqu %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
19618 ; AVX512DQBW-FAST-NEXT: vpermt2q %ymm9, %ymm0, %ymm5
19619 ; AVX512DQBW-FAST-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
19620 ; AVX512DQBW-FAST-NEXT: vpermt2q %ymm4, %ymm0, %ymm2
19621 ; AVX512DQBW-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
19622 ; AVX512DQBW-FAST-NEXT: vmovdqa 384(%r9), %ymm1
19623 ; AVX512DQBW-FAST-NEXT: vmovdqa 384(%r8), %ymm2
19624 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm3 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
19625 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19626 ; AVX512DQBW-FAST-NEXT: vpermt2q %ymm1, %ymm0, %ymm2
19627 ; AVX512DQBW-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
19628 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm29 = [3,0,12,4,3,0,12,4]
19629 ; AVX512DQBW-FAST-NEXT: # zmm29 = mem[0,1,2,3,0,1,2,3]
19630 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
19631 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, %zmm1
19632 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19633 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm29, %zmm1
19634 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm21 = [13,5,13,5,13,5,13,5]
19635 ; AVX512DQBW-FAST-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
19636 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm2
19637 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm7, %zmm21, %zmm2
19638 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19639 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm3 = [6,14,6,14,6,14,6,14]
19640 ; AVX512DQBW-FAST-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
19641 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm2
19642 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm7, %zmm3, %zmm2
19643 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19644 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm7, %zmm8, %zmm0
19645 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19646 ; AVX512DQBW-FAST-NEXT: movb $48, %r10b
19647 ; AVX512DQBW-FAST-NEXT: kmovd %r10d, %k3
19648 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [0,8,0,1,0,8,0,1]
19649 ; AVX512DQBW-FAST-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
19650 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19651 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm2
19652 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm26, %zmm11, %zmm2
19653 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19654 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [1,0,10,2,1,0,10,2]
19655 ; AVX512DQBW-FAST-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
19656 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm26, %zmm2
19657 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm7, %zmm2
19658 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19659 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm1 {%k3} = zmm0[0],zmm26[0],zmm0[2],zmm26[2],zmm0[4],zmm26[4],zmm0[6],zmm26[6]
19660 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19661 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
19662 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm26, %zmm21, %zmm1
19663 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19664 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x2 {{.*#+}} ymm30 = [6,14,6,14]
19665 ; AVX512DQBW-FAST-NEXT: # ymm30 = mem[0,1,0,1]
19666 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
19667 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm26, %zmm30, %zmm1
19668 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19669 ; AVX512DQBW-FAST-NEXT: vbroadcasti64x2 {{.*#+}} ymm18 = [15,7,15,7]
19670 ; AVX512DQBW-FAST-NEXT: # ymm18 = mem[0,1,0,1]
19671 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm26, %zmm18, %zmm0
19672 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19673 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19674 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
19675 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
19676 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm29, %zmm1
19677 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, %zmm2
19678 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm21, %zmm2
19679 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19680 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, %zmm2
19681 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm3, %zmm2
19682 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19683 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm8, %zmm4
19684 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19685 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
19686 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, %zmm0
19687 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19688 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm11, %zmm0
19689 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19690 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm0
19691 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm7, %zmm0
19692 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19693 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm1 {%k3} = zmm4[0],zmm2[0],zmm4[2],zmm2[2],zmm4[4],zmm2[4],zmm4[6],zmm2[6]
19694 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19695 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, %zmm0
19696 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm21, %zmm0
19697 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19698 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, %zmm0
19699 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm30, %zmm0
19700 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19701 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm18, %zmm4
19702 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19703 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm27, %zmm0
19704 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19705 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm29, %zmm0
19706 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
19707 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm27, %zmm21, %zmm1
19708 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19709 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
19710 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm27, %zmm3, %zmm1
19711 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19712 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm27, %zmm8, %zmm2
19713 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19714 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19715 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
19716 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm19, %zmm11, %zmm1
19717 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19718 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm19, %zmm1
19719 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm7, %zmm1
19720 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19721 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm0 {%k3} = zmm2[0],zmm19[0],zmm2[2],zmm19[2],zmm2[4],zmm19[4],zmm2[6],zmm19[6]
19722 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19723 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm0
19724 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm19, %zmm21, %zmm0
19725 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19726 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm0
19727 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm19, %zmm30, %zmm0
19728 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19729 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm19, %zmm18, %zmm2
19730 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19731 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm15, %zmm0
19732 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19733 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm29, %zmm0
19734 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
19735 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm15, %zmm21, %zmm1
19736 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19737 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
19738 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm15, %zmm3, %zmm1
19739 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19740 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm15, %zmm8, %zmm2
19741 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19742 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
19743 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm1
19744 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm16, %zmm11, %zmm1
19745 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19746 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm16, %zmm17
19747 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm7, %zmm17
19748 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm0 {%k3} = zmm2[0],zmm16[0],zmm2[2],zmm16[2],zmm2[4],zmm16[4],zmm2[6],zmm16[6]
19749 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19750 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm0
19751 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm16, %zmm21, %zmm0
19752 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19753 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm0
19754 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm16, %zmm30, %zmm0
19755 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19756 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm16, %zmm18, %zmm2
19757 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19758 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm12, %zmm0
19759 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm28, %zmm29, %zmm0
19760 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
19761 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm28, %zmm0
19762 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm12, %zmm21, %zmm0
19763 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19764 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm28, %zmm0
19765 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm12, %zmm3, %zmm0
19766 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19767 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm12, %zmm8, %zmm28
19768 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm28, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19769 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19770 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm2
19771 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm31, %zmm11, %zmm2
19772 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19773 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm31, %zmm2
19774 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm0, %zmm7, %zmm2
19775 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm19
19776 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm1 {%k3} = zmm0[0],zmm31[0],zmm0[2],zmm31[2],zmm0[4],zmm31[4],zmm0[6],zmm31[6]
19777 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19778 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
19779 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm31, %zmm21, %zmm1
19780 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19781 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm1
19782 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm31, %zmm30, %zmm1
19783 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19784 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm31, %zmm18, %zmm0
19785 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19786 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm13, %zmm0
19787 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
19788 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm15, %zmm29, %zmm0
19789 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm15, %zmm1
19790 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm13, %zmm21, %zmm1
19791 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19792 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm15, %zmm1
19793 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm13, %zmm3, %zmm1
19794 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19795 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm13, %zmm8, %zmm15
19796 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm23, %zmm1
19797 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm11, %zmm1
19798 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19799 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm20, %zmm28
19800 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm23, %zmm7, %zmm28
19801 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm0 {%k3} = zmm23[0],zmm20[0],zmm23[2],zmm20[2],zmm23[4],zmm20[4],zmm23[6],zmm20[6]
19802 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19803 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm23, %zmm0
19804 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm21, %zmm0
19805 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19806 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm23, %zmm0
19807 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm30, %zmm0
19808 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19809 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm18, %zmm23
19810 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm23, %zmm26
19811 ; AVX512DQBW-FAST-NEXT: vmovdqa64 448(%rdx), %zmm20
19812 ; AVX512DQBW-FAST-NEXT: vmovdqa64 448(%rcx), %zmm5
19813 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm20, %zmm13
19814 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19815 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm0, %zmm13
19816 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm20, %zmm10
19817 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm3, %zmm10
19818 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm9
19819 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm20, %zmm29, %zmm9
19820 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm20, %zmm16
19821 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm21, %zmm16
19822 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm8, %zmm20
19823 ; AVX512DQBW-FAST-NEXT: vmovdqa64 384(%rdx), %zmm1
19824 ; AVX512DQBW-FAST-NEXT: vmovdqa64 384(%rcx), %zmm4
19825 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm4, %zmm1, %zmm0
19826 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19827 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm1, %zmm4, %zmm29
19828 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm4, %zmm1, %zmm3
19829 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19830 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm0
19831 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm8, %zmm1
19832 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm21, %zmm0
19833 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19834 ; AVX512DQBW-FAST-NEXT: vmovdqa64 448(%rdi), %zmm12
19835 ; AVX512DQBW-FAST-NEXT: vmovdqa64 448(%rsi), %zmm4
19836 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm12, %zmm8
19837 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19838 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm0, %zmm8
19839 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm12, %zmm3
19840 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm30, %zmm3
19841 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm12, %zmm2
19842 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm21, %zmm2
19843 ; AVX512DQBW-FAST-NEXT: vmovdqa64 384(%rdi), %zmm6
19844 ; AVX512DQBW-FAST-NEXT: vmovdqa64 384(%rsi), %zmm5
19845 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm6, %zmm25
19846 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm11, %zmm25
19847 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm24
19848 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm6, %zmm7, %zmm24
19849 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm5, %zmm6, %zmm0
19850 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19851 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm29 {%k3} = zmm6[0],zmm5[0],zmm6[2],zmm5[2],zmm6[4],zmm5[4],zmm6[6],zmm5[6]
19852 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm5, %zmm6, %zmm21
19853 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm5, %zmm6, %zmm30
19854 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm18, %zmm6
19855 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} zmm9 {%k3} = zmm12[0],zmm4[0],zmm12[2],zmm4[2],zmm12[4],zmm4[4],zmm12[6],zmm4[6]
19856 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm4, %zmm12, %zmm11
19857 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm12, %zmm4, %zmm7
19858 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm4, %zmm18, %zmm12
19859 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm8, %zmm13 {%k1}
19860 ; AVX512DQBW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm3 = zmm3[0,1,2,3],zmm10[4,5,6,7]
19861 ; AVX512DQBW-FAST-NEXT: vmovdqa64 448(%r8), %zmm5
19862 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm4 = <0,11,u,u,4,5,6,7>
19863 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm5, %zmm9, %zmm4
19864 ; AVX512DQBW-FAST-NEXT: vmovdqa64 448(%r9), %zmm8
19865 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm14 = <0,1,11,u,4,5,6,7>
19866 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm8, %zmm4, %zmm14
19867 ; AVX512DQBW-FAST-NEXT: movb $4, %sil
19868 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k3
19869 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm13 {%k3}
19870 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm4 = <0,1,2,10,u,5,6,7>
19871 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm8, %zmm13, %zmm4
19872 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm16, %zmm2 {%k1}
19873 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm10 = <12,u,u,3,4,5,6,13>
19874 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm5, %zmm2, %zmm10
19875 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm18 = <0,12,u,3,4,5,6,7>
19876 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm8, %zmm10, %zmm18
19877 ; AVX512DQBW-FAST-NEXT: vbroadcasti32x4 {{.*#+}} zmm10 = [0,8,0,8,0,8,0,8]
19878 ; AVX512DQBW-FAST-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
19879 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19880 ; AVX512DQBW-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm0 # 64-byte Folded Reload
19881 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19882 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19883 ; AVX512DQBW-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm0 # 64-byte Folded Reload
19884 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19885 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19886 ; AVX512DQBW-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm0 # 64-byte Folded Reload
19887 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19888 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19889 ; AVX512DQBW-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm0 # 64-byte Folded Reload
19890 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19891 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
19892 ; AVX512DQBW-FAST-NEXT: vpermt2q {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm23 # 64-byte Folded Reload
19893 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
19894 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm22, %zmm10, %zmm27
19895 ; AVX512DQBW-FAST-NEXT: movb $24, %sil
19896 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k4
19897 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm6 {%k4}
19898 ; AVX512DQBW-FAST-NEXT: movb $6, %sil
19899 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k5
19900 ; AVX512DQBW-FAST-NEXT: vpbroadcastq 456(%rcx), %ymm0
19901 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = mem[0,1,2,3],ymm0[4,5,6,7]
19902 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm7 {%k5}
19903 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,1,2,9,u,u,6,7>
19904 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm5, %zmm7, %zmm1
19905 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm20, %zmm12 {%k4}
19906 ; AVX512DQBW-FAST-NEXT: movb $64, %sil
19907 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k3
19908 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm3 {%k3}
19909 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm2 = <u,1,2,3,4,15,u,u>
19910 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm2, %zmm12
19911 ; AVX512DQBW-FAST-NEXT: vmovdqa64 384(%r8), %zmm5
19912 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm2, %zmm6
19913 ; AVX512DQBW-FAST-NEXT: vmovdqa64 384(%r9), %zmm2
19914 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm2, %zmm5, %zmm10
19915 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
19916 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm2, %zmm5, %zmm16
19917 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
19918 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm2, %zmm5, %zmm13
19919 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
19920 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm5, %zmm2, %zmm0
19921 ; AVX512DQBW-FAST-NEXT: movb $12, %sil
19922 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k3
19923 ; AVX512DQBW-FAST-NEXT: vmovdqa 448(%rdx), %xmm5
19924 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],mem[0]
19925 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
19926 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm5, %zmm0, %zmm11 {%k3}
19927 ; AVX512DQBW-FAST-NEXT: vinserti32x4 $2, 448(%r8), %zmm11, %zmm5
19928 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm7 = <0,1,2,3,4,8,u,7>
19929 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm8, %zmm5, %zmm7
19930 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = <u,1,2,3,4,5,15,u>
19931 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm2, %zmm5, %zmm6
19932 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm11 = <0,1,2,3,9,u,6,7>
19933 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm8, %zmm1, %zmm11
19934 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm2 = <13,u,2,3,4,5,6,14>
19935 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm8, %zmm3, %zmm2
19936 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm8, %zmm5, %zmm12
19937 ; AVX512DQBW-FAST-NEXT: vpbroadcastq 8(%rcx), %ymm1
19938 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
19939 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
19940 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm9 {%k5}
19941 ; AVX512DQBW-FAST-NEXT: vpbroadcastq 72(%rcx), %ymm1
19942 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
19943 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
19944 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm31 {%k5}
19945 ; AVX512DQBW-FAST-NEXT: vpbroadcastq 136(%rcx), %ymm1
19946 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
19947 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
19948 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm8 {%k5}
19949 ; AVX512DQBW-FAST-NEXT: vpbroadcastq 200(%rcx), %ymm1
19950 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
19951 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm17, %zmm3
19952 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm3 {%k5}
19953 ; AVX512DQBW-FAST-NEXT: vpbroadcastq 264(%rcx), %ymm1
19954 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
19955 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm19, %zmm20
19956 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm20 {%k5}
19957 ; AVX512DQBW-FAST-NEXT: vpbroadcastq 328(%rcx), %ymm1
19958 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
19959 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm28 {%k5}
19960 ; AVX512DQBW-FAST-NEXT: vpbroadcastq 392(%rcx), %ymm1
19961 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm1 = mem[0,1,2,3],ymm1[4,5,6,7]
19962 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm24 {%k5}
19963 ; AVX512DQBW-FAST-NEXT: vmovdqa64 448(%rax), %zmm5
19964 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,1,2,3,10,5,6,7]
19965 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm5, %zmm4, %zmm1
19966 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19967 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,1,12,3,4,5,6,7]
19968 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm5, %zmm18, %zmm1
19969 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19970 ; AVX512DQBW-FAST-NEXT: vmovdqa64 384(%rax), %zmm1
19971 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
19972 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm4, %zmm16
19973 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm16, %zmm4
19974 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
19975 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
19976 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm17, %zmm16 {%k1}
19977 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
19978 ; AVX512DQBW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm16 {%k2} = zmm17[2,3,2,3],zmm1[2,3,2,3]
19979 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19980 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
19981 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm16, %zmm13
19982 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm13, %zmm18
19983 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
19984 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm13, %zmm0
19985 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19986 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,3,4,5,8,7]
19987 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm5, %zmm7, %zmm0
19988 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19989 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm7 = [14,1,2,3,4,5,6,15]
19990 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm1, %zmm7, %zmm6
19991 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,2,3,4,9,6,7]
19992 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm5, %zmm11, %zmm0
19993 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19994 ; AVX512DQBW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,13,2,3,4,5,6,7]
19995 ; AVX512DQBW-FAST-NEXT: vpermi2q %zmm5, %zmm2, %zmm0
19996 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
19997 ; AVX512DQBW-FAST-NEXT: vpermt2q %zmm5, %zmm7, %zmm12
19998 ; AVX512DQBW-FAST-NEXT: movb $8, %sil
19999 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k2
20000 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm14 {%k2}
20001 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20002 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
20003 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20004 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm1 {%k4}
20005 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20006 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20007 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm5 {%k4}
20008 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
20009 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20010 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm7 {%k4}
20011 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20012 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20013 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm2 {%k4}
20014 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
20015 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20016 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm11 {%k4}
20017 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm15, %zmm26 {%k4}
20018 ; AVX512DQBW-FAST-NEXT: movb $-31, %sil
20019 ; AVX512DQBW-FAST-NEXT: kmovd %esi, %k2
20020 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20021 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
20022 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20023 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20024 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm5 {%k2}
20025 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20026 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20027 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm7 {%k2}
20028 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20029 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20030 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm2 {%k2}
20031 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20032 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20033 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
20034 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20035 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20036 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm26 {%k2}
20037 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20038 ; AVX512DQBW-FAST-NEXT: vmovdqa (%rdx), %xmm0
20039 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
20040 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
20041 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
20042 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm13 {%k3}
20043 ; AVX512DQBW-FAST-NEXT: vmovdqa 64(%rdx), %xmm0
20044 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
20045 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
20046 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
20047 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm1 {%k3}
20048 ; AVX512DQBW-FAST-NEXT: vmovdqa 128(%rdx), %xmm0
20049 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
20050 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
20051 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20052 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm2 {%k3}
20053 ; AVX512DQBW-FAST-NEXT: vmovdqa 192(%rdx), %xmm0
20054 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
20055 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
20056 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20057 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm5 {%k3}
20058 ; AVX512DQBW-FAST-NEXT: vmovdqa 256(%rdx), %xmm0
20059 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
20060 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
20061 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
20062 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm7 {%k3}
20063 ; AVX512DQBW-FAST-NEXT: vmovdqa 320(%rdx), %xmm0
20064 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
20065 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
20066 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
20067 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm11 {%k3}
20068 ; AVX512DQBW-FAST-NEXT: vmovdqa 384(%rdx), %xmm0
20069 ; AVX512DQBW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
20070 ; AVX512DQBW-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
20071 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm25 {%k3}
20072 ; AVX512DQBW-FAST-NEXT: movb $112, %cl
20073 ; AVX512DQBW-FAST-NEXT: kmovd %ecx, %k2
20074 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20075 ; AVX512DQBW-FAST-NEXT: vinserti64x2 $3, (%rax), %zmm0, %zmm13 {%k2}
20076 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20077 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20078 ; AVX512DQBW-FAST-NEXT: vinserti64x2 $3, 64(%rax), %zmm0, %zmm1 {%k2}
20079 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20080 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20081 ; AVX512DQBW-FAST-NEXT: vinserti64x2 $3, 128(%rax), %zmm0, %zmm2 {%k2}
20082 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20083 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20084 ; AVX512DQBW-FAST-NEXT: vinserti64x2 $3, 192(%rax), %zmm0, %zmm5 {%k2}
20085 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20086 ; AVX512DQBW-FAST-NEXT: vinserti64x2 $3, 256(%rax), %zmm23, %zmm7 {%k2}
20087 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20088 ; AVX512DQBW-FAST-NEXT: vinserti64x2 $3, 320(%rax), %zmm27, %zmm11 {%k2}
20089 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, %zmm27
20090 ; AVX512DQBW-FAST-NEXT: vinserti64x2 $3, 384(%rax), %zmm10, %zmm25 {%k2}
20091 ; AVX512DQBW-FAST-NEXT: movb $56, %cl
20092 ; AVX512DQBW-FAST-NEXT: kmovd %ecx, %k2
20093 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20094 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
20095 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20096 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20097 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
20098 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20099 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm8 {%k2}
20100 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20101 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20102 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm3 {%k2}
20103 ; AVX512DQBW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
20104 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20105 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm20 {%k2}
20106 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm20, %zmm26
20107 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20108 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm28 {%k2}
20109 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, %zmm24 {%k2}
20110 ; AVX512DQBW-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
20111 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
20112 ; AVX512DQBW-FAST-NEXT: movb $14, %cl
20113 ; AVX512DQBW-FAST-NEXT: kmovd %ecx, %k2
20114 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
20115 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm23 {%k2}
20116 ; AVX512DQBW-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
20117 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
20118 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm22 # 64-byte Reload
20119 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm22 {%k2}
20120 ; AVX512DQBW-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
20121 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
20122 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
20123 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm20 {%k2}
20124 ; AVX512DQBW-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
20125 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
20126 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
20127 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm16 {%k2}
20128 ; AVX512DQBW-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
20129 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
20130 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
20131 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm14 {%k2}
20132 ; AVX512DQBW-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
20133 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
20134 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
20135 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm4 {%k2}
20136 ; AVX512DQBW-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
20137 ; AVX512DQBW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],mem[6,7]
20138 ; AVX512DQBW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm29 {%k2}
20139 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20140 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
20141 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
20142 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20143 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20144 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm2 {%k1}
20145 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20146 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
20147 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm3 {%k1}
20148 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20149 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
20150 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm5 {%k1}
20151 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20152 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
20153 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm7 {%k1}
20154 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20155 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
20156 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
20157 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20158 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, %zmm21 {%k1}
20159 ; AVX512DQBW-FAST-NEXT: movb $120, %al
20160 ; AVX512DQBW-FAST-NEXT: kmovd %eax, %k1
20161 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
20162 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, %zmm19 {%k1}
20163 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20164 ; AVX512DQBW-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm17 # 64-byte Folded Reload
20165 ; AVX512DQBW-FAST-NEXT: # zmm17 = zmm0[0,1,2,3],mem[4,5,6,7]
20166 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
20167 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm15 {%k1}
20168 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
20169 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, %zmm13 {%k1}
20170 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
20171 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, %zmm3 {%k1}
20172 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
20173 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, %zmm1 {%k1}
20174 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20175 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm8, %zmm0 {%k1}
20176 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm21, %zmm18 {%k1}
20177 ; AVX512DQBW-FAST-NEXT: movb $-61, %al
20178 ; AVX512DQBW-FAST-NEXT: kmovd %eax, %k1
20179 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20180 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm17 {%k1}
20181 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20182 ; AVX512DQBW-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm5 # 64-byte Folded Reload
20183 ; AVX512DQBW-FAST-NEXT: # zmm5 = zmm2[0,1,2,3],mem[4,5,6,7]
20184 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20185 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm5 {%k1}
20186 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20187 ; AVX512DQBW-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm7 # 64-byte Folded Reload
20188 ; AVX512DQBW-FAST-NEXT: # zmm7 = zmm2[0,1,2,3],mem[4,5,6,7]
20189 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20190 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm7 {%k1}
20191 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20192 ; AVX512DQBW-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm8 # 64-byte Folded Reload
20193 ; AVX512DQBW-FAST-NEXT: # zmm8 = zmm2[0,1,2,3],mem[4,5,6,7]
20194 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20195 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm8 {%k1}
20196 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20197 ; AVX512DQBW-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm9 # 64-byte Folded Reload
20198 ; AVX512DQBW-FAST-NEXT: # zmm9 = zmm2[0,1,2,3],mem[4,5,6,7]
20199 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20200 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm9 {%k1}
20201 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20202 ; AVX512DQBW-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm10 # 64-byte Folded Reload
20203 ; AVX512DQBW-FAST-NEXT: # zmm10 = zmm2[0,1,2,3],mem[4,5,6,7]
20204 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20205 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm10 {%k1}
20206 ; AVX512DQBW-FAST-NEXT: vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm30, %zmm11 # 64-byte Folded Reload
20207 ; AVX512DQBW-FAST-NEXT: # zmm11 = zmm30[0,1,2,3],mem[4,5,6,7]
20208 ; AVX512DQBW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20209 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm2, %zmm11 {%k1}
20210 ; AVX512DQBW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
20211 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm11, 3008(%rax)
20212 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm18, 2944(%rax)
20213 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm29, 2880(%rax)
20214 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20215 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm2, 2816(%rax)
20216 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm24, 2752(%rax)
20217 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm25, 2688(%rax)
20218 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20219 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm2, 2624(%rax)
20220 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm10, 2560(%rax)
20221 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm0, 2496(%rax)
20222 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm4, 2432(%rax)
20223 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20224 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm2, 2368(%rax)
20225 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm28, 2304(%rax)
20226 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm27, 2240(%rax)
20227 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20228 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 2176(%rax)
20229 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm9, 2112(%rax)
20230 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm1, 2048(%rax)
20231 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm14, 1984(%rax)
20232 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20233 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm2, 1920(%rax)
20234 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm26, 1856(%rax)
20235 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20236 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 1792(%rax)
20237 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20238 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 1728(%rax)
20239 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm8, 1664(%rax)
20240 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm3, 1600(%rax)
20241 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm16, 1536(%rax)
20242 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20243 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm2, 1472(%rax)
20244 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20245 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 1408(%rax)
20246 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20247 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 1344(%rax)
20248 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20249 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 1280(%rax)
20250 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm7, 1216(%rax)
20251 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm13, 1152(%rax)
20252 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm20, 1088(%rax)
20253 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20254 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm2, 1024(%rax)
20255 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20256 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 960(%rax)
20257 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20258 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 896(%rax)
20259 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20260 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 832(%rax)
20261 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm5, 768(%rax)
20262 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm15, 704(%rax)
20263 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm22, 640(%rax)
20264 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
20265 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm2, 576(%rax)
20266 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm31, 512(%rax)
20267 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20268 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 448(%rax)
20269 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20270 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 384(%rax)
20271 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm17, 320(%rax)
20272 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm19, 256(%rax)
20273 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm23, 192(%rax)
20274 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20275 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 128(%rax)
20276 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20277 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 64(%rax)
20278 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20279 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, (%rax)
20280 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm12, 3520(%rax)
20281 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20282 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 3456(%rax)
20283 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20284 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 3392(%rax)
20285 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20286 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 3328(%rax)
20287 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20288 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 3264(%rax)
20289 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20290 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 3200(%rax)
20291 ; AVX512DQBW-FAST-NEXT: vmovdqa64 %zmm6, 3072(%rax)
20292 ; AVX512DQBW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
20293 ; AVX512DQBW-FAST-NEXT: vmovaps %zmm0, 3136(%rax)
20294 ; AVX512DQBW-FAST-NEXT: addq $6568, %rsp # imm = 0x19A8
20295 ; AVX512DQBW-FAST-NEXT: vzeroupper
20296 ; AVX512DQBW-FAST-NEXT: retq
20297 %in.vec0 = load <64 x i64>, ptr %in.vecptr0, align 64
20298 %in.vec1 = load <64 x i64>, ptr %in.vecptr1, align 64
20299 %in.vec2 = load <64 x i64>, ptr %in.vecptr2, align 64
20300 %in.vec3 = load <64 x i64>, ptr %in.vecptr3, align 64
20301 %in.vec4 = load <64 x i64>, ptr %in.vecptr4, align 64
20302 %in.vec5 = load <64 x i64>, ptr %in.vecptr5, align 64
20303 %in.vec6 = load <64 x i64>, ptr %in.vecptr6, align 64
20304 %1 = shufflevector <64 x i64> %in.vec0, <64 x i64> %in.vec1, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
20305 %2 = shufflevector <64 x i64> %in.vec2, <64 x i64> %in.vec3, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
20306 %3 = shufflevector <64 x i64> %in.vec4, <64 x i64> %in.vec5, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
20307 %4 = shufflevector <128 x i64> %1, <128 x i64> %2, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255>
20308 %5 = shufflevector <64 x i64> %in.vec6, <64 x i64> poison, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
20309 %6 = shufflevector <128 x i64> %3, <128 x i64> %5, <192 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191>
20310 %7 = shufflevector <192 x i64> %6, <192 x i64> poison, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
20311 %8 = shufflevector <256 x i64> %4, <256 x i64> %7, <448 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255, i32 256, i32 257, i32 258, i32 259, i32 260, i32 261, i32 262, i32 263, i32 264, i32 265, i32 266, i32 267, i32 268, i32 269, i32 270, i32 271, i32 272, i32 273, i32 274, i32 275, i32 276, i32 277, i32 278, i32 279, i32 280, i32 281, i32 282, i32 283, i32 284, i32 285, i32 286, i32 287, i32 288, i32 289, i32 290, i32 291, i32 292, i32 293, i32 294, i32 295, i32 296, i32 297, i32 298, i32 299, i32 300, i32 301, i32 302, i32 303, i32 304, i32 305, i32 306, i32 307, i32 308, i32 309, i32 310, i32 311, i32 312, i32 313, i32 314, i32 315, i32 316, i32 317, i32 318, i32 319, i32 320, i32 321, i32 322, i32 323, i32 324, i32 325, i32 326, i32 327, i32 328, i32 329, i32 330, i32 331, i32 332, i32 333, i32 334, i32 335, i32 336, i32 337, i32 338, i32 339, i32 340, i32 341, i32 342, i32 343, i32 344, i32 345, i32 346, i32 347, i32 348, i32 349, i32 350, i32 351, i32 352, i32 353, i32 354, i32 355, i32 356, i32 357, i32 358, i32 359, i32 360, i32 361, i32 362, i32 363, i32 364, i32 365, i32 366, i32 367, i32 368, i32 369, i32 370, i32 371, i32 372, i32 373, i32 374, i32 375, i32 376, i32 377, i32 378, i32 379, i32 380, i32 381, i32 382, i32 383, i32 384, i32 385, i32 386, i32 387, i32 388, i32 389, i32 390, i32 391, i32 392, i32 393, i32 394, i32 395, i32 396, i32 397, i32 398, i32 399, i32 400, i32 401, i32 402, i32 403, i32 404, i32 405, i32 406, i32 407, i32 408, i32 409, i32 410, i32 411, i32 412, i32 413, i32 414, i32 415, i32 416, i32 417, i32 418, i32 419, i32 420, i32 421, i32 422, i32 423, i32 424, i32 425, i32 426, i32 427, i32 428, i32 429, i32 430, i32 431, i32 432, i32 433, i32 434, i32 435, i32 436, i32 437, i32 438, i32 439, i32 440, i32 441, i32 442, i32 443, i32 444, i32 445, i32 446, i32 447>
20312 %interleaved.vec = shufflevector <448 x i64> %8, <448 x i64> poison, <448 x i32> <i32 0, i32 64, i32 128, i32 192, i32 256, i32 320, i32 384, i32 1, i32 65, i32 129, i32 193, i32 257, i32 321, i32 385, i32 2, i32 66, i32 130, i32 194, i32 258, i32 322, i32 386, i32 3, i32 67, i32 131, i32 195, i32 259, i32 323, i32 387, i32 4, i32 68, i32 132, i32 196, i32 260, i32 324, i32 388, i32 5, i32 69, i32 133, i32 197, i32 261, i32 325, i32 389, i32 6, i32 70, i32 134, i32 198, i32 262, i32 326, i32 390, i32 7, i32 71, i32 135, i32 199, i32 263, i32 327, i32 391, i32 8, i32 72, i32 136, i32 200, i32 264, i32 328, i32 392, i32 9, i32 73, i32 137, i32 201, i32 265, i32 329, i32 393, i32 10, i32 74, i32 138, i32 202, i32 266, i32 330, i32 394, i32 11, i32 75, i32 139, i32 203, i32 267, i32 331, i32 395, i32 12, i32 76, i32 140, i32 204, i32 268, i32 332, i32 396, i32 13, i32 77, i32 141, i32 205, i32 269, i32 333, i32 397, i32 14, i32 78, i32 142, i32 206, i32 270, i32 334, i32 398, i32 15, i32 79, i32 143, i32 207, i32 271, i32 335, i32 399, i32 16, i32 80, i32 144, i32 208, i32 272, i32 336, i32 400, i32 17, i32 81, i32 145, i32 209, i32 273, i32 337, i32 401, i32 18, i32 82, i32 146, i32 210, i32 274, i32 338, i32 402, i32 19, i32 83, i32 147, i32 211, i32 275, i32 339, i32 403, i32 20, i32 84, i32 148, i32 212, i32 276, i32 340, i32 404, i32 21, i32 85, i32 149, i32 213, i32 277, i32 341, i32 405, i32 22, i32 86, i32 150, i32 214, i32 278, i32 342, i32 406, i32 23, i32 87, i32 151, i32 215, i32 279, i32 343, i32 407, i32 24, i32 88, i32 152, i32 216, i32 280, i32 344, i32 408, i32 25, i32 89, i32 153, i32 217, i32 281, i32 345, i32 409, i32 26, i32 90, i32 154, i32 218, i32 282, i32 346, i32 410, i32 27, i32 91, i32 155, i32 219, i32 283, i32 347, i32 411, i32 28, i32 92, i32 156, i32 220, i32 284, i32 348, i32 412, i32 29, i32 93, i32 157, i32 221, i32 285, i32 349, i32 413, i32 30, i32 94, i32 158, i32 222, i32 286, i32 350, i32 414, i32 31, i32 95, i32 159, i32 223, i32 287, i32 351, i32 415, i32 32, i32 96, i32 160, i32 224, i32 288, i32 352, i32 416, i32 33, i32 97, i32 161, i32 225, i32 289, i32 353, i32 417, i32 34, i32 98, i32 162, i32 226, i32 290, i32 354, i32 418, i32 35, i32 99, i32 163, i32 227, i32 291, i32 355, i32 419, i32 36, i32 100, i32 164, i32 228, i32 292, i32 356, i32 420, i32 37, i32 101, i32 165, i32 229, i32 293, i32 357, i32 421, i32 38, i32 102, i32 166, i32 230, i32 294, i32 358, i32 422, i32 39, i32 103, i32 167, i32 231, i32 295, i32 359, i32 423, i32 40, i32 104, i32 168, i32 232, i32 296, i32 360, i32 424, i32 41, i32 105, i32 169, i32 233, i32 297, i32 361, i32 425, i32 42, i32 106, i32 170, i32 234, i32 298, i32 362, i32 426, i32 43, i32 107, i32 171, i32 235, i32 299, i32 363, i32 427, i32 44, i32 108, i32 172, i32 236, i32 300, i32 364, i32 428, i32 45, i32 109, i32 173, i32 237, i32 301, i32 365, i32 429, i32 46, i32 110, i32 174, i32 238, i32 302, i32 366, i32 430, i32 47, i32 111, i32 175, i32 239, i32 303, i32 367, i32 431, i32 48, i32 112, i32 176, i32 240, i32 304, i32 368, i32 432, i32 49, i32 113, i32 177, i32 241, i32 305, i32 369, i32 433, i32 50, i32 114, i32 178, i32 242, i32 306, i32 370, i32 434, i32 51, i32 115, i32 179, i32 243, i32 307, i32 371, i32 435, i32 52, i32 116, i32 180, i32 244, i32 308, i32 372, i32 436, i32 53, i32 117, i32 181, i32 245, i32 309, i32 373, i32 437, i32 54, i32 118, i32 182, i32 246, i32 310, i32 374, i32 438, i32 55, i32 119, i32 183, i32 247, i32 311, i32 375, i32 439, i32 56, i32 120, i32 184, i32 248, i32 312, i32 376, i32 440, i32 57, i32 121, i32 185, i32 249, i32 313, i32 377, i32 441, i32 58, i32 122, i32 186, i32 250, i32 314, i32 378, i32 442, i32 59, i32 123, i32 187, i32 251, i32 315, i32 379, i32 443, i32 60, i32 124, i32 188, i32 252, i32 316, i32 380, i32 444, i32 61, i32 125, i32 189, i32 253, i32 317, i32 381, i32 445, i32 62, i32 126, i32 190, i32 254, i32 318, i32 382, i32 446, i32 63, i32 127, i32 191, i32 255, i32 319, i32 383, i32 447>
20313 store <448 x i64> %interleaved.vec, ptr %out.vec, align 64
20316 ;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
20320 ; AVX2-FAST: {{.*}}
20321 ; AVX2-FAST-PERLANE: {{.*}}
20322 ; AVX2-SLOW: {{.*}}
20323 ; AVX512-FAST: {{.*}}
20324 ; AVX512-SLOW: {{.*}}
20325 ; AVX512BW-FAST: {{.*}}
20326 ; AVX512BW-SLOW: {{.*}}
20327 ; AVX512F-FAST: {{.*}}
20328 ; AVX512F-SLOW: {{.*}}
20329 ; FALLBACK0: {{.*}}
20330 ; FALLBACK1: {{.*}}
20331 ; FALLBACK10: {{.*}}
20332 ; FALLBACK11: {{.*}}
20333 ; FALLBACK12: {{.*}}
20334 ; FALLBACK2: {{.*}}
20335 ; FALLBACK3: {{.*}}
20336 ; FALLBACK4: {{.*}}
20337 ; FALLBACK5: {{.*}}
20338 ; FALLBACK6: {{.*}}
20339 ; FALLBACK7: {{.*}}
20340 ; FALLBACK8: {{.*}}
20341 ; FALLBACK9: {{.*}}