1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+sse2 | FileCheck %s --check-prefixes=SSE
3 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx | FileCheck %s --check-prefixes=AVX
4 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2 | FileCheck %s --check-prefixes=AVX2
5 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX2-FP
6 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX2-FCP
7 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl | FileCheck %s --check-prefixes=AVX512
8 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512-FCP
9 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq | FileCheck %s --check-prefixes=AVX512DQ
10 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512DQ-FCP
11 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw | FileCheck %s --check-prefixes=AVX512BW
12 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512BW-FCP
13 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw | FileCheck %s --check-prefixes=AVX512DQ-BW
14 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512DQ-BW-FCP
16 ; These patterns are produced by LoopVectorizer for interleaved stores.
18 define void @store_i32_stride4_vf2(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %out.vec) nounwind {
19 ; SSE-LABEL: store_i32_stride4_vf2:
21 ; SSE-NEXT: movsd {{.*#+}} xmm0 = mem[0],zero
22 ; SSE-NEXT: movsd {{.*#+}} xmm1 = mem[0],zero
23 ; SSE-NEXT: unpcklps {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
24 ; SSE-NEXT: movsd {{.*#+}} xmm1 = mem[0],zero
25 ; SSE-NEXT: movsd {{.*#+}} xmm2 = mem[0],zero
26 ; SSE-NEXT: unpcklps {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1]
27 ; SSE-NEXT: movaps %xmm0, %xmm2
28 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm1[0]
29 ; SSE-NEXT: movhlps {{.*#+}} xmm1 = xmm0[1],xmm1[1]
30 ; SSE-NEXT: movaps %xmm1, 16(%r8)
31 ; SSE-NEXT: movaps %xmm2, (%r8)
34 ; AVX-LABEL: store_i32_stride4_vf2:
36 ; AVX-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
37 ; AVX-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
38 ; AVX-NEXT: vmovlhps {{.*#+}} xmm0 = xmm1[0],xmm0[0]
39 ; AVX-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
40 ; AVX-NEXT: vmovsd {{.*#+}} xmm2 = mem[0],zero
41 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
42 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm2
43 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
44 ; AVX-NEXT: vshufps {{.*#+}} ymm0 = ymm0[1,3,0,2,5,7,4,6]
45 ; AVX-NEXT: vshufps {{.*#+}} ymm1 = ymm2[0,2,1,3,4,6,5,7]
46 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3,4,5],ymm1[6,7]
47 ; AVX-NEXT: vmovaps %ymm0, (%r8)
48 ; AVX-NEXT: vzeroupper
51 ; AVX2-LABEL: store_i32_stride4_vf2:
53 ; AVX2-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
54 ; AVX2-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
55 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm0 = xmm1[0],xmm0[0]
56 ; AVX2-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
57 ; AVX2-NEXT: vmovsd {{.*#+}} xmm2 = mem[0],zero
58 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
59 ; AVX2-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
60 ; AVX2-NEXT: vshufps {{.*#+}} ymm0 = ymm0[0,2,1,3,4,6,5,7]
61 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,1,3]
62 ; AVX2-NEXT: vmovaps %ymm0, (%r8)
63 ; AVX2-NEXT: vzeroupper
66 ; AVX2-FP-LABEL: store_i32_stride4_vf2:
68 ; AVX2-FP-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
69 ; AVX2-FP-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
70 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm1[0],xmm0[0]
71 ; AVX2-FP-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
72 ; AVX2-FP-NEXT: vmovsd {{.*#+}} xmm2 = mem[0],zero
73 ; AVX2-FP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
74 ; AVX2-FP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
75 ; AVX2-FP-NEXT: vshufps {{.*#+}} ymm0 = ymm0[0,2,1,3,4,6,5,7]
76 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,1,3]
77 ; AVX2-FP-NEXT: vmovaps %ymm0, (%r8)
78 ; AVX2-FP-NEXT: vzeroupper
81 ; AVX2-FCP-LABEL: store_i32_stride4_vf2:
83 ; AVX2-FCP-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
84 ; AVX2-FCP-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
85 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm1[0],xmm0[0]
86 ; AVX2-FCP-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
87 ; AVX2-FCP-NEXT: vmovsd {{.*#+}} xmm2 = mem[0],zero
88 ; AVX2-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
89 ; AVX2-FCP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
90 ; AVX2-FCP-NEXT: vmovaps {{.*#+}} ymm1 = [0,2,4,6,1,3,5,7]
91 ; AVX2-FCP-NEXT: vpermps %ymm0, %ymm1, %ymm0
92 ; AVX2-FCP-NEXT: vmovaps %ymm0, (%r8)
93 ; AVX2-FCP-NEXT: vzeroupper
96 ; AVX512-LABEL: store_i32_stride4_vf2:
98 ; AVX512-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
99 ; AVX512-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
100 ; AVX512-NEXT: vmovlhps {{.*#+}} xmm0 = xmm1[0],xmm0[0]
101 ; AVX512-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
102 ; AVX512-NEXT: vmovsd {{.*#+}} xmm2 = mem[0],zero
103 ; AVX512-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
104 ; AVX512-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
105 ; AVX512-NEXT: vshufps {{.*#+}} ymm0 = ymm0[0,2,1,3,4,6,5,7]
106 ; AVX512-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,1,3]
107 ; AVX512-NEXT: vmovaps %ymm0, (%r8)
108 ; AVX512-NEXT: vzeroupper
111 ; AVX512-FCP-LABEL: store_i32_stride4_vf2:
112 ; AVX512-FCP: # %bb.0:
113 ; AVX512-FCP-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
114 ; AVX512-FCP-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
115 ; AVX512-FCP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm1[0],xmm0[0]
116 ; AVX512-FCP-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
117 ; AVX512-FCP-NEXT: vmovsd {{.*#+}} xmm2 = mem[0],zero
118 ; AVX512-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
119 ; AVX512-FCP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
120 ; AVX512-FCP-NEXT: vmovaps {{.*#+}} ymm1 = [0,2,4,6,1,3,5,7]
121 ; AVX512-FCP-NEXT: vpermps %ymm0, %ymm1, %ymm0
122 ; AVX512-FCP-NEXT: vmovaps %ymm0, (%r8)
123 ; AVX512-FCP-NEXT: vzeroupper
124 ; AVX512-FCP-NEXT: retq
126 ; AVX512DQ-LABEL: store_i32_stride4_vf2:
128 ; AVX512DQ-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
129 ; AVX512DQ-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
130 ; AVX512DQ-NEXT: vmovlhps {{.*#+}} xmm0 = xmm1[0],xmm0[0]
131 ; AVX512DQ-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
132 ; AVX512DQ-NEXT: vmovsd {{.*#+}} xmm2 = mem[0],zero
133 ; AVX512DQ-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
134 ; AVX512DQ-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
135 ; AVX512DQ-NEXT: vshufps {{.*#+}} ymm0 = ymm0[0,2,1,3,4,6,5,7]
136 ; AVX512DQ-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,1,3]
137 ; AVX512DQ-NEXT: vmovaps %ymm0, (%r8)
138 ; AVX512DQ-NEXT: vzeroupper
139 ; AVX512DQ-NEXT: retq
141 ; AVX512DQ-FCP-LABEL: store_i32_stride4_vf2:
142 ; AVX512DQ-FCP: # %bb.0:
143 ; AVX512DQ-FCP-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
144 ; AVX512DQ-FCP-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
145 ; AVX512DQ-FCP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm1[0],xmm0[0]
146 ; AVX512DQ-FCP-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
147 ; AVX512DQ-FCP-NEXT: vmovsd {{.*#+}} xmm2 = mem[0],zero
148 ; AVX512DQ-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
149 ; AVX512DQ-FCP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
150 ; AVX512DQ-FCP-NEXT: vmovaps {{.*#+}} ymm1 = [0,2,4,6,1,3,5,7]
151 ; AVX512DQ-FCP-NEXT: vpermps %ymm0, %ymm1, %ymm0
152 ; AVX512DQ-FCP-NEXT: vmovaps %ymm0, (%r8)
153 ; AVX512DQ-FCP-NEXT: vzeroupper
154 ; AVX512DQ-FCP-NEXT: retq
156 ; AVX512BW-LABEL: store_i32_stride4_vf2:
158 ; AVX512BW-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
159 ; AVX512BW-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
160 ; AVX512BW-NEXT: vmovlhps {{.*#+}} xmm0 = xmm1[0],xmm0[0]
161 ; AVX512BW-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
162 ; AVX512BW-NEXT: vmovsd {{.*#+}} xmm2 = mem[0],zero
163 ; AVX512BW-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
164 ; AVX512BW-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
165 ; AVX512BW-NEXT: vshufps {{.*#+}} ymm0 = ymm0[0,2,1,3,4,6,5,7]
166 ; AVX512BW-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,1,3]
167 ; AVX512BW-NEXT: vmovaps %ymm0, (%r8)
168 ; AVX512BW-NEXT: vzeroupper
169 ; AVX512BW-NEXT: retq
171 ; AVX512BW-FCP-LABEL: store_i32_stride4_vf2:
172 ; AVX512BW-FCP: # %bb.0:
173 ; AVX512BW-FCP-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
174 ; AVX512BW-FCP-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
175 ; AVX512BW-FCP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm1[0],xmm0[0]
176 ; AVX512BW-FCP-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
177 ; AVX512BW-FCP-NEXT: vmovsd {{.*#+}} xmm2 = mem[0],zero
178 ; AVX512BW-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
179 ; AVX512BW-FCP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
180 ; AVX512BW-FCP-NEXT: vmovaps {{.*#+}} ymm1 = [0,2,4,6,1,3,5,7]
181 ; AVX512BW-FCP-NEXT: vpermps %ymm0, %ymm1, %ymm0
182 ; AVX512BW-FCP-NEXT: vmovaps %ymm0, (%r8)
183 ; AVX512BW-FCP-NEXT: vzeroupper
184 ; AVX512BW-FCP-NEXT: retq
186 ; AVX512DQ-BW-LABEL: store_i32_stride4_vf2:
187 ; AVX512DQ-BW: # %bb.0:
188 ; AVX512DQ-BW-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
189 ; AVX512DQ-BW-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
190 ; AVX512DQ-BW-NEXT: vmovlhps {{.*#+}} xmm0 = xmm1[0],xmm0[0]
191 ; AVX512DQ-BW-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
192 ; AVX512DQ-BW-NEXT: vmovsd {{.*#+}} xmm2 = mem[0],zero
193 ; AVX512DQ-BW-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
194 ; AVX512DQ-BW-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
195 ; AVX512DQ-BW-NEXT: vshufps {{.*#+}} ymm0 = ymm0[0,2,1,3,4,6,5,7]
196 ; AVX512DQ-BW-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,1,3]
197 ; AVX512DQ-BW-NEXT: vmovaps %ymm0, (%r8)
198 ; AVX512DQ-BW-NEXT: vzeroupper
199 ; AVX512DQ-BW-NEXT: retq
201 ; AVX512DQ-BW-FCP-LABEL: store_i32_stride4_vf2:
202 ; AVX512DQ-BW-FCP: # %bb.0:
203 ; AVX512DQ-BW-FCP-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
204 ; AVX512DQ-BW-FCP-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
205 ; AVX512DQ-BW-FCP-NEXT: vmovlhps {{.*#+}} xmm0 = xmm1[0],xmm0[0]
206 ; AVX512DQ-BW-FCP-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
207 ; AVX512DQ-BW-FCP-NEXT: vmovsd {{.*#+}} xmm2 = mem[0],zero
208 ; AVX512DQ-BW-FCP-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
209 ; AVX512DQ-BW-FCP-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
210 ; AVX512DQ-BW-FCP-NEXT: vmovaps {{.*#+}} ymm1 = [0,2,4,6,1,3,5,7]
211 ; AVX512DQ-BW-FCP-NEXT: vpermps %ymm0, %ymm1, %ymm0
212 ; AVX512DQ-BW-FCP-NEXT: vmovaps %ymm0, (%r8)
213 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
214 ; AVX512DQ-BW-FCP-NEXT: retq
215 %in.vec0 = load <2 x i32>, ptr %in.vecptr0, align 64
216 %in.vec1 = load <2 x i32>, ptr %in.vecptr1, align 64
217 %in.vec2 = load <2 x i32>, ptr %in.vecptr2, align 64
218 %in.vec3 = load <2 x i32>, ptr %in.vecptr3, align 64
219 %1 = shufflevector <2 x i32> %in.vec0, <2 x i32> %in.vec1, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
220 %2 = shufflevector <2 x i32> %in.vec2, <2 x i32> %in.vec3, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
221 %3 = shufflevector <4 x i32> %1, <4 x i32> %2, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
222 %interleaved.vec = shufflevector <8 x i32> %3, <8 x i32> poison, <8 x i32> <i32 0, i32 2, i32 4, i32 6, i32 1, i32 3, i32 5, i32 7>
223 store <8 x i32> %interleaved.vec, ptr %out.vec, align 64
227 define void @store_i32_stride4_vf4(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %out.vec) nounwind {
228 ; SSE-LABEL: store_i32_stride4_vf4:
230 ; SSE-NEXT: movaps (%rdi), %xmm0
231 ; SSE-NEXT: movaps (%rsi), %xmm1
232 ; SSE-NEXT: movaps (%rdx), %xmm2
233 ; SSE-NEXT: movaps (%rcx), %xmm3
234 ; SSE-NEXT: movaps %xmm2, %xmm4
235 ; SSE-NEXT: unpcklps {{.*#+}} xmm4 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
236 ; SSE-NEXT: movaps %xmm0, %xmm5
237 ; SSE-NEXT: unpcklps {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1]
238 ; SSE-NEXT: movaps %xmm5, %xmm6
239 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm4[0]
240 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm4[1]
241 ; SSE-NEXT: unpckhps {{.*#+}} xmm2 = xmm2[2],xmm3[2],xmm2[3],xmm3[3]
242 ; SSE-NEXT: unpckhps {{.*#+}} xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
243 ; SSE-NEXT: movaps %xmm0, %xmm1
244 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
245 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm2[0]
246 ; SSE-NEXT: movaps %xmm0, 32(%r8)
247 ; SSE-NEXT: movaps %xmm1, 48(%r8)
248 ; SSE-NEXT: movaps %xmm5, 16(%r8)
249 ; SSE-NEXT: movaps %xmm6, (%r8)
252 ; AVX-LABEL: store_i32_stride4_vf4:
254 ; AVX-NEXT: vmovaps (%rdi), %xmm0
255 ; AVX-NEXT: vmovaps (%rsi), %xmm1
256 ; AVX-NEXT: vmovaps (%rdx), %xmm2
257 ; AVX-NEXT: vmovaps (%rcx), %xmm3
258 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm4
259 ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm5
260 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm2
261 ; AVX-NEXT: vshufps {{.*#+}} ymm3 = ymm2[0,1,1,0,4,5,5,4]
262 ; AVX-NEXT: vmovddup {{.*#+}} ymm6 = ymm5[0,0,2,2]
263 ; AVX-NEXT: vblendps {{.*#+}} ymm3 = ymm6[0,1,2],ymm3[3],ymm6[4,5],ymm3[6],ymm6[7]
264 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
265 ; AVX-NEXT: vshufps {{.*#+}} ymm1 = ymm0[1,0,2,3,5,4,6,7]
266 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm4[0],ymm1[1],ymm4[2,3],ymm1[4],ymm4[5,6,7]
267 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm3[2,3],ymm1[4,5],ymm3[6,7]
268 ; AVX-NEXT: vshufps {{.*#+}} ymm2 = ymm2[0,1,3,2,4,5,7,6]
269 ; AVX-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1,2],ymm2[3],ymm5[4,5],ymm2[6],ymm5[7]
270 ; AVX-NEXT: vshufps {{.*#+}} ymm0 = ymm0[3,2,2,3,7,6,6,7]
271 ; AVX-NEXT: vshufpd {{.*#+}} ymm3 = ymm4[1,0,3,2]
272 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm3[0],ymm0[1],ymm3[2,3],ymm0[4],ymm3[5,6,7]
273 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
274 ; AVX-NEXT: vmovaps %ymm0, 32(%r8)
275 ; AVX-NEXT: vmovaps %ymm1, (%r8)
276 ; AVX-NEXT: vzeroupper
279 ; AVX2-LABEL: store_i32_stride4_vf4:
281 ; AVX2-NEXT: vmovaps (%rdi), %xmm0
282 ; AVX2-NEXT: vmovaps (%rdx), %xmm1
283 ; AVX2-NEXT: vinsertf128 $1, (%rsi), %ymm0, %ymm0
284 ; AVX2-NEXT: vinsertf128 $1, (%rcx), %ymm1, %ymm1
285 ; AVX2-NEXT: vmovaps {{.*#+}} ymm2 = [u,u,0,4,u,u,1,5]
286 ; AVX2-NEXT: vpermps %ymm1, %ymm2, %ymm2
287 ; AVX2-NEXT: vmovaps {{.*#+}} ymm3 = [0,4,u,u,1,5,u,u]
288 ; AVX2-NEXT: vpermps %ymm0, %ymm3, %ymm3
289 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7]
290 ; AVX2-NEXT: vmovaps {{.*#+}} ymm3 = [u,u,2,6,u,u,3,7]
291 ; AVX2-NEXT: vpermps %ymm1, %ymm3, %ymm1
292 ; AVX2-NEXT: vmovaps {{.*#+}} ymm3 = [2,6,u,u,3,7,u,u]
293 ; AVX2-NEXT: vpermps %ymm0, %ymm3, %ymm0
294 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
295 ; AVX2-NEXT: vmovaps %ymm0, 32(%r8)
296 ; AVX2-NEXT: vmovaps %ymm2, (%r8)
297 ; AVX2-NEXT: vzeroupper
300 ; AVX2-FP-LABEL: store_i32_stride4_vf4:
302 ; AVX2-FP-NEXT: vmovaps (%rdi), %xmm0
303 ; AVX2-FP-NEXT: vmovaps (%rdx), %xmm1
304 ; AVX2-FP-NEXT: vinsertf128 $1, (%rsi), %ymm0, %ymm0
305 ; AVX2-FP-NEXT: vinsertf128 $1, (%rcx), %ymm1, %ymm1
306 ; AVX2-FP-NEXT: vmovaps {{.*#+}} ymm2 = [u,u,0,4,u,u,1,5]
307 ; AVX2-FP-NEXT: vpermps %ymm1, %ymm2, %ymm2
308 ; AVX2-FP-NEXT: vmovaps {{.*#+}} ymm3 = [0,4,u,u,1,5,u,u]
309 ; AVX2-FP-NEXT: vpermps %ymm0, %ymm3, %ymm3
310 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7]
311 ; AVX2-FP-NEXT: vmovaps {{.*#+}} ymm3 = [u,u,2,6,u,u,3,7]
312 ; AVX2-FP-NEXT: vpermps %ymm1, %ymm3, %ymm1
313 ; AVX2-FP-NEXT: vmovaps {{.*#+}} ymm3 = [2,6,u,u,3,7,u,u]
314 ; AVX2-FP-NEXT: vpermps %ymm0, %ymm3, %ymm0
315 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
316 ; AVX2-FP-NEXT: vmovaps %ymm0, 32(%r8)
317 ; AVX2-FP-NEXT: vmovaps %ymm2, (%r8)
318 ; AVX2-FP-NEXT: vzeroupper
321 ; AVX2-FCP-LABEL: store_i32_stride4_vf4:
323 ; AVX2-FCP-NEXT: vmovaps (%rdi), %xmm0
324 ; AVX2-FCP-NEXT: vmovaps (%rdx), %xmm1
325 ; AVX2-FCP-NEXT: vinsertf128 $1, (%rsi), %ymm0, %ymm0
326 ; AVX2-FCP-NEXT: vinsertf128 $1, (%rcx), %ymm1, %ymm1
327 ; AVX2-FCP-NEXT: vmovaps {{.*#+}} ymm2 = [u,u,0,4,u,u,1,5]
328 ; AVX2-FCP-NEXT: vpermps %ymm1, %ymm2, %ymm2
329 ; AVX2-FCP-NEXT: vmovaps {{.*#+}} ymm3 = [0,4,u,u,1,5,u,u]
330 ; AVX2-FCP-NEXT: vpermps %ymm0, %ymm3, %ymm3
331 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7]
332 ; AVX2-FCP-NEXT: vmovaps {{.*#+}} ymm3 = [u,u,2,6,u,u,3,7]
333 ; AVX2-FCP-NEXT: vpermps %ymm1, %ymm3, %ymm1
334 ; AVX2-FCP-NEXT: vmovaps {{.*#+}} ymm3 = [2,6,u,u,3,7,u,u]
335 ; AVX2-FCP-NEXT: vpermps %ymm0, %ymm3, %ymm0
336 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
337 ; AVX2-FCP-NEXT: vmovaps %ymm0, 32(%r8)
338 ; AVX2-FCP-NEXT: vmovaps %ymm2, (%r8)
339 ; AVX2-FCP-NEXT: vzeroupper
340 ; AVX2-FCP-NEXT: retq
342 ; AVX512-LABEL: store_i32_stride4_vf4:
344 ; AVX512-NEXT: vmovaps (%rdi), %xmm0
345 ; AVX512-NEXT: vmovaps (%rdx), %xmm1
346 ; AVX512-NEXT: vinsertf128 $1, (%rcx), %ymm1, %ymm1
347 ; AVX512-NEXT: vinsertf128 $1, (%rsi), %ymm0, %ymm0
348 ; AVX512-NEXT: vinsertf64x4 $1, %ymm1, %zmm0, %zmm0
349 ; AVX512-NEXT: vmovaps {{.*#+}} zmm1 = [0,4,8,12,1,5,9,13,2,6,10,14,3,7,11,15]
350 ; AVX512-NEXT: vpermps %zmm0, %zmm1, %zmm0
351 ; AVX512-NEXT: vmovaps %zmm0, (%r8)
352 ; AVX512-NEXT: vzeroupper
355 ; AVX512-FCP-LABEL: store_i32_stride4_vf4:
356 ; AVX512-FCP: # %bb.0:
357 ; AVX512-FCP-NEXT: vmovaps (%rdi), %xmm0
358 ; AVX512-FCP-NEXT: vmovaps (%rdx), %xmm1
359 ; AVX512-FCP-NEXT: vinsertf128 $1, (%rcx), %ymm1, %ymm1
360 ; AVX512-FCP-NEXT: vinsertf128 $1, (%rsi), %ymm0, %ymm0
361 ; AVX512-FCP-NEXT: vinsertf64x4 $1, %ymm1, %zmm0, %zmm0
362 ; AVX512-FCP-NEXT: vmovaps {{.*#+}} zmm1 = [0,4,8,12,1,5,9,13,2,6,10,14,3,7,11,15]
363 ; AVX512-FCP-NEXT: vpermps %zmm0, %zmm1, %zmm0
364 ; AVX512-FCP-NEXT: vmovaps %zmm0, (%r8)
365 ; AVX512-FCP-NEXT: vzeroupper
366 ; AVX512-FCP-NEXT: retq
368 ; AVX512DQ-LABEL: store_i32_stride4_vf4:
370 ; AVX512DQ-NEXT: vmovaps (%rdi), %xmm0
371 ; AVX512DQ-NEXT: vmovaps (%rdx), %xmm1
372 ; AVX512DQ-NEXT: vinsertf128 $1, (%rcx), %ymm1, %ymm1
373 ; AVX512DQ-NEXT: vinsertf128 $1, (%rsi), %ymm0, %ymm0
374 ; AVX512DQ-NEXT: vinsertf64x4 $1, %ymm1, %zmm0, %zmm0
375 ; AVX512DQ-NEXT: vmovaps {{.*#+}} zmm1 = [0,4,8,12,1,5,9,13,2,6,10,14,3,7,11,15]
376 ; AVX512DQ-NEXT: vpermps %zmm0, %zmm1, %zmm0
377 ; AVX512DQ-NEXT: vmovaps %zmm0, (%r8)
378 ; AVX512DQ-NEXT: vzeroupper
379 ; AVX512DQ-NEXT: retq
381 ; AVX512DQ-FCP-LABEL: store_i32_stride4_vf4:
382 ; AVX512DQ-FCP: # %bb.0:
383 ; AVX512DQ-FCP-NEXT: vmovaps (%rdi), %xmm0
384 ; AVX512DQ-FCP-NEXT: vmovaps (%rdx), %xmm1
385 ; AVX512DQ-FCP-NEXT: vinsertf128 $1, (%rcx), %ymm1, %ymm1
386 ; AVX512DQ-FCP-NEXT: vinsertf128 $1, (%rsi), %ymm0, %ymm0
387 ; AVX512DQ-FCP-NEXT: vinsertf64x4 $1, %ymm1, %zmm0, %zmm0
388 ; AVX512DQ-FCP-NEXT: vmovaps {{.*#+}} zmm1 = [0,4,8,12,1,5,9,13,2,6,10,14,3,7,11,15]
389 ; AVX512DQ-FCP-NEXT: vpermps %zmm0, %zmm1, %zmm0
390 ; AVX512DQ-FCP-NEXT: vmovaps %zmm0, (%r8)
391 ; AVX512DQ-FCP-NEXT: vzeroupper
392 ; AVX512DQ-FCP-NEXT: retq
394 ; AVX512BW-LABEL: store_i32_stride4_vf4:
396 ; AVX512BW-NEXT: vmovaps (%rdi), %xmm0
397 ; AVX512BW-NEXT: vmovaps (%rdx), %xmm1
398 ; AVX512BW-NEXT: vinsertf128 $1, (%rcx), %ymm1, %ymm1
399 ; AVX512BW-NEXT: vinsertf128 $1, (%rsi), %ymm0, %ymm0
400 ; AVX512BW-NEXT: vinsertf64x4 $1, %ymm1, %zmm0, %zmm0
401 ; AVX512BW-NEXT: vmovaps {{.*#+}} zmm1 = [0,4,8,12,1,5,9,13,2,6,10,14,3,7,11,15]
402 ; AVX512BW-NEXT: vpermps %zmm0, %zmm1, %zmm0
403 ; AVX512BW-NEXT: vmovaps %zmm0, (%r8)
404 ; AVX512BW-NEXT: vzeroupper
405 ; AVX512BW-NEXT: retq
407 ; AVX512BW-FCP-LABEL: store_i32_stride4_vf4:
408 ; AVX512BW-FCP: # %bb.0:
409 ; AVX512BW-FCP-NEXT: vmovaps (%rdi), %xmm0
410 ; AVX512BW-FCP-NEXT: vmovaps (%rdx), %xmm1
411 ; AVX512BW-FCP-NEXT: vinsertf128 $1, (%rcx), %ymm1, %ymm1
412 ; AVX512BW-FCP-NEXT: vinsertf128 $1, (%rsi), %ymm0, %ymm0
413 ; AVX512BW-FCP-NEXT: vinsertf64x4 $1, %ymm1, %zmm0, %zmm0
414 ; AVX512BW-FCP-NEXT: vmovaps {{.*#+}} zmm1 = [0,4,8,12,1,5,9,13,2,6,10,14,3,7,11,15]
415 ; AVX512BW-FCP-NEXT: vpermps %zmm0, %zmm1, %zmm0
416 ; AVX512BW-FCP-NEXT: vmovaps %zmm0, (%r8)
417 ; AVX512BW-FCP-NEXT: vzeroupper
418 ; AVX512BW-FCP-NEXT: retq
420 ; AVX512DQ-BW-LABEL: store_i32_stride4_vf4:
421 ; AVX512DQ-BW: # %bb.0:
422 ; AVX512DQ-BW-NEXT: vmovaps (%rdi), %xmm0
423 ; AVX512DQ-BW-NEXT: vmovaps (%rdx), %xmm1
424 ; AVX512DQ-BW-NEXT: vinsertf128 $1, (%rcx), %ymm1, %ymm1
425 ; AVX512DQ-BW-NEXT: vinsertf128 $1, (%rsi), %ymm0, %ymm0
426 ; AVX512DQ-BW-NEXT: vinsertf64x4 $1, %ymm1, %zmm0, %zmm0
427 ; AVX512DQ-BW-NEXT: vmovaps {{.*#+}} zmm1 = [0,4,8,12,1,5,9,13,2,6,10,14,3,7,11,15]
428 ; AVX512DQ-BW-NEXT: vpermps %zmm0, %zmm1, %zmm0
429 ; AVX512DQ-BW-NEXT: vmovaps %zmm0, (%r8)
430 ; AVX512DQ-BW-NEXT: vzeroupper
431 ; AVX512DQ-BW-NEXT: retq
433 ; AVX512DQ-BW-FCP-LABEL: store_i32_stride4_vf4:
434 ; AVX512DQ-BW-FCP: # %bb.0:
435 ; AVX512DQ-BW-FCP-NEXT: vmovaps (%rdi), %xmm0
436 ; AVX512DQ-BW-FCP-NEXT: vmovaps (%rdx), %xmm1
437 ; AVX512DQ-BW-FCP-NEXT: vinsertf128 $1, (%rcx), %ymm1, %ymm1
438 ; AVX512DQ-BW-FCP-NEXT: vinsertf128 $1, (%rsi), %ymm0, %ymm0
439 ; AVX512DQ-BW-FCP-NEXT: vinsertf64x4 $1, %ymm1, %zmm0, %zmm0
440 ; AVX512DQ-BW-FCP-NEXT: vmovaps {{.*#+}} zmm1 = [0,4,8,12,1,5,9,13,2,6,10,14,3,7,11,15]
441 ; AVX512DQ-BW-FCP-NEXT: vpermps %zmm0, %zmm1, %zmm0
442 ; AVX512DQ-BW-FCP-NEXT: vmovaps %zmm0, (%r8)
443 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
444 ; AVX512DQ-BW-FCP-NEXT: retq
445 %in.vec0 = load <4 x i32>, ptr %in.vecptr0, align 64
446 %in.vec1 = load <4 x i32>, ptr %in.vecptr1, align 64
447 %in.vec2 = load <4 x i32>, ptr %in.vecptr2, align 64
448 %in.vec3 = load <4 x i32>, ptr %in.vecptr3, align 64
449 %1 = shufflevector <4 x i32> %in.vec0, <4 x i32> %in.vec1, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
450 %2 = shufflevector <4 x i32> %in.vec2, <4 x i32> %in.vec3, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
451 %3 = shufflevector <8 x i32> %1, <8 x i32> %2, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
452 %interleaved.vec = shufflevector <16 x i32> %3, <16 x i32> poison, <16 x i32> <i32 0, i32 4, i32 8, i32 12, i32 1, i32 5, i32 9, i32 13, i32 2, i32 6, i32 10, i32 14, i32 3, i32 7, i32 11, i32 15>
453 store <16 x i32> %interleaved.vec, ptr %out.vec, align 64
457 define void @store_i32_stride4_vf8(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %out.vec) nounwind {
458 ; SSE-LABEL: store_i32_stride4_vf8:
460 ; SSE-NEXT: movaps (%rdi), %xmm0
461 ; SSE-NEXT: movaps 16(%rdi), %xmm1
462 ; SSE-NEXT: movaps (%rsi), %xmm5
463 ; SSE-NEXT: movaps 16(%rsi), %xmm6
464 ; SSE-NEXT: movaps (%rdx), %xmm7
465 ; SSE-NEXT: movaps 16(%rdx), %xmm4
466 ; SSE-NEXT: movaps (%rcx), %xmm8
467 ; SSE-NEXT: movaps 16(%rcx), %xmm9
468 ; SSE-NEXT: movaps %xmm7, %xmm10
469 ; SSE-NEXT: unpcklps {{.*#+}} xmm10 = xmm10[0],xmm8[0],xmm10[1],xmm8[1]
470 ; SSE-NEXT: movaps %xmm0, %xmm2
471 ; SSE-NEXT: unpcklps {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1]
472 ; SSE-NEXT: movaps %xmm2, %xmm3
473 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm10[1]
474 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm10[0]
475 ; SSE-NEXT: unpckhps {{.*#+}} xmm7 = xmm7[2],xmm8[2],xmm7[3],xmm8[3]
476 ; SSE-NEXT: unpckhps {{.*#+}} xmm0 = xmm0[2],xmm5[2],xmm0[3],xmm5[3]
477 ; SSE-NEXT: movaps %xmm0, %xmm5
478 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm7[1]
479 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm7[0]
480 ; SSE-NEXT: movaps %xmm4, %xmm7
481 ; SSE-NEXT: unpcklps {{.*#+}} xmm7 = xmm7[0],xmm9[0],xmm7[1],xmm9[1]
482 ; SSE-NEXT: movaps %xmm1, %xmm8
483 ; SSE-NEXT: unpcklps {{.*#+}} xmm8 = xmm8[0],xmm6[0],xmm8[1],xmm6[1]
484 ; SSE-NEXT: movaps %xmm8, %xmm10
485 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm7[1]
486 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm7[0]
487 ; SSE-NEXT: unpckhps {{.*#+}} xmm4 = xmm4[2],xmm9[2],xmm4[3],xmm9[3]
488 ; SSE-NEXT: unpckhps {{.*#+}} xmm1 = xmm1[2],xmm6[2],xmm1[3],xmm6[3]
489 ; SSE-NEXT: movaps %xmm1, %xmm6
490 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm4[1]
491 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm4[0]
492 ; SSE-NEXT: movaps %xmm1, 96(%r8)
493 ; SSE-NEXT: movaps %xmm6, 112(%r8)
494 ; SSE-NEXT: movaps %xmm8, 64(%r8)
495 ; SSE-NEXT: movaps %xmm10, 80(%r8)
496 ; SSE-NEXT: movaps %xmm0, 32(%r8)
497 ; SSE-NEXT: movaps %xmm5, 48(%r8)
498 ; SSE-NEXT: movaps %xmm2, (%r8)
499 ; SSE-NEXT: movaps %xmm3, 16(%r8)
502 ; AVX-LABEL: store_i32_stride4_vf8:
504 ; AVX-NEXT: vmovaps (%rdi), %xmm2
505 ; AVX-NEXT: vmovaps 16(%rdi), %xmm0
506 ; AVX-NEXT: vmovaps (%rsi), %xmm4
507 ; AVX-NEXT: vmovaps 16(%rsi), %xmm1
508 ; AVX-NEXT: vinsertps {{.*#+}} xmm3 = xmm2[1],xmm4[1],zero,zero
509 ; AVX-NEXT: vunpcklps {{.*#+}} xmm5 = xmm2[0],xmm4[0],xmm2[1],xmm4[1]
510 ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm5, %ymm3
511 ; AVX-NEXT: vmovaps (%rcx), %xmm5
512 ; AVX-NEXT: vmovaps 16(%rcx), %xmm6
513 ; AVX-NEXT: vmovaps (%rdx), %xmm7
514 ; AVX-NEXT: vmovaps 16(%rdx), %xmm8
515 ; AVX-NEXT: vunpcklps {{.*#+}} xmm9 = xmm7[0],xmm5[0],xmm7[1],xmm5[1]
516 ; AVX-NEXT: vmovlhps {{.*#+}} xmm10 = xmm5[0],xmm7[0]
517 ; AVX-NEXT: vshufps {{.*#+}} xmm10 = xmm10[0,1,2,0]
518 ; AVX-NEXT: vinsertf128 $1, %xmm9, %ymm10, %ymm9
519 ; AVX-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1],ymm9[2,3],ymm3[4,5],ymm9[6,7]
520 ; AVX-NEXT: vinsertps {{.*#+}} xmm9 = xmm0[1],xmm1[1],zero,zero
521 ; AVX-NEXT: vunpcklps {{.*#+}} xmm10 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
522 ; AVX-NEXT: vinsertf128 $1, %xmm9, %ymm10, %ymm9
523 ; AVX-NEXT: vunpcklps {{.*#+}} xmm10 = xmm8[0],xmm6[0],xmm8[1],xmm6[1]
524 ; AVX-NEXT: vmovlhps {{.*#+}} xmm11 = xmm6[0],xmm8[0]
525 ; AVX-NEXT: vshufps {{.*#+}} xmm11 = xmm11[0,1,2,0]
526 ; AVX-NEXT: vinsertf128 $1, %xmm10, %ymm11, %ymm10
527 ; AVX-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1],ymm10[2,3],ymm9[4,5],ymm10[6,7]
528 ; AVX-NEXT: vunpckhps {{.*#+}} xmm10 = xmm7[2],xmm5[2],xmm7[3],xmm5[3]
529 ; AVX-NEXT: vinsertps {{.*#+}} xmm5 = zero,zero,xmm7[2],xmm5[2]
530 ; AVX-NEXT: vinsertf128 $1, %xmm10, %ymm5, %ymm5
531 ; AVX-NEXT: vunpckhps {{.*#+}} xmm7 = xmm2[2],xmm4[2],xmm2[3],xmm4[3]
532 ; AVX-NEXT: vshufps {{.*#+}} xmm2 = xmm4[3,0],xmm2[3,0]
533 ; AVX-NEXT: vshufps {{.*#+}} xmm2 = xmm2[2,0,2,3]
534 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm7, %ymm2
535 ; AVX-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1],ymm5[2,3],ymm2[4,5],ymm5[6,7]
536 ; AVX-NEXT: vunpckhps {{.*#+}} xmm4 = xmm8[2],xmm6[2],xmm8[3],xmm6[3]
537 ; AVX-NEXT: vinsertps {{.*#+}} xmm5 = zero,zero,xmm8[2],xmm6[2]
538 ; AVX-NEXT: vinsertf128 $1, %xmm4, %ymm5, %ymm4
539 ; AVX-NEXT: vunpckhps {{.*#+}} xmm5 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
540 ; AVX-NEXT: vshufps {{.*#+}} xmm0 = xmm1[3,0],xmm0[3,0]
541 ; AVX-NEXT: vshufps {{.*#+}} xmm0 = xmm0[2,0,2,3]
542 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm5, %ymm0
543 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm4[2,3],ymm0[4,5],ymm4[6,7]
544 ; AVX-NEXT: vmovaps %ymm0, 96(%r8)
545 ; AVX-NEXT: vmovaps %ymm2, 32(%r8)
546 ; AVX-NEXT: vmovaps %ymm9, 64(%r8)
547 ; AVX-NEXT: vmovaps %ymm3, (%r8)
548 ; AVX-NEXT: vzeroupper
551 ; AVX2-LABEL: store_i32_stride4_vf8:
553 ; AVX2-NEXT: vmovaps (%rdi), %ymm0
554 ; AVX2-NEXT: vmovaps (%rsi), %ymm1
555 ; AVX2-NEXT: vmovaps (%rdx), %ymm2
556 ; AVX2-NEXT: vmovaps (%rcx), %ymm3
557 ; AVX2-NEXT: vmovaps (%rcx), %xmm4
558 ; AVX2-NEXT: vmovaps (%rdx), %xmm5
559 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm6 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
560 ; AVX2-NEXT: vpermpd {{.*#+}} ymm6 = ymm6[0,0,2,1]
561 ; AVX2-NEXT: vmovaps (%rsi), %xmm7
562 ; AVX2-NEXT: vmovaps (%rdi), %xmm8
563 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm9 = xmm8[0],xmm7[0],xmm8[1],xmm7[1]
564 ; AVX2-NEXT: vpermpd {{.*#+}} ymm9 = ymm9[0,1,1,3]
565 ; AVX2-NEXT: vblendps {{.*#+}} ymm6 = ymm9[0,1],ymm6[2,3],ymm9[4,5],ymm6[6,7]
566 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm4 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
567 ; AVX2-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,0,2,1]
568 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm5 = xmm8[2],xmm7[2],xmm8[3],xmm7[3]
569 ; AVX2-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,1,3]
570 ; AVX2-NEXT: vblendps {{.*#+}} ymm4 = ymm5[0,1],ymm4[2,3],ymm5[4,5],ymm4[6,7]
571 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm5 = ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[6],ymm3[6],ymm2[7],ymm3[7]
572 ; AVX2-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,2,2,3]
573 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm7 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
574 ; AVX2-NEXT: vpermpd {{.*#+}} ymm7 = ymm7[2,1,3,3]
575 ; AVX2-NEXT: vblendps {{.*#+}} ymm5 = ymm7[0,1],ymm5[2,3],ymm7[4,5],ymm5[6,7]
576 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm2 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[4],ymm3[4],ymm2[5],ymm3[5]
577 ; AVX2-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
578 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
579 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,1,3,3]
580 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
581 ; AVX2-NEXT: vmovaps %ymm0, 64(%r8)
582 ; AVX2-NEXT: vmovaps %ymm5, 96(%r8)
583 ; AVX2-NEXT: vmovaps %ymm4, 32(%r8)
584 ; AVX2-NEXT: vmovaps %ymm6, (%r8)
585 ; AVX2-NEXT: vzeroupper
588 ; AVX2-FP-LABEL: store_i32_stride4_vf8:
590 ; AVX2-FP-NEXT: vmovaps (%rdi), %ymm0
591 ; AVX2-FP-NEXT: vmovaps (%rsi), %ymm1
592 ; AVX2-FP-NEXT: vmovaps (%rdx), %ymm2
593 ; AVX2-FP-NEXT: vmovaps (%rcx), %ymm3
594 ; AVX2-FP-NEXT: vmovaps (%rcx), %xmm4
595 ; AVX2-FP-NEXT: vmovaps (%rdx), %xmm5
596 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm6 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
597 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm6 = ymm6[0,0,2,1]
598 ; AVX2-FP-NEXT: vmovaps (%rsi), %xmm7
599 ; AVX2-FP-NEXT: vmovaps (%rdi), %xmm8
600 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm9 = xmm8[0],xmm7[0],xmm8[1],xmm7[1]
601 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm9 = ymm9[0,1,1,3]
602 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm6 = ymm9[0,1],ymm6[2,3],ymm9[4,5],ymm6[6,7]
603 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm4 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
604 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,0,2,1]
605 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm5 = xmm8[2],xmm7[2],xmm8[3],xmm7[3]
606 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,1,3]
607 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm4 = ymm5[0,1],ymm4[2,3],ymm5[4,5],ymm4[6,7]
608 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm5 = ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[6],ymm3[6],ymm2[7],ymm3[7]
609 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,2,2,3]
610 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm7 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
611 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm7 = ymm7[2,1,3,3]
612 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm5 = ymm7[0,1],ymm5[2,3],ymm7[4,5],ymm5[6,7]
613 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm2 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[4],ymm3[4],ymm2[5],ymm3[5]
614 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
615 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
616 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,1,3,3]
617 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
618 ; AVX2-FP-NEXT: vmovaps %ymm0, 64(%r8)
619 ; AVX2-FP-NEXT: vmovaps %ymm5, 96(%r8)
620 ; AVX2-FP-NEXT: vmovaps %ymm4, 32(%r8)
621 ; AVX2-FP-NEXT: vmovaps %ymm6, (%r8)
622 ; AVX2-FP-NEXT: vzeroupper
625 ; AVX2-FCP-LABEL: store_i32_stride4_vf8:
627 ; AVX2-FCP-NEXT: vmovaps (%rdi), %ymm0
628 ; AVX2-FCP-NEXT: vmovaps (%rsi), %ymm1
629 ; AVX2-FCP-NEXT: vmovaps (%rdx), %ymm2
630 ; AVX2-FCP-NEXT: vmovaps (%rcx), %ymm3
631 ; AVX2-FCP-NEXT: vmovaps (%rcx), %xmm4
632 ; AVX2-FCP-NEXT: vmovaps (%rdx), %xmm5
633 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm6 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
634 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm6 = ymm6[0,0,2,1]
635 ; AVX2-FCP-NEXT: vmovaps (%rsi), %xmm7
636 ; AVX2-FCP-NEXT: vmovaps (%rdi), %xmm8
637 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm9 = xmm8[0],xmm7[0],xmm8[1],xmm7[1]
638 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm9 = ymm9[0,1,1,3]
639 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm6 = ymm9[0,1],ymm6[2,3],ymm9[4,5],ymm6[6,7]
640 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm4 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
641 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,0,2,1]
642 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm5 = xmm8[2],xmm7[2],xmm8[3],xmm7[3]
643 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,1,3]
644 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm4 = ymm5[0,1],ymm4[2,3],ymm5[4,5],ymm4[6,7]
645 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm5 = ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[6],ymm3[6],ymm2[7],ymm3[7]
646 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,2,2,3]
647 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm7 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
648 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm7 = ymm7[2,1,3,3]
649 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm5 = ymm7[0,1],ymm5[2,3],ymm7[4,5],ymm5[6,7]
650 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm2 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[4],ymm3[4],ymm2[5],ymm3[5]
651 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
652 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
653 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,1,3,3]
654 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
655 ; AVX2-FCP-NEXT: vmovaps %ymm0, 64(%r8)
656 ; AVX2-FCP-NEXT: vmovaps %ymm5, 96(%r8)
657 ; AVX2-FCP-NEXT: vmovaps %ymm4, 32(%r8)
658 ; AVX2-FCP-NEXT: vmovaps %ymm6, (%r8)
659 ; AVX2-FCP-NEXT: vzeroupper
660 ; AVX2-FCP-NEXT: retq
662 ; AVX512-LABEL: store_i32_stride4_vf8:
664 ; AVX512-NEXT: vmovdqa (%rdi), %ymm0
665 ; AVX512-NEXT: vmovdqa (%rdx), %ymm1
666 ; AVX512-NEXT: vinserti64x4 $1, (%rsi), %zmm0, %zmm0
667 ; AVX512-NEXT: vinserti64x4 $1, (%rcx), %zmm1, %zmm1
668 ; AVX512-NEXT: vpmovsxbd {{.*#+}} zmm2 = [0,8,16,24,1,9,17,25,2,10,18,26,3,11,19,27]
669 ; AVX512-NEXT: vpermi2d %zmm1, %zmm0, %zmm2
670 ; AVX512-NEXT: vpmovsxbd {{.*#+}} zmm3 = [4,12,20,28,5,13,21,29,6,14,22,30,7,15,23,31]
671 ; AVX512-NEXT: vpermi2d %zmm1, %zmm0, %zmm3
672 ; AVX512-NEXT: vmovdqa64 %zmm3, 64(%r8)
673 ; AVX512-NEXT: vmovdqa64 %zmm2, (%r8)
674 ; AVX512-NEXT: vzeroupper
677 ; AVX512-FCP-LABEL: store_i32_stride4_vf8:
678 ; AVX512-FCP: # %bb.0:
679 ; AVX512-FCP-NEXT: vmovdqa (%rdi), %ymm0
680 ; AVX512-FCP-NEXT: vmovdqa (%rdx), %ymm1
681 ; AVX512-FCP-NEXT: vinserti64x4 $1, (%rsi), %zmm0, %zmm0
682 ; AVX512-FCP-NEXT: vinserti64x4 $1, (%rcx), %zmm1, %zmm1
683 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} zmm2 = [0,8,16,24,1,9,17,25,2,10,18,26,3,11,19,27]
684 ; AVX512-FCP-NEXT: vpermi2d %zmm1, %zmm0, %zmm2
685 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} zmm3 = [4,12,20,28,5,13,21,29,6,14,22,30,7,15,23,31]
686 ; AVX512-FCP-NEXT: vpermi2d %zmm1, %zmm0, %zmm3
687 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, 64(%r8)
688 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, (%r8)
689 ; AVX512-FCP-NEXT: vzeroupper
690 ; AVX512-FCP-NEXT: retq
692 ; AVX512DQ-LABEL: store_i32_stride4_vf8:
694 ; AVX512DQ-NEXT: vmovdqa (%rdi), %ymm0
695 ; AVX512DQ-NEXT: vmovdqa (%rdx), %ymm1
696 ; AVX512DQ-NEXT: vinserti64x4 $1, (%rsi), %zmm0, %zmm0
697 ; AVX512DQ-NEXT: vinserti64x4 $1, (%rcx), %zmm1, %zmm1
698 ; AVX512DQ-NEXT: vpmovsxbd {{.*#+}} zmm2 = [0,8,16,24,1,9,17,25,2,10,18,26,3,11,19,27]
699 ; AVX512DQ-NEXT: vpermi2d %zmm1, %zmm0, %zmm2
700 ; AVX512DQ-NEXT: vpmovsxbd {{.*#+}} zmm3 = [4,12,20,28,5,13,21,29,6,14,22,30,7,15,23,31]
701 ; AVX512DQ-NEXT: vpermi2d %zmm1, %zmm0, %zmm3
702 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, 64(%r8)
703 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, (%r8)
704 ; AVX512DQ-NEXT: vzeroupper
705 ; AVX512DQ-NEXT: retq
707 ; AVX512DQ-FCP-LABEL: store_i32_stride4_vf8:
708 ; AVX512DQ-FCP: # %bb.0:
709 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdi), %ymm0
710 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdx), %ymm1
711 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, (%rsi), %zmm0, %zmm0
712 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, (%rcx), %zmm1, %zmm1
713 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} zmm2 = [0,8,16,24,1,9,17,25,2,10,18,26,3,11,19,27]
714 ; AVX512DQ-FCP-NEXT: vpermi2d %zmm1, %zmm0, %zmm2
715 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} zmm3 = [4,12,20,28,5,13,21,29,6,14,22,30,7,15,23,31]
716 ; AVX512DQ-FCP-NEXT: vpermi2d %zmm1, %zmm0, %zmm3
717 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, 64(%r8)
718 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, (%r8)
719 ; AVX512DQ-FCP-NEXT: vzeroupper
720 ; AVX512DQ-FCP-NEXT: retq
722 ; AVX512BW-LABEL: store_i32_stride4_vf8:
724 ; AVX512BW-NEXT: vmovdqa (%rdi), %ymm0
725 ; AVX512BW-NEXT: vmovdqa (%rdx), %ymm1
726 ; AVX512BW-NEXT: vinserti64x4 $1, (%rsi), %zmm0, %zmm0
727 ; AVX512BW-NEXT: vinserti64x4 $1, (%rcx), %zmm1, %zmm1
728 ; AVX512BW-NEXT: vpmovsxbd {{.*#+}} zmm2 = [0,8,16,24,1,9,17,25,2,10,18,26,3,11,19,27]
729 ; AVX512BW-NEXT: vpermi2d %zmm1, %zmm0, %zmm2
730 ; AVX512BW-NEXT: vpmovsxbd {{.*#+}} zmm3 = [4,12,20,28,5,13,21,29,6,14,22,30,7,15,23,31]
731 ; AVX512BW-NEXT: vpermi2d %zmm1, %zmm0, %zmm3
732 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 64(%r8)
733 ; AVX512BW-NEXT: vmovdqa64 %zmm2, (%r8)
734 ; AVX512BW-NEXT: vzeroupper
735 ; AVX512BW-NEXT: retq
737 ; AVX512BW-FCP-LABEL: store_i32_stride4_vf8:
738 ; AVX512BW-FCP: # %bb.0:
739 ; AVX512BW-FCP-NEXT: vmovdqa (%rdi), %ymm0
740 ; AVX512BW-FCP-NEXT: vmovdqa (%rdx), %ymm1
741 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, (%rsi), %zmm0, %zmm0
742 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, (%rcx), %zmm1, %zmm1
743 ; AVX512BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm2 = [0,8,16,24,1,9,17,25,2,10,18,26,3,11,19,27]
744 ; AVX512BW-FCP-NEXT: vpermi2d %zmm1, %zmm0, %zmm2
745 ; AVX512BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm3 = [4,12,20,28,5,13,21,29,6,14,22,30,7,15,23,31]
746 ; AVX512BW-FCP-NEXT: vpermi2d %zmm1, %zmm0, %zmm3
747 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, 64(%r8)
748 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, (%r8)
749 ; AVX512BW-FCP-NEXT: vzeroupper
750 ; AVX512BW-FCP-NEXT: retq
752 ; AVX512DQ-BW-LABEL: store_i32_stride4_vf8:
753 ; AVX512DQ-BW: # %bb.0:
754 ; AVX512DQ-BW-NEXT: vmovdqa (%rdi), %ymm0
755 ; AVX512DQ-BW-NEXT: vmovdqa (%rdx), %ymm1
756 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, (%rsi), %zmm0, %zmm0
757 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, (%rcx), %zmm1, %zmm1
758 ; AVX512DQ-BW-NEXT: vpmovsxbd {{.*#+}} zmm2 = [0,8,16,24,1,9,17,25,2,10,18,26,3,11,19,27]
759 ; AVX512DQ-BW-NEXT: vpermi2d %zmm1, %zmm0, %zmm2
760 ; AVX512DQ-BW-NEXT: vpmovsxbd {{.*#+}} zmm3 = [4,12,20,28,5,13,21,29,6,14,22,30,7,15,23,31]
761 ; AVX512DQ-BW-NEXT: vpermi2d %zmm1, %zmm0, %zmm3
762 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, 64(%r8)
763 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, (%r8)
764 ; AVX512DQ-BW-NEXT: vzeroupper
765 ; AVX512DQ-BW-NEXT: retq
767 ; AVX512DQ-BW-FCP-LABEL: store_i32_stride4_vf8:
768 ; AVX512DQ-BW-FCP: # %bb.0:
769 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rdi), %ymm0
770 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rdx), %ymm1
771 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, (%rsi), %zmm0, %zmm0
772 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, (%rcx), %zmm1, %zmm1
773 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm2 = [0,8,16,24,1,9,17,25,2,10,18,26,3,11,19,27]
774 ; AVX512DQ-BW-FCP-NEXT: vpermi2d %zmm1, %zmm0, %zmm2
775 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm3 = [4,12,20,28,5,13,21,29,6,14,22,30,7,15,23,31]
776 ; AVX512DQ-BW-FCP-NEXT: vpermi2d %zmm1, %zmm0, %zmm3
777 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, 64(%r8)
778 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, (%r8)
779 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
780 ; AVX512DQ-BW-FCP-NEXT: retq
781 %in.vec0 = load <8 x i32>, ptr %in.vecptr0, align 64
782 %in.vec1 = load <8 x i32>, ptr %in.vecptr1, align 64
783 %in.vec2 = load <8 x i32>, ptr %in.vecptr2, align 64
784 %in.vec3 = load <8 x i32>, ptr %in.vecptr3, align 64
785 %1 = shufflevector <8 x i32> %in.vec0, <8 x i32> %in.vec1, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
786 %2 = shufflevector <8 x i32> %in.vec2, <8 x i32> %in.vec3, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
787 %3 = shufflevector <16 x i32> %1, <16 x i32> %2, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
788 %interleaved.vec = shufflevector <32 x i32> %3, <32 x i32> poison, <32 x i32> <i32 0, i32 8, i32 16, i32 24, i32 1, i32 9, i32 17, i32 25, i32 2, i32 10, i32 18, i32 26, i32 3, i32 11, i32 19, i32 27, i32 4, i32 12, i32 20, i32 28, i32 5, i32 13, i32 21, i32 29, i32 6, i32 14, i32 22, i32 30, i32 7, i32 15, i32 23, i32 31>
789 store <32 x i32> %interleaved.vec, ptr %out.vec, align 64
793 define void @store_i32_stride4_vf16(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %out.vec) nounwind {
794 ; SSE-LABEL: store_i32_stride4_vf16:
796 ; SSE-NEXT: movaps (%rdi), %xmm5
797 ; SSE-NEXT: movaps 16(%rdi), %xmm11
798 ; SSE-NEXT: movaps 32(%rdi), %xmm4
799 ; SSE-NEXT: movaps 48(%rdi), %xmm2
800 ; SSE-NEXT: movaps (%rsi), %xmm0
801 ; SSE-NEXT: movaps 16(%rsi), %xmm3
802 ; SSE-NEXT: movaps 32(%rsi), %xmm9
803 ; SSE-NEXT: movaps (%rdx), %xmm7
804 ; SSE-NEXT: movaps 16(%rdx), %xmm13
805 ; SSE-NEXT: movaps 32(%rdx), %xmm10
806 ; SSE-NEXT: movaps (%rcx), %xmm8
807 ; SSE-NEXT: movaps 16(%rcx), %xmm14
808 ; SSE-NEXT: movaps 32(%rcx), %xmm12
809 ; SSE-NEXT: movaps %xmm7, %xmm15
810 ; SSE-NEXT: unpcklps {{.*#+}} xmm15 = xmm15[0],xmm8[0],xmm15[1],xmm8[1]
811 ; SSE-NEXT: movaps %xmm5, %xmm6
812 ; SSE-NEXT: unpcklps {{.*#+}} xmm6 = xmm6[0],xmm0[0],xmm6[1],xmm0[1]
813 ; SSE-NEXT: movaps %xmm6, %xmm1
814 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm15[1]
815 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
816 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm15[0]
817 ; SSE-NEXT: unpckhps {{.*#+}} xmm7 = xmm7[2],xmm8[2],xmm7[3],xmm8[3]
818 ; SSE-NEXT: unpckhps {{.*#+}} xmm5 = xmm5[2],xmm0[2],xmm5[3],xmm0[3]
819 ; SSE-NEXT: movaps %xmm5, %xmm0
820 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm7[1]
821 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
822 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm7[0]
823 ; SSE-NEXT: movaps %xmm13, %xmm15
824 ; SSE-NEXT: unpcklps {{.*#+}} xmm15 = xmm15[0],xmm14[0],xmm15[1],xmm14[1]
825 ; SSE-NEXT: movaps %xmm11, %xmm7
826 ; SSE-NEXT: unpcklps {{.*#+}} xmm7 = xmm7[0],xmm3[0],xmm7[1],xmm3[1]
827 ; SSE-NEXT: movaps %xmm7, %xmm0
828 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm15[1]
829 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
830 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm15[0]
831 ; SSE-NEXT: unpckhps {{.*#+}} xmm13 = xmm13[2],xmm14[2],xmm13[3],xmm14[3]
832 ; SSE-NEXT: unpckhps {{.*#+}} xmm11 = xmm11[2],xmm3[2],xmm11[3],xmm3[3]
833 ; SSE-NEXT: movaps %xmm11, %xmm8
834 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm13[1]
835 ; SSE-NEXT: movlhps {{.*#+}} xmm11 = xmm11[0],xmm13[0]
836 ; SSE-NEXT: movaps %xmm10, %xmm15
837 ; SSE-NEXT: unpcklps {{.*#+}} xmm15 = xmm15[0],xmm12[0],xmm15[1],xmm12[1]
838 ; SSE-NEXT: movaps %xmm4, %xmm13
839 ; SSE-NEXT: unpcklps {{.*#+}} xmm13 = xmm13[0],xmm9[0],xmm13[1],xmm9[1]
840 ; SSE-NEXT: movaps %xmm13, %xmm14
841 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm15[1]
842 ; SSE-NEXT: movlhps {{.*#+}} xmm13 = xmm13[0],xmm15[0]
843 ; SSE-NEXT: movaps 48(%rdx), %xmm15
844 ; SSE-NEXT: unpckhps {{.*#+}} xmm10 = xmm10[2],xmm12[2],xmm10[3],xmm12[3]
845 ; SSE-NEXT: movaps 48(%rcx), %xmm12
846 ; SSE-NEXT: unpckhps {{.*#+}} xmm4 = xmm4[2],xmm9[2],xmm4[3],xmm9[3]
847 ; SSE-NEXT: movaps %xmm4, %xmm9
848 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm10[1]
849 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm10[0]
850 ; SSE-NEXT: movaps %xmm15, %xmm10
851 ; SSE-NEXT: unpcklps {{.*#+}} xmm10 = xmm10[0],xmm12[0],xmm10[1],xmm12[1]
852 ; SSE-NEXT: movaps 48(%rsi), %xmm1
853 ; SSE-NEXT: movaps %xmm2, %xmm3
854 ; SSE-NEXT: unpcklps {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
855 ; SSE-NEXT: movaps %xmm3, %xmm0
856 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm10[1]
857 ; SSE-NEXT: movlhps {{.*#+}} xmm3 = xmm3[0],xmm10[0]
858 ; SSE-NEXT: unpckhps {{.*#+}} xmm15 = xmm15[2],xmm12[2],xmm15[3],xmm12[3]
859 ; SSE-NEXT: unpckhps {{.*#+}} xmm2 = xmm2[2],xmm1[2],xmm2[3],xmm1[3]
860 ; SSE-NEXT: movaps %xmm2, %xmm1
861 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm15[1]
862 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm15[0]
863 ; SSE-NEXT: movaps %xmm2, 224(%r8)
864 ; SSE-NEXT: movaps %xmm1, 240(%r8)
865 ; SSE-NEXT: movaps %xmm3, 192(%r8)
866 ; SSE-NEXT: movaps %xmm0, 208(%r8)
867 ; SSE-NEXT: movaps %xmm4, 160(%r8)
868 ; SSE-NEXT: movaps %xmm9, 176(%r8)
869 ; SSE-NEXT: movaps %xmm13, 128(%r8)
870 ; SSE-NEXT: movaps %xmm14, 144(%r8)
871 ; SSE-NEXT: movaps %xmm11, 96(%r8)
872 ; SSE-NEXT: movaps %xmm8, 112(%r8)
873 ; SSE-NEXT: movaps %xmm7, 64(%r8)
874 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
875 ; SSE-NEXT: movaps %xmm0, 80(%r8)
876 ; SSE-NEXT: movaps %xmm5, 32(%r8)
877 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
878 ; SSE-NEXT: movaps %xmm0, 48(%r8)
879 ; SSE-NEXT: movaps %xmm6, (%r8)
880 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
881 ; SSE-NEXT: movaps %xmm0, 16(%r8)
884 ; AVX-LABEL: store_i32_stride4_vf16:
886 ; AVX-NEXT: subq $24, %rsp
887 ; AVX-NEXT: vmovaps (%rdi), %xmm7
888 ; AVX-NEXT: vmovaps 32(%rdi), %xmm3
889 ; AVX-NEXT: vmovaps 48(%rdi), %xmm10
890 ; AVX-NEXT: vmovaps (%rsi), %xmm9
891 ; AVX-NEXT: vmovaps 32(%rsi), %xmm4
892 ; AVX-NEXT: vmovaps 48(%rsi), %xmm5
893 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm7[1],xmm9[1],zero,zero
894 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm7[0],xmm9[0],xmm7[1],xmm9[1]
895 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
896 ; AVX-NEXT: vmovaps (%rcx), %xmm14
897 ; AVX-NEXT: vmovaps 32(%rcx), %xmm6
898 ; AVX-NEXT: vmovaps 48(%rcx), %xmm11
899 ; AVX-NEXT: vmovaps (%rdx), %xmm2
900 ; AVX-NEXT: vmovaps 32(%rdx), %xmm8
901 ; AVX-NEXT: vmovaps 48(%rdx), %xmm12
902 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm2[0],xmm14[0],xmm2[1],xmm14[1]
903 ; AVX-NEXT: vmovlhps {{.*#+}} xmm13 = xmm14[0],xmm2[0]
904 ; AVX-NEXT: vshufps {{.*#+}} xmm13 = xmm13[0,1,2,0]
905 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm13, %ymm1
906 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
907 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
908 ; AVX-NEXT: vmovaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
909 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm10[1],xmm5[1],zero,zero
910 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm10[0],xmm5[0],xmm10[1],xmm5[1]
911 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
912 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm12[0],xmm11[0],xmm12[1],xmm11[1]
913 ; AVX-NEXT: vmovlhps {{.*#+}} xmm13 = xmm11[0],xmm12[0]
914 ; AVX-NEXT: vshufps {{.*#+}} xmm13 = xmm13[0,1,2,0]
915 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm13, %ymm1
916 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
917 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
918 ; AVX-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
919 ; AVX-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
920 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm3[1],xmm4[1],zero,zero
921 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm3[0],xmm4[0],xmm3[1],xmm4[1]
922 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
923 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm6[0],xmm8[0]
924 ; AVX-NEXT: vshufps {{.*#+}} xmm1 = xmm1[0,1,2,0]
925 ; AVX-NEXT: vunpcklps {{.*#+}} xmm13 = xmm8[0],xmm6[0],xmm8[1],xmm6[1]
926 ; AVX-NEXT: vinsertf128 $1, %xmm13, %ymm1, %ymm1
927 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
928 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
929 ; AVX-NEXT: vmovaps 16(%rdi), %xmm13
930 ; AVX-NEXT: vmovaps 16(%rsi), %xmm15
931 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm13[1],xmm15[1],zero,zero
932 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm13[0],xmm15[0],xmm13[1],xmm15[1]
933 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm3
934 ; AVX-NEXT: vmovaps 16(%rcx), %xmm1
935 ; AVX-NEXT: vmovaps 16(%rdx), %xmm0
936 ; AVX-NEXT: vmovlhps {{.*#+}} xmm10 = xmm1[0],xmm0[0]
937 ; AVX-NEXT: vshufps {{.*#+}} xmm10 = xmm10[0,1,2,0]
938 ; AVX-NEXT: vunpcklps {{.*#+}} xmm4 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
939 ; AVX-NEXT: vinsertf128 $1, %xmm4, %ymm10, %ymm4
940 ; AVX-NEXT: vblendps {{.*#+}} ymm10 = ymm3[0,1],ymm4[2,3],ymm3[4,5],ymm4[6,7]
941 ; AVX-NEXT: vunpckhps {{.*#+}} xmm3 = xmm2[2],xmm14[2],xmm2[3],xmm14[3]
942 ; AVX-NEXT: vinsertps {{.*#+}} xmm2 = zero,zero,xmm2[2],xmm14[2]
943 ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
944 ; AVX-NEXT: vunpckhps {{.*#+}} xmm3 = xmm7[2],xmm9[2],xmm7[3],xmm9[3]
945 ; AVX-NEXT: vshufps {{.*#+}} xmm4 = xmm9[3,0],xmm7[3,0]
946 ; AVX-NEXT: vshufps {{.*#+}} xmm4 = xmm4[2,0,2,3]
947 ; AVX-NEXT: vinsertf128 $1, %xmm4, %ymm3, %ymm3
948 ; AVX-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7]
949 ; AVX-NEXT: vunpckhps {{.*#+}} xmm3 = xmm12[2],xmm11[2],xmm12[3],xmm11[3]
950 ; AVX-NEXT: vinsertps {{.*#+}} xmm4 = zero,zero,xmm12[2],xmm11[2]
951 ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm4, %ymm3
952 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
953 ; AVX-NEXT: vunpckhps {{.*#+}} xmm4 = xmm7[2],xmm5[2],xmm7[3],xmm5[3]
954 ; AVX-NEXT: vshufps {{.*#+}} xmm5 = xmm5[3,0],xmm7[3,0]
955 ; AVX-NEXT: vshufps {{.*#+}} xmm5 = xmm5[2,0,2,3]
956 ; AVX-NEXT: vinsertf128 $1, %xmm5, %ymm4, %ymm4
957 ; AVX-NEXT: vblendps {{.*#+}} ymm3 = ymm4[0,1],ymm3[2,3],ymm4[4,5],ymm3[6,7]
958 ; AVX-NEXT: vunpckhps {{.*#+}} xmm4 = xmm8[2],xmm6[2],xmm8[3],xmm6[3]
959 ; AVX-NEXT: vinsertps {{.*#+}} xmm5 = zero,zero,xmm8[2],xmm6[2]
960 ; AVX-NEXT: vinsertf128 $1, %xmm4, %ymm5, %ymm4
961 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
962 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
963 ; AVX-NEXT: vunpckhps {{.*#+}} xmm5 = xmm7[2],xmm6[2],xmm7[3],xmm6[3]
964 ; AVX-NEXT: vshufps {{.*#+}} xmm6 = xmm6[3,0],xmm7[3,0]
965 ; AVX-NEXT: vshufps {{.*#+}} xmm6 = xmm6[2,0,2,3]
966 ; AVX-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm5
967 ; AVX-NEXT: vblendps {{.*#+}} ymm4 = ymm5[0,1],ymm4[2,3],ymm5[4,5],ymm4[6,7]
968 ; AVX-NEXT: vunpckhps {{.*#+}} xmm5 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
969 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = zero,zero,xmm0[2],xmm1[2]
970 ; AVX-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm0
971 ; AVX-NEXT: vunpckhps {{.*#+}} xmm1 = xmm13[2],xmm15[2],xmm13[3],xmm15[3]
972 ; AVX-NEXT: vshufps {{.*#+}} xmm5 = xmm15[3,0],xmm13[3,0]
973 ; AVX-NEXT: vshufps {{.*#+}} xmm5 = xmm5[2,0,2,3]
974 ; AVX-NEXT: vinsertf128 $1, %xmm5, %ymm1, %ymm1
975 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
976 ; AVX-NEXT: vmovaps %ymm0, 96(%r8)
977 ; AVX-NEXT: vmovaps %ymm4, 160(%r8)
978 ; AVX-NEXT: vmovaps %ymm3, 224(%r8)
979 ; AVX-NEXT: vmovaps %ymm2, 32(%r8)
980 ; AVX-NEXT: vmovaps %ymm10, 64(%r8)
981 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
982 ; AVX-NEXT: vmovaps %ymm0, 128(%r8)
983 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
984 ; AVX-NEXT: vmovaps %ymm0, 192(%r8)
985 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
986 ; AVX-NEXT: vmovaps %ymm0, (%r8)
987 ; AVX-NEXT: addq $24, %rsp
988 ; AVX-NEXT: vzeroupper
991 ; AVX2-LABEL: store_i32_stride4_vf16:
993 ; AVX2-NEXT: vmovaps (%rdi), %ymm0
994 ; AVX2-NEXT: vmovaps 32(%rdi), %ymm2
995 ; AVX2-NEXT: vmovaps (%rsi), %ymm1
996 ; AVX2-NEXT: vmovaps 32(%rsi), %ymm4
997 ; AVX2-NEXT: vmovaps (%rcx), %xmm5
998 ; AVX2-NEXT: vmovaps 32(%rcx), %xmm7
999 ; AVX2-NEXT: vmovaps (%rdx), %xmm6
1000 ; AVX2-NEXT: vmovaps 32(%rdx), %xmm8
1001 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm3 = xmm6[2],xmm5[2],xmm6[3],xmm5[3]
1002 ; AVX2-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,0,2,1]
1003 ; AVX2-NEXT: vmovaps (%rsi), %xmm9
1004 ; AVX2-NEXT: vmovaps (%rdi), %xmm10
1005 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm11 = xmm10[2],xmm9[2],xmm10[3],xmm9[3]
1006 ; AVX2-NEXT: vpermpd {{.*#+}} ymm11 = ymm11[0,1,1,3]
1007 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm11[0,1],ymm3[2,3],ymm11[4,5],ymm3[6,7]
1008 ; AVX2-NEXT: vmovaps 32(%rsi), %xmm11
1009 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1]
1010 ; AVX2-NEXT: vmovaps 32(%rdi), %xmm12
1011 ; AVX2-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,0,2,1]
1012 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm6 = xmm10[0],xmm9[0],xmm10[1],xmm9[1]
1013 ; AVX2-NEXT: vpermpd {{.*#+}} ymm6 = ymm6[0,1,1,3]
1014 ; AVX2-NEXT: vblendps {{.*#+}} ymm5 = ymm6[0,1],ymm5[2,3],ymm6[4,5],ymm5[6,7]
1015 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm6 = xmm8[0],xmm7[0],xmm8[1],xmm7[1]
1016 ; AVX2-NEXT: vpermpd {{.*#+}} ymm6 = ymm6[0,0,2,1]
1017 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm9 = xmm12[0],xmm11[0],xmm12[1],xmm11[1]
1018 ; AVX2-NEXT: vpermpd {{.*#+}} ymm9 = ymm9[0,1,1,3]
1019 ; AVX2-NEXT: vblendps {{.*#+}} ymm6 = ymm9[0,1],ymm6[2,3],ymm9[4,5],ymm6[6,7]
1020 ; AVX2-NEXT: vmovaps 32(%rdx), %ymm9
1021 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm7 = xmm8[2],xmm7[2],xmm8[3],xmm7[3]
1022 ; AVX2-NEXT: vmovaps 32(%rcx), %ymm8
1023 ; AVX2-NEXT: vpermpd {{.*#+}} ymm7 = ymm7[0,0,2,1]
1024 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm10 = xmm12[2],xmm11[2],xmm12[3],xmm11[3]
1025 ; AVX2-NEXT: vpermpd {{.*#+}} ymm10 = ymm10[0,1,1,3]
1026 ; AVX2-NEXT: vblendps {{.*#+}} ymm7 = ymm10[0,1],ymm7[2,3],ymm10[4,5],ymm7[6,7]
1027 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm10 = ymm9[0],ymm8[0],ymm9[1],ymm8[1],ymm9[4],ymm8[4],ymm9[5],ymm8[5]
1028 ; AVX2-NEXT: vpermpd {{.*#+}} ymm10 = ymm10[0,2,2,3]
1029 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm11 = ymm2[0],ymm4[0],ymm2[1],ymm4[1],ymm2[4],ymm4[4],ymm2[5],ymm4[5]
1030 ; AVX2-NEXT: vpermpd {{.*#+}} ymm11 = ymm11[2,1,3,3]
1031 ; AVX2-NEXT: vblendps {{.*#+}} ymm10 = ymm11[0,1],ymm10[2,3],ymm11[4,5],ymm10[6,7]
1032 ; AVX2-NEXT: vmovaps (%rdx), %ymm11
1033 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm8 = ymm9[2],ymm8[2],ymm9[3],ymm8[3],ymm9[6],ymm8[6],ymm9[7],ymm8[7]
1034 ; AVX2-NEXT: vmovaps (%rcx), %ymm9
1035 ; AVX2-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,2,2,3]
1036 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm2 = ymm2[2],ymm4[2],ymm2[3],ymm4[3],ymm2[6],ymm4[6],ymm2[7],ymm4[7]
1037 ; AVX2-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[2,1,3,3]
1038 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1],ymm8[2,3],ymm2[4,5],ymm8[6,7]
1039 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm4 = ymm11[0],ymm9[0],ymm11[1],ymm9[1],ymm11[4],ymm9[4],ymm11[5],ymm9[5]
1040 ; AVX2-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,2,2,3]
1041 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm8 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
1042 ; AVX2-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[2,1,3,3]
1043 ; AVX2-NEXT: vblendps {{.*#+}} ymm4 = ymm8[0,1],ymm4[2,3],ymm8[4,5],ymm4[6,7]
1044 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm8 = ymm11[2],ymm9[2],ymm11[3],ymm9[3],ymm11[6],ymm9[6],ymm11[7],ymm9[7]
1045 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm0 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
1046 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm8[0,2,2,3]
1047 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,1,3,3]
1048 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
1049 ; AVX2-NEXT: vmovaps %ymm0, 96(%r8)
1050 ; AVX2-NEXT: vmovaps %ymm4, 64(%r8)
1051 ; AVX2-NEXT: vmovaps %ymm2, 224(%r8)
1052 ; AVX2-NEXT: vmovaps %ymm10, 192(%r8)
1053 ; AVX2-NEXT: vmovaps %ymm7, 160(%r8)
1054 ; AVX2-NEXT: vmovaps %ymm6, 128(%r8)
1055 ; AVX2-NEXT: vmovaps %ymm5, (%r8)
1056 ; AVX2-NEXT: vmovaps %ymm3, 32(%r8)
1057 ; AVX2-NEXT: vzeroupper
1060 ; AVX2-FP-LABEL: store_i32_stride4_vf16:
1062 ; AVX2-FP-NEXT: vmovaps (%rdi), %ymm0
1063 ; AVX2-FP-NEXT: vmovaps 32(%rdi), %ymm2
1064 ; AVX2-FP-NEXT: vmovaps (%rsi), %ymm1
1065 ; AVX2-FP-NEXT: vmovaps 32(%rsi), %ymm4
1066 ; AVX2-FP-NEXT: vmovaps (%rcx), %xmm5
1067 ; AVX2-FP-NEXT: vmovaps 32(%rcx), %xmm7
1068 ; AVX2-FP-NEXT: vmovaps (%rdx), %xmm6
1069 ; AVX2-FP-NEXT: vmovaps 32(%rdx), %xmm8
1070 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm3 = xmm6[2],xmm5[2],xmm6[3],xmm5[3]
1071 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,0,2,1]
1072 ; AVX2-FP-NEXT: vmovaps (%rsi), %xmm9
1073 ; AVX2-FP-NEXT: vmovaps (%rdi), %xmm10
1074 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm11 = xmm10[2],xmm9[2],xmm10[3],xmm9[3]
1075 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm11 = ymm11[0,1,1,3]
1076 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm11[0,1],ymm3[2,3],ymm11[4,5],ymm3[6,7]
1077 ; AVX2-FP-NEXT: vmovaps 32(%rsi), %xmm11
1078 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1]
1079 ; AVX2-FP-NEXT: vmovaps 32(%rdi), %xmm12
1080 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,0,2,1]
1081 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm6 = xmm10[0],xmm9[0],xmm10[1],xmm9[1]
1082 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm6 = ymm6[0,1,1,3]
1083 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm5 = ymm6[0,1],ymm5[2,3],ymm6[4,5],ymm5[6,7]
1084 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm6 = xmm8[0],xmm7[0],xmm8[1],xmm7[1]
1085 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm6 = ymm6[0,0,2,1]
1086 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm9 = xmm12[0],xmm11[0],xmm12[1],xmm11[1]
1087 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm9 = ymm9[0,1,1,3]
1088 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm6 = ymm9[0,1],ymm6[2,3],ymm9[4,5],ymm6[6,7]
1089 ; AVX2-FP-NEXT: vmovaps 32(%rdx), %ymm9
1090 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm7 = xmm8[2],xmm7[2],xmm8[3],xmm7[3]
1091 ; AVX2-FP-NEXT: vmovaps 32(%rcx), %ymm8
1092 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm7 = ymm7[0,0,2,1]
1093 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm10 = xmm12[2],xmm11[2],xmm12[3],xmm11[3]
1094 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm10 = ymm10[0,1,1,3]
1095 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm7 = ymm10[0,1],ymm7[2,3],ymm10[4,5],ymm7[6,7]
1096 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm10 = ymm9[0],ymm8[0],ymm9[1],ymm8[1],ymm9[4],ymm8[4],ymm9[5],ymm8[5]
1097 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm10 = ymm10[0,2,2,3]
1098 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm11 = ymm2[0],ymm4[0],ymm2[1],ymm4[1],ymm2[4],ymm4[4],ymm2[5],ymm4[5]
1099 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm11 = ymm11[2,1,3,3]
1100 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm10 = ymm11[0,1],ymm10[2,3],ymm11[4,5],ymm10[6,7]
1101 ; AVX2-FP-NEXT: vmovaps (%rdx), %ymm11
1102 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm8 = ymm9[2],ymm8[2],ymm9[3],ymm8[3],ymm9[6],ymm8[6],ymm9[7],ymm8[7]
1103 ; AVX2-FP-NEXT: vmovaps (%rcx), %ymm9
1104 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,2,2,3]
1105 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm2 = ymm2[2],ymm4[2],ymm2[3],ymm4[3],ymm2[6],ymm4[6],ymm2[7],ymm4[7]
1106 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[2,1,3,3]
1107 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1],ymm8[2,3],ymm2[4,5],ymm8[6,7]
1108 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm4 = ymm11[0],ymm9[0],ymm11[1],ymm9[1],ymm11[4],ymm9[4],ymm11[5],ymm9[5]
1109 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,2,2,3]
1110 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm8 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
1111 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[2,1,3,3]
1112 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm4 = ymm8[0,1],ymm4[2,3],ymm8[4,5],ymm4[6,7]
1113 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm8 = ymm11[2],ymm9[2],ymm11[3],ymm9[3],ymm11[6],ymm9[6],ymm11[7],ymm9[7]
1114 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm0 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
1115 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm8[0,2,2,3]
1116 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,1,3,3]
1117 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
1118 ; AVX2-FP-NEXT: vmovaps %ymm0, 96(%r8)
1119 ; AVX2-FP-NEXT: vmovaps %ymm4, 64(%r8)
1120 ; AVX2-FP-NEXT: vmovaps %ymm2, 224(%r8)
1121 ; AVX2-FP-NEXT: vmovaps %ymm10, 192(%r8)
1122 ; AVX2-FP-NEXT: vmovaps %ymm7, 160(%r8)
1123 ; AVX2-FP-NEXT: vmovaps %ymm6, 128(%r8)
1124 ; AVX2-FP-NEXT: vmovaps %ymm5, (%r8)
1125 ; AVX2-FP-NEXT: vmovaps %ymm3, 32(%r8)
1126 ; AVX2-FP-NEXT: vzeroupper
1127 ; AVX2-FP-NEXT: retq
1129 ; AVX2-FCP-LABEL: store_i32_stride4_vf16:
1130 ; AVX2-FCP: # %bb.0:
1131 ; AVX2-FCP-NEXT: vmovaps (%rdi), %ymm0
1132 ; AVX2-FCP-NEXT: vmovaps 32(%rdi), %ymm2
1133 ; AVX2-FCP-NEXT: vmovaps (%rsi), %ymm1
1134 ; AVX2-FCP-NEXT: vmovaps 32(%rsi), %ymm4
1135 ; AVX2-FCP-NEXT: vmovaps (%rcx), %xmm5
1136 ; AVX2-FCP-NEXT: vmovaps 32(%rcx), %xmm7
1137 ; AVX2-FCP-NEXT: vmovaps (%rdx), %xmm6
1138 ; AVX2-FCP-NEXT: vmovaps 32(%rdx), %xmm8
1139 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm3 = xmm6[2],xmm5[2],xmm6[3],xmm5[3]
1140 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,0,2,1]
1141 ; AVX2-FCP-NEXT: vmovaps (%rsi), %xmm9
1142 ; AVX2-FCP-NEXT: vmovaps (%rdi), %xmm10
1143 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm11 = xmm10[2],xmm9[2],xmm10[3],xmm9[3]
1144 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm11 = ymm11[0,1,1,3]
1145 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm11[0,1],ymm3[2,3],ymm11[4,5],ymm3[6,7]
1146 ; AVX2-FCP-NEXT: vmovaps 32(%rsi), %xmm11
1147 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1]
1148 ; AVX2-FCP-NEXT: vmovaps 32(%rdi), %xmm12
1149 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,0,2,1]
1150 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm6 = xmm10[0],xmm9[0],xmm10[1],xmm9[1]
1151 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm6 = ymm6[0,1,1,3]
1152 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm5 = ymm6[0,1],ymm5[2,3],ymm6[4,5],ymm5[6,7]
1153 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm6 = xmm8[0],xmm7[0],xmm8[1],xmm7[1]
1154 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm6 = ymm6[0,0,2,1]
1155 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm9 = xmm12[0],xmm11[0],xmm12[1],xmm11[1]
1156 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm9 = ymm9[0,1,1,3]
1157 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm6 = ymm9[0,1],ymm6[2,3],ymm9[4,5],ymm6[6,7]
1158 ; AVX2-FCP-NEXT: vmovaps 32(%rdx), %ymm9
1159 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm7 = xmm8[2],xmm7[2],xmm8[3],xmm7[3]
1160 ; AVX2-FCP-NEXT: vmovaps 32(%rcx), %ymm8
1161 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm7 = ymm7[0,0,2,1]
1162 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm10 = xmm12[2],xmm11[2],xmm12[3],xmm11[3]
1163 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm10 = ymm10[0,1,1,3]
1164 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm7 = ymm10[0,1],ymm7[2,3],ymm10[4,5],ymm7[6,7]
1165 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm10 = ymm9[0],ymm8[0],ymm9[1],ymm8[1],ymm9[4],ymm8[4],ymm9[5],ymm8[5]
1166 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm10 = ymm10[0,2,2,3]
1167 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm11 = ymm2[0],ymm4[0],ymm2[1],ymm4[1],ymm2[4],ymm4[4],ymm2[5],ymm4[5]
1168 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm11 = ymm11[2,1,3,3]
1169 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm10 = ymm11[0,1],ymm10[2,3],ymm11[4,5],ymm10[6,7]
1170 ; AVX2-FCP-NEXT: vmovaps (%rdx), %ymm11
1171 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm8 = ymm9[2],ymm8[2],ymm9[3],ymm8[3],ymm9[6],ymm8[6],ymm9[7],ymm8[7]
1172 ; AVX2-FCP-NEXT: vmovaps (%rcx), %ymm9
1173 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,2,2,3]
1174 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm2 = ymm2[2],ymm4[2],ymm2[3],ymm4[3],ymm2[6],ymm4[6],ymm2[7],ymm4[7]
1175 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[2,1,3,3]
1176 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1],ymm8[2,3],ymm2[4,5],ymm8[6,7]
1177 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm4 = ymm11[0],ymm9[0],ymm11[1],ymm9[1],ymm11[4],ymm9[4],ymm11[5],ymm9[5]
1178 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,2,2,3]
1179 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm8 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
1180 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[2,1,3,3]
1181 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm4 = ymm8[0,1],ymm4[2,3],ymm8[4,5],ymm4[6,7]
1182 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm8 = ymm11[2],ymm9[2],ymm11[3],ymm9[3],ymm11[6],ymm9[6],ymm11[7],ymm9[7]
1183 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm0 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
1184 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm8[0,2,2,3]
1185 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,1,3,3]
1186 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
1187 ; AVX2-FCP-NEXT: vmovaps %ymm0, 96(%r8)
1188 ; AVX2-FCP-NEXT: vmovaps %ymm4, 64(%r8)
1189 ; AVX2-FCP-NEXT: vmovaps %ymm2, 224(%r8)
1190 ; AVX2-FCP-NEXT: vmovaps %ymm10, 192(%r8)
1191 ; AVX2-FCP-NEXT: vmovaps %ymm7, 160(%r8)
1192 ; AVX2-FCP-NEXT: vmovaps %ymm6, 128(%r8)
1193 ; AVX2-FCP-NEXT: vmovaps %ymm5, (%r8)
1194 ; AVX2-FCP-NEXT: vmovaps %ymm3, 32(%r8)
1195 ; AVX2-FCP-NEXT: vzeroupper
1196 ; AVX2-FCP-NEXT: retq
1198 ; AVX512-LABEL: store_i32_stride4_vf16:
1200 ; AVX512-NEXT: vmovdqa64 (%rdi), %zmm0
1201 ; AVX512-NEXT: vmovdqa64 (%rsi), %zmm1
1202 ; AVX512-NEXT: vmovdqa64 (%rdx), %zmm2
1203 ; AVX512-NEXT: vmovdqa64 (%rcx), %zmm3
1204 ; AVX512-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,0,16,0,0,1,17,0,0,2,18,0,0,3,19]
1205 ; AVX512-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1206 ; AVX512-NEXT: vpmovsxbd {{.*#+}} zmm5 = [0,16,0,0,1,17,0,0,2,18,0,0,3,19,0,0]
1207 ; AVX512-NEXT: vpermi2d %zmm1, %zmm0, %zmm5
1208 ; AVX512-NEXT: movb $-86, %al
1209 ; AVX512-NEXT: kmovw %eax, %k1
1210 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm5 {%k1}
1211 ; AVX512-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,4,20,0,0,5,21,0,0,6,22,0,0,7,23]
1212 ; AVX512-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1213 ; AVX512-NEXT: vpmovsxbd {{.*#+}} zmm6 = [4,20,0,0,5,21,0,0,6,22,0,0,7,23,0,0]
1214 ; AVX512-NEXT: vpermi2d %zmm1, %zmm0, %zmm6
1215 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm6 {%k1}
1216 ; AVX512-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,8,24,0,0,9,25,0,0,10,26,0,0,11,27]
1217 ; AVX512-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1218 ; AVX512-NEXT: vpmovsxbd {{.*#+}} zmm7 = [8,24,0,0,9,25,0,0,10,26,0,0,11,27,0,0]
1219 ; AVX512-NEXT: vpermi2d %zmm1, %zmm0, %zmm7
1220 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm7 {%k1}
1221 ; AVX512-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,12,28,0,0,13,29,0,0,14,30,0,0,15,31]
1222 ; AVX512-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1223 ; AVX512-NEXT: vpmovsxbd {{.*#+}} zmm2 = [12,28,0,0,13,29,0,0,14,30,0,0,15,31,0,0]
1224 ; AVX512-NEXT: vpermi2d %zmm1, %zmm0, %zmm2
1225 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm2 {%k1}
1226 ; AVX512-NEXT: vmovdqa64 %zmm2, 192(%r8)
1227 ; AVX512-NEXT: vmovdqa64 %zmm7, 128(%r8)
1228 ; AVX512-NEXT: vmovdqa64 %zmm6, 64(%r8)
1229 ; AVX512-NEXT: vmovdqa64 %zmm5, (%r8)
1230 ; AVX512-NEXT: vzeroupper
1233 ; AVX512-FCP-LABEL: store_i32_stride4_vf16:
1234 ; AVX512-FCP: # %bb.0:
1235 ; AVX512-FCP-NEXT: vmovdqa64 (%rdi), %zmm0
1236 ; AVX512-FCP-NEXT: vmovdqa64 (%rsi), %zmm1
1237 ; AVX512-FCP-NEXT: vmovdqa64 (%rdx), %zmm2
1238 ; AVX512-FCP-NEXT: vmovdqa64 (%rcx), %zmm3
1239 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,0,16,0,0,1,17,0,0,2,18,0,0,3,19]
1240 ; AVX512-FCP-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1241 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} zmm5 = [0,16,0,0,1,17,0,0,2,18,0,0,3,19,0,0]
1242 ; AVX512-FCP-NEXT: vpermi2d %zmm1, %zmm0, %zmm5
1243 ; AVX512-FCP-NEXT: movb $-86, %al
1244 ; AVX512-FCP-NEXT: kmovw %eax, %k1
1245 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm5 {%k1}
1246 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,4,20,0,0,5,21,0,0,6,22,0,0,7,23]
1247 ; AVX512-FCP-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1248 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} zmm6 = [4,20,0,0,5,21,0,0,6,22,0,0,7,23,0,0]
1249 ; AVX512-FCP-NEXT: vpermi2d %zmm1, %zmm0, %zmm6
1250 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm6 {%k1}
1251 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,8,24,0,0,9,25,0,0,10,26,0,0,11,27]
1252 ; AVX512-FCP-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1253 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} zmm7 = [8,24,0,0,9,25,0,0,10,26,0,0,11,27,0,0]
1254 ; AVX512-FCP-NEXT: vpermi2d %zmm1, %zmm0, %zmm7
1255 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm7 {%k1}
1256 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,12,28,0,0,13,29,0,0,14,30,0,0,15,31]
1257 ; AVX512-FCP-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1258 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} zmm2 = [12,28,0,0,13,29,0,0,14,30,0,0,15,31,0,0]
1259 ; AVX512-FCP-NEXT: vpermi2d %zmm1, %zmm0, %zmm2
1260 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm2 {%k1}
1261 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, 192(%r8)
1262 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, 128(%r8)
1263 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, 64(%r8)
1264 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, (%r8)
1265 ; AVX512-FCP-NEXT: vzeroupper
1266 ; AVX512-FCP-NEXT: retq
1268 ; AVX512DQ-LABEL: store_i32_stride4_vf16:
1269 ; AVX512DQ: # %bb.0:
1270 ; AVX512DQ-NEXT: vmovdqa64 (%rdi), %zmm0
1271 ; AVX512DQ-NEXT: vmovdqa64 (%rsi), %zmm1
1272 ; AVX512DQ-NEXT: vmovdqa64 (%rdx), %zmm2
1273 ; AVX512DQ-NEXT: vmovdqa64 (%rcx), %zmm3
1274 ; AVX512DQ-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,0,16,0,0,1,17,0,0,2,18,0,0,3,19]
1275 ; AVX512DQ-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1276 ; AVX512DQ-NEXT: vpmovsxbd {{.*#+}} zmm5 = [0,16,0,0,1,17,0,0,2,18,0,0,3,19,0,0]
1277 ; AVX512DQ-NEXT: vpermi2d %zmm1, %zmm0, %zmm5
1278 ; AVX512DQ-NEXT: movb $-86, %al
1279 ; AVX512DQ-NEXT: kmovw %eax, %k1
1280 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm5 {%k1}
1281 ; AVX512DQ-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,4,20,0,0,5,21,0,0,6,22,0,0,7,23]
1282 ; AVX512DQ-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1283 ; AVX512DQ-NEXT: vpmovsxbd {{.*#+}} zmm6 = [4,20,0,0,5,21,0,0,6,22,0,0,7,23,0,0]
1284 ; AVX512DQ-NEXT: vpermi2d %zmm1, %zmm0, %zmm6
1285 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm6 {%k1}
1286 ; AVX512DQ-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,8,24,0,0,9,25,0,0,10,26,0,0,11,27]
1287 ; AVX512DQ-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1288 ; AVX512DQ-NEXT: vpmovsxbd {{.*#+}} zmm7 = [8,24,0,0,9,25,0,0,10,26,0,0,11,27,0,0]
1289 ; AVX512DQ-NEXT: vpermi2d %zmm1, %zmm0, %zmm7
1290 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm7 {%k1}
1291 ; AVX512DQ-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,12,28,0,0,13,29,0,0,14,30,0,0,15,31]
1292 ; AVX512DQ-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1293 ; AVX512DQ-NEXT: vpmovsxbd {{.*#+}} zmm2 = [12,28,0,0,13,29,0,0,14,30,0,0,15,31,0,0]
1294 ; AVX512DQ-NEXT: vpermi2d %zmm1, %zmm0, %zmm2
1295 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm2 {%k1}
1296 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 192(%r8)
1297 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, 128(%r8)
1298 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, 64(%r8)
1299 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, (%r8)
1300 ; AVX512DQ-NEXT: vzeroupper
1301 ; AVX512DQ-NEXT: retq
1303 ; AVX512DQ-FCP-LABEL: store_i32_stride4_vf16:
1304 ; AVX512DQ-FCP: # %bb.0:
1305 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rdi), %zmm0
1306 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rsi), %zmm1
1307 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rdx), %zmm2
1308 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rcx), %zmm3
1309 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,0,16,0,0,1,17,0,0,2,18,0,0,3,19]
1310 ; AVX512DQ-FCP-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1311 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} zmm5 = [0,16,0,0,1,17,0,0,2,18,0,0,3,19,0,0]
1312 ; AVX512DQ-FCP-NEXT: vpermi2d %zmm1, %zmm0, %zmm5
1313 ; AVX512DQ-FCP-NEXT: movb $-86, %al
1314 ; AVX512DQ-FCP-NEXT: kmovw %eax, %k1
1315 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm5 {%k1}
1316 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,4,20,0,0,5,21,0,0,6,22,0,0,7,23]
1317 ; AVX512DQ-FCP-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1318 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} zmm6 = [4,20,0,0,5,21,0,0,6,22,0,0,7,23,0,0]
1319 ; AVX512DQ-FCP-NEXT: vpermi2d %zmm1, %zmm0, %zmm6
1320 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm6 {%k1}
1321 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,8,24,0,0,9,25,0,0,10,26,0,0,11,27]
1322 ; AVX512DQ-FCP-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1323 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} zmm7 = [8,24,0,0,9,25,0,0,10,26,0,0,11,27,0,0]
1324 ; AVX512DQ-FCP-NEXT: vpermi2d %zmm1, %zmm0, %zmm7
1325 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm7 {%k1}
1326 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,12,28,0,0,13,29,0,0,14,30,0,0,15,31]
1327 ; AVX512DQ-FCP-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1328 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} zmm2 = [12,28,0,0,13,29,0,0,14,30,0,0,15,31,0,0]
1329 ; AVX512DQ-FCP-NEXT: vpermi2d %zmm1, %zmm0, %zmm2
1330 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm2 {%k1}
1331 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, 192(%r8)
1332 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, 128(%r8)
1333 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, 64(%r8)
1334 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, (%r8)
1335 ; AVX512DQ-FCP-NEXT: vzeroupper
1336 ; AVX512DQ-FCP-NEXT: retq
1338 ; AVX512BW-LABEL: store_i32_stride4_vf16:
1339 ; AVX512BW: # %bb.0:
1340 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm0
1341 ; AVX512BW-NEXT: vmovdqa64 (%rsi), %zmm1
1342 ; AVX512BW-NEXT: vmovdqa64 (%rdx), %zmm2
1343 ; AVX512BW-NEXT: vmovdqa64 (%rcx), %zmm3
1344 ; AVX512BW-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,0,16,0,0,1,17,0,0,2,18,0,0,3,19]
1345 ; AVX512BW-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1346 ; AVX512BW-NEXT: vpmovsxbd {{.*#+}} zmm5 = [0,16,0,0,1,17,0,0,2,18,0,0,3,19,0,0]
1347 ; AVX512BW-NEXT: vpermi2d %zmm1, %zmm0, %zmm5
1348 ; AVX512BW-NEXT: movb $-86, %al
1349 ; AVX512BW-NEXT: kmovd %eax, %k1
1350 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm5 {%k1}
1351 ; AVX512BW-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,4,20,0,0,5,21,0,0,6,22,0,0,7,23]
1352 ; AVX512BW-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1353 ; AVX512BW-NEXT: vpmovsxbd {{.*#+}} zmm6 = [4,20,0,0,5,21,0,0,6,22,0,0,7,23,0,0]
1354 ; AVX512BW-NEXT: vpermi2d %zmm1, %zmm0, %zmm6
1355 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm6 {%k1}
1356 ; AVX512BW-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,8,24,0,0,9,25,0,0,10,26,0,0,11,27]
1357 ; AVX512BW-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1358 ; AVX512BW-NEXT: vpmovsxbd {{.*#+}} zmm7 = [8,24,0,0,9,25,0,0,10,26,0,0,11,27,0,0]
1359 ; AVX512BW-NEXT: vpermi2d %zmm1, %zmm0, %zmm7
1360 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm7 {%k1}
1361 ; AVX512BW-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,12,28,0,0,13,29,0,0,14,30,0,0,15,31]
1362 ; AVX512BW-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1363 ; AVX512BW-NEXT: vpmovsxbd {{.*#+}} zmm2 = [12,28,0,0,13,29,0,0,14,30,0,0,15,31,0,0]
1364 ; AVX512BW-NEXT: vpermi2d %zmm1, %zmm0, %zmm2
1365 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm2 {%k1}
1366 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 192(%r8)
1367 ; AVX512BW-NEXT: vmovdqa64 %zmm7, 128(%r8)
1368 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 64(%r8)
1369 ; AVX512BW-NEXT: vmovdqa64 %zmm5, (%r8)
1370 ; AVX512BW-NEXT: vzeroupper
1371 ; AVX512BW-NEXT: retq
1373 ; AVX512BW-FCP-LABEL: store_i32_stride4_vf16:
1374 ; AVX512BW-FCP: # %bb.0:
1375 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm0
1376 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rsi), %zmm1
1377 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdx), %zmm2
1378 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rcx), %zmm3
1379 ; AVX512BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,0,16,0,0,1,17,0,0,2,18,0,0,3,19]
1380 ; AVX512BW-FCP-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1381 ; AVX512BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm5 = [0,16,0,0,1,17,0,0,2,18,0,0,3,19,0,0]
1382 ; AVX512BW-FCP-NEXT: vpermi2d %zmm1, %zmm0, %zmm5
1383 ; AVX512BW-FCP-NEXT: movb $-86, %al
1384 ; AVX512BW-FCP-NEXT: kmovd %eax, %k1
1385 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm5 {%k1}
1386 ; AVX512BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,4,20,0,0,5,21,0,0,6,22,0,0,7,23]
1387 ; AVX512BW-FCP-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1388 ; AVX512BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm6 = [4,20,0,0,5,21,0,0,6,22,0,0,7,23,0,0]
1389 ; AVX512BW-FCP-NEXT: vpermi2d %zmm1, %zmm0, %zmm6
1390 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm6 {%k1}
1391 ; AVX512BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,8,24,0,0,9,25,0,0,10,26,0,0,11,27]
1392 ; AVX512BW-FCP-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1393 ; AVX512BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm7 = [8,24,0,0,9,25,0,0,10,26,0,0,11,27,0,0]
1394 ; AVX512BW-FCP-NEXT: vpermi2d %zmm1, %zmm0, %zmm7
1395 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm7 {%k1}
1396 ; AVX512BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,12,28,0,0,13,29,0,0,14,30,0,0,15,31]
1397 ; AVX512BW-FCP-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1398 ; AVX512BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm2 = [12,28,0,0,13,29,0,0,14,30,0,0,15,31,0,0]
1399 ; AVX512BW-FCP-NEXT: vpermi2d %zmm1, %zmm0, %zmm2
1400 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm2 {%k1}
1401 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, 192(%r8)
1402 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, 128(%r8)
1403 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, 64(%r8)
1404 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, (%r8)
1405 ; AVX512BW-FCP-NEXT: vzeroupper
1406 ; AVX512BW-FCP-NEXT: retq
1408 ; AVX512DQ-BW-LABEL: store_i32_stride4_vf16:
1409 ; AVX512DQ-BW: # %bb.0:
1410 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdi), %zmm0
1411 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rsi), %zmm1
1412 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdx), %zmm2
1413 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rcx), %zmm3
1414 ; AVX512DQ-BW-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,0,16,0,0,1,17,0,0,2,18,0,0,3,19]
1415 ; AVX512DQ-BW-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1416 ; AVX512DQ-BW-NEXT: vpmovsxbd {{.*#+}} zmm5 = [0,16,0,0,1,17,0,0,2,18,0,0,3,19,0,0]
1417 ; AVX512DQ-BW-NEXT: vpermi2d %zmm1, %zmm0, %zmm5
1418 ; AVX512DQ-BW-NEXT: movb $-86, %al
1419 ; AVX512DQ-BW-NEXT: kmovd %eax, %k1
1420 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm5 {%k1}
1421 ; AVX512DQ-BW-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,4,20,0,0,5,21,0,0,6,22,0,0,7,23]
1422 ; AVX512DQ-BW-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1423 ; AVX512DQ-BW-NEXT: vpmovsxbd {{.*#+}} zmm6 = [4,20,0,0,5,21,0,0,6,22,0,0,7,23,0,0]
1424 ; AVX512DQ-BW-NEXT: vpermi2d %zmm1, %zmm0, %zmm6
1425 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm6 {%k1}
1426 ; AVX512DQ-BW-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,8,24,0,0,9,25,0,0,10,26,0,0,11,27]
1427 ; AVX512DQ-BW-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1428 ; AVX512DQ-BW-NEXT: vpmovsxbd {{.*#+}} zmm7 = [8,24,0,0,9,25,0,0,10,26,0,0,11,27,0,0]
1429 ; AVX512DQ-BW-NEXT: vpermi2d %zmm1, %zmm0, %zmm7
1430 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm7 {%k1}
1431 ; AVX512DQ-BW-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,12,28,0,0,13,29,0,0,14,30,0,0,15,31]
1432 ; AVX512DQ-BW-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1433 ; AVX512DQ-BW-NEXT: vpmovsxbd {{.*#+}} zmm2 = [12,28,0,0,13,29,0,0,14,30,0,0,15,31,0,0]
1434 ; AVX512DQ-BW-NEXT: vpermi2d %zmm1, %zmm0, %zmm2
1435 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm2 {%k1}
1436 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, 192(%r8)
1437 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, 128(%r8)
1438 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, 64(%r8)
1439 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, (%r8)
1440 ; AVX512DQ-BW-NEXT: vzeroupper
1441 ; AVX512DQ-BW-NEXT: retq
1443 ; AVX512DQ-BW-FCP-LABEL: store_i32_stride4_vf16:
1444 ; AVX512DQ-BW-FCP: # %bb.0:
1445 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm0
1446 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rsi), %zmm1
1447 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdx), %zmm2
1448 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rcx), %zmm3
1449 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,0,16,0,0,1,17,0,0,2,18,0,0,3,19]
1450 ; AVX512DQ-BW-FCP-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1451 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm5 = [0,16,0,0,1,17,0,0,2,18,0,0,3,19,0,0]
1452 ; AVX512DQ-BW-FCP-NEXT: vpermi2d %zmm1, %zmm0, %zmm5
1453 ; AVX512DQ-BW-FCP-NEXT: movb $-86, %al
1454 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k1
1455 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm5 {%k1}
1456 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,4,20,0,0,5,21,0,0,6,22,0,0,7,23]
1457 ; AVX512DQ-BW-FCP-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1458 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm6 = [4,20,0,0,5,21,0,0,6,22,0,0,7,23,0,0]
1459 ; AVX512DQ-BW-FCP-NEXT: vpermi2d %zmm1, %zmm0, %zmm6
1460 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm6 {%k1}
1461 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,8,24,0,0,9,25,0,0,10,26,0,0,11,27]
1462 ; AVX512DQ-BW-FCP-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1463 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm7 = [8,24,0,0,9,25,0,0,10,26,0,0,11,27,0,0]
1464 ; AVX512DQ-BW-FCP-NEXT: vpermi2d %zmm1, %zmm0, %zmm7
1465 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm7 {%k1}
1466 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm4 = [0,0,12,28,0,0,13,29,0,0,14,30,0,0,15,31]
1467 ; AVX512DQ-BW-FCP-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
1468 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm2 = [12,28,0,0,13,29,0,0,14,30,0,0,15,31,0,0]
1469 ; AVX512DQ-BW-FCP-NEXT: vpermi2d %zmm1, %zmm0, %zmm2
1470 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm2 {%k1}
1471 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, 192(%r8)
1472 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, 128(%r8)
1473 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, 64(%r8)
1474 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, (%r8)
1475 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
1476 ; AVX512DQ-BW-FCP-NEXT: retq
1477 %in.vec0 = load <16 x i32>, ptr %in.vecptr0, align 64
1478 %in.vec1 = load <16 x i32>, ptr %in.vecptr1, align 64
1479 %in.vec2 = load <16 x i32>, ptr %in.vecptr2, align 64
1480 %in.vec3 = load <16 x i32>, ptr %in.vecptr3, align 64
1481 %1 = shufflevector <16 x i32> %in.vec0, <16 x i32> %in.vec1, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1482 %2 = shufflevector <16 x i32> %in.vec2, <16 x i32> %in.vec3, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1483 %3 = shufflevector <32 x i32> %1, <32 x i32> %2, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
1484 %interleaved.vec = shufflevector <64 x i32> %3, <64 x i32> poison, <64 x i32> <i32 0, i32 16, i32 32, i32 48, i32 1, i32 17, i32 33, i32 49, i32 2, i32 18, i32 34, i32 50, i32 3, i32 19, i32 35, i32 51, i32 4, i32 20, i32 36, i32 52, i32 5, i32 21, i32 37, i32 53, i32 6, i32 22, i32 38, i32 54, i32 7, i32 23, i32 39, i32 55, i32 8, i32 24, i32 40, i32 56, i32 9, i32 25, i32 41, i32 57, i32 10, i32 26, i32 42, i32 58, i32 11, i32 27, i32 43, i32 59, i32 12, i32 28, i32 44, i32 60, i32 13, i32 29, i32 45, i32 61, i32 14, i32 30, i32 46, i32 62, i32 15, i32 31, i32 47, i32 63>
1485 store <64 x i32> %interleaved.vec, ptr %out.vec, align 64
1489 define void @store_i32_stride4_vf32(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %out.vec) nounwind {
1490 ; SSE-LABEL: store_i32_stride4_vf32:
1492 ; SSE-NEXT: subq $184, %rsp
1493 ; SSE-NEXT: movaps (%rdi), %xmm10
1494 ; SSE-NEXT: movaps 16(%rdi), %xmm11
1495 ; SSE-NEXT: movaps 32(%rdi), %xmm12
1496 ; SSE-NEXT: movaps 48(%rdi), %xmm13
1497 ; SSE-NEXT: movaps (%rsi), %xmm5
1498 ; SSE-NEXT: movaps 16(%rsi), %xmm2
1499 ; SSE-NEXT: movaps 32(%rsi), %xmm0
1500 ; SSE-NEXT: movaps (%rdx), %xmm6
1501 ; SSE-NEXT: movaps 16(%rdx), %xmm4
1502 ; SSE-NEXT: movaps 32(%rdx), %xmm1
1503 ; SSE-NEXT: movaps (%rcx), %xmm7
1504 ; SSE-NEXT: movaps 16(%rcx), %xmm8
1505 ; SSE-NEXT: movaps 32(%rcx), %xmm3
1506 ; SSE-NEXT: movaps %xmm6, %xmm9
1507 ; SSE-NEXT: unpcklps {{.*#+}} xmm9 = xmm9[0],xmm7[0],xmm9[1],xmm7[1]
1508 ; SSE-NEXT: movaps %xmm10, %xmm14
1509 ; SSE-NEXT: unpcklps {{.*#+}} xmm14 = xmm14[0],xmm5[0],xmm14[1],xmm5[1]
1510 ; SSE-NEXT: movaps %xmm14, %xmm15
1511 ; SSE-NEXT: movlhps {{.*#+}} xmm15 = xmm15[0],xmm9[0]
1512 ; SSE-NEXT: movaps %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1513 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm9[1]
1514 ; SSE-NEXT: movaps %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1515 ; SSE-NEXT: unpckhps {{.*#+}} xmm6 = xmm6[2],xmm7[2],xmm6[3],xmm7[3]
1516 ; SSE-NEXT: unpckhps {{.*#+}} xmm10 = xmm10[2],xmm5[2],xmm10[3],xmm5[3]
1517 ; SSE-NEXT: movaps %xmm10, %xmm5
1518 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm6[0]
1519 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1520 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm6[1]
1521 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1522 ; SSE-NEXT: movaps %xmm4, %xmm5
1523 ; SSE-NEXT: unpcklps {{.*#+}} xmm5 = xmm5[0],xmm8[0],xmm5[1],xmm8[1]
1524 ; SSE-NEXT: movaps %xmm11, %xmm6
1525 ; SSE-NEXT: unpcklps {{.*#+}} xmm6 = xmm6[0],xmm2[0],xmm6[1],xmm2[1]
1526 ; SSE-NEXT: movaps %xmm6, %xmm7
1527 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm5[0]
1528 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1529 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm5[1]
1530 ; SSE-NEXT: movaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1531 ; SSE-NEXT: unpckhps {{.*#+}} xmm4 = xmm4[2],xmm8[2],xmm4[3],xmm8[3]
1532 ; SSE-NEXT: unpckhps {{.*#+}} xmm11 = xmm11[2],xmm2[2],xmm11[3],xmm2[3]
1533 ; SSE-NEXT: movaps %xmm11, %xmm2
1534 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm4[0]
1535 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1536 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm4[1]
1537 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1538 ; SSE-NEXT: movaps %xmm1, %xmm2
1539 ; SSE-NEXT: unpcklps {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
1540 ; SSE-NEXT: movaps %xmm12, %xmm4
1541 ; SSE-NEXT: unpcklps {{.*#+}} xmm4 = xmm4[0],xmm0[0],xmm4[1],xmm0[1]
1542 ; SSE-NEXT: movaps %xmm4, %xmm5
1543 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm2[0]
1544 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1545 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm2[1]
1546 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1547 ; SSE-NEXT: movaps 48(%rdx), %xmm2
1548 ; SSE-NEXT: unpckhps {{.*#+}} xmm1 = xmm1[2],xmm3[2],xmm1[3],xmm3[3]
1549 ; SSE-NEXT: movaps 48(%rcx), %xmm3
1550 ; SSE-NEXT: unpckhps {{.*#+}} xmm12 = xmm12[2],xmm0[2],xmm12[3],xmm0[3]
1551 ; SSE-NEXT: movaps %xmm12, %xmm0
1552 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
1553 ; SSE-NEXT: movaps %xmm0, (%rsp) # 16-byte Spill
1554 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm1[1]
1555 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1556 ; SSE-NEXT: movaps %xmm2, %xmm0
1557 ; SSE-NEXT: unpcklps {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1]
1558 ; SSE-NEXT: movaps 48(%rsi), %xmm1
1559 ; SSE-NEXT: movaps %xmm13, %xmm4
1560 ; SSE-NEXT: unpcklps {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
1561 ; SSE-NEXT: movaps %xmm4, %xmm5
1562 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm0[0]
1563 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1564 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm0[1]
1565 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1566 ; SSE-NEXT: unpckhps {{.*#+}} xmm2 = xmm2[2],xmm3[2],xmm2[3],xmm3[3]
1567 ; SSE-NEXT: unpckhps {{.*#+}} xmm13 = xmm13[2],xmm1[2],xmm13[3],xmm1[3]
1568 ; SSE-NEXT: movaps %xmm13, %xmm0
1569 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm2[0]
1570 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1571 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm2[1]
1572 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1573 ; SSE-NEXT: movaps 64(%rdx), %xmm0
1574 ; SSE-NEXT: movaps 64(%rcx), %xmm1
1575 ; SSE-NEXT: movaps %xmm0, %xmm2
1576 ; SSE-NEXT: unpcklps {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
1577 ; SSE-NEXT: movaps 64(%rdi), %xmm13
1578 ; SSE-NEXT: movaps 64(%rsi), %xmm3
1579 ; SSE-NEXT: movaps %xmm13, %xmm14
1580 ; SSE-NEXT: unpcklps {{.*#+}} xmm14 = xmm14[0],xmm3[0],xmm14[1],xmm3[1]
1581 ; SSE-NEXT: movaps %xmm14, %xmm4
1582 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm2[0]
1583 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1584 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm2[1]
1585 ; SSE-NEXT: unpckhps {{.*#+}} xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
1586 ; SSE-NEXT: unpckhps {{.*#+}} xmm13 = xmm13[2],xmm3[2],xmm13[3],xmm3[3]
1587 ; SSE-NEXT: movaps %xmm13, %xmm1
1588 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
1589 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1590 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm0[1]
1591 ; SSE-NEXT: movaps 80(%rdx), %xmm0
1592 ; SSE-NEXT: movaps 80(%rcx), %xmm1
1593 ; SSE-NEXT: movaps %xmm0, %xmm2
1594 ; SSE-NEXT: unpcklps {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
1595 ; SSE-NEXT: movaps 80(%rdi), %xmm11
1596 ; SSE-NEXT: movaps 80(%rsi), %xmm7
1597 ; SSE-NEXT: movaps %xmm11, %xmm8
1598 ; SSE-NEXT: unpcklps {{.*#+}} xmm8 = xmm8[0],xmm7[0],xmm8[1],xmm7[1]
1599 ; SSE-NEXT: movaps %xmm8, %xmm3
1600 ; SSE-NEXT: movlhps {{.*#+}} xmm3 = xmm3[0],xmm2[0]
1601 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1602 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm2[1]
1603 ; SSE-NEXT: unpckhps {{.*#+}} xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
1604 ; SSE-NEXT: unpckhps {{.*#+}} xmm11 = xmm11[2],xmm7[2],xmm11[3],xmm7[3]
1605 ; SSE-NEXT: movaps %xmm11, %xmm15
1606 ; SSE-NEXT: movlhps {{.*#+}} xmm15 = xmm15[0],xmm0[0]
1607 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm0[1]
1608 ; SSE-NEXT: movaps 96(%rdx), %xmm1
1609 ; SSE-NEXT: movaps 96(%rcx), %xmm6
1610 ; SSE-NEXT: movaps %xmm1, %xmm0
1611 ; SSE-NEXT: unpcklps {{.*#+}} xmm0 = xmm0[0],xmm6[0],xmm0[1],xmm6[1]
1612 ; SSE-NEXT: movaps 96(%rdi), %xmm5
1613 ; SSE-NEXT: movaps 96(%rsi), %xmm4
1614 ; SSE-NEXT: movaps %xmm5, %xmm9
1615 ; SSE-NEXT: unpcklps {{.*#+}} xmm9 = xmm9[0],xmm4[0],xmm9[1],xmm4[1]
1616 ; SSE-NEXT: movaps %xmm9, %xmm12
1617 ; SSE-NEXT: movlhps {{.*#+}} xmm12 = xmm12[0],xmm0[0]
1618 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm0[1]
1619 ; SSE-NEXT: unpckhps {{.*#+}} xmm1 = xmm1[2],xmm6[2],xmm1[3],xmm6[3]
1620 ; SSE-NEXT: unpckhps {{.*#+}} xmm5 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
1621 ; SSE-NEXT: movaps %xmm5, %xmm10
1622 ; SSE-NEXT: movlhps {{.*#+}} xmm10 = xmm10[0],xmm1[0]
1623 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm1[1]
1624 ; SSE-NEXT: movaps 112(%rdx), %xmm2
1625 ; SSE-NEXT: movaps 112(%rcx), %xmm7
1626 ; SSE-NEXT: movaps %xmm2, %xmm6
1627 ; SSE-NEXT: unpcklps {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1]
1628 ; SSE-NEXT: movaps 112(%rdi), %xmm0
1629 ; SSE-NEXT: movaps 112(%rsi), %xmm4
1630 ; SSE-NEXT: movaps %xmm0, %xmm1
1631 ; SSE-NEXT: unpcklps {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1]
1632 ; SSE-NEXT: movaps %xmm1, %xmm3
1633 ; SSE-NEXT: movlhps {{.*#+}} xmm3 = xmm3[0],xmm6[0]
1634 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm6[1]
1635 ; SSE-NEXT: unpckhps {{.*#+}} xmm2 = xmm2[2],xmm7[2],xmm2[3],xmm7[3]
1636 ; SSE-NEXT: unpckhps {{.*#+}} xmm0 = xmm0[2],xmm4[2],xmm0[3],xmm4[3]
1637 ; SSE-NEXT: movaps %xmm0, %xmm4
1638 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm2[0]
1639 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
1640 ; SSE-NEXT: movaps %xmm0, 496(%r8)
1641 ; SSE-NEXT: movaps %xmm4, 480(%r8)
1642 ; SSE-NEXT: movaps %xmm1, 464(%r8)
1643 ; SSE-NEXT: movaps %xmm3, 448(%r8)
1644 ; SSE-NEXT: movaps %xmm5, 432(%r8)
1645 ; SSE-NEXT: movaps %xmm10, 416(%r8)
1646 ; SSE-NEXT: movaps %xmm9, 400(%r8)
1647 ; SSE-NEXT: movaps %xmm12, 384(%r8)
1648 ; SSE-NEXT: movaps %xmm11, 368(%r8)
1649 ; SSE-NEXT: movaps %xmm15, 352(%r8)
1650 ; SSE-NEXT: movaps %xmm8, 336(%r8)
1651 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1652 ; SSE-NEXT: movaps %xmm0, 320(%r8)
1653 ; SSE-NEXT: movaps %xmm13, 304(%r8)
1654 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1655 ; SSE-NEXT: movaps %xmm0, 288(%r8)
1656 ; SSE-NEXT: movaps %xmm14, 272(%r8)
1657 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1658 ; SSE-NEXT: movaps %xmm0, 256(%r8)
1659 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1660 ; SSE-NEXT: movaps %xmm0, 240(%r8)
1661 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1662 ; SSE-NEXT: movaps %xmm0, 224(%r8)
1663 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1664 ; SSE-NEXT: movaps %xmm0, 208(%r8)
1665 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1666 ; SSE-NEXT: movaps %xmm0, 192(%r8)
1667 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1668 ; SSE-NEXT: movaps %xmm0, 176(%r8)
1669 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
1670 ; SSE-NEXT: movaps %xmm0, 160(%r8)
1671 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1672 ; SSE-NEXT: movaps %xmm0, 144(%r8)
1673 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1674 ; SSE-NEXT: movaps %xmm0, 128(%r8)
1675 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1676 ; SSE-NEXT: movaps %xmm0, 112(%r8)
1677 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1678 ; SSE-NEXT: movaps %xmm0, 96(%r8)
1679 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1680 ; SSE-NEXT: movaps %xmm0, 80(%r8)
1681 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1682 ; SSE-NEXT: movaps %xmm0, 64(%r8)
1683 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1684 ; SSE-NEXT: movaps %xmm0, 48(%r8)
1685 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1686 ; SSE-NEXT: movaps %xmm0, 32(%r8)
1687 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1688 ; SSE-NEXT: movaps %xmm0, 16(%r8)
1689 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1690 ; SSE-NEXT: movaps %xmm0, (%r8)
1691 ; SSE-NEXT: addq $184, %rsp
1694 ; AVX-LABEL: store_i32_stride4_vf32:
1696 ; AVX-NEXT: subq $488, %rsp # imm = 0x1E8
1697 ; AVX-NEXT: vmovaps 16(%rdi), %xmm2
1698 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1699 ; AVX-NEXT: vmovaps 16(%rsi), %xmm1
1700 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1701 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm2[1],xmm1[1],zero,zero
1702 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
1703 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
1704 ; AVX-NEXT: vmovaps 16(%rcx), %xmm3
1705 ; AVX-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1706 ; AVX-NEXT: vmovaps 16(%rdx), %xmm2
1707 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1708 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
1709 ; AVX-NEXT: vmovlhps {{.*#+}} xmm2 = xmm3[0],xmm2[0]
1710 ; AVX-NEXT: vshufps {{.*#+}} xmm2 = xmm2[0,1,2,0]
1711 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
1712 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
1713 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1714 ; AVX-NEXT: vmovaps 64(%rdi), %xmm2
1715 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1716 ; AVX-NEXT: vmovaps 64(%rsi), %xmm1
1717 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1718 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm2[1],xmm1[1],zero,zero
1719 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
1720 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
1721 ; AVX-NEXT: vmovaps 64(%rcx), %xmm3
1722 ; AVX-NEXT: vmovaps %xmm3, (%rsp) # 16-byte Spill
1723 ; AVX-NEXT: vmovaps 64(%rdx), %xmm2
1724 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1725 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
1726 ; AVX-NEXT: vmovlhps {{.*#+}} xmm2 = xmm3[0],xmm2[0]
1727 ; AVX-NEXT: vshufps {{.*#+}} xmm2 = xmm2[0,1,2,0]
1728 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
1729 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
1730 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1731 ; AVX-NEXT: vmovaps 80(%rdi), %xmm2
1732 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1733 ; AVX-NEXT: vmovaps 80(%rsi), %xmm1
1734 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1735 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm2[1],xmm1[1],zero,zero
1736 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
1737 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
1738 ; AVX-NEXT: vmovaps 80(%rcx), %xmm3
1739 ; AVX-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1740 ; AVX-NEXT: vmovaps 80(%rdx), %xmm2
1741 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1742 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm2[0]
1743 ; AVX-NEXT: vshufps {{.*#+}} xmm1 = xmm1[0,1,2,0]
1744 ; AVX-NEXT: vunpcklps {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
1745 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
1746 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
1747 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1748 ; AVX-NEXT: vmovaps 32(%rdi), %xmm2
1749 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1750 ; AVX-NEXT: vmovaps 32(%rsi), %xmm1
1751 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1752 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm2[1],xmm1[1],zero,zero
1753 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
1754 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
1755 ; AVX-NEXT: vmovaps 32(%rcx), %xmm2
1756 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1757 ; AVX-NEXT: vmovaps 32(%rdx), %xmm13
1758 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm13[0]
1759 ; AVX-NEXT: vshufps {{.*#+}} xmm1 = xmm1[0,1,2,0]
1760 ; AVX-NEXT: vunpcklps {{.*#+}} xmm2 = xmm13[0],xmm2[0],xmm13[1],xmm2[1]
1761 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
1762 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
1763 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1764 ; AVX-NEXT: vmovaps 48(%rdi), %xmm2
1765 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1766 ; AVX-NEXT: vmovaps 48(%rsi), %xmm1
1767 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1768 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm2[1],xmm1[1],zero,zero
1769 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
1770 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
1771 ; AVX-NEXT: vmovaps 48(%rcx), %xmm10
1772 ; AVX-NEXT: vmovaps 48(%rdx), %xmm9
1773 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm10[0],xmm9[0]
1774 ; AVX-NEXT: vshufps {{.*#+}} xmm1 = xmm1[0,1,2,0]
1775 ; AVX-NEXT: vunpcklps {{.*#+}} xmm2 = xmm9[0],xmm10[0],xmm9[1],xmm10[1]
1776 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
1777 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
1778 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1779 ; AVX-NEXT: vmovaps 96(%rdi), %xmm2
1780 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1781 ; AVX-NEXT: vmovaps 96(%rsi), %xmm1
1782 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1783 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm2[1],xmm1[1],zero,zero
1784 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
1785 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
1786 ; AVX-NEXT: vmovaps 96(%rcx), %xmm7
1787 ; AVX-NEXT: vmovaps 96(%rdx), %xmm5
1788 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm7[0],xmm5[0]
1789 ; AVX-NEXT: vshufps {{.*#+}} xmm1 = xmm1[0,1,2,0]
1790 ; AVX-NEXT: vunpcklps {{.*#+}} xmm2 = xmm5[0],xmm7[0],xmm5[1],xmm7[1]
1791 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
1792 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
1793 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1794 ; AVX-NEXT: vmovaps 112(%rdi), %xmm1
1795 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1796 ; AVX-NEXT: vmovaps 112(%rsi), %xmm12
1797 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm1[1],xmm12[1],zero,zero
1798 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm1[0],xmm12[0],xmm1[1],xmm12[1]
1799 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
1800 ; AVX-NEXT: vmovaps 112(%rcx), %xmm4
1801 ; AVX-NEXT: vmovaps 112(%rdx), %xmm3
1802 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm4[0],xmm3[0]
1803 ; AVX-NEXT: vshufps {{.*#+}} xmm1 = xmm1[0,1,2,0]
1804 ; AVX-NEXT: vunpcklps {{.*#+}} xmm6 = xmm3[0],xmm4[0],xmm3[1],xmm4[1]
1805 ; AVX-NEXT: vinsertf128 $1, %xmm6, %ymm1, %ymm1
1806 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
1807 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1808 ; AVX-NEXT: vmovaps (%rdi), %xmm11
1809 ; AVX-NEXT: vmovaps (%rsi), %xmm8
1810 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm11[1],xmm8[1],zero,zero
1811 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm11[0],xmm8[0],xmm11[1],xmm8[1]
1812 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
1813 ; AVX-NEXT: vmovaps (%rcx), %xmm6
1814 ; AVX-NEXT: vmovaps (%rdx), %xmm2
1815 ; AVX-NEXT: vmovlhps {{.*#+}} xmm14 = xmm6[0],xmm2[0]
1816 ; AVX-NEXT: vshufps {{.*#+}} xmm14 = xmm14[0,1,2,0]
1817 ; AVX-NEXT: vunpcklps {{.*#+}} xmm15 = xmm2[0],xmm6[0],xmm2[1],xmm6[1]
1818 ; AVX-NEXT: vinsertf128 $1, %xmm15, %ymm14, %ymm14
1819 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm14[2,3],ymm0[4,5],ymm14[6,7]
1820 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1821 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
1822 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
1823 ; AVX-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm14[2],xmm1[3],xmm14[3]
1824 ; AVX-NEXT: vinsertps {{.*#+}} xmm14 = zero,zero,xmm1[2],xmm14[2]
1825 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm14, %ymm1
1826 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
1827 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1828 ; AVX-NEXT: vunpckhps {{.*#+}} xmm14 = xmm0[2],xmm15[2],xmm0[3],xmm15[3]
1829 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm15[3,0],xmm0[3,0]
1830 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm15[2,0,2,3]
1831 ; AVX-NEXT: vinsertf128 $1, %xmm15, %ymm14, %ymm14
1832 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm14[0,1],ymm1[2,3],ymm14[4,5],ymm1[6,7]
1833 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1834 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
1835 ; AVX-NEXT: vmovaps (%rsp), %xmm14 # 16-byte Reload
1836 ; AVX-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm14[2],xmm1[3],xmm14[3]
1837 ; AVX-NEXT: vinsertps {{.*#+}} xmm14 = zero,zero,xmm1[2],xmm14[2]
1838 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm14, %ymm0
1839 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
1840 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
1841 ; AVX-NEXT: vunpckhps {{.*#+}} xmm14 = xmm15[2],xmm1[2],xmm15[3],xmm1[3]
1842 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm1[3,0],xmm15[3,0]
1843 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm15[2,0,2,3]
1844 ; AVX-NEXT: vinsertf128 $1, %xmm15, %ymm14, %ymm14
1845 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm14[0,1],ymm0[2,3],ymm14[4,5],ymm0[6,7]
1846 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1847 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
1848 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
1849 ; AVX-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm14[2],xmm1[3],xmm14[3]
1850 ; AVX-NEXT: vinsertps {{.*#+}} xmm14 = zero,zero,xmm1[2],xmm14[2]
1851 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm14, %ymm0
1852 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
1853 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
1854 ; AVX-NEXT: vunpckhps {{.*#+}} xmm14 = xmm15[2],xmm1[2],xmm15[3],xmm1[3]
1855 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm1[3,0],xmm15[3,0]
1856 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm15[2,0,2,3]
1857 ; AVX-NEXT: vinsertf128 $1, %xmm15, %ymm14, %ymm14
1858 ; AVX-NEXT: vblendps {{.*#+}} ymm15 = ymm14[0,1],ymm0[2,3],ymm14[4,5],ymm0[6,7]
1859 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
1860 ; AVX-NEXT: vunpckhps {{.*#+}} xmm0 = xmm13[2],xmm1[2],xmm13[3],xmm1[3]
1861 ; AVX-NEXT: vinsertps {{.*#+}} xmm13 = zero,zero,xmm13[2],xmm1[2]
1862 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm13, %ymm0
1863 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
1864 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
1865 ; AVX-NEXT: vunpckhps {{.*#+}} xmm13 = xmm14[2],xmm1[2],xmm14[3],xmm1[3]
1866 ; AVX-NEXT: vshufps {{.*#+}} xmm14 = xmm1[3,0],xmm14[3,0]
1867 ; AVX-NEXT: vshufps {{.*#+}} xmm14 = xmm14[2,0,2,3]
1868 ; AVX-NEXT: vinsertf128 $1, %xmm14, %ymm13, %ymm13
1869 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm13[0,1],ymm0[2,3],ymm13[4,5],ymm0[6,7]
1870 ; AVX-NEXT: vunpckhps {{.*#+}} xmm13 = xmm9[2],xmm10[2],xmm9[3],xmm10[3]
1871 ; AVX-NEXT: vinsertps {{.*#+}} xmm9 = zero,zero,xmm9[2],xmm10[2]
1872 ; AVX-NEXT: vinsertf128 $1, %xmm13, %ymm9, %ymm9
1873 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
1874 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
1875 ; AVX-NEXT: vunpckhps {{.*#+}} xmm10 = xmm13[2],xmm1[2],xmm13[3],xmm1[3]
1876 ; AVX-NEXT: vshufps {{.*#+}} xmm13 = xmm1[3,0],xmm13[3,0]
1877 ; AVX-NEXT: vshufps {{.*#+}} xmm13 = xmm13[2,0,2,3]
1878 ; AVX-NEXT: vinsertf128 $1, %xmm13, %ymm10, %ymm10
1879 ; AVX-NEXT: vblendps {{.*#+}} ymm9 = ymm10[0,1],ymm9[2,3],ymm10[4,5],ymm9[6,7]
1880 ; AVX-NEXT: vunpckhps {{.*#+}} xmm10 = xmm5[2],xmm7[2],xmm5[3],xmm7[3]
1881 ; AVX-NEXT: vinsertps {{.*#+}} xmm5 = zero,zero,xmm5[2],xmm7[2]
1882 ; AVX-NEXT: vinsertf128 $1, %xmm10, %ymm5, %ymm5
1883 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
1884 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
1885 ; AVX-NEXT: vunpckhps {{.*#+}} xmm7 = xmm10[2],xmm1[2],xmm10[3],xmm1[3]
1886 ; AVX-NEXT: vshufps {{.*#+}} xmm10 = xmm1[3,0],xmm10[3,0]
1887 ; AVX-NEXT: vshufps {{.*#+}} xmm10 = xmm10[2,0,2,3]
1888 ; AVX-NEXT: vinsertf128 $1, %xmm10, %ymm7, %ymm7
1889 ; AVX-NEXT: vblendps {{.*#+}} ymm5 = ymm7[0,1],ymm5[2,3],ymm7[4,5],ymm5[6,7]
1890 ; AVX-NEXT: vunpckhps {{.*#+}} xmm7 = xmm3[2],xmm4[2],xmm3[3],xmm4[3]
1891 ; AVX-NEXT: vinsertps {{.*#+}} xmm3 = zero,zero,xmm3[2],xmm4[2]
1892 ; AVX-NEXT: vinsertf128 $1, %xmm7, %ymm3, %ymm3
1893 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
1894 ; AVX-NEXT: vunpckhps {{.*#+}} xmm4 = xmm1[2],xmm12[2],xmm1[3],xmm12[3]
1895 ; AVX-NEXT: vshufps {{.*#+}} xmm7 = xmm12[3,0],xmm1[3,0]
1896 ; AVX-NEXT: vshufps {{.*#+}} xmm7 = xmm7[2,0,2,3]
1897 ; AVX-NEXT: vinsertf128 $1, %xmm7, %ymm4, %ymm4
1898 ; AVX-NEXT: vblendps {{.*#+}} ymm3 = ymm4[0,1],ymm3[2,3],ymm4[4,5],ymm3[6,7]
1899 ; AVX-NEXT: vunpckhps {{.*#+}} xmm4 = xmm2[2],xmm6[2],xmm2[3],xmm6[3]
1900 ; AVX-NEXT: vinsertps {{.*#+}} xmm1 = zero,zero,xmm2[2],xmm6[2]
1901 ; AVX-NEXT: vinsertf128 $1, %xmm4, %ymm1, %ymm1
1902 ; AVX-NEXT: vunpckhps {{.*#+}} xmm2 = xmm11[2],xmm8[2],xmm11[3],xmm8[3]
1903 ; AVX-NEXT: vshufps {{.*#+}} xmm4 = xmm8[3,0],xmm11[3,0]
1904 ; AVX-NEXT: vshufps {{.*#+}} xmm4 = xmm4[2,0,2,3]
1905 ; AVX-NEXT: vinsertf128 $1, %xmm4, %ymm2, %ymm2
1906 ; AVX-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3],ymm2[4,5],ymm1[6,7]
1907 ; AVX-NEXT: vmovaps %ymm1, 32(%r8)
1908 ; AVX-NEXT: vmovaps %ymm3, 480(%r8)
1909 ; AVX-NEXT: vmovaps %ymm5, 416(%r8)
1910 ; AVX-NEXT: vmovaps %ymm9, 224(%r8)
1911 ; AVX-NEXT: vmovaps %ymm0, 160(%r8)
1912 ; AVX-NEXT: vmovaps %ymm15, 352(%r8)
1913 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1914 ; AVX-NEXT: vmovaps %ymm0, 288(%r8)
1915 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1916 ; AVX-NEXT: vmovaps %ymm0, 96(%r8)
1917 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1918 ; AVX-NEXT: vmovaps %ymm0, (%r8)
1919 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1920 ; AVX-NEXT: vmovaps %ymm0, 448(%r8)
1921 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1922 ; AVX-NEXT: vmovaps %ymm0, 384(%r8)
1923 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1924 ; AVX-NEXT: vmovaps %ymm0, 192(%r8)
1925 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1926 ; AVX-NEXT: vmovaps %ymm0, 128(%r8)
1927 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1928 ; AVX-NEXT: vmovaps %ymm0, 320(%r8)
1929 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1930 ; AVX-NEXT: vmovaps %ymm0, 256(%r8)
1931 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1932 ; AVX-NEXT: vmovaps %ymm0, 64(%r8)
1933 ; AVX-NEXT: addq $488, %rsp # imm = 0x1E8
1934 ; AVX-NEXT: vzeroupper
1937 ; AVX2-LABEL: store_i32_stride4_vf32:
1939 ; AVX2-NEXT: pushq %rax
1940 ; AVX2-NEXT: vmovaps 64(%rdi), %ymm2
1941 ; AVX2-NEXT: vmovaps (%rdi), %ymm6
1942 ; AVX2-NEXT: vmovaps 64(%rsi), %ymm4
1943 ; AVX2-NEXT: vmovaps (%rsi), %ymm7
1944 ; AVX2-NEXT: vmovaps (%rcx), %xmm11
1945 ; AVX2-NEXT: vmovaps 32(%rcx), %xmm5
1946 ; AVX2-NEXT: vmovaps 64(%rcx), %xmm1
1947 ; AVX2-NEXT: vmovaps (%rdx), %xmm12
1948 ; AVX2-NEXT: vmovaps 32(%rdx), %xmm8
1949 ; AVX2-NEXT: vmovaps 64(%rdx), %xmm3
1950 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm0 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
1951 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,0,2,1]
1952 ; AVX2-NEXT: vmovaps 32(%rsi), %xmm9
1953 ; AVX2-NEXT: vmovaps 64(%rsi), %xmm10
1954 ; AVX2-NEXT: vmovaps 32(%rdi), %xmm13
1955 ; AVX2-NEXT: vmovaps 64(%rdi), %xmm14
1956 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm15 = xmm14[0],xmm10[0],xmm14[1],xmm10[1]
1957 ; AVX2-NEXT: vpermpd {{.*#+}} ymm15 = ymm15[0,1,1,3]
1958 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1],ymm0[2,3],ymm15[4,5],ymm0[6,7]
1959 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1960 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm1 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
1961 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,0,2,1]
1962 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm3 = xmm14[2],xmm10[2],xmm14[3],xmm10[3]
1963 ; AVX2-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,1,3]
1964 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
1965 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1966 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm3 = xmm8[0],xmm5[0],xmm8[1],xmm5[1]
1967 ; AVX2-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,0,2,1]
1968 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm10 = xmm13[0],xmm9[0],xmm13[1],xmm9[1]
1969 ; AVX2-NEXT: vpermpd {{.*#+}} ymm10 = ymm10[0,1,1,3]
1970 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm10[0,1],ymm3[2,3],ymm10[4,5],ymm3[6,7]
1971 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1972 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm5 = xmm8[2],xmm5[2],xmm8[3],xmm5[3]
1973 ; AVX2-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,0,2,1]
1974 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm8 = xmm13[2],xmm9[2],xmm13[3],xmm9[3]
1975 ; AVX2-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,1,1,3]
1976 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm8[0,1],ymm5[2,3],ymm8[4,5],ymm5[6,7]
1977 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1978 ; AVX2-NEXT: vmovaps 96(%rcx), %xmm9
1979 ; AVX2-NEXT: vmovaps 96(%rdx), %xmm10
1980 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm8 = xmm10[0],xmm9[0],xmm10[1],xmm9[1]
1981 ; AVX2-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,0,2,1]
1982 ; AVX2-NEXT: vmovaps 96(%rsi), %xmm13
1983 ; AVX2-NEXT: vmovaps 96(%rdi), %xmm14
1984 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm15 = xmm14[0],xmm13[0],xmm14[1],xmm13[1]
1985 ; AVX2-NEXT: vpermpd {{.*#+}} ymm15 = ymm15[0,1,1,3]
1986 ; AVX2-NEXT: vblendps {{.*#+}} ymm8 = ymm15[0,1],ymm8[2,3],ymm15[4,5],ymm8[6,7]
1987 ; AVX2-NEXT: vmovaps (%rsi), %xmm15
1988 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm9 = xmm10[2],xmm9[2],xmm10[3],xmm9[3]
1989 ; AVX2-NEXT: vmovaps (%rdi), %xmm0
1990 ; AVX2-NEXT: vpermpd {{.*#+}} ymm9 = ymm9[0,0,2,1]
1991 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm10 = xmm14[2],xmm13[2],xmm14[3],xmm13[3]
1992 ; AVX2-NEXT: vpermpd {{.*#+}} ymm10 = ymm10[0,1,1,3]
1993 ; AVX2-NEXT: vblendps {{.*#+}} ymm9 = ymm10[0,1],ymm9[2,3],ymm10[4,5],ymm9[6,7]
1994 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm10 = xmm12[0],xmm11[0],xmm12[1],xmm11[1]
1995 ; AVX2-NEXT: vpermpd {{.*#+}} ymm10 = ymm10[0,0,2,1]
1996 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm13 = xmm0[0],xmm15[0],xmm0[1],xmm15[1]
1997 ; AVX2-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[0,1,1,3]
1998 ; AVX2-NEXT: vblendps {{.*#+}} ymm10 = ymm13[0,1],ymm10[2,3],ymm13[4,5],ymm10[6,7]
1999 ; AVX2-NEXT: vmovaps (%rdx), %ymm13
2000 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm11 = xmm12[2],xmm11[2],xmm12[3],xmm11[3]
2001 ; AVX2-NEXT: vmovaps (%rcx), %ymm14
2002 ; AVX2-NEXT: vpermpd {{.*#+}} ymm11 = ymm11[0,0,2,1]
2003 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm0 = xmm0[2],xmm15[2],xmm0[3],xmm15[3]
2004 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,1,3]
2005 ; AVX2-NEXT: vblendps {{.*#+}} ymm11 = ymm0[0,1],ymm11[2,3],ymm0[4,5],ymm11[6,7]
2006 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm0 = ymm13[2],ymm14[2],ymm13[3],ymm14[3],ymm13[6],ymm14[6],ymm13[7],ymm14[7]
2007 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,2,3]
2008 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm12 = ymm6[2],ymm7[2],ymm6[3],ymm7[3],ymm6[6],ymm7[6],ymm6[7],ymm7[7]
2009 ; AVX2-NEXT: vpermpd {{.*#+}} ymm12 = ymm12[2,1,3,3]
2010 ; AVX2-NEXT: vblendps {{.*#+}} ymm12 = ymm12[0,1],ymm0[2,3],ymm12[4,5],ymm0[6,7]
2011 ; AVX2-NEXT: vmovaps 64(%rdx), %ymm0
2012 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm13 = ymm13[0],ymm14[0],ymm13[1],ymm14[1],ymm13[4],ymm14[4],ymm13[5],ymm14[5]
2013 ; AVX2-NEXT: vmovaps 64(%rcx), %ymm14
2014 ; AVX2-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[0,2,2,3]
2015 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm6 = ymm6[0],ymm7[0],ymm6[1],ymm7[1],ymm6[4],ymm7[4],ymm6[5],ymm7[5]
2016 ; AVX2-NEXT: vpermpd {{.*#+}} ymm6 = ymm6[2,1,3,3]
2017 ; AVX2-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1],ymm13[2,3],ymm6[4,5],ymm13[6,7]
2018 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm7 = ymm0[0],ymm14[0],ymm0[1],ymm14[1],ymm0[4],ymm14[4],ymm0[5],ymm14[5]
2019 ; AVX2-NEXT: vpermpd {{.*#+}} ymm7 = ymm7[0,2,2,3]
2020 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm13 = ymm2[0],ymm4[0],ymm2[1],ymm4[1],ymm2[4],ymm4[4],ymm2[5],ymm4[5]
2021 ; AVX2-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[2,1,3,3]
2022 ; AVX2-NEXT: vblendps {{.*#+}} ymm7 = ymm13[0,1],ymm7[2,3],ymm13[4,5],ymm7[6,7]
2023 ; AVX2-NEXT: vmovaps 32(%rdi), %ymm13
2024 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm0 = ymm0[2],ymm14[2],ymm0[3],ymm14[3],ymm0[6],ymm14[6],ymm0[7],ymm14[7]
2025 ; AVX2-NEXT: vmovaps 32(%rdx), %ymm14
2026 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm2 = ymm2[2],ymm4[2],ymm2[3],ymm4[3],ymm2[6],ymm4[6],ymm2[7],ymm4[7]
2027 ; AVX2-NEXT: vmovaps 32(%rcx), %ymm4
2028 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,2,3]
2029 ; AVX2-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[2,1,3,3]
2030 ; AVX2-NEXT: vblendps {{.*#+}} ymm3 = ymm2[0,1],ymm0[2,3],ymm2[4,5],ymm0[6,7]
2031 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm2 = ymm14[0],ymm4[0],ymm14[1],ymm4[1],ymm14[4],ymm4[4],ymm14[5],ymm4[5]
2032 ; AVX2-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
2033 ; AVX2-NEXT: vmovaps 32(%rsi), %ymm15
2034 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm1 = ymm13[0],ymm15[0],ymm13[1],ymm15[1],ymm13[4],ymm15[4],ymm13[5],ymm15[5]
2035 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[2,1,3,3]
2036 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3],ymm1[4,5],ymm2[6,7]
2037 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm2 = ymm14[2],ymm4[2],ymm14[3],ymm4[3],ymm14[6],ymm4[6],ymm14[7],ymm4[7]
2038 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm4 = ymm13[2],ymm15[2],ymm13[3],ymm15[3],ymm13[6],ymm15[6],ymm13[7],ymm15[7]
2039 ; AVX2-NEXT: vmovaps 96(%rdx), %ymm13
2040 ; AVX2-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
2041 ; AVX2-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[2,1,3,3]
2042 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm4[0,1],ymm2[2,3],ymm4[4,5],ymm2[6,7]
2043 ; AVX2-NEXT: vmovaps 96(%rcx), %ymm4
2044 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm14 = ymm13[0],ymm4[0],ymm13[1],ymm4[1],ymm13[4],ymm4[4],ymm13[5],ymm4[5]
2045 ; AVX2-NEXT: vpermpd {{.*#+}} ymm14 = ymm14[0,2,2,3]
2046 ; AVX2-NEXT: vmovaps 96(%rdi), %ymm15
2047 ; AVX2-NEXT: vmovaps 96(%rsi), %ymm0
2048 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm5 = ymm15[0],ymm0[0],ymm15[1],ymm0[1],ymm15[4],ymm0[4],ymm15[5],ymm0[5]
2049 ; AVX2-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[2,1,3,3]
2050 ; AVX2-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1],ymm14[2,3],ymm5[4,5],ymm14[6,7]
2051 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm4 = ymm13[2],ymm4[2],ymm13[3],ymm4[3],ymm13[6],ymm4[6],ymm13[7],ymm4[7]
2052 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm0 = ymm15[2],ymm0[2],ymm15[3],ymm0[3],ymm15[6],ymm0[6],ymm15[7],ymm0[7]
2053 ; AVX2-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,2,2,3]
2054 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,1,3,3]
2055 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm4[2,3],ymm0[4,5],ymm4[6,7]
2056 ; AVX2-NEXT: vmovaps %ymm0, 480(%r8)
2057 ; AVX2-NEXT: vmovaps %ymm5, 448(%r8)
2058 ; AVX2-NEXT: vmovaps %ymm2, 224(%r8)
2059 ; AVX2-NEXT: vmovaps %ymm1, 192(%r8)
2060 ; AVX2-NEXT: vmovaps %ymm3, 352(%r8)
2061 ; AVX2-NEXT: vmovaps %ymm7, 320(%r8)
2062 ; AVX2-NEXT: vmovaps %ymm6, 64(%r8)
2063 ; AVX2-NEXT: vmovaps %ymm12, 96(%r8)
2064 ; AVX2-NEXT: vmovaps %ymm11, 32(%r8)
2065 ; AVX2-NEXT: vmovaps %ymm10, (%r8)
2066 ; AVX2-NEXT: vmovaps %ymm9, 416(%r8)
2067 ; AVX2-NEXT: vmovaps %ymm8, 384(%r8)
2068 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2069 ; AVX2-NEXT: vmovaps %ymm0, 160(%r8)
2070 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2071 ; AVX2-NEXT: vmovaps %ymm0, 128(%r8)
2072 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2073 ; AVX2-NEXT: vmovaps %ymm0, 288(%r8)
2074 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2075 ; AVX2-NEXT: vmovaps %ymm0, 256(%r8)
2076 ; AVX2-NEXT: popq %rax
2077 ; AVX2-NEXT: vzeroupper
2080 ; AVX2-FP-LABEL: store_i32_stride4_vf32:
2082 ; AVX2-FP-NEXT: pushq %rax
2083 ; AVX2-FP-NEXT: vmovaps 64(%rdi), %ymm2
2084 ; AVX2-FP-NEXT: vmovaps (%rdi), %ymm6
2085 ; AVX2-FP-NEXT: vmovaps 64(%rsi), %ymm4
2086 ; AVX2-FP-NEXT: vmovaps (%rsi), %ymm7
2087 ; AVX2-FP-NEXT: vmovaps (%rcx), %xmm11
2088 ; AVX2-FP-NEXT: vmovaps 32(%rcx), %xmm5
2089 ; AVX2-FP-NEXT: vmovaps 64(%rcx), %xmm1
2090 ; AVX2-FP-NEXT: vmovaps (%rdx), %xmm12
2091 ; AVX2-FP-NEXT: vmovaps 32(%rdx), %xmm8
2092 ; AVX2-FP-NEXT: vmovaps 64(%rdx), %xmm3
2093 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm0 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
2094 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,0,2,1]
2095 ; AVX2-FP-NEXT: vmovaps 32(%rsi), %xmm9
2096 ; AVX2-FP-NEXT: vmovaps 64(%rsi), %xmm10
2097 ; AVX2-FP-NEXT: vmovaps 32(%rdi), %xmm13
2098 ; AVX2-FP-NEXT: vmovaps 64(%rdi), %xmm14
2099 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm15 = xmm14[0],xmm10[0],xmm14[1],xmm10[1]
2100 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm15 = ymm15[0,1,1,3]
2101 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1],ymm0[2,3],ymm15[4,5],ymm0[6,7]
2102 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2103 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm1 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
2104 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,0,2,1]
2105 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm3 = xmm14[2],xmm10[2],xmm14[3],xmm10[3]
2106 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,1,3]
2107 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
2108 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2109 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm3 = xmm8[0],xmm5[0],xmm8[1],xmm5[1]
2110 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,0,2,1]
2111 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm10 = xmm13[0],xmm9[0],xmm13[1],xmm9[1]
2112 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm10 = ymm10[0,1,1,3]
2113 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm10[0,1],ymm3[2,3],ymm10[4,5],ymm3[6,7]
2114 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2115 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm5 = xmm8[2],xmm5[2],xmm8[3],xmm5[3]
2116 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,0,2,1]
2117 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm8 = xmm13[2],xmm9[2],xmm13[3],xmm9[3]
2118 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,1,1,3]
2119 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm8[0,1],ymm5[2,3],ymm8[4,5],ymm5[6,7]
2120 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2121 ; AVX2-FP-NEXT: vmovaps 96(%rcx), %xmm9
2122 ; AVX2-FP-NEXT: vmovaps 96(%rdx), %xmm10
2123 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm8 = xmm10[0],xmm9[0],xmm10[1],xmm9[1]
2124 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,0,2,1]
2125 ; AVX2-FP-NEXT: vmovaps 96(%rsi), %xmm13
2126 ; AVX2-FP-NEXT: vmovaps 96(%rdi), %xmm14
2127 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm15 = xmm14[0],xmm13[0],xmm14[1],xmm13[1]
2128 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm15 = ymm15[0,1,1,3]
2129 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm8 = ymm15[0,1],ymm8[2,3],ymm15[4,5],ymm8[6,7]
2130 ; AVX2-FP-NEXT: vmovaps (%rsi), %xmm15
2131 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm9 = xmm10[2],xmm9[2],xmm10[3],xmm9[3]
2132 ; AVX2-FP-NEXT: vmovaps (%rdi), %xmm0
2133 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm9 = ymm9[0,0,2,1]
2134 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm10 = xmm14[2],xmm13[2],xmm14[3],xmm13[3]
2135 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm10 = ymm10[0,1,1,3]
2136 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm9 = ymm10[0,1],ymm9[2,3],ymm10[4,5],ymm9[6,7]
2137 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm10 = xmm12[0],xmm11[0],xmm12[1],xmm11[1]
2138 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm10 = ymm10[0,0,2,1]
2139 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm13 = xmm0[0],xmm15[0],xmm0[1],xmm15[1]
2140 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[0,1,1,3]
2141 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm10 = ymm13[0,1],ymm10[2,3],ymm13[4,5],ymm10[6,7]
2142 ; AVX2-FP-NEXT: vmovaps (%rdx), %ymm13
2143 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm11 = xmm12[2],xmm11[2],xmm12[3],xmm11[3]
2144 ; AVX2-FP-NEXT: vmovaps (%rcx), %ymm14
2145 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm11 = ymm11[0,0,2,1]
2146 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm0 = xmm0[2],xmm15[2],xmm0[3],xmm15[3]
2147 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,1,3]
2148 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm11 = ymm0[0,1],ymm11[2,3],ymm0[4,5],ymm11[6,7]
2149 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm0 = ymm13[2],ymm14[2],ymm13[3],ymm14[3],ymm13[6],ymm14[6],ymm13[7],ymm14[7]
2150 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,2,3]
2151 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm12 = ymm6[2],ymm7[2],ymm6[3],ymm7[3],ymm6[6],ymm7[6],ymm6[7],ymm7[7]
2152 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm12 = ymm12[2,1,3,3]
2153 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm12 = ymm12[0,1],ymm0[2,3],ymm12[4,5],ymm0[6,7]
2154 ; AVX2-FP-NEXT: vmovaps 64(%rdx), %ymm0
2155 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm13 = ymm13[0],ymm14[0],ymm13[1],ymm14[1],ymm13[4],ymm14[4],ymm13[5],ymm14[5]
2156 ; AVX2-FP-NEXT: vmovaps 64(%rcx), %ymm14
2157 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[0,2,2,3]
2158 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm6 = ymm6[0],ymm7[0],ymm6[1],ymm7[1],ymm6[4],ymm7[4],ymm6[5],ymm7[5]
2159 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm6 = ymm6[2,1,3,3]
2160 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1],ymm13[2,3],ymm6[4,5],ymm13[6,7]
2161 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm7 = ymm0[0],ymm14[0],ymm0[1],ymm14[1],ymm0[4],ymm14[4],ymm0[5],ymm14[5]
2162 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm7 = ymm7[0,2,2,3]
2163 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm13 = ymm2[0],ymm4[0],ymm2[1],ymm4[1],ymm2[4],ymm4[4],ymm2[5],ymm4[5]
2164 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[2,1,3,3]
2165 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm7 = ymm13[0,1],ymm7[2,3],ymm13[4,5],ymm7[6,7]
2166 ; AVX2-FP-NEXT: vmovaps 32(%rdi), %ymm13
2167 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm0 = ymm0[2],ymm14[2],ymm0[3],ymm14[3],ymm0[6],ymm14[6],ymm0[7],ymm14[7]
2168 ; AVX2-FP-NEXT: vmovaps 32(%rdx), %ymm14
2169 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm2 = ymm2[2],ymm4[2],ymm2[3],ymm4[3],ymm2[6],ymm4[6],ymm2[7],ymm4[7]
2170 ; AVX2-FP-NEXT: vmovaps 32(%rcx), %ymm4
2171 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,2,3]
2172 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[2,1,3,3]
2173 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm3 = ymm2[0,1],ymm0[2,3],ymm2[4,5],ymm0[6,7]
2174 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm2 = ymm14[0],ymm4[0],ymm14[1],ymm4[1],ymm14[4],ymm4[4],ymm14[5],ymm4[5]
2175 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
2176 ; AVX2-FP-NEXT: vmovaps 32(%rsi), %ymm15
2177 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm1 = ymm13[0],ymm15[0],ymm13[1],ymm15[1],ymm13[4],ymm15[4],ymm13[5],ymm15[5]
2178 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[2,1,3,3]
2179 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3],ymm1[4,5],ymm2[6,7]
2180 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm2 = ymm14[2],ymm4[2],ymm14[3],ymm4[3],ymm14[6],ymm4[6],ymm14[7],ymm4[7]
2181 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm4 = ymm13[2],ymm15[2],ymm13[3],ymm15[3],ymm13[6],ymm15[6],ymm13[7],ymm15[7]
2182 ; AVX2-FP-NEXT: vmovaps 96(%rdx), %ymm13
2183 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
2184 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[2,1,3,3]
2185 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm4[0,1],ymm2[2,3],ymm4[4,5],ymm2[6,7]
2186 ; AVX2-FP-NEXT: vmovaps 96(%rcx), %ymm4
2187 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm14 = ymm13[0],ymm4[0],ymm13[1],ymm4[1],ymm13[4],ymm4[4],ymm13[5],ymm4[5]
2188 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm14 = ymm14[0,2,2,3]
2189 ; AVX2-FP-NEXT: vmovaps 96(%rdi), %ymm15
2190 ; AVX2-FP-NEXT: vmovaps 96(%rsi), %ymm0
2191 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm5 = ymm15[0],ymm0[0],ymm15[1],ymm0[1],ymm15[4],ymm0[4],ymm15[5],ymm0[5]
2192 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[2,1,3,3]
2193 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1],ymm14[2,3],ymm5[4,5],ymm14[6,7]
2194 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm4 = ymm13[2],ymm4[2],ymm13[3],ymm4[3],ymm13[6],ymm4[6],ymm13[7],ymm4[7]
2195 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm0 = ymm15[2],ymm0[2],ymm15[3],ymm0[3],ymm15[6],ymm0[6],ymm15[7],ymm0[7]
2196 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,2,2,3]
2197 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,1,3,3]
2198 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm4[2,3],ymm0[4,5],ymm4[6,7]
2199 ; AVX2-FP-NEXT: vmovaps %ymm0, 480(%r8)
2200 ; AVX2-FP-NEXT: vmovaps %ymm5, 448(%r8)
2201 ; AVX2-FP-NEXT: vmovaps %ymm2, 224(%r8)
2202 ; AVX2-FP-NEXT: vmovaps %ymm1, 192(%r8)
2203 ; AVX2-FP-NEXT: vmovaps %ymm3, 352(%r8)
2204 ; AVX2-FP-NEXT: vmovaps %ymm7, 320(%r8)
2205 ; AVX2-FP-NEXT: vmovaps %ymm6, 64(%r8)
2206 ; AVX2-FP-NEXT: vmovaps %ymm12, 96(%r8)
2207 ; AVX2-FP-NEXT: vmovaps %ymm11, 32(%r8)
2208 ; AVX2-FP-NEXT: vmovaps %ymm10, (%r8)
2209 ; AVX2-FP-NEXT: vmovaps %ymm9, 416(%r8)
2210 ; AVX2-FP-NEXT: vmovaps %ymm8, 384(%r8)
2211 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2212 ; AVX2-FP-NEXT: vmovaps %ymm0, 160(%r8)
2213 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2214 ; AVX2-FP-NEXT: vmovaps %ymm0, 128(%r8)
2215 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2216 ; AVX2-FP-NEXT: vmovaps %ymm0, 288(%r8)
2217 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2218 ; AVX2-FP-NEXT: vmovaps %ymm0, 256(%r8)
2219 ; AVX2-FP-NEXT: popq %rax
2220 ; AVX2-FP-NEXT: vzeroupper
2221 ; AVX2-FP-NEXT: retq
2223 ; AVX2-FCP-LABEL: store_i32_stride4_vf32:
2224 ; AVX2-FCP: # %bb.0:
2225 ; AVX2-FCP-NEXT: pushq %rax
2226 ; AVX2-FCP-NEXT: vmovaps 64(%rdi), %ymm2
2227 ; AVX2-FCP-NEXT: vmovaps (%rdi), %ymm6
2228 ; AVX2-FCP-NEXT: vmovaps 64(%rsi), %ymm4
2229 ; AVX2-FCP-NEXT: vmovaps (%rsi), %ymm7
2230 ; AVX2-FCP-NEXT: vmovaps (%rcx), %xmm11
2231 ; AVX2-FCP-NEXT: vmovaps 32(%rcx), %xmm5
2232 ; AVX2-FCP-NEXT: vmovaps 64(%rcx), %xmm1
2233 ; AVX2-FCP-NEXT: vmovaps (%rdx), %xmm12
2234 ; AVX2-FCP-NEXT: vmovaps 32(%rdx), %xmm8
2235 ; AVX2-FCP-NEXT: vmovaps 64(%rdx), %xmm3
2236 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm0 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
2237 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,0,2,1]
2238 ; AVX2-FCP-NEXT: vmovaps 32(%rsi), %xmm9
2239 ; AVX2-FCP-NEXT: vmovaps 64(%rsi), %xmm10
2240 ; AVX2-FCP-NEXT: vmovaps 32(%rdi), %xmm13
2241 ; AVX2-FCP-NEXT: vmovaps 64(%rdi), %xmm14
2242 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm15 = xmm14[0],xmm10[0],xmm14[1],xmm10[1]
2243 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm15 = ymm15[0,1,1,3]
2244 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0,1],ymm0[2,3],ymm15[4,5],ymm0[6,7]
2245 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2246 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm1 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
2247 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,0,2,1]
2248 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm3 = xmm14[2],xmm10[2],xmm14[3],xmm10[3]
2249 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,1,3]
2250 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
2251 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2252 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm3 = xmm8[0],xmm5[0],xmm8[1],xmm5[1]
2253 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,0,2,1]
2254 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm10 = xmm13[0],xmm9[0],xmm13[1],xmm9[1]
2255 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm10 = ymm10[0,1,1,3]
2256 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm10[0,1],ymm3[2,3],ymm10[4,5],ymm3[6,7]
2257 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2258 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm5 = xmm8[2],xmm5[2],xmm8[3],xmm5[3]
2259 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,0,2,1]
2260 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm8 = xmm13[2],xmm9[2],xmm13[3],xmm9[3]
2261 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,1,1,3]
2262 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm8[0,1],ymm5[2,3],ymm8[4,5],ymm5[6,7]
2263 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2264 ; AVX2-FCP-NEXT: vmovaps 96(%rcx), %xmm9
2265 ; AVX2-FCP-NEXT: vmovaps 96(%rdx), %xmm10
2266 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm8 = xmm10[0],xmm9[0],xmm10[1],xmm9[1]
2267 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm8 = ymm8[0,0,2,1]
2268 ; AVX2-FCP-NEXT: vmovaps 96(%rsi), %xmm13
2269 ; AVX2-FCP-NEXT: vmovaps 96(%rdi), %xmm14
2270 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm15 = xmm14[0],xmm13[0],xmm14[1],xmm13[1]
2271 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm15 = ymm15[0,1,1,3]
2272 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm8 = ymm15[0,1],ymm8[2,3],ymm15[4,5],ymm8[6,7]
2273 ; AVX2-FCP-NEXT: vmovaps (%rsi), %xmm15
2274 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm9 = xmm10[2],xmm9[2],xmm10[3],xmm9[3]
2275 ; AVX2-FCP-NEXT: vmovaps (%rdi), %xmm0
2276 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm9 = ymm9[0,0,2,1]
2277 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm10 = xmm14[2],xmm13[2],xmm14[3],xmm13[3]
2278 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm10 = ymm10[0,1,1,3]
2279 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm9 = ymm10[0,1],ymm9[2,3],ymm10[4,5],ymm9[6,7]
2280 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm10 = xmm12[0],xmm11[0],xmm12[1],xmm11[1]
2281 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm10 = ymm10[0,0,2,1]
2282 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm13 = xmm0[0],xmm15[0],xmm0[1],xmm15[1]
2283 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[0,1,1,3]
2284 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm10 = ymm13[0,1],ymm10[2,3],ymm13[4,5],ymm10[6,7]
2285 ; AVX2-FCP-NEXT: vmovaps (%rdx), %ymm13
2286 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm11 = xmm12[2],xmm11[2],xmm12[3],xmm11[3]
2287 ; AVX2-FCP-NEXT: vmovaps (%rcx), %ymm14
2288 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm11 = ymm11[0,0,2,1]
2289 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm0 = xmm0[2],xmm15[2],xmm0[3],xmm15[3]
2290 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,1,3]
2291 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm11 = ymm0[0,1],ymm11[2,3],ymm0[4,5],ymm11[6,7]
2292 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm0 = ymm13[2],ymm14[2],ymm13[3],ymm14[3],ymm13[6],ymm14[6],ymm13[7],ymm14[7]
2293 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,2,3]
2294 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm12 = ymm6[2],ymm7[2],ymm6[3],ymm7[3],ymm6[6],ymm7[6],ymm6[7],ymm7[7]
2295 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm12 = ymm12[2,1,3,3]
2296 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm12 = ymm12[0,1],ymm0[2,3],ymm12[4,5],ymm0[6,7]
2297 ; AVX2-FCP-NEXT: vmovaps 64(%rdx), %ymm0
2298 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm13 = ymm13[0],ymm14[0],ymm13[1],ymm14[1],ymm13[4],ymm14[4],ymm13[5],ymm14[5]
2299 ; AVX2-FCP-NEXT: vmovaps 64(%rcx), %ymm14
2300 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[0,2,2,3]
2301 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm6 = ymm6[0],ymm7[0],ymm6[1],ymm7[1],ymm6[4],ymm7[4],ymm6[5],ymm7[5]
2302 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm6 = ymm6[2,1,3,3]
2303 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1],ymm13[2,3],ymm6[4,5],ymm13[6,7]
2304 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm7 = ymm0[0],ymm14[0],ymm0[1],ymm14[1],ymm0[4],ymm14[4],ymm0[5],ymm14[5]
2305 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm7 = ymm7[0,2,2,3]
2306 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm13 = ymm2[0],ymm4[0],ymm2[1],ymm4[1],ymm2[4],ymm4[4],ymm2[5],ymm4[5]
2307 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[2,1,3,3]
2308 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm7 = ymm13[0,1],ymm7[2,3],ymm13[4,5],ymm7[6,7]
2309 ; AVX2-FCP-NEXT: vmovaps 32(%rdi), %ymm13
2310 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm0 = ymm0[2],ymm14[2],ymm0[3],ymm14[3],ymm0[6],ymm14[6],ymm0[7],ymm14[7]
2311 ; AVX2-FCP-NEXT: vmovaps 32(%rdx), %ymm14
2312 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm2 = ymm2[2],ymm4[2],ymm2[3],ymm4[3],ymm2[6],ymm4[6],ymm2[7],ymm4[7]
2313 ; AVX2-FCP-NEXT: vmovaps 32(%rcx), %ymm4
2314 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,2,3]
2315 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[2,1,3,3]
2316 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm3 = ymm2[0,1],ymm0[2,3],ymm2[4,5],ymm0[6,7]
2317 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm2 = ymm14[0],ymm4[0],ymm14[1],ymm4[1],ymm14[4],ymm4[4],ymm14[5],ymm4[5]
2318 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
2319 ; AVX2-FCP-NEXT: vmovaps 32(%rsi), %ymm15
2320 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm1 = ymm13[0],ymm15[0],ymm13[1],ymm15[1],ymm13[4],ymm15[4],ymm13[5],ymm15[5]
2321 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[2,1,3,3]
2322 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3],ymm1[4,5],ymm2[6,7]
2323 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm2 = ymm14[2],ymm4[2],ymm14[3],ymm4[3],ymm14[6],ymm4[6],ymm14[7],ymm4[7]
2324 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm4 = ymm13[2],ymm15[2],ymm13[3],ymm15[3],ymm13[6],ymm15[6],ymm13[7],ymm15[7]
2325 ; AVX2-FCP-NEXT: vmovaps 96(%rdx), %ymm13
2326 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
2327 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[2,1,3,3]
2328 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm4[0,1],ymm2[2,3],ymm4[4,5],ymm2[6,7]
2329 ; AVX2-FCP-NEXT: vmovaps 96(%rcx), %ymm4
2330 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm14 = ymm13[0],ymm4[0],ymm13[1],ymm4[1],ymm13[4],ymm4[4],ymm13[5],ymm4[5]
2331 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm14 = ymm14[0,2,2,3]
2332 ; AVX2-FCP-NEXT: vmovaps 96(%rdi), %ymm15
2333 ; AVX2-FCP-NEXT: vmovaps 96(%rsi), %ymm0
2334 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm5 = ymm15[0],ymm0[0],ymm15[1],ymm0[1],ymm15[4],ymm0[4],ymm15[5],ymm0[5]
2335 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[2,1,3,3]
2336 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1],ymm14[2,3],ymm5[4,5],ymm14[6,7]
2337 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm4 = ymm13[2],ymm4[2],ymm13[3],ymm4[3],ymm13[6],ymm4[6],ymm13[7],ymm4[7]
2338 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm0 = ymm15[2],ymm0[2],ymm15[3],ymm0[3],ymm15[6],ymm0[6],ymm15[7],ymm0[7]
2339 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,2,2,3]
2340 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,1,3,3]
2341 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm4[2,3],ymm0[4,5],ymm4[6,7]
2342 ; AVX2-FCP-NEXT: vmovaps %ymm0, 480(%r8)
2343 ; AVX2-FCP-NEXT: vmovaps %ymm5, 448(%r8)
2344 ; AVX2-FCP-NEXT: vmovaps %ymm2, 224(%r8)
2345 ; AVX2-FCP-NEXT: vmovaps %ymm1, 192(%r8)
2346 ; AVX2-FCP-NEXT: vmovaps %ymm3, 352(%r8)
2347 ; AVX2-FCP-NEXT: vmovaps %ymm7, 320(%r8)
2348 ; AVX2-FCP-NEXT: vmovaps %ymm6, 64(%r8)
2349 ; AVX2-FCP-NEXT: vmovaps %ymm12, 96(%r8)
2350 ; AVX2-FCP-NEXT: vmovaps %ymm11, 32(%r8)
2351 ; AVX2-FCP-NEXT: vmovaps %ymm10, (%r8)
2352 ; AVX2-FCP-NEXT: vmovaps %ymm9, 416(%r8)
2353 ; AVX2-FCP-NEXT: vmovaps %ymm8, 384(%r8)
2354 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2355 ; AVX2-FCP-NEXT: vmovaps %ymm0, 160(%r8)
2356 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2357 ; AVX2-FCP-NEXT: vmovaps %ymm0, 128(%r8)
2358 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2359 ; AVX2-FCP-NEXT: vmovaps %ymm0, 288(%r8)
2360 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2361 ; AVX2-FCP-NEXT: vmovaps %ymm0, 256(%r8)
2362 ; AVX2-FCP-NEXT: popq %rax
2363 ; AVX2-FCP-NEXT: vzeroupper
2364 ; AVX2-FCP-NEXT: retq
2366 ; AVX512-LABEL: store_i32_stride4_vf32:
2368 ; AVX512-NEXT: vmovdqa64 (%rdi), %zmm0
2369 ; AVX512-NEXT: vmovdqa64 64(%rdi), %zmm1
2370 ; AVX512-NEXT: vmovdqa64 (%rsi), %zmm2
2371 ; AVX512-NEXT: vmovdqa64 64(%rsi), %zmm3
2372 ; AVX512-NEXT: vmovdqa64 (%rdx), %zmm4
2373 ; AVX512-NEXT: vmovdqa64 64(%rdx), %zmm5
2374 ; AVX512-NEXT: vmovdqa64 (%rcx), %zmm6
2375 ; AVX512-NEXT: vmovdqa64 64(%rcx), %zmm7
2376 ; AVX512-NEXT: vpmovsxbd {{.*#+}} zmm8 = [0,0,4,20,0,0,5,21,0,0,6,22,0,0,7,23]
2377 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm9
2378 ; AVX512-NEXT: vpermt2d %zmm6, %zmm8, %zmm9
2379 ; AVX512-NEXT: vpmovsxbd {{.*#+}} zmm10 = [4,20,0,0,5,21,0,0,6,22,0,0,7,23,0,0]
2380 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm11
2381 ; AVX512-NEXT: vpermt2d %zmm2, %zmm10, %zmm11
2382 ; AVX512-NEXT: movb $-86, %al
2383 ; AVX512-NEXT: kmovw %eax, %k1
2384 ; AVX512-NEXT: vmovdqa64 %zmm9, %zmm11 {%k1}
2385 ; AVX512-NEXT: vpmovsxbd {{.*#+}} zmm9 = [0,0,0,16,0,0,1,17,0,0,2,18,0,0,3,19]
2386 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm12
2387 ; AVX512-NEXT: vpermt2d %zmm6, %zmm9, %zmm12
2388 ; AVX512-NEXT: vpmovsxbd {{.*#+}} zmm13 = [0,16,0,0,1,17,0,0,2,18,0,0,3,19,0,0]
2389 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm14
2390 ; AVX512-NEXT: vpermt2d %zmm2, %zmm13, %zmm14
2391 ; AVX512-NEXT: vmovdqa64 %zmm12, %zmm14 {%k1}
2392 ; AVX512-NEXT: vpmovsxbd {{.*#+}} zmm12 = [0,0,12,28,0,0,13,29,0,0,14,30,0,0,15,31]
2393 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm15
2394 ; AVX512-NEXT: vpermt2d %zmm6, %zmm12, %zmm15
2395 ; AVX512-NEXT: vpmovsxbd {{.*#+}} zmm16 = [12,28,0,0,13,29,0,0,14,30,0,0,15,31,0,0]
2396 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm17
2397 ; AVX512-NEXT: vpermt2d %zmm2, %zmm16, %zmm17
2398 ; AVX512-NEXT: vmovdqa64 %zmm15, %zmm17 {%k1}
2399 ; AVX512-NEXT: vpmovsxbd {{.*#+}} zmm15 = [0,0,8,24,0,0,9,25,0,0,10,26,0,0,11,27]
2400 ; AVX512-NEXT: vpermt2d %zmm6, %zmm15, %zmm4
2401 ; AVX512-NEXT: vpmovsxbd {{.*#+}} zmm6 = [8,24,0,0,9,25,0,0,10,26,0,0,11,27,0,0]
2402 ; AVX512-NEXT: vpermt2d %zmm2, %zmm6, %zmm0
2403 ; AVX512-NEXT: vmovdqa64 %zmm4, %zmm0 {%k1}
2404 ; AVX512-NEXT: vpermi2d %zmm7, %zmm5, %zmm8
2405 ; AVX512-NEXT: vpermi2d %zmm3, %zmm1, %zmm10
2406 ; AVX512-NEXT: vmovdqa64 %zmm8, %zmm10 {%k1}
2407 ; AVX512-NEXT: vpermi2d %zmm7, %zmm5, %zmm9
2408 ; AVX512-NEXT: vpermi2d %zmm3, %zmm1, %zmm13
2409 ; AVX512-NEXT: vmovdqa64 %zmm9, %zmm13 {%k1}
2410 ; AVX512-NEXT: vpermi2d %zmm7, %zmm5, %zmm12
2411 ; AVX512-NEXT: vpermi2d %zmm3, %zmm1, %zmm16
2412 ; AVX512-NEXT: vmovdqa64 %zmm12, %zmm16 {%k1}
2413 ; AVX512-NEXT: vpermt2d %zmm7, %zmm15, %zmm5
2414 ; AVX512-NEXT: vpermt2d %zmm3, %zmm6, %zmm1
2415 ; AVX512-NEXT: vmovdqa64 %zmm5, %zmm1 {%k1}
2416 ; AVX512-NEXT: vmovdqa64 %zmm1, 384(%r8)
2417 ; AVX512-NEXT: vmovdqa64 %zmm16, 448(%r8)
2418 ; AVX512-NEXT: vmovdqa64 %zmm13, 256(%r8)
2419 ; AVX512-NEXT: vmovdqa64 %zmm10, 320(%r8)
2420 ; AVX512-NEXT: vmovdqa64 %zmm0, 128(%r8)
2421 ; AVX512-NEXT: vmovdqa64 %zmm17, 192(%r8)
2422 ; AVX512-NEXT: vmovdqa64 %zmm14, (%r8)
2423 ; AVX512-NEXT: vmovdqa64 %zmm11, 64(%r8)
2424 ; AVX512-NEXT: vzeroupper
2427 ; AVX512-FCP-LABEL: store_i32_stride4_vf32:
2428 ; AVX512-FCP: # %bb.0:
2429 ; AVX512-FCP-NEXT: vmovdqa64 (%rdi), %zmm0
2430 ; AVX512-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
2431 ; AVX512-FCP-NEXT: vmovdqa64 (%rsi), %zmm2
2432 ; AVX512-FCP-NEXT: vmovdqa64 64(%rsi), %zmm3
2433 ; AVX512-FCP-NEXT: vmovdqa64 (%rdx), %zmm4
2434 ; AVX512-FCP-NEXT: vmovdqa64 64(%rdx), %zmm5
2435 ; AVX512-FCP-NEXT: vmovdqa64 (%rcx), %zmm6
2436 ; AVX512-FCP-NEXT: vmovdqa64 64(%rcx), %zmm7
2437 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} zmm8 = [0,0,4,20,0,0,5,21,0,0,6,22,0,0,7,23]
2438 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm9
2439 ; AVX512-FCP-NEXT: vpermt2d %zmm6, %zmm8, %zmm9
2440 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} zmm10 = [4,20,0,0,5,21,0,0,6,22,0,0,7,23,0,0]
2441 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm11
2442 ; AVX512-FCP-NEXT: vpermt2d %zmm2, %zmm10, %zmm11
2443 ; AVX512-FCP-NEXT: movb $-86, %al
2444 ; AVX512-FCP-NEXT: kmovw %eax, %k1
2445 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, %zmm11 {%k1}
2446 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} zmm9 = [0,0,0,16,0,0,1,17,0,0,2,18,0,0,3,19]
2447 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm12
2448 ; AVX512-FCP-NEXT: vpermt2d %zmm6, %zmm9, %zmm12
2449 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} zmm13 = [0,16,0,0,1,17,0,0,2,18,0,0,3,19,0,0]
2450 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm14
2451 ; AVX512-FCP-NEXT: vpermt2d %zmm2, %zmm13, %zmm14
2452 ; AVX512-FCP-NEXT: vmovdqa64 %zmm12, %zmm14 {%k1}
2453 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} zmm12 = [0,0,12,28,0,0,13,29,0,0,14,30,0,0,15,31]
2454 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm15
2455 ; AVX512-FCP-NEXT: vpermt2d %zmm6, %zmm12, %zmm15
2456 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} zmm16 = [12,28,0,0,13,29,0,0,14,30,0,0,15,31,0,0]
2457 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm17
2458 ; AVX512-FCP-NEXT: vpermt2d %zmm2, %zmm16, %zmm17
2459 ; AVX512-FCP-NEXT: vmovdqa64 %zmm15, %zmm17 {%k1}
2460 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} zmm15 = [0,0,8,24,0,0,9,25,0,0,10,26,0,0,11,27]
2461 ; AVX512-FCP-NEXT: vpermt2d %zmm6, %zmm15, %zmm4
2462 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} zmm6 = [8,24,0,0,9,25,0,0,10,26,0,0,11,27,0,0]
2463 ; AVX512-FCP-NEXT: vpermt2d %zmm2, %zmm6, %zmm0
2464 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, %zmm0 {%k1}
2465 ; AVX512-FCP-NEXT: vpermi2d %zmm7, %zmm5, %zmm8
2466 ; AVX512-FCP-NEXT: vpermi2d %zmm3, %zmm1, %zmm10
2467 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, %zmm10 {%k1}
2468 ; AVX512-FCP-NEXT: vpermi2d %zmm7, %zmm5, %zmm9
2469 ; AVX512-FCP-NEXT: vpermi2d %zmm3, %zmm1, %zmm13
2470 ; AVX512-FCP-NEXT: vmovdqa64 %zmm9, %zmm13 {%k1}
2471 ; AVX512-FCP-NEXT: vpermi2d %zmm7, %zmm5, %zmm12
2472 ; AVX512-FCP-NEXT: vpermi2d %zmm3, %zmm1, %zmm16
2473 ; AVX512-FCP-NEXT: vmovdqa64 %zmm12, %zmm16 {%k1}
2474 ; AVX512-FCP-NEXT: vpermt2d %zmm7, %zmm15, %zmm5
2475 ; AVX512-FCP-NEXT: vpermt2d %zmm3, %zmm6, %zmm1
2476 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, %zmm1 {%k1}
2477 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, 384(%r8)
2478 ; AVX512-FCP-NEXT: vmovdqa64 %zmm16, 448(%r8)
2479 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, 256(%r8)
2480 ; AVX512-FCP-NEXT: vmovdqa64 %zmm10, 320(%r8)
2481 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, 128(%r8)
2482 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, 192(%r8)
2483 ; AVX512-FCP-NEXT: vmovdqa64 %zmm14, (%r8)
2484 ; AVX512-FCP-NEXT: vmovdqa64 %zmm11, 64(%r8)
2485 ; AVX512-FCP-NEXT: vzeroupper
2486 ; AVX512-FCP-NEXT: retq
2488 ; AVX512DQ-LABEL: store_i32_stride4_vf32:
2489 ; AVX512DQ: # %bb.0:
2490 ; AVX512DQ-NEXT: vmovdqa64 (%rdi), %zmm0
2491 ; AVX512DQ-NEXT: vmovdqa64 64(%rdi), %zmm1
2492 ; AVX512DQ-NEXT: vmovdqa64 (%rsi), %zmm2
2493 ; AVX512DQ-NEXT: vmovdqa64 64(%rsi), %zmm3
2494 ; AVX512DQ-NEXT: vmovdqa64 (%rdx), %zmm4
2495 ; AVX512DQ-NEXT: vmovdqa64 64(%rdx), %zmm5
2496 ; AVX512DQ-NEXT: vmovdqa64 (%rcx), %zmm6
2497 ; AVX512DQ-NEXT: vmovdqa64 64(%rcx), %zmm7
2498 ; AVX512DQ-NEXT: vpmovsxbd {{.*#+}} zmm8 = [0,0,4,20,0,0,5,21,0,0,6,22,0,0,7,23]
2499 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm9
2500 ; AVX512DQ-NEXT: vpermt2d %zmm6, %zmm8, %zmm9
2501 ; AVX512DQ-NEXT: vpmovsxbd {{.*#+}} zmm10 = [4,20,0,0,5,21,0,0,6,22,0,0,7,23,0,0]
2502 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm11
2503 ; AVX512DQ-NEXT: vpermt2d %zmm2, %zmm10, %zmm11
2504 ; AVX512DQ-NEXT: movb $-86, %al
2505 ; AVX512DQ-NEXT: kmovw %eax, %k1
2506 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, %zmm11 {%k1}
2507 ; AVX512DQ-NEXT: vpmovsxbd {{.*#+}} zmm9 = [0,0,0,16,0,0,1,17,0,0,2,18,0,0,3,19]
2508 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm12
2509 ; AVX512DQ-NEXT: vpermt2d %zmm6, %zmm9, %zmm12
2510 ; AVX512DQ-NEXT: vpmovsxbd {{.*#+}} zmm13 = [0,16,0,0,1,17,0,0,2,18,0,0,3,19,0,0]
2511 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm14
2512 ; AVX512DQ-NEXT: vpermt2d %zmm2, %zmm13, %zmm14
2513 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, %zmm14 {%k1}
2514 ; AVX512DQ-NEXT: vpmovsxbd {{.*#+}} zmm12 = [0,0,12,28,0,0,13,29,0,0,14,30,0,0,15,31]
2515 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm15
2516 ; AVX512DQ-NEXT: vpermt2d %zmm6, %zmm12, %zmm15
2517 ; AVX512DQ-NEXT: vpmovsxbd {{.*#+}} zmm16 = [12,28,0,0,13,29,0,0,14,30,0,0,15,31,0,0]
2518 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm17
2519 ; AVX512DQ-NEXT: vpermt2d %zmm2, %zmm16, %zmm17
2520 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, %zmm17 {%k1}
2521 ; AVX512DQ-NEXT: vpmovsxbd {{.*#+}} zmm15 = [0,0,8,24,0,0,9,25,0,0,10,26,0,0,11,27]
2522 ; AVX512DQ-NEXT: vpermt2d %zmm6, %zmm15, %zmm4
2523 ; AVX512DQ-NEXT: vpmovsxbd {{.*#+}} zmm6 = [8,24,0,0,9,25,0,0,10,26,0,0,11,27,0,0]
2524 ; AVX512DQ-NEXT: vpermt2d %zmm2, %zmm6, %zmm0
2525 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, %zmm0 {%k1}
2526 ; AVX512DQ-NEXT: vpermi2d %zmm7, %zmm5, %zmm8
2527 ; AVX512DQ-NEXT: vpermi2d %zmm3, %zmm1, %zmm10
2528 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, %zmm10 {%k1}
2529 ; AVX512DQ-NEXT: vpermi2d %zmm7, %zmm5, %zmm9
2530 ; AVX512DQ-NEXT: vpermi2d %zmm3, %zmm1, %zmm13
2531 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, %zmm13 {%k1}
2532 ; AVX512DQ-NEXT: vpermi2d %zmm7, %zmm5, %zmm12
2533 ; AVX512DQ-NEXT: vpermi2d %zmm3, %zmm1, %zmm16
2534 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, %zmm16 {%k1}
2535 ; AVX512DQ-NEXT: vpermt2d %zmm7, %zmm15, %zmm5
2536 ; AVX512DQ-NEXT: vpermt2d %zmm3, %zmm6, %zmm1
2537 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, %zmm1 {%k1}
2538 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 384(%r8)
2539 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, 448(%r8)
2540 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, 256(%r8)
2541 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, 320(%r8)
2542 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, 128(%r8)
2543 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, 192(%r8)
2544 ; AVX512DQ-NEXT: vmovdqa64 %zmm14, (%r8)
2545 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, 64(%r8)
2546 ; AVX512DQ-NEXT: vzeroupper
2547 ; AVX512DQ-NEXT: retq
2549 ; AVX512DQ-FCP-LABEL: store_i32_stride4_vf32:
2550 ; AVX512DQ-FCP: # %bb.0:
2551 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rdi), %zmm0
2552 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
2553 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rsi), %zmm2
2554 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rsi), %zmm3
2555 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rdx), %zmm4
2556 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rdx), %zmm5
2557 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rcx), %zmm6
2558 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rcx), %zmm7
2559 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} zmm8 = [0,0,4,20,0,0,5,21,0,0,6,22,0,0,7,23]
2560 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm9
2561 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm6, %zmm8, %zmm9
2562 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} zmm10 = [4,20,0,0,5,21,0,0,6,22,0,0,7,23,0,0]
2563 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm11
2564 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm2, %zmm10, %zmm11
2565 ; AVX512DQ-FCP-NEXT: movb $-86, %al
2566 ; AVX512DQ-FCP-NEXT: kmovw %eax, %k1
2567 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, %zmm11 {%k1}
2568 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} zmm9 = [0,0,0,16,0,0,1,17,0,0,2,18,0,0,3,19]
2569 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm12
2570 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm6, %zmm9, %zmm12
2571 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} zmm13 = [0,16,0,0,1,17,0,0,2,18,0,0,3,19,0,0]
2572 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm14
2573 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm2, %zmm13, %zmm14
2574 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm12, %zmm14 {%k1}
2575 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} zmm12 = [0,0,12,28,0,0,13,29,0,0,14,30,0,0,15,31]
2576 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm15
2577 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm6, %zmm12, %zmm15
2578 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} zmm16 = [12,28,0,0,13,29,0,0,14,30,0,0,15,31,0,0]
2579 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm17
2580 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm2, %zmm16, %zmm17
2581 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm15, %zmm17 {%k1}
2582 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} zmm15 = [0,0,8,24,0,0,9,25,0,0,10,26,0,0,11,27]
2583 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm6, %zmm15, %zmm4
2584 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} zmm6 = [8,24,0,0,9,25,0,0,10,26,0,0,11,27,0,0]
2585 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm2, %zmm6, %zmm0
2586 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, %zmm0 {%k1}
2587 ; AVX512DQ-FCP-NEXT: vpermi2d %zmm7, %zmm5, %zmm8
2588 ; AVX512DQ-FCP-NEXT: vpermi2d %zmm3, %zmm1, %zmm10
2589 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, %zmm10 {%k1}
2590 ; AVX512DQ-FCP-NEXT: vpermi2d %zmm7, %zmm5, %zmm9
2591 ; AVX512DQ-FCP-NEXT: vpermi2d %zmm3, %zmm1, %zmm13
2592 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm9, %zmm13 {%k1}
2593 ; AVX512DQ-FCP-NEXT: vpermi2d %zmm7, %zmm5, %zmm12
2594 ; AVX512DQ-FCP-NEXT: vpermi2d %zmm3, %zmm1, %zmm16
2595 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm12, %zmm16 {%k1}
2596 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm7, %zmm15, %zmm5
2597 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm3, %zmm6, %zmm1
2598 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, %zmm1 {%k1}
2599 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, 384(%r8)
2600 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm16, 448(%r8)
2601 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, 256(%r8)
2602 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm10, 320(%r8)
2603 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, 128(%r8)
2604 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, 192(%r8)
2605 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm14, (%r8)
2606 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm11, 64(%r8)
2607 ; AVX512DQ-FCP-NEXT: vzeroupper
2608 ; AVX512DQ-FCP-NEXT: retq
2610 ; AVX512BW-LABEL: store_i32_stride4_vf32:
2611 ; AVX512BW: # %bb.0:
2612 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm0
2613 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %zmm1
2614 ; AVX512BW-NEXT: vmovdqa64 (%rsi), %zmm2
2615 ; AVX512BW-NEXT: vmovdqa64 64(%rsi), %zmm3
2616 ; AVX512BW-NEXT: vmovdqa64 (%rdx), %zmm4
2617 ; AVX512BW-NEXT: vmovdqa64 64(%rdx), %zmm5
2618 ; AVX512BW-NEXT: vmovdqa64 (%rcx), %zmm6
2619 ; AVX512BW-NEXT: vmovdqa64 64(%rcx), %zmm7
2620 ; AVX512BW-NEXT: vpmovsxbd {{.*#+}} zmm8 = [0,0,4,20,0,0,5,21,0,0,6,22,0,0,7,23]
2621 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm9
2622 ; AVX512BW-NEXT: vpermt2d %zmm6, %zmm8, %zmm9
2623 ; AVX512BW-NEXT: vpmovsxbd {{.*#+}} zmm10 = [4,20,0,0,5,21,0,0,6,22,0,0,7,23,0,0]
2624 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm11
2625 ; AVX512BW-NEXT: vpermt2d %zmm2, %zmm10, %zmm11
2626 ; AVX512BW-NEXT: movb $-86, %al
2627 ; AVX512BW-NEXT: kmovd %eax, %k1
2628 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm11 {%k1}
2629 ; AVX512BW-NEXT: vpmovsxbd {{.*#+}} zmm9 = [0,0,0,16,0,0,1,17,0,0,2,18,0,0,3,19]
2630 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm12
2631 ; AVX512BW-NEXT: vpermt2d %zmm6, %zmm9, %zmm12
2632 ; AVX512BW-NEXT: vpmovsxbd {{.*#+}} zmm13 = [0,16,0,0,1,17,0,0,2,18,0,0,3,19,0,0]
2633 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm14
2634 ; AVX512BW-NEXT: vpermt2d %zmm2, %zmm13, %zmm14
2635 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm14 {%k1}
2636 ; AVX512BW-NEXT: vpmovsxbd {{.*#+}} zmm12 = [0,0,12,28,0,0,13,29,0,0,14,30,0,0,15,31]
2637 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm15
2638 ; AVX512BW-NEXT: vpermt2d %zmm6, %zmm12, %zmm15
2639 ; AVX512BW-NEXT: vpmovsxbd {{.*#+}} zmm16 = [12,28,0,0,13,29,0,0,14,30,0,0,15,31,0,0]
2640 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm17
2641 ; AVX512BW-NEXT: vpermt2d %zmm2, %zmm16, %zmm17
2642 ; AVX512BW-NEXT: vmovdqa64 %zmm15, %zmm17 {%k1}
2643 ; AVX512BW-NEXT: vpmovsxbd {{.*#+}} zmm15 = [0,0,8,24,0,0,9,25,0,0,10,26,0,0,11,27]
2644 ; AVX512BW-NEXT: vpermt2d %zmm6, %zmm15, %zmm4
2645 ; AVX512BW-NEXT: vpmovsxbd {{.*#+}} zmm6 = [8,24,0,0,9,25,0,0,10,26,0,0,11,27,0,0]
2646 ; AVX512BW-NEXT: vpermt2d %zmm2, %zmm6, %zmm0
2647 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm0 {%k1}
2648 ; AVX512BW-NEXT: vpermi2d %zmm7, %zmm5, %zmm8
2649 ; AVX512BW-NEXT: vpermi2d %zmm3, %zmm1, %zmm10
2650 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm10 {%k1}
2651 ; AVX512BW-NEXT: vpermi2d %zmm7, %zmm5, %zmm9
2652 ; AVX512BW-NEXT: vpermi2d %zmm3, %zmm1, %zmm13
2653 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm13 {%k1}
2654 ; AVX512BW-NEXT: vpermi2d %zmm7, %zmm5, %zmm12
2655 ; AVX512BW-NEXT: vpermi2d %zmm3, %zmm1, %zmm16
2656 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm16 {%k1}
2657 ; AVX512BW-NEXT: vpermt2d %zmm7, %zmm15, %zmm5
2658 ; AVX512BW-NEXT: vpermt2d %zmm3, %zmm6, %zmm1
2659 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm1 {%k1}
2660 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 384(%r8)
2661 ; AVX512BW-NEXT: vmovdqa64 %zmm16, 448(%r8)
2662 ; AVX512BW-NEXT: vmovdqa64 %zmm13, 256(%r8)
2663 ; AVX512BW-NEXT: vmovdqa64 %zmm10, 320(%r8)
2664 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 128(%r8)
2665 ; AVX512BW-NEXT: vmovdqa64 %zmm17, 192(%r8)
2666 ; AVX512BW-NEXT: vmovdqa64 %zmm14, (%r8)
2667 ; AVX512BW-NEXT: vmovdqa64 %zmm11, 64(%r8)
2668 ; AVX512BW-NEXT: vzeroupper
2669 ; AVX512BW-NEXT: retq
2671 ; AVX512BW-FCP-LABEL: store_i32_stride4_vf32:
2672 ; AVX512BW-FCP: # %bb.0:
2673 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm0
2674 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
2675 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rsi), %zmm2
2676 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rsi), %zmm3
2677 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdx), %zmm4
2678 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rdx), %zmm5
2679 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rcx), %zmm6
2680 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rcx), %zmm7
2681 ; AVX512BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm8 = [0,0,4,20,0,0,5,21,0,0,6,22,0,0,7,23]
2682 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm9
2683 ; AVX512BW-FCP-NEXT: vpermt2d %zmm6, %zmm8, %zmm9
2684 ; AVX512BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm10 = [4,20,0,0,5,21,0,0,6,22,0,0,7,23,0,0]
2685 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm11
2686 ; AVX512BW-FCP-NEXT: vpermt2d %zmm2, %zmm10, %zmm11
2687 ; AVX512BW-FCP-NEXT: movb $-86, %al
2688 ; AVX512BW-FCP-NEXT: kmovd %eax, %k1
2689 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm11 {%k1}
2690 ; AVX512BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm9 = [0,0,0,16,0,0,1,17,0,0,2,18,0,0,3,19]
2691 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm12
2692 ; AVX512BW-FCP-NEXT: vpermt2d %zmm6, %zmm9, %zmm12
2693 ; AVX512BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm13 = [0,16,0,0,1,17,0,0,2,18,0,0,3,19,0,0]
2694 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm14
2695 ; AVX512BW-FCP-NEXT: vpermt2d %zmm2, %zmm13, %zmm14
2696 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm14 {%k1}
2697 ; AVX512BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm12 = [0,0,12,28,0,0,13,29,0,0,14,30,0,0,15,31]
2698 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm15
2699 ; AVX512BW-FCP-NEXT: vpermt2d %zmm6, %zmm12, %zmm15
2700 ; AVX512BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm16 = [12,28,0,0,13,29,0,0,14,30,0,0,15,31,0,0]
2701 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm17
2702 ; AVX512BW-FCP-NEXT: vpermt2d %zmm2, %zmm16, %zmm17
2703 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm17 {%k1}
2704 ; AVX512BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm15 = [0,0,8,24,0,0,9,25,0,0,10,26,0,0,11,27]
2705 ; AVX512BW-FCP-NEXT: vpermt2d %zmm6, %zmm15, %zmm4
2706 ; AVX512BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm6 = [8,24,0,0,9,25,0,0,10,26,0,0,11,27,0,0]
2707 ; AVX512BW-FCP-NEXT: vpermt2d %zmm2, %zmm6, %zmm0
2708 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm0 {%k1}
2709 ; AVX512BW-FCP-NEXT: vpermi2d %zmm7, %zmm5, %zmm8
2710 ; AVX512BW-FCP-NEXT: vpermi2d %zmm3, %zmm1, %zmm10
2711 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm10 {%k1}
2712 ; AVX512BW-FCP-NEXT: vpermi2d %zmm7, %zmm5, %zmm9
2713 ; AVX512BW-FCP-NEXT: vpermi2d %zmm3, %zmm1, %zmm13
2714 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm13 {%k1}
2715 ; AVX512BW-FCP-NEXT: vpermi2d %zmm7, %zmm5, %zmm12
2716 ; AVX512BW-FCP-NEXT: vpermi2d %zmm3, %zmm1, %zmm16
2717 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm16 {%k1}
2718 ; AVX512BW-FCP-NEXT: vpermt2d %zmm7, %zmm15, %zmm5
2719 ; AVX512BW-FCP-NEXT: vpermt2d %zmm3, %zmm6, %zmm1
2720 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm1 {%k1}
2721 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, 384(%r8)
2722 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm16, 448(%r8)
2723 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, 256(%r8)
2724 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, 320(%r8)
2725 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, 128(%r8)
2726 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, 192(%r8)
2727 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm14, (%r8)
2728 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm11, 64(%r8)
2729 ; AVX512BW-FCP-NEXT: vzeroupper
2730 ; AVX512BW-FCP-NEXT: retq
2732 ; AVX512DQ-BW-LABEL: store_i32_stride4_vf32:
2733 ; AVX512DQ-BW: # %bb.0:
2734 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdi), %zmm0
2735 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rdi), %zmm1
2736 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rsi), %zmm2
2737 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rsi), %zmm3
2738 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdx), %zmm4
2739 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rdx), %zmm5
2740 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rcx), %zmm6
2741 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rcx), %zmm7
2742 ; AVX512DQ-BW-NEXT: vpmovsxbd {{.*#+}} zmm8 = [0,0,4,20,0,0,5,21,0,0,6,22,0,0,7,23]
2743 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm9
2744 ; AVX512DQ-BW-NEXT: vpermt2d %zmm6, %zmm8, %zmm9
2745 ; AVX512DQ-BW-NEXT: vpmovsxbd {{.*#+}} zmm10 = [4,20,0,0,5,21,0,0,6,22,0,0,7,23,0,0]
2746 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm11
2747 ; AVX512DQ-BW-NEXT: vpermt2d %zmm2, %zmm10, %zmm11
2748 ; AVX512DQ-BW-NEXT: movb $-86, %al
2749 ; AVX512DQ-BW-NEXT: kmovd %eax, %k1
2750 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, %zmm11 {%k1}
2751 ; AVX512DQ-BW-NEXT: vpmovsxbd {{.*#+}} zmm9 = [0,0,0,16,0,0,1,17,0,0,2,18,0,0,3,19]
2752 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm12
2753 ; AVX512DQ-BW-NEXT: vpermt2d %zmm6, %zmm9, %zmm12
2754 ; AVX512DQ-BW-NEXT: vpmovsxbd {{.*#+}} zmm13 = [0,16,0,0,1,17,0,0,2,18,0,0,3,19,0,0]
2755 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm14
2756 ; AVX512DQ-BW-NEXT: vpermt2d %zmm2, %zmm13, %zmm14
2757 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm12, %zmm14 {%k1}
2758 ; AVX512DQ-BW-NEXT: vpmovsxbd {{.*#+}} zmm12 = [0,0,12,28,0,0,13,29,0,0,14,30,0,0,15,31]
2759 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm15
2760 ; AVX512DQ-BW-NEXT: vpermt2d %zmm6, %zmm12, %zmm15
2761 ; AVX512DQ-BW-NEXT: vpmovsxbd {{.*#+}} zmm16 = [12,28,0,0,13,29,0,0,14,30,0,0,15,31,0,0]
2762 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm17
2763 ; AVX512DQ-BW-NEXT: vpermt2d %zmm2, %zmm16, %zmm17
2764 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm15, %zmm17 {%k1}
2765 ; AVX512DQ-BW-NEXT: vpmovsxbd {{.*#+}} zmm15 = [0,0,8,24,0,0,9,25,0,0,10,26,0,0,11,27]
2766 ; AVX512DQ-BW-NEXT: vpermt2d %zmm6, %zmm15, %zmm4
2767 ; AVX512DQ-BW-NEXT: vpmovsxbd {{.*#+}} zmm6 = [8,24,0,0,9,25,0,0,10,26,0,0,11,27,0,0]
2768 ; AVX512DQ-BW-NEXT: vpermt2d %zmm2, %zmm6, %zmm0
2769 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, %zmm0 {%k1}
2770 ; AVX512DQ-BW-NEXT: vpermi2d %zmm7, %zmm5, %zmm8
2771 ; AVX512DQ-BW-NEXT: vpermi2d %zmm3, %zmm1, %zmm10
2772 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, %zmm10 {%k1}
2773 ; AVX512DQ-BW-NEXT: vpermi2d %zmm7, %zmm5, %zmm9
2774 ; AVX512DQ-BW-NEXT: vpermi2d %zmm3, %zmm1, %zmm13
2775 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, %zmm13 {%k1}
2776 ; AVX512DQ-BW-NEXT: vpermi2d %zmm7, %zmm5, %zmm12
2777 ; AVX512DQ-BW-NEXT: vpermi2d %zmm3, %zmm1, %zmm16
2778 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm12, %zmm16 {%k1}
2779 ; AVX512DQ-BW-NEXT: vpermt2d %zmm7, %zmm15, %zmm5
2780 ; AVX512DQ-BW-NEXT: vpermt2d %zmm3, %zmm6, %zmm1
2781 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, %zmm1 {%k1}
2782 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, 384(%r8)
2783 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm16, 448(%r8)
2784 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, 256(%r8)
2785 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, 320(%r8)
2786 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, 128(%r8)
2787 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, 192(%r8)
2788 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm14, (%r8)
2789 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm11, 64(%r8)
2790 ; AVX512DQ-BW-NEXT: vzeroupper
2791 ; AVX512DQ-BW-NEXT: retq
2793 ; AVX512DQ-BW-FCP-LABEL: store_i32_stride4_vf32:
2794 ; AVX512DQ-BW-FCP: # %bb.0:
2795 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm0
2796 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
2797 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rsi), %zmm2
2798 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rsi), %zmm3
2799 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdx), %zmm4
2800 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rdx), %zmm5
2801 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rcx), %zmm6
2802 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rcx), %zmm7
2803 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm8 = [0,0,4,20,0,0,5,21,0,0,6,22,0,0,7,23]
2804 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm9
2805 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm6, %zmm8, %zmm9
2806 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm10 = [4,20,0,0,5,21,0,0,6,22,0,0,7,23,0,0]
2807 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm11
2808 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm2, %zmm10, %zmm11
2809 ; AVX512DQ-BW-FCP-NEXT: movb $-86, %al
2810 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k1
2811 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm11 {%k1}
2812 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm9 = [0,0,0,16,0,0,1,17,0,0,2,18,0,0,3,19]
2813 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm12
2814 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm6, %zmm9, %zmm12
2815 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm13 = [0,16,0,0,1,17,0,0,2,18,0,0,3,19,0,0]
2816 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm14
2817 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm2, %zmm13, %zmm14
2818 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm14 {%k1}
2819 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm12 = [0,0,12,28,0,0,13,29,0,0,14,30,0,0,15,31]
2820 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm15
2821 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm6, %zmm12, %zmm15
2822 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm16 = [12,28,0,0,13,29,0,0,14,30,0,0,15,31,0,0]
2823 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm17
2824 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm2, %zmm16, %zmm17
2825 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm15, %zmm17 {%k1}
2826 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm15 = [0,0,8,24,0,0,9,25,0,0,10,26,0,0,11,27]
2827 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm6, %zmm15, %zmm4
2828 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm6 = [8,24,0,0,9,25,0,0,10,26,0,0,11,27,0,0]
2829 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm2, %zmm6, %zmm0
2830 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, %zmm0 {%k1}
2831 ; AVX512DQ-BW-FCP-NEXT: vpermi2d %zmm7, %zmm5, %zmm8
2832 ; AVX512DQ-BW-FCP-NEXT: vpermi2d %zmm3, %zmm1, %zmm10
2833 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm10 {%k1}
2834 ; AVX512DQ-BW-FCP-NEXT: vpermi2d %zmm7, %zmm5, %zmm9
2835 ; AVX512DQ-BW-FCP-NEXT: vpermi2d %zmm3, %zmm1, %zmm13
2836 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm13 {%k1}
2837 ; AVX512DQ-BW-FCP-NEXT: vpermi2d %zmm7, %zmm5, %zmm12
2838 ; AVX512DQ-BW-FCP-NEXT: vpermi2d %zmm3, %zmm1, %zmm16
2839 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm12, %zmm16 {%k1}
2840 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm7, %zmm15, %zmm5
2841 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm3, %zmm6, %zmm1
2842 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, %zmm1 {%k1}
2843 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, 384(%r8)
2844 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm16, 448(%r8)
2845 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, 256(%r8)
2846 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, 320(%r8)
2847 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, 128(%r8)
2848 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, 192(%r8)
2849 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm14, (%r8)
2850 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm11, 64(%r8)
2851 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
2852 ; AVX512DQ-BW-FCP-NEXT: retq
2853 %in.vec0 = load <32 x i32>, ptr %in.vecptr0, align 64
2854 %in.vec1 = load <32 x i32>, ptr %in.vecptr1, align 64
2855 %in.vec2 = load <32 x i32>, ptr %in.vecptr2, align 64
2856 %in.vec3 = load <32 x i32>, ptr %in.vecptr3, align 64
2857 %1 = shufflevector <32 x i32> %in.vec0, <32 x i32> %in.vec1, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
2858 %2 = shufflevector <32 x i32> %in.vec2, <32 x i32> %in.vec3, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
2859 %3 = shufflevector <64 x i32> %1, <64 x i32> %2, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
2860 %interleaved.vec = shufflevector <128 x i32> %3, <128 x i32> poison, <128 x i32> <i32 0, i32 32, i32 64, i32 96, i32 1, i32 33, i32 65, i32 97, i32 2, i32 34, i32 66, i32 98, i32 3, i32 35, i32 67, i32 99, i32 4, i32 36, i32 68, i32 100, i32 5, i32 37, i32 69, i32 101, i32 6, i32 38, i32 70, i32 102, i32 7, i32 39, i32 71, i32 103, i32 8, i32 40, i32 72, i32 104, i32 9, i32 41, i32 73, i32 105, i32 10, i32 42, i32 74, i32 106, i32 11, i32 43, i32 75, i32 107, i32 12, i32 44, i32 76, i32 108, i32 13, i32 45, i32 77, i32 109, i32 14, i32 46, i32 78, i32 110, i32 15, i32 47, i32 79, i32 111, i32 16, i32 48, i32 80, i32 112, i32 17, i32 49, i32 81, i32 113, i32 18, i32 50, i32 82, i32 114, i32 19, i32 51, i32 83, i32 115, i32 20, i32 52, i32 84, i32 116, i32 21, i32 53, i32 85, i32 117, i32 22, i32 54, i32 86, i32 118, i32 23, i32 55, i32 87, i32 119, i32 24, i32 56, i32 88, i32 120, i32 25, i32 57, i32 89, i32 121, i32 26, i32 58, i32 90, i32 122, i32 27, i32 59, i32 91, i32 123, i32 28, i32 60, i32 92, i32 124, i32 29, i32 61, i32 93, i32 125, i32 30, i32 62, i32 94, i32 126, i32 31, i32 63, i32 95, i32 127>
2861 store <128 x i32> %interleaved.vec, ptr %out.vec, align 64
2865 define void @store_i32_stride4_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %out.vec) nounwind {
2866 ; SSE-LABEL: store_i32_stride4_vf64:
2868 ; SSE-NEXT: subq $696, %rsp # imm = 0x2B8
2869 ; SSE-NEXT: movaps (%rdi), %xmm10
2870 ; SSE-NEXT: movaps 16(%rdi), %xmm11
2871 ; SSE-NEXT: movaps 32(%rdi), %xmm12
2872 ; SSE-NEXT: movaps 48(%rdi), %xmm13
2873 ; SSE-NEXT: movaps (%rsi), %xmm4
2874 ; SSE-NEXT: movaps 16(%rsi), %xmm2
2875 ; SSE-NEXT: movaps 32(%rsi), %xmm0
2876 ; SSE-NEXT: movaps (%rdx), %xmm6
2877 ; SSE-NEXT: movaps 16(%rdx), %xmm3
2878 ; SSE-NEXT: movaps 32(%rdx), %xmm1
2879 ; SSE-NEXT: movaps (%rcx), %xmm7
2880 ; SSE-NEXT: movaps 16(%rcx), %xmm8
2881 ; SSE-NEXT: movaps 32(%rcx), %xmm5
2882 ; SSE-NEXT: movaps %xmm6, %xmm9
2883 ; SSE-NEXT: unpcklps {{.*#+}} xmm9 = xmm9[0],xmm7[0],xmm9[1],xmm7[1]
2884 ; SSE-NEXT: movaps %xmm10, %xmm14
2885 ; SSE-NEXT: unpcklps {{.*#+}} xmm14 = xmm14[0],xmm4[0],xmm14[1],xmm4[1]
2886 ; SSE-NEXT: movaps %xmm14, %xmm15
2887 ; SSE-NEXT: movlhps {{.*#+}} xmm15 = xmm15[0],xmm9[0]
2888 ; SSE-NEXT: movaps %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2889 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm9[1]
2890 ; SSE-NEXT: movaps %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2891 ; SSE-NEXT: unpckhps {{.*#+}} xmm6 = xmm6[2],xmm7[2],xmm6[3],xmm7[3]
2892 ; SSE-NEXT: unpckhps {{.*#+}} xmm10 = xmm10[2],xmm4[2],xmm10[3],xmm4[3]
2893 ; SSE-NEXT: movaps %xmm10, %xmm4
2894 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm6[0]
2895 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2896 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1],xmm6[1]
2897 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2898 ; SSE-NEXT: movaps %xmm3, %xmm4
2899 ; SSE-NEXT: unpcklps {{.*#+}} xmm4 = xmm4[0],xmm8[0],xmm4[1],xmm8[1]
2900 ; SSE-NEXT: movaps %xmm11, %xmm6
2901 ; SSE-NEXT: unpcklps {{.*#+}} xmm6 = xmm6[0],xmm2[0],xmm6[1],xmm2[1]
2902 ; SSE-NEXT: movaps %xmm6, %xmm7
2903 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm4[0]
2904 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2905 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1],xmm4[1]
2906 ; SSE-NEXT: movaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2907 ; SSE-NEXT: unpckhps {{.*#+}} xmm3 = xmm3[2],xmm8[2],xmm3[3],xmm8[3]
2908 ; SSE-NEXT: unpckhps {{.*#+}} xmm11 = xmm11[2],xmm2[2],xmm11[3],xmm2[3]
2909 ; SSE-NEXT: movaps %xmm11, %xmm2
2910 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm3[0]
2911 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2912 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm3[1]
2913 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2914 ; SSE-NEXT: movaps %xmm1, %xmm2
2915 ; SSE-NEXT: unpcklps {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1]
2916 ; SSE-NEXT: movaps %xmm12, %xmm3
2917 ; SSE-NEXT: unpcklps {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
2918 ; SSE-NEXT: movaps %xmm3, %xmm4
2919 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm2[0]
2920 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2921 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm2[1]
2922 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2923 ; SSE-NEXT: movaps 48(%rdx), %xmm2
2924 ; SSE-NEXT: unpckhps {{.*#+}} xmm1 = xmm1[2],xmm5[2],xmm1[3],xmm5[3]
2925 ; SSE-NEXT: movaps 48(%rcx), %xmm3
2926 ; SSE-NEXT: unpckhps {{.*#+}} xmm12 = xmm12[2],xmm0[2],xmm12[3],xmm0[3]
2927 ; SSE-NEXT: movaps %xmm12, %xmm0
2928 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
2929 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2930 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm1[1]
2931 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2932 ; SSE-NEXT: movaps %xmm2, %xmm0
2933 ; SSE-NEXT: unpcklps {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1]
2934 ; SSE-NEXT: movaps 48(%rsi), %xmm1
2935 ; SSE-NEXT: movaps %xmm13, %xmm4
2936 ; SSE-NEXT: unpcklps {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
2937 ; SSE-NEXT: movaps %xmm4, %xmm5
2938 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm0[0]
2939 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2940 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm0[1]
2941 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2942 ; SSE-NEXT: unpckhps {{.*#+}} xmm2 = xmm2[2],xmm3[2],xmm2[3],xmm3[3]
2943 ; SSE-NEXT: unpckhps {{.*#+}} xmm13 = xmm13[2],xmm1[2],xmm13[3],xmm1[3]
2944 ; SSE-NEXT: movaps %xmm13, %xmm0
2945 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm2[0]
2946 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2947 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm2[1]
2948 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2949 ; SSE-NEXT: movaps 64(%rdx), %xmm0
2950 ; SSE-NEXT: movaps 64(%rcx), %xmm1
2951 ; SSE-NEXT: movaps %xmm0, %xmm2
2952 ; SSE-NEXT: unpcklps {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
2953 ; SSE-NEXT: movaps 64(%rdi), %xmm5
2954 ; SSE-NEXT: movaps 64(%rsi), %xmm3
2955 ; SSE-NEXT: movaps %xmm5, %xmm4
2956 ; SSE-NEXT: unpcklps {{.*#+}} xmm4 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
2957 ; SSE-NEXT: movaps %xmm4, %xmm6
2958 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm2[0]
2959 ; SSE-NEXT: movaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2960 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm2[1]
2961 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2962 ; SSE-NEXT: unpckhps {{.*#+}} xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
2963 ; SSE-NEXT: unpckhps {{.*#+}} xmm5 = xmm5[2],xmm3[2],xmm5[3],xmm3[3]
2964 ; SSE-NEXT: movaps %xmm5, %xmm1
2965 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2966 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2967 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm0[1]
2968 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2969 ; SSE-NEXT: movaps 80(%rdx), %xmm0
2970 ; SSE-NEXT: movaps 80(%rcx), %xmm1
2971 ; SSE-NEXT: movaps %xmm0, %xmm2
2972 ; SSE-NEXT: unpcklps {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
2973 ; SSE-NEXT: movaps 80(%rdi), %xmm5
2974 ; SSE-NEXT: movaps 80(%rsi), %xmm3
2975 ; SSE-NEXT: movaps %xmm5, %xmm4
2976 ; SSE-NEXT: unpcklps {{.*#+}} xmm4 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
2977 ; SSE-NEXT: movaps %xmm4, %xmm6
2978 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm2[0]
2979 ; SSE-NEXT: movaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2980 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm2[1]
2981 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2982 ; SSE-NEXT: unpckhps {{.*#+}} xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
2983 ; SSE-NEXT: unpckhps {{.*#+}} xmm5 = xmm5[2],xmm3[2],xmm5[3],xmm3[3]
2984 ; SSE-NEXT: movaps %xmm5, %xmm1
2985 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
2986 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2987 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm0[1]
2988 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2989 ; SSE-NEXT: movaps 96(%rdx), %xmm0
2990 ; SSE-NEXT: movaps 96(%rcx), %xmm1
2991 ; SSE-NEXT: movaps %xmm0, %xmm2
2992 ; SSE-NEXT: unpcklps {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
2993 ; SSE-NEXT: movaps 96(%rdi), %xmm5
2994 ; SSE-NEXT: movaps 96(%rsi), %xmm3
2995 ; SSE-NEXT: movaps %xmm5, %xmm4
2996 ; SSE-NEXT: unpcklps {{.*#+}} xmm4 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
2997 ; SSE-NEXT: movaps %xmm4, %xmm6
2998 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm2[0]
2999 ; SSE-NEXT: movaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3000 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm2[1]
3001 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3002 ; SSE-NEXT: unpckhps {{.*#+}} xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
3003 ; SSE-NEXT: unpckhps {{.*#+}} xmm5 = xmm5[2],xmm3[2],xmm5[3],xmm3[3]
3004 ; SSE-NEXT: movaps %xmm5, %xmm1
3005 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
3006 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3007 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm0[1]
3008 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3009 ; SSE-NEXT: movaps 112(%rdx), %xmm0
3010 ; SSE-NEXT: movaps 112(%rcx), %xmm1
3011 ; SSE-NEXT: movaps %xmm0, %xmm2
3012 ; SSE-NEXT: unpcklps {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
3013 ; SSE-NEXT: movaps 112(%rdi), %xmm5
3014 ; SSE-NEXT: movaps 112(%rsi), %xmm3
3015 ; SSE-NEXT: movaps %xmm5, %xmm4
3016 ; SSE-NEXT: unpcklps {{.*#+}} xmm4 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
3017 ; SSE-NEXT: movaps %xmm4, %xmm6
3018 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm2[0]
3019 ; SSE-NEXT: movaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3020 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm2[1]
3021 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3022 ; SSE-NEXT: unpckhps {{.*#+}} xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
3023 ; SSE-NEXT: unpckhps {{.*#+}} xmm5 = xmm5[2],xmm3[2],xmm5[3],xmm3[3]
3024 ; SSE-NEXT: movaps %xmm5, %xmm1
3025 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
3026 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3027 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm0[1]
3028 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3029 ; SSE-NEXT: movaps 128(%rdx), %xmm0
3030 ; SSE-NEXT: movaps 128(%rcx), %xmm1
3031 ; SSE-NEXT: movaps %xmm0, %xmm2
3032 ; SSE-NEXT: unpcklps {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
3033 ; SSE-NEXT: movaps 128(%rdi), %xmm5
3034 ; SSE-NEXT: movaps 128(%rsi), %xmm3
3035 ; SSE-NEXT: movaps %xmm5, %xmm4
3036 ; SSE-NEXT: unpcklps {{.*#+}} xmm4 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
3037 ; SSE-NEXT: movaps %xmm4, %xmm6
3038 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm2[0]
3039 ; SSE-NEXT: movaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3040 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm2[1]
3041 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3042 ; SSE-NEXT: unpckhps {{.*#+}} xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
3043 ; SSE-NEXT: unpckhps {{.*#+}} xmm5 = xmm5[2],xmm3[2],xmm5[3],xmm3[3]
3044 ; SSE-NEXT: movaps %xmm5, %xmm1
3045 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
3046 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3047 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm0[1]
3048 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3049 ; SSE-NEXT: movaps 144(%rdx), %xmm0
3050 ; SSE-NEXT: movaps 144(%rcx), %xmm1
3051 ; SSE-NEXT: movaps %xmm0, %xmm2
3052 ; SSE-NEXT: unpcklps {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
3053 ; SSE-NEXT: movaps 144(%rdi), %xmm5
3054 ; SSE-NEXT: movaps 144(%rsi), %xmm3
3055 ; SSE-NEXT: movaps %xmm5, %xmm4
3056 ; SSE-NEXT: unpcklps {{.*#+}} xmm4 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
3057 ; SSE-NEXT: movaps %xmm4, %xmm6
3058 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm2[0]
3059 ; SSE-NEXT: movaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3060 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm2[1]
3061 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3062 ; SSE-NEXT: unpckhps {{.*#+}} xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
3063 ; SSE-NEXT: unpckhps {{.*#+}} xmm5 = xmm5[2],xmm3[2],xmm5[3],xmm3[3]
3064 ; SSE-NEXT: movaps %xmm5, %xmm1
3065 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
3066 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3067 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm0[1]
3068 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3069 ; SSE-NEXT: movaps 160(%rdx), %xmm0
3070 ; SSE-NEXT: movaps 160(%rcx), %xmm1
3071 ; SSE-NEXT: movaps %xmm0, %xmm2
3072 ; SSE-NEXT: unpcklps {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
3073 ; SSE-NEXT: movaps 160(%rdi), %xmm5
3074 ; SSE-NEXT: movaps 160(%rsi), %xmm3
3075 ; SSE-NEXT: movaps %xmm5, %xmm4
3076 ; SSE-NEXT: unpcklps {{.*#+}} xmm4 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
3077 ; SSE-NEXT: movaps %xmm4, %xmm6
3078 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm2[0]
3079 ; SSE-NEXT: movaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3080 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm2[1]
3081 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3082 ; SSE-NEXT: unpckhps {{.*#+}} xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
3083 ; SSE-NEXT: unpckhps {{.*#+}} xmm5 = xmm5[2],xmm3[2],xmm5[3],xmm3[3]
3084 ; SSE-NEXT: movaps %xmm5, %xmm1
3085 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
3086 ; SSE-NEXT: movaps %xmm1, (%rsp) # 16-byte Spill
3087 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm0[1]
3088 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3089 ; SSE-NEXT: movaps 176(%rdx), %xmm0
3090 ; SSE-NEXT: movaps 176(%rcx), %xmm1
3091 ; SSE-NEXT: movaps %xmm0, %xmm2
3092 ; SSE-NEXT: unpcklps {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
3093 ; SSE-NEXT: movaps 176(%rdi), %xmm15
3094 ; SSE-NEXT: movaps 176(%rsi), %xmm3
3095 ; SSE-NEXT: movaps %xmm15, %xmm4
3096 ; SSE-NEXT: unpcklps {{.*#+}} xmm4 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
3097 ; SSE-NEXT: movaps %xmm4, %xmm5
3098 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm2[0]
3099 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3100 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm2[1]
3101 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3102 ; SSE-NEXT: unpckhps {{.*#+}} xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
3103 ; SSE-NEXT: unpckhps {{.*#+}} xmm15 = xmm15[2],xmm3[2],xmm15[3],xmm3[3]
3104 ; SSE-NEXT: movaps %xmm15, %xmm1
3105 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
3106 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3107 ; SSE-NEXT: unpckhpd {{.*#+}} xmm15 = xmm15[1],xmm0[1]
3108 ; SSE-NEXT: movaps 192(%rdx), %xmm0
3109 ; SSE-NEXT: movaps 192(%rcx), %xmm1
3110 ; SSE-NEXT: movaps %xmm0, %xmm2
3111 ; SSE-NEXT: unpcklps {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
3112 ; SSE-NEXT: movaps 192(%rdi), %xmm12
3113 ; SSE-NEXT: movaps 192(%rsi), %xmm3
3114 ; SSE-NEXT: movaps %xmm12, %xmm14
3115 ; SSE-NEXT: unpcklps {{.*#+}} xmm14 = xmm14[0],xmm3[0],xmm14[1],xmm3[1]
3116 ; SSE-NEXT: movaps %xmm14, %xmm4
3117 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm2[0]
3118 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3119 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1],xmm2[1]
3120 ; SSE-NEXT: unpckhps {{.*#+}} xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
3121 ; SSE-NEXT: unpckhps {{.*#+}} xmm12 = xmm12[2],xmm3[2],xmm12[3],xmm3[3]
3122 ; SSE-NEXT: movaps %xmm12, %xmm1
3123 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
3124 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3125 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm0[1]
3126 ; SSE-NEXT: movaps 208(%rdx), %xmm0
3127 ; SSE-NEXT: movaps 208(%rcx), %xmm1
3128 ; SSE-NEXT: movaps %xmm0, %xmm2
3129 ; SSE-NEXT: unpcklps {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
3130 ; SSE-NEXT: movaps 208(%rdi), %xmm13
3131 ; SSE-NEXT: movaps 208(%rsi), %xmm7
3132 ; SSE-NEXT: movaps %xmm13, %xmm8
3133 ; SSE-NEXT: unpcklps {{.*#+}} xmm8 = xmm8[0],xmm7[0],xmm8[1],xmm7[1]
3134 ; SSE-NEXT: movaps %xmm8, %xmm3
3135 ; SSE-NEXT: movlhps {{.*#+}} xmm3 = xmm3[0],xmm2[0]
3136 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3137 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm2[1]
3138 ; SSE-NEXT: unpckhps {{.*#+}} xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
3139 ; SSE-NEXT: unpckhps {{.*#+}} xmm13 = xmm13[2],xmm7[2],xmm13[3],xmm7[3]
3140 ; SSE-NEXT: movaps %xmm13, %xmm1
3141 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm0[0]
3142 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3143 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1],xmm0[1]
3144 ; SSE-NEXT: movaps 224(%rdx), %xmm1
3145 ; SSE-NEXT: movaps 224(%rcx), %xmm6
3146 ; SSE-NEXT: movaps %xmm1, %xmm0
3147 ; SSE-NEXT: unpcklps {{.*#+}} xmm0 = xmm0[0],xmm6[0],xmm0[1],xmm6[1]
3148 ; SSE-NEXT: movaps 224(%rdi), %xmm5
3149 ; SSE-NEXT: movaps 224(%rsi), %xmm4
3150 ; SSE-NEXT: movaps %xmm5, %xmm9
3151 ; SSE-NEXT: unpcklps {{.*#+}} xmm9 = xmm9[0],xmm4[0],xmm9[1],xmm4[1]
3152 ; SSE-NEXT: movaps %xmm9, %xmm11
3153 ; SSE-NEXT: movlhps {{.*#+}} xmm11 = xmm11[0],xmm0[0]
3154 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1],xmm0[1]
3155 ; SSE-NEXT: unpckhps {{.*#+}} xmm1 = xmm1[2],xmm6[2],xmm1[3],xmm6[3]
3156 ; SSE-NEXT: unpckhps {{.*#+}} xmm5 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
3157 ; SSE-NEXT: movaps %xmm5, %xmm10
3158 ; SSE-NEXT: movlhps {{.*#+}} xmm10 = xmm10[0],xmm1[0]
3159 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm1[1]
3160 ; SSE-NEXT: movaps 240(%rdx), %xmm2
3161 ; SSE-NEXT: movaps 240(%rcx), %xmm7
3162 ; SSE-NEXT: movaps %xmm2, %xmm6
3163 ; SSE-NEXT: unpcklps {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1]
3164 ; SSE-NEXT: movaps 240(%rdi), %xmm0
3165 ; SSE-NEXT: movaps 240(%rsi), %xmm4
3166 ; SSE-NEXT: movaps %xmm0, %xmm1
3167 ; SSE-NEXT: unpcklps {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1]
3168 ; SSE-NEXT: movaps %xmm1, %xmm3
3169 ; SSE-NEXT: movlhps {{.*#+}} xmm3 = xmm3[0],xmm6[0]
3170 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm6[1]
3171 ; SSE-NEXT: unpckhps {{.*#+}} xmm2 = xmm2[2],xmm7[2],xmm2[3],xmm7[3]
3172 ; SSE-NEXT: unpckhps {{.*#+}} xmm0 = xmm0[2],xmm4[2],xmm0[3],xmm4[3]
3173 ; SSE-NEXT: movaps %xmm0, %xmm4
3174 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm2[0]
3175 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
3176 ; SSE-NEXT: movaps %xmm0, 1008(%r8)
3177 ; SSE-NEXT: movaps %xmm4, 992(%r8)
3178 ; SSE-NEXT: movaps %xmm1, 976(%r8)
3179 ; SSE-NEXT: movaps %xmm3, 960(%r8)
3180 ; SSE-NEXT: movaps %xmm5, 944(%r8)
3181 ; SSE-NEXT: movaps %xmm10, 928(%r8)
3182 ; SSE-NEXT: movaps %xmm9, 912(%r8)
3183 ; SSE-NEXT: movaps %xmm11, 896(%r8)
3184 ; SSE-NEXT: movaps %xmm13, 880(%r8)
3185 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3186 ; SSE-NEXT: movaps %xmm0, 864(%r8)
3187 ; SSE-NEXT: movaps %xmm8, 848(%r8)
3188 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3189 ; SSE-NEXT: movaps %xmm0, 832(%r8)
3190 ; SSE-NEXT: movaps %xmm12, 816(%r8)
3191 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3192 ; SSE-NEXT: movaps %xmm0, 800(%r8)
3193 ; SSE-NEXT: movaps %xmm14, 784(%r8)
3194 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3195 ; SSE-NEXT: movaps %xmm0, 768(%r8)
3196 ; SSE-NEXT: movaps %xmm15, 752(%r8)
3197 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3198 ; SSE-NEXT: movaps %xmm0, 736(%r8)
3199 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3200 ; SSE-NEXT: movaps %xmm0, 720(%r8)
3201 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3202 ; SSE-NEXT: movaps %xmm0, 704(%r8)
3203 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3204 ; SSE-NEXT: movaps %xmm0, 688(%r8)
3205 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
3206 ; SSE-NEXT: movaps %xmm0, 672(%r8)
3207 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3208 ; SSE-NEXT: movaps %xmm0, 656(%r8)
3209 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3210 ; SSE-NEXT: movaps %xmm0, 640(%r8)
3211 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3212 ; SSE-NEXT: movaps %xmm0, 624(%r8)
3213 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3214 ; SSE-NEXT: movaps %xmm0, 608(%r8)
3215 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3216 ; SSE-NEXT: movaps %xmm0, 592(%r8)
3217 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3218 ; SSE-NEXT: movaps %xmm0, 576(%r8)
3219 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3220 ; SSE-NEXT: movaps %xmm0, 560(%r8)
3221 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3222 ; SSE-NEXT: movaps %xmm0, 544(%r8)
3223 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3224 ; SSE-NEXT: movaps %xmm0, 528(%r8)
3225 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3226 ; SSE-NEXT: movaps %xmm0, 512(%r8)
3227 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3228 ; SSE-NEXT: movaps %xmm0, 496(%r8)
3229 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3230 ; SSE-NEXT: movaps %xmm0, 480(%r8)
3231 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3232 ; SSE-NEXT: movaps %xmm0, 464(%r8)
3233 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3234 ; SSE-NEXT: movaps %xmm0, 448(%r8)
3235 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3236 ; SSE-NEXT: movaps %xmm0, 432(%r8)
3237 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3238 ; SSE-NEXT: movaps %xmm0, 416(%r8)
3239 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3240 ; SSE-NEXT: movaps %xmm0, 400(%r8)
3241 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3242 ; SSE-NEXT: movaps %xmm0, 384(%r8)
3243 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3244 ; SSE-NEXT: movaps %xmm0, 368(%r8)
3245 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3246 ; SSE-NEXT: movaps %xmm0, 352(%r8)
3247 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3248 ; SSE-NEXT: movaps %xmm0, 336(%r8)
3249 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3250 ; SSE-NEXT: movaps %xmm0, 320(%r8)
3251 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3252 ; SSE-NEXT: movaps %xmm0, 304(%r8)
3253 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3254 ; SSE-NEXT: movaps %xmm0, 288(%r8)
3255 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3256 ; SSE-NEXT: movaps %xmm0, 272(%r8)
3257 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3258 ; SSE-NEXT: movaps %xmm0, 256(%r8)
3259 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3260 ; SSE-NEXT: movaps %xmm0, 240(%r8)
3261 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3262 ; SSE-NEXT: movaps %xmm0, 224(%r8)
3263 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3264 ; SSE-NEXT: movaps %xmm0, 208(%r8)
3265 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3266 ; SSE-NEXT: movaps %xmm0, 192(%r8)
3267 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3268 ; SSE-NEXT: movaps %xmm0, 176(%r8)
3269 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3270 ; SSE-NEXT: movaps %xmm0, 160(%r8)
3271 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3272 ; SSE-NEXT: movaps %xmm0, 144(%r8)
3273 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3274 ; SSE-NEXT: movaps %xmm0, 128(%r8)
3275 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3276 ; SSE-NEXT: movaps %xmm0, 112(%r8)
3277 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3278 ; SSE-NEXT: movaps %xmm0, 96(%r8)
3279 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3280 ; SSE-NEXT: movaps %xmm0, 80(%r8)
3281 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3282 ; SSE-NEXT: movaps %xmm0, 64(%r8)
3283 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3284 ; SSE-NEXT: movaps %xmm0, 48(%r8)
3285 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3286 ; SSE-NEXT: movaps %xmm0, 32(%r8)
3287 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3288 ; SSE-NEXT: movaps %xmm0, 16(%r8)
3289 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3290 ; SSE-NEXT: movaps %xmm0, (%r8)
3291 ; SSE-NEXT: addq $696, %rsp # imm = 0x2B8
3294 ; AVX-LABEL: store_i32_stride4_vf64:
3296 ; AVX-NEXT: subq $1384, %rsp # imm = 0x568
3297 ; AVX-NEXT: vmovaps (%rdi), %xmm2
3298 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3299 ; AVX-NEXT: vmovaps 16(%rdi), %xmm5
3300 ; AVX-NEXT: vmovaps 32(%rdi), %xmm3
3301 ; AVX-NEXT: vmovaps (%rsi), %xmm1
3302 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3303 ; AVX-NEXT: vmovaps 16(%rsi), %xmm6
3304 ; AVX-NEXT: vmovaps 32(%rsi), %xmm4
3305 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm2[1],xmm1[1],zero,zero
3306 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
3307 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
3308 ; AVX-NEXT: vmovaps (%rcx), %xmm11
3309 ; AVX-NEXT: vmovaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3310 ; AVX-NEXT: vmovaps 16(%rcx), %xmm9
3311 ; AVX-NEXT: vmovaps 32(%rcx), %xmm7
3312 ; AVX-NEXT: vmovaps (%rdx), %xmm2
3313 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3314 ; AVX-NEXT: vmovaps 16(%rdx), %xmm10
3315 ; AVX-NEXT: vmovaps 32(%rdx), %xmm8
3316 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm2[0],xmm11[0],xmm2[1],xmm11[1]
3317 ; AVX-NEXT: vmovlhps {{.*#+}} xmm2 = xmm11[0],xmm2[0]
3318 ; AVX-NEXT: vshufps {{.*#+}} xmm2 = xmm2[0,1,2,0]
3319 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
3320 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3321 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3322 ; AVX-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3323 ; AVX-NEXT: vmovaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3324 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm5[1],xmm6[1],zero,zero
3325 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm5[0],xmm6[0],xmm5[1],xmm6[1]
3326 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
3327 ; AVX-NEXT: vmovaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3328 ; AVX-NEXT: vmovaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3329 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm10[0],xmm9[0],xmm10[1],xmm9[1]
3330 ; AVX-NEXT: vmovlhps {{.*#+}} xmm2 = xmm9[0],xmm10[0]
3331 ; AVX-NEXT: vshufps {{.*#+}} xmm2 = xmm2[0,1,2,0]
3332 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
3333 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3334 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3335 ; AVX-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3336 ; AVX-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3337 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm3[1],xmm4[1],zero,zero
3338 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm3[0],xmm4[0],xmm3[1],xmm4[1]
3339 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
3340 ; AVX-NEXT: vmovaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3341 ; AVX-NEXT: vmovaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3342 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm7[0],xmm8[0]
3343 ; AVX-NEXT: vshufps {{.*#+}} xmm1 = xmm1[0,1,2,0]
3344 ; AVX-NEXT: vunpcklps {{.*#+}} xmm2 = xmm8[0],xmm7[0],xmm8[1],xmm7[1]
3345 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
3346 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3347 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3348 ; AVX-NEXT: vmovaps 48(%rdi), %xmm2
3349 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3350 ; AVX-NEXT: vmovaps 48(%rsi), %xmm1
3351 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3352 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm2[1],xmm1[1],zero,zero
3353 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
3354 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
3355 ; AVX-NEXT: vmovaps 48(%rcx), %xmm3
3356 ; AVX-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3357 ; AVX-NEXT: vmovaps 48(%rdx), %xmm2
3358 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3359 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm2[0]
3360 ; AVX-NEXT: vshufps {{.*#+}} xmm1 = xmm1[0,1,2,0]
3361 ; AVX-NEXT: vunpcklps {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
3362 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
3363 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3364 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3365 ; AVX-NEXT: vmovaps 64(%rdi), %xmm2
3366 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3367 ; AVX-NEXT: vmovaps 64(%rsi), %xmm1
3368 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3369 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm2[1],xmm1[1],zero,zero
3370 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
3371 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
3372 ; AVX-NEXT: vmovaps 64(%rcx), %xmm3
3373 ; AVX-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3374 ; AVX-NEXT: vmovaps 64(%rdx), %xmm2
3375 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3376 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm2[0]
3377 ; AVX-NEXT: vshufps {{.*#+}} xmm1 = xmm1[0,1,2,0]
3378 ; AVX-NEXT: vunpcklps {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
3379 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
3380 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3381 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3382 ; AVX-NEXT: vmovaps 80(%rdi), %xmm2
3383 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3384 ; AVX-NEXT: vmovaps 80(%rsi), %xmm1
3385 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3386 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm2[1],xmm1[1],zero,zero
3387 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
3388 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
3389 ; AVX-NEXT: vmovaps 80(%rcx), %xmm3
3390 ; AVX-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3391 ; AVX-NEXT: vmovaps 80(%rdx), %xmm2
3392 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3393 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm2[0]
3394 ; AVX-NEXT: vshufps {{.*#+}} xmm1 = xmm1[0,1,2,0]
3395 ; AVX-NEXT: vunpcklps {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
3396 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
3397 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3398 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3399 ; AVX-NEXT: vmovaps 96(%rdi), %xmm2
3400 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3401 ; AVX-NEXT: vmovaps 96(%rsi), %xmm1
3402 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3403 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm2[1],xmm1[1],zero,zero
3404 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
3405 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
3406 ; AVX-NEXT: vmovaps 96(%rcx), %xmm3
3407 ; AVX-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3408 ; AVX-NEXT: vmovaps 96(%rdx), %xmm2
3409 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3410 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm2[0]
3411 ; AVX-NEXT: vshufps {{.*#+}} xmm1 = xmm1[0,1,2,0]
3412 ; AVX-NEXT: vunpcklps {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
3413 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
3414 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3415 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3416 ; AVX-NEXT: vmovaps 112(%rdi), %xmm2
3417 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3418 ; AVX-NEXT: vmovaps 112(%rsi), %xmm1
3419 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3420 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm2[1],xmm1[1],zero,zero
3421 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
3422 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
3423 ; AVX-NEXT: vmovaps 112(%rcx), %xmm3
3424 ; AVX-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3425 ; AVX-NEXT: vmovaps 112(%rdx), %xmm2
3426 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3427 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm2[0]
3428 ; AVX-NEXT: vshufps {{.*#+}} xmm1 = xmm1[0,1,2,0]
3429 ; AVX-NEXT: vunpcklps {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
3430 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
3431 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3432 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3433 ; AVX-NEXT: vmovaps 128(%rdi), %xmm2
3434 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3435 ; AVX-NEXT: vmovaps 128(%rsi), %xmm1
3436 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3437 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm2[1],xmm1[1],zero,zero
3438 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
3439 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
3440 ; AVX-NEXT: vmovaps 128(%rcx), %xmm3
3441 ; AVX-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3442 ; AVX-NEXT: vmovaps 128(%rdx), %xmm2
3443 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3444 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm2[0]
3445 ; AVX-NEXT: vshufps {{.*#+}} xmm1 = xmm1[0,1,2,0]
3446 ; AVX-NEXT: vunpcklps {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
3447 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
3448 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3449 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3450 ; AVX-NEXT: vmovaps 144(%rdi), %xmm2
3451 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3452 ; AVX-NEXT: vmovaps 144(%rsi), %xmm1
3453 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3454 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm2[1],xmm1[1],zero,zero
3455 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
3456 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
3457 ; AVX-NEXT: vmovaps 144(%rcx), %xmm3
3458 ; AVX-NEXT: vmovaps %xmm3, (%rsp) # 16-byte Spill
3459 ; AVX-NEXT: vmovaps 144(%rdx), %xmm2
3460 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3461 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm2[0]
3462 ; AVX-NEXT: vshufps {{.*#+}} xmm1 = xmm1[0,1,2,0]
3463 ; AVX-NEXT: vunpcklps {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
3464 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
3465 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3466 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3467 ; AVX-NEXT: vmovaps 160(%rdi), %xmm2
3468 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3469 ; AVX-NEXT: vmovaps 160(%rsi), %xmm1
3470 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3471 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm2[1],xmm1[1],zero,zero
3472 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
3473 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
3474 ; AVX-NEXT: vmovaps 160(%rcx), %xmm3
3475 ; AVX-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3476 ; AVX-NEXT: vmovaps 160(%rdx), %xmm2
3477 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3478 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm2[0]
3479 ; AVX-NEXT: vshufps {{.*#+}} xmm1 = xmm1[0,1,2,0]
3480 ; AVX-NEXT: vunpcklps {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
3481 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
3482 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3483 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3484 ; AVX-NEXT: vmovaps 176(%rdi), %xmm2
3485 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3486 ; AVX-NEXT: vmovaps 176(%rsi), %xmm1
3487 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3488 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm2[1],xmm1[1],zero,zero
3489 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
3490 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
3491 ; AVX-NEXT: vmovaps 176(%rcx), %xmm2
3492 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3493 ; AVX-NEXT: vmovaps 176(%rdx), %xmm13
3494 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm13[0]
3495 ; AVX-NEXT: vshufps {{.*#+}} xmm1 = xmm1[0,1,2,0]
3496 ; AVX-NEXT: vunpcklps {{.*#+}} xmm2 = xmm13[0],xmm2[0],xmm13[1],xmm2[1]
3497 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
3498 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3499 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3500 ; AVX-NEXT: vmovaps 192(%rdi), %xmm2
3501 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3502 ; AVX-NEXT: vmovaps 192(%rsi), %xmm1
3503 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3504 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm2[1],xmm1[1],zero,zero
3505 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
3506 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
3507 ; AVX-NEXT: vmovaps 192(%rcx), %xmm10
3508 ; AVX-NEXT: vmovaps 192(%rdx), %xmm8
3509 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm10[0],xmm8[0]
3510 ; AVX-NEXT: vshufps {{.*#+}} xmm1 = xmm1[0,1,2,0]
3511 ; AVX-NEXT: vunpcklps {{.*#+}} xmm2 = xmm8[0],xmm10[0],xmm8[1],xmm10[1]
3512 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
3513 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3514 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3515 ; AVX-NEXT: vmovaps 208(%rdi), %xmm2
3516 ; AVX-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3517 ; AVX-NEXT: vmovaps 208(%rsi), %xmm1
3518 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3519 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm2[1],xmm1[1],zero,zero
3520 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
3521 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
3522 ; AVX-NEXT: vmovaps 208(%rcx), %xmm6
3523 ; AVX-NEXT: vmovaps 208(%rdx), %xmm4
3524 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm6[0],xmm4[0]
3525 ; AVX-NEXT: vshufps {{.*#+}} xmm1 = xmm1[0,1,2,0]
3526 ; AVX-NEXT: vunpcklps {{.*#+}} xmm2 = xmm4[0],xmm6[0],xmm4[1],xmm6[1]
3527 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
3528 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3529 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3530 ; AVX-NEXT: vmovaps 224(%rdi), %xmm1
3531 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3532 ; AVX-NEXT: vmovaps 224(%rsi), %xmm11
3533 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm1[1],xmm11[1],zero,zero
3534 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm1[0],xmm11[0],xmm1[1],xmm11[1]
3535 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
3536 ; AVX-NEXT: vmovaps 224(%rcx), %xmm3
3537 ; AVX-NEXT: vmovaps 224(%rdx), %xmm2
3538 ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm3[0],xmm2[0]
3539 ; AVX-NEXT: vshufps {{.*#+}} xmm1 = xmm1[0,1,2,0]
3540 ; AVX-NEXT: vunpcklps {{.*#+}} xmm5 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
3541 ; AVX-NEXT: vinsertf128 $1, %xmm5, %ymm1, %ymm1
3542 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3543 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3544 ; AVX-NEXT: vmovaps 240(%rdi), %xmm9
3545 ; AVX-NEXT: vmovaps 240(%rsi), %xmm7
3546 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm9[1],xmm7[1],zero,zero
3547 ; AVX-NEXT: vunpcklps {{.*#+}} xmm1 = xmm9[0],xmm7[0],xmm9[1],xmm7[1]
3548 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm12
3549 ; AVX-NEXT: vmovaps 240(%rcx), %xmm5
3550 ; AVX-NEXT: vmovaps 240(%rdx), %xmm1
3551 ; AVX-NEXT: vmovlhps {{.*#+}} xmm14 = xmm5[0],xmm1[0]
3552 ; AVX-NEXT: vshufps {{.*#+}} xmm14 = xmm14[0,1,2,0]
3553 ; AVX-NEXT: vunpcklps {{.*#+}} xmm15 = xmm1[0],xmm5[0],xmm1[1],xmm5[1]
3554 ; AVX-NEXT: vinsertf128 $1, %xmm15, %ymm14, %ymm14
3555 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm12[0,1],ymm14[2,3],ymm12[4,5],ymm14[6,7]
3556 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3557 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3558 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3559 ; AVX-NEXT: vunpckhps {{.*#+}} xmm12 = xmm0[2],xmm14[2],xmm0[3],xmm14[3]
3560 ; AVX-NEXT: vinsertps {{.*#+}} xmm14 = zero,zero,xmm0[2],xmm14[2]
3561 ; AVX-NEXT: vinsertf128 $1, %xmm12, %ymm14, %ymm12
3562 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
3563 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3564 ; AVX-NEXT: vunpckhps {{.*#+}} xmm14 = xmm0[2],xmm15[2],xmm0[3],xmm15[3]
3565 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm15[3,0],xmm0[3,0]
3566 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm15[2,0,2,3]
3567 ; AVX-NEXT: vinsertf128 $1, %xmm15, %ymm14, %ymm14
3568 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm14[0,1],ymm12[2,3],ymm14[4,5],ymm12[6,7]
3569 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3570 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3571 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3572 ; AVX-NEXT: vunpckhps {{.*#+}} xmm12 = xmm0[2],xmm14[2],xmm0[3],xmm14[3]
3573 ; AVX-NEXT: vinsertps {{.*#+}} xmm14 = zero,zero,xmm0[2],xmm14[2]
3574 ; AVX-NEXT: vinsertf128 $1, %xmm12, %ymm14, %ymm12
3575 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3576 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
3577 ; AVX-NEXT: vunpckhps {{.*#+}} xmm14 = xmm15[2],xmm0[2],xmm15[3],xmm0[3]
3578 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm0[3,0],xmm15[3,0]
3579 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm15[2,0,2,3]
3580 ; AVX-NEXT: vinsertf128 $1, %xmm15, %ymm14, %ymm14
3581 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm14[0,1],ymm12[2,3],ymm14[4,5],ymm12[6,7]
3582 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3583 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3584 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3585 ; AVX-NEXT: vunpckhps {{.*#+}} xmm12 = xmm0[2],xmm14[2],xmm0[3],xmm14[3]
3586 ; AVX-NEXT: vinsertps {{.*#+}} xmm14 = zero,zero,xmm0[2],xmm14[2]
3587 ; AVX-NEXT: vinsertf128 $1, %xmm12, %ymm14, %ymm12
3588 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3589 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
3590 ; AVX-NEXT: vunpckhps {{.*#+}} xmm14 = xmm15[2],xmm0[2],xmm15[3],xmm0[3]
3591 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm0[3,0],xmm15[3,0]
3592 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm15[2,0,2,3]
3593 ; AVX-NEXT: vinsertf128 $1, %xmm15, %ymm14, %ymm14
3594 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm14[0,1],ymm12[2,3],ymm14[4,5],ymm12[6,7]
3595 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3596 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3597 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3598 ; AVX-NEXT: vunpckhps {{.*#+}} xmm12 = xmm0[2],xmm14[2],xmm0[3],xmm14[3]
3599 ; AVX-NEXT: vinsertps {{.*#+}} xmm14 = zero,zero,xmm0[2],xmm14[2]
3600 ; AVX-NEXT: vinsertf128 $1, %xmm12, %ymm14, %ymm12
3601 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3602 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
3603 ; AVX-NEXT: vunpckhps {{.*#+}} xmm14 = xmm15[2],xmm0[2],xmm15[3],xmm0[3]
3604 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm0[3,0],xmm15[3,0]
3605 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm15[2,0,2,3]
3606 ; AVX-NEXT: vinsertf128 $1, %xmm15, %ymm14, %ymm14
3607 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm14[0,1],ymm12[2,3],ymm14[4,5],ymm12[6,7]
3608 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3609 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3610 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3611 ; AVX-NEXT: vunpckhps {{.*#+}} xmm12 = xmm0[2],xmm14[2],xmm0[3],xmm14[3]
3612 ; AVX-NEXT: vinsertps {{.*#+}} xmm14 = zero,zero,xmm0[2],xmm14[2]
3613 ; AVX-NEXT: vinsertf128 $1, %xmm12, %ymm14, %ymm12
3614 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3615 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
3616 ; AVX-NEXT: vunpckhps {{.*#+}} xmm14 = xmm15[2],xmm0[2],xmm15[3],xmm0[3]
3617 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm0[3,0],xmm15[3,0]
3618 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm15[2,0,2,3]
3619 ; AVX-NEXT: vinsertf128 $1, %xmm15, %ymm14, %ymm14
3620 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm14[0,1],ymm12[2,3],ymm14[4,5],ymm12[6,7]
3621 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3622 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3623 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3624 ; AVX-NEXT: vunpckhps {{.*#+}} xmm12 = xmm0[2],xmm14[2],xmm0[3],xmm14[3]
3625 ; AVX-NEXT: vinsertps {{.*#+}} xmm14 = zero,zero,xmm0[2],xmm14[2]
3626 ; AVX-NEXT: vinsertf128 $1, %xmm12, %ymm14, %ymm12
3627 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3628 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
3629 ; AVX-NEXT: vunpckhps {{.*#+}} xmm14 = xmm15[2],xmm0[2],xmm15[3],xmm0[3]
3630 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm0[3,0],xmm15[3,0]
3631 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm15[2,0,2,3]
3632 ; AVX-NEXT: vinsertf128 $1, %xmm15, %ymm14, %ymm14
3633 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm14[0,1],ymm12[2,3],ymm14[4,5],ymm12[6,7]
3634 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3635 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3636 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3637 ; AVX-NEXT: vunpckhps {{.*#+}} xmm12 = xmm0[2],xmm14[2],xmm0[3],xmm14[3]
3638 ; AVX-NEXT: vinsertps {{.*#+}} xmm14 = zero,zero,xmm0[2],xmm14[2]
3639 ; AVX-NEXT: vinsertf128 $1, %xmm12, %ymm14, %ymm12
3640 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3641 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
3642 ; AVX-NEXT: vunpckhps {{.*#+}} xmm14 = xmm15[2],xmm0[2],xmm15[3],xmm0[3]
3643 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm0[3,0],xmm15[3,0]
3644 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm15[2,0,2,3]
3645 ; AVX-NEXT: vinsertf128 $1, %xmm15, %ymm14, %ymm14
3646 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm14[0,1],ymm12[2,3],ymm14[4,5],ymm12[6,7]
3647 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3648 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3649 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3650 ; AVX-NEXT: vunpckhps {{.*#+}} xmm12 = xmm0[2],xmm14[2],xmm0[3],xmm14[3]
3651 ; AVX-NEXT: vinsertps {{.*#+}} xmm14 = zero,zero,xmm0[2],xmm14[2]
3652 ; AVX-NEXT: vinsertf128 $1, %xmm12, %ymm14, %ymm12
3653 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3654 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
3655 ; AVX-NEXT: vunpckhps {{.*#+}} xmm14 = xmm15[2],xmm0[2],xmm15[3],xmm0[3]
3656 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm0[3,0],xmm15[3,0]
3657 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm15[2,0,2,3]
3658 ; AVX-NEXT: vinsertf128 $1, %xmm15, %ymm14, %ymm14
3659 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm14[0,1],ymm12[2,3],ymm14[4,5],ymm12[6,7]
3660 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3661 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3662 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3663 ; AVX-NEXT: vunpckhps {{.*#+}} xmm12 = xmm0[2],xmm14[2],xmm0[3],xmm14[3]
3664 ; AVX-NEXT: vinsertps {{.*#+}} xmm14 = zero,zero,xmm0[2],xmm14[2]
3665 ; AVX-NEXT: vinsertf128 $1, %xmm12, %ymm14, %ymm12
3666 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3667 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
3668 ; AVX-NEXT: vunpckhps {{.*#+}} xmm14 = xmm15[2],xmm0[2],xmm15[3],xmm0[3]
3669 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm0[3,0],xmm15[3,0]
3670 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm15[2,0,2,3]
3671 ; AVX-NEXT: vinsertf128 $1, %xmm15, %ymm14, %ymm14
3672 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm14[0,1],ymm12[2,3],ymm14[4,5],ymm12[6,7]
3673 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3674 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3675 ; AVX-NEXT: vmovaps (%rsp), %xmm14 # 16-byte Reload
3676 ; AVX-NEXT: vunpckhps {{.*#+}} xmm12 = xmm0[2],xmm14[2],xmm0[3],xmm14[3]
3677 ; AVX-NEXT: vinsertps {{.*#+}} xmm14 = zero,zero,xmm0[2],xmm14[2]
3678 ; AVX-NEXT: vinsertf128 $1, %xmm12, %ymm14, %ymm12
3679 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3680 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
3681 ; AVX-NEXT: vunpckhps {{.*#+}} xmm14 = xmm15[2],xmm0[2],xmm15[3],xmm0[3]
3682 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm0[3,0],xmm15[3,0]
3683 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm15[2,0,2,3]
3684 ; AVX-NEXT: vinsertf128 $1, %xmm15, %ymm14, %ymm14
3685 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm14[0,1],ymm12[2,3],ymm14[4,5],ymm12[6,7]
3686 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3687 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3688 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3689 ; AVX-NEXT: vunpckhps {{.*#+}} xmm12 = xmm0[2],xmm14[2],xmm0[3],xmm14[3]
3690 ; AVX-NEXT: vinsertps {{.*#+}} xmm14 = zero,zero,xmm0[2],xmm14[2]
3691 ; AVX-NEXT: vinsertf128 $1, %xmm12, %ymm14, %ymm12
3692 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3693 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
3694 ; AVX-NEXT: vunpckhps {{.*#+}} xmm14 = xmm15[2],xmm0[2],xmm15[3],xmm0[3]
3695 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm0[3,0],xmm15[3,0]
3696 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm15[2,0,2,3]
3697 ; AVX-NEXT: vinsertf128 $1, %xmm15, %ymm14, %ymm14
3698 ; AVX-NEXT: vblendps {{.*#+}} ymm12 = ymm14[0,1],ymm12[2,3],ymm14[4,5],ymm12[6,7]
3699 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3700 ; AVX-NEXT: vunpckhps {{.*#+}} xmm14 = xmm13[2],xmm0[2],xmm13[3],xmm0[3]
3701 ; AVX-NEXT: vinsertps {{.*#+}} xmm13 = zero,zero,xmm13[2],xmm0[2]
3702 ; AVX-NEXT: vinsertf128 $1, %xmm14, %ymm13, %ymm13
3703 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3704 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
3705 ; AVX-NEXT: vunpckhps {{.*#+}} xmm14 = xmm15[2],xmm0[2],xmm15[3],xmm0[3]
3706 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm0[3,0],xmm15[3,0]
3707 ; AVX-NEXT: vshufps {{.*#+}} xmm15 = xmm15[2,0,2,3]
3708 ; AVX-NEXT: vinsertf128 $1, %xmm15, %ymm14, %ymm14
3709 ; AVX-NEXT: vblendps {{.*#+}} ymm13 = ymm14[0,1],ymm13[2,3],ymm14[4,5],ymm13[6,7]
3710 ; AVX-NEXT: vunpckhps {{.*#+}} xmm14 = xmm8[2],xmm10[2],xmm8[3],xmm10[3]
3711 ; AVX-NEXT: vinsertps {{.*#+}} xmm8 = zero,zero,xmm8[2],xmm10[2]
3712 ; AVX-NEXT: vinsertf128 $1, %xmm14, %ymm8, %ymm8
3713 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3714 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3715 ; AVX-NEXT: vunpckhps {{.*#+}} xmm10 = xmm14[2],xmm0[2],xmm14[3],xmm0[3]
3716 ; AVX-NEXT: vshufps {{.*#+}} xmm14 = xmm0[3,0],xmm14[3,0]
3717 ; AVX-NEXT: vshufps {{.*#+}} xmm14 = xmm14[2,0,2,3]
3718 ; AVX-NEXT: vinsertf128 $1, %xmm14, %ymm10, %ymm10
3719 ; AVX-NEXT: vblendps {{.*#+}} ymm8 = ymm10[0,1],ymm8[2,3],ymm10[4,5],ymm8[6,7]
3720 ; AVX-NEXT: vunpckhps {{.*#+}} xmm10 = xmm4[2],xmm6[2],xmm4[3],xmm6[3]
3721 ; AVX-NEXT: vinsertps {{.*#+}} xmm4 = zero,zero,xmm4[2],xmm6[2]
3722 ; AVX-NEXT: vinsertf128 $1, %xmm10, %ymm4, %ymm4
3723 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3724 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
3725 ; AVX-NEXT: vunpckhps {{.*#+}} xmm6 = xmm10[2],xmm0[2],xmm10[3],xmm0[3]
3726 ; AVX-NEXT: vshufps {{.*#+}} xmm10 = xmm0[3,0],xmm10[3,0]
3727 ; AVX-NEXT: vshufps {{.*#+}} xmm10 = xmm10[2,0,2,3]
3728 ; AVX-NEXT: vinsertf128 $1, %xmm10, %ymm6, %ymm6
3729 ; AVX-NEXT: vblendps {{.*#+}} ymm4 = ymm6[0,1],ymm4[2,3],ymm6[4,5],ymm4[6,7]
3730 ; AVX-NEXT: vunpckhps {{.*#+}} xmm6 = xmm2[2],xmm3[2],xmm2[3],xmm3[3]
3731 ; AVX-NEXT: vinsertps {{.*#+}} xmm2 = zero,zero,xmm2[2],xmm3[2]
3732 ; AVX-NEXT: vinsertf128 $1, %xmm6, %ymm2, %ymm2
3733 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3734 ; AVX-NEXT: vunpckhps {{.*#+}} xmm3 = xmm0[2],xmm11[2],xmm0[3],xmm11[3]
3735 ; AVX-NEXT: vshufps {{.*#+}} xmm6 = xmm11[3,0],xmm0[3,0]
3736 ; AVX-NEXT: vshufps {{.*#+}} xmm6 = xmm6[2,0,2,3]
3737 ; AVX-NEXT: vinsertf128 $1, %xmm6, %ymm3, %ymm3
3738 ; AVX-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7]
3739 ; AVX-NEXT: vunpckhps {{.*#+}} xmm3 = xmm1[2],xmm5[2],xmm1[3],xmm5[3]
3740 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = zero,zero,xmm1[2],xmm5[2]
3741 ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm0
3742 ; AVX-NEXT: vunpckhps {{.*#+}} xmm1 = xmm9[2],xmm7[2],xmm9[3],xmm7[3]
3743 ; AVX-NEXT: vshufps {{.*#+}} xmm3 = xmm7[3,0],xmm9[3,0]
3744 ; AVX-NEXT: vshufps {{.*#+}} xmm3 = xmm3[2,0,2,3]
3745 ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm1, %ymm1
3746 ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
3747 ; AVX-NEXT: vmovaps %ymm0, 992(%r8)
3748 ; AVX-NEXT: vmovaps %ymm2, 928(%r8)
3749 ; AVX-NEXT: vmovaps %ymm4, 864(%r8)
3750 ; AVX-NEXT: vmovaps %ymm8, 800(%r8)
3751 ; AVX-NEXT: vmovaps %ymm13, 736(%r8)
3752 ; AVX-NEXT: vmovaps %ymm12, 672(%r8)
3753 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3754 ; AVX-NEXT: vmovaps %ymm0, 608(%r8)
3755 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3756 ; AVX-NEXT: vmovaps %ymm0, 544(%r8)
3757 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3758 ; AVX-NEXT: vmovaps %ymm0, 480(%r8)
3759 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3760 ; AVX-NEXT: vmovaps %ymm0, 416(%r8)
3761 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3762 ; AVX-NEXT: vmovaps %ymm0, 352(%r8)
3763 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3764 ; AVX-NEXT: vmovaps %ymm0, 288(%r8)
3765 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3766 ; AVX-NEXT: vmovaps %ymm0, 224(%r8)
3767 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3768 ; AVX-NEXT: vmovaps %ymm0, 160(%r8)
3769 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3770 ; AVX-NEXT: vmovaps %ymm0, 96(%r8)
3771 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3772 ; AVX-NEXT: vmovaps %ymm0, 32(%r8)
3773 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3774 ; AVX-NEXT: vmovaps %ymm0, 960(%r8)
3775 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3776 ; AVX-NEXT: vmovaps %ymm0, 896(%r8)
3777 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3778 ; AVX-NEXT: vmovaps %ymm0, 832(%r8)
3779 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3780 ; AVX-NEXT: vmovaps %ymm0, 768(%r8)
3781 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3782 ; AVX-NEXT: vmovaps %ymm0, 704(%r8)
3783 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3784 ; AVX-NEXT: vmovaps %ymm0, 640(%r8)
3785 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3786 ; AVX-NEXT: vmovaps %ymm0, 576(%r8)
3787 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3788 ; AVX-NEXT: vmovaps %ymm0, 512(%r8)
3789 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3790 ; AVX-NEXT: vmovaps %ymm0, 448(%r8)
3791 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3792 ; AVX-NEXT: vmovaps %ymm0, 384(%r8)
3793 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3794 ; AVX-NEXT: vmovaps %ymm0, 320(%r8)
3795 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3796 ; AVX-NEXT: vmovaps %ymm0, 256(%r8)
3797 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3798 ; AVX-NEXT: vmovaps %ymm0, 192(%r8)
3799 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3800 ; AVX-NEXT: vmovaps %ymm0, 128(%r8)
3801 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3802 ; AVX-NEXT: vmovaps %ymm0, 64(%r8)
3803 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3804 ; AVX-NEXT: vmovaps %ymm0, (%r8)
3805 ; AVX-NEXT: addq $1384, %rsp # imm = 0x568
3806 ; AVX-NEXT: vzeroupper
3809 ; AVX2-LABEL: store_i32_stride4_vf64:
3811 ; AVX2-NEXT: subq $520, %rsp # imm = 0x208
3812 ; AVX2-NEXT: vmovaps (%rcx), %xmm4
3813 ; AVX2-NEXT: vmovaps 32(%rcx), %xmm5
3814 ; AVX2-NEXT: vmovaps 64(%rcx), %xmm0
3815 ; AVX2-NEXT: vmovaps (%rdx), %xmm6
3816 ; AVX2-NEXT: vmovaps 32(%rdx), %xmm7
3817 ; AVX2-NEXT: vmovaps 64(%rdx), %xmm1
3818 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm2 = xmm6[0],xmm4[0],xmm6[1],xmm4[1]
3819 ; AVX2-NEXT: vpermpd {{.*#+}} ymm8 = ymm2[0,0,2,1]
3820 ; AVX2-NEXT: vmovaps (%rsi), %xmm9
3821 ; AVX2-NEXT: vmovaps 32(%rsi), %xmm10
3822 ; AVX2-NEXT: vmovaps 64(%rsi), %xmm2
3823 ; AVX2-NEXT: vmovaps (%rdi), %xmm11
3824 ; AVX2-NEXT: vmovaps 32(%rdi), %xmm12
3825 ; AVX2-NEXT: vmovaps 64(%rdi), %xmm3
3826 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm13 = xmm11[0],xmm9[0],xmm11[1],xmm9[1]
3827 ; AVX2-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[0,1,1,3]
3828 ; AVX2-NEXT: vblendps {{.*#+}} ymm8 = ymm13[0,1],ymm8[2,3],ymm13[4,5],ymm8[6,7]
3829 ; AVX2-NEXT: vmovups %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3830 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm4 = xmm6[2],xmm4[2],xmm6[3],xmm4[3]
3831 ; AVX2-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,0,2,1]
3832 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm6 = xmm11[2],xmm9[2],xmm11[3],xmm9[3]
3833 ; AVX2-NEXT: vpermpd {{.*#+}} ymm6 = ymm6[0,1,1,3]
3834 ; AVX2-NEXT: vblendps {{.*#+}} ymm4 = ymm6[0,1],ymm4[2,3],ymm6[4,5],ymm4[6,7]
3835 ; AVX2-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3836 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm4 = xmm7[0],xmm5[0],xmm7[1],xmm5[1]
3837 ; AVX2-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,0,2,1]
3838 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm6 = xmm12[0],xmm10[0],xmm12[1],xmm10[1]
3839 ; AVX2-NEXT: vpermpd {{.*#+}} ymm6 = ymm6[0,1,1,3]
3840 ; AVX2-NEXT: vblendps {{.*#+}} ymm4 = ymm6[0,1],ymm4[2,3],ymm6[4,5],ymm4[6,7]
3841 ; AVX2-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3842 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm4 = xmm7[2],xmm5[2],xmm7[3],xmm5[3]
3843 ; AVX2-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,0,2,1]
3844 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm5 = xmm12[2],xmm10[2],xmm12[3],xmm10[3]
3845 ; AVX2-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,1,3]
3846 ; AVX2-NEXT: vblendps {{.*#+}} ymm4 = ymm5[0,1],ymm4[2,3],ymm5[4,5],ymm4[6,7]
3847 ; AVX2-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3848 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm4 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
3849 ; AVX2-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,0,2,1]
3850 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm5 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
3851 ; AVX2-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,1,3]
3852 ; AVX2-NEXT: vblendps {{.*#+}} ymm4 = ymm5[0,1],ymm4[2,3],ymm5[4,5],ymm4[6,7]
3853 ; AVX2-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3854 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
3855 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,0,2,1]
3856 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm1 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
3857 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,1,3]
3858 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
3859 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3860 ; AVX2-NEXT: vmovaps 96(%rcx), %xmm0
3861 ; AVX2-NEXT: vmovaps 96(%rdx), %xmm1
3862 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm2 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
3863 ; AVX2-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,0,2,1]
3864 ; AVX2-NEXT: vmovaps 96(%rsi), %xmm3
3865 ; AVX2-NEXT: vmovaps 96(%rdi), %xmm4
3866 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm5 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
3867 ; AVX2-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,1,3]
3868 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
3869 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3870 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
3871 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,0,2,1]
3872 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm1 = xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3873 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,1,3]
3874 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
3875 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3876 ; AVX2-NEXT: vmovaps 128(%rcx), %xmm0
3877 ; AVX2-NEXT: vmovaps 128(%rdx), %xmm1
3878 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm2 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
3879 ; AVX2-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,0,2,1]
3880 ; AVX2-NEXT: vmovaps 128(%rsi), %xmm3
3881 ; AVX2-NEXT: vmovaps 128(%rdi), %xmm4
3882 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm5 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
3883 ; AVX2-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,1,3]
3884 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
3885 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3886 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
3887 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,0,2,1]
3888 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm1 = xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3889 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,1,3]
3890 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
3891 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3892 ; AVX2-NEXT: vmovaps 160(%rcx), %xmm0
3893 ; AVX2-NEXT: vmovaps 160(%rdx), %xmm1
3894 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm2 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
3895 ; AVX2-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,0,2,1]
3896 ; AVX2-NEXT: vmovaps 160(%rsi), %xmm3
3897 ; AVX2-NEXT: vmovaps 160(%rdi), %xmm4
3898 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm5 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
3899 ; AVX2-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,1,3]
3900 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
3901 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3902 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
3903 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,0,2,1]
3904 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm1 = xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3905 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,1,3]
3906 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
3907 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3908 ; AVX2-NEXT: vmovaps 192(%rcx), %xmm0
3909 ; AVX2-NEXT: vmovaps 192(%rdx), %xmm1
3910 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm2 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
3911 ; AVX2-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,0,2,1]
3912 ; AVX2-NEXT: vmovaps 192(%rsi), %xmm3
3913 ; AVX2-NEXT: vmovaps 192(%rdi), %xmm4
3914 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm5 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
3915 ; AVX2-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,1,3]
3916 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
3917 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3918 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
3919 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,0,2,1]
3920 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm1 = xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3921 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,1,3]
3922 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
3923 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3924 ; AVX2-NEXT: vmovaps 224(%rcx), %xmm0
3925 ; AVX2-NEXT: vmovaps 224(%rdx), %xmm1
3926 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm2 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
3927 ; AVX2-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,0,2,1]
3928 ; AVX2-NEXT: vmovaps 224(%rsi), %xmm3
3929 ; AVX2-NEXT: vmovaps 224(%rdi), %xmm4
3930 ; AVX2-NEXT: vunpcklps {{.*#+}} xmm5 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
3931 ; AVX2-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,1,3]
3932 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
3933 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3934 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
3935 ; AVX2-NEXT: vunpckhps {{.*#+}} xmm1 = xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3936 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,0,2,1]
3937 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,1,3]
3938 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
3939 ; AVX2-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
3940 ; AVX2-NEXT: vmovaps (%rdx), %ymm0
3941 ; AVX2-NEXT: vmovaps (%rcx), %ymm1
3942 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
3943 ; AVX2-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
3944 ; AVX2-NEXT: vmovaps (%rdi), %ymm3
3945 ; AVX2-NEXT: vmovaps (%rsi), %ymm4
3946 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm5 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[4],ymm4[4],ymm3[5],ymm4[5]
3947 ; AVX2-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[2,1,3,3]
3948 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
3949 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3950 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm0 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
3951 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm1 = ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[6],ymm4[6],ymm3[7],ymm4[7]
3952 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,2,3]
3953 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[2,1,3,3]
3954 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
3955 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3956 ; AVX2-NEXT: vmovaps 32(%rdx), %ymm0
3957 ; AVX2-NEXT: vmovaps 32(%rcx), %ymm1
3958 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
3959 ; AVX2-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
3960 ; AVX2-NEXT: vmovaps 32(%rdi), %ymm3
3961 ; AVX2-NEXT: vmovaps 32(%rsi), %ymm4
3962 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm5 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[4],ymm4[4],ymm3[5],ymm4[5]
3963 ; AVX2-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[2,1,3,3]
3964 ; AVX2-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
3965 ; AVX2-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3966 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm0 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
3967 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm1 = ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[6],ymm4[6],ymm3[7],ymm4[7]
3968 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,2,3]
3969 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[2,1,3,3]
3970 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
3971 ; AVX2-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3972 ; AVX2-NEXT: vmovaps 64(%rdx), %ymm0
3973 ; AVX2-NEXT: vmovaps 64(%rcx), %ymm1
3974 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
3975 ; AVX2-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
3976 ; AVX2-NEXT: vmovaps 64(%rdi), %ymm3
3977 ; AVX2-NEXT: vmovaps 64(%rsi), %ymm4
3978 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm5 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[4],ymm4[4],ymm3[5],ymm4[5]
3979 ; AVX2-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[2,1,3,3]
3980 ; AVX2-NEXT: vblendps {{.*#+}} ymm11 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
3981 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm0 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
3982 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm1 = ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[6],ymm4[6],ymm3[7],ymm4[7]
3983 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,2,3]
3984 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[2,1,3,3]
3985 ; AVX2-NEXT: vblendps {{.*#+}} ymm10 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
3986 ; AVX2-NEXT: vmovaps 96(%rdx), %ymm0
3987 ; AVX2-NEXT: vmovaps 96(%rcx), %ymm1
3988 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
3989 ; AVX2-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
3990 ; AVX2-NEXT: vmovaps 96(%rdi), %ymm3
3991 ; AVX2-NEXT: vmovaps 96(%rsi), %ymm4
3992 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm5 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[4],ymm4[4],ymm3[5],ymm4[5]
3993 ; AVX2-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[2,1,3,3]
3994 ; AVX2-NEXT: vblendps {{.*#+}} ymm9 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
3995 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm0 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
3996 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm1 = ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[6],ymm4[6],ymm3[7],ymm4[7]
3997 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,2,3]
3998 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[2,1,3,3]
3999 ; AVX2-NEXT: vblendps {{.*#+}} ymm8 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
4000 ; AVX2-NEXT: vmovaps 128(%rdx), %ymm3
4001 ; AVX2-NEXT: vmovaps 128(%rcx), %ymm1
4002 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm2 = ymm3[0],ymm1[0],ymm3[1],ymm1[1],ymm3[4],ymm1[4],ymm3[5],ymm1[5]
4003 ; AVX2-NEXT: vpermpd {{.*#+}} ymm4 = ymm2[0,2,2,3]
4004 ; AVX2-NEXT: vmovaps 128(%rdi), %ymm2
4005 ; AVX2-NEXT: vmovaps 128(%rsi), %ymm0
4006 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm13 = ymm2[0],ymm0[0],ymm2[1],ymm0[1],ymm2[4],ymm0[4],ymm2[5],ymm0[5]
4007 ; AVX2-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[2,1,3,3]
4008 ; AVX2-NEXT: vblendps {{.*#+}} ymm7 = ymm13[0,1],ymm4[2,3],ymm13[4,5],ymm4[6,7]
4009 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm1 = ymm3[2],ymm1[2],ymm3[3],ymm1[3],ymm3[6],ymm1[6],ymm3[7],ymm1[7]
4010 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm0 = ymm2[2],ymm0[2],ymm2[3],ymm0[3],ymm2[6],ymm0[6],ymm2[7],ymm0[7]
4011 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,2,3]
4012 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,1,3,3]
4013 ; AVX2-NEXT: vblendps {{.*#+}} ymm6 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
4014 ; AVX2-NEXT: vmovaps 160(%rdx), %ymm2
4015 ; AVX2-NEXT: vmovaps 160(%rcx), %ymm3
4016 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm1 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[4],ymm3[4],ymm2[5],ymm3[5]
4017 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,2,3]
4018 ; AVX2-NEXT: vmovaps 160(%rdi), %ymm13
4019 ; AVX2-NEXT: vmovaps 160(%rsi), %ymm0
4020 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm15 = ymm13[0],ymm0[0],ymm13[1],ymm0[1],ymm13[4],ymm0[4],ymm13[5],ymm0[5]
4021 ; AVX2-NEXT: vpermpd {{.*#+}} ymm15 = ymm15[2,1,3,3]
4022 ; AVX2-NEXT: vblendps {{.*#+}} ymm5 = ymm15[0,1],ymm1[2,3],ymm15[4,5],ymm1[6,7]
4023 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm2 = ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[6],ymm3[6],ymm2[7],ymm3[7]
4024 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm0 = ymm13[2],ymm0[2],ymm13[3],ymm0[3],ymm13[6],ymm0[6],ymm13[7],ymm0[7]
4025 ; AVX2-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
4026 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,1,3,3]
4027 ; AVX2-NEXT: vblendps {{.*#+}} ymm4 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
4028 ; AVX2-NEXT: vmovaps 192(%rdx), %ymm2
4029 ; AVX2-NEXT: vmovaps 192(%rcx), %ymm3
4030 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm13 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[4],ymm3[4],ymm2[5],ymm3[5]
4031 ; AVX2-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[0,2,2,3]
4032 ; AVX2-NEXT: vmovaps 192(%rdi), %ymm15
4033 ; AVX2-NEXT: vmovaps 192(%rsi), %ymm0
4034 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm14 = ymm15[0],ymm0[0],ymm15[1],ymm0[1],ymm15[4],ymm0[4],ymm15[5],ymm0[5]
4035 ; AVX2-NEXT: vpermpd {{.*#+}} ymm14 = ymm14[2,1,3,3]
4036 ; AVX2-NEXT: vblendps {{.*#+}} ymm13 = ymm14[0,1],ymm13[2,3],ymm14[4,5],ymm13[6,7]
4037 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm2 = ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[6],ymm3[6],ymm2[7],ymm3[7]
4038 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm0 = ymm15[2],ymm0[2],ymm15[3],ymm0[3],ymm15[6],ymm0[6],ymm15[7],ymm0[7]
4039 ; AVX2-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
4040 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,1,3,3]
4041 ; AVX2-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
4042 ; AVX2-NEXT: vmovaps 224(%rdx), %ymm2
4043 ; AVX2-NEXT: vmovaps 224(%rcx), %ymm3
4044 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm14 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[4],ymm3[4],ymm2[5],ymm3[5]
4045 ; AVX2-NEXT: vpermpd {{.*#+}} ymm14 = ymm14[0,2,2,3]
4046 ; AVX2-NEXT: vmovaps 224(%rdi), %ymm15
4047 ; AVX2-NEXT: vmovaps 224(%rsi), %ymm0
4048 ; AVX2-NEXT: vunpcklps {{.*#+}} ymm12 = ymm15[0],ymm0[0],ymm15[1],ymm0[1],ymm15[4],ymm0[4],ymm15[5],ymm0[5]
4049 ; AVX2-NEXT: vpermpd {{.*#+}} ymm12 = ymm12[2,1,3,3]
4050 ; AVX2-NEXT: vblendps {{.*#+}} ymm12 = ymm12[0,1],ymm14[2,3],ymm12[4,5],ymm14[6,7]
4051 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm2 = ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[6],ymm3[6],ymm2[7],ymm3[7]
4052 ; AVX2-NEXT: vunpckhps {{.*#+}} ymm0 = ymm15[2],ymm0[2],ymm15[3],ymm0[3],ymm15[6],ymm0[6],ymm15[7],ymm0[7]
4053 ; AVX2-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
4054 ; AVX2-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,1,3,3]
4055 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
4056 ; AVX2-NEXT: vmovaps %ymm0, 992(%r8)
4057 ; AVX2-NEXT: vmovaps %ymm12, 960(%r8)
4058 ; AVX2-NEXT: vmovaps %ymm1, 864(%r8)
4059 ; AVX2-NEXT: vmovaps %ymm13, 832(%r8)
4060 ; AVX2-NEXT: vmovaps %ymm4, 736(%r8)
4061 ; AVX2-NEXT: vmovaps %ymm5, 704(%r8)
4062 ; AVX2-NEXT: vmovaps %ymm6, 608(%r8)
4063 ; AVX2-NEXT: vmovaps %ymm7, 576(%r8)
4064 ; AVX2-NEXT: vmovaps %ymm8, 480(%r8)
4065 ; AVX2-NEXT: vmovaps %ymm9, 448(%r8)
4066 ; AVX2-NEXT: vmovaps %ymm10, 352(%r8)
4067 ; AVX2-NEXT: vmovaps %ymm11, 320(%r8)
4068 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4069 ; AVX2-NEXT: vmovaps %ymm0, 224(%r8)
4070 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4071 ; AVX2-NEXT: vmovaps %ymm0, 192(%r8)
4072 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4073 ; AVX2-NEXT: vmovaps %ymm0, 96(%r8)
4074 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4075 ; AVX2-NEXT: vmovaps %ymm0, 64(%r8)
4076 ; AVX2-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
4077 ; AVX2-NEXT: vmovaps %ymm0, 928(%r8)
4078 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4079 ; AVX2-NEXT: vmovaps %ymm0, 896(%r8)
4080 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4081 ; AVX2-NEXT: vmovaps %ymm0, 800(%r8)
4082 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4083 ; AVX2-NEXT: vmovaps %ymm0, 768(%r8)
4084 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4085 ; AVX2-NEXT: vmovaps %ymm0, 672(%r8)
4086 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4087 ; AVX2-NEXT: vmovaps %ymm0, 640(%r8)
4088 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4089 ; AVX2-NEXT: vmovaps %ymm0, 544(%r8)
4090 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4091 ; AVX2-NEXT: vmovaps %ymm0, 512(%r8)
4092 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4093 ; AVX2-NEXT: vmovaps %ymm0, 416(%r8)
4094 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4095 ; AVX2-NEXT: vmovaps %ymm0, 384(%r8)
4096 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4097 ; AVX2-NEXT: vmovaps %ymm0, 288(%r8)
4098 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4099 ; AVX2-NEXT: vmovaps %ymm0, 256(%r8)
4100 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4101 ; AVX2-NEXT: vmovaps %ymm0, 160(%r8)
4102 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4103 ; AVX2-NEXT: vmovaps %ymm0, 128(%r8)
4104 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4105 ; AVX2-NEXT: vmovaps %ymm0, 32(%r8)
4106 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4107 ; AVX2-NEXT: vmovaps %ymm0, (%r8)
4108 ; AVX2-NEXT: addq $520, %rsp # imm = 0x208
4109 ; AVX2-NEXT: vzeroupper
4112 ; AVX2-FP-LABEL: store_i32_stride4_vf64:
4114 ; AVX2-FP-NEXT: subq $520, %rsp # imm = 0x208
4115 ; AVX2-FP-NEXT: vmovaps (%rcx), %xmm4
4116 ; AVX2-FP-NEXT: vmovaps 32(%rcx), %xmm5
4117 ; AVX2-FP-NEXT: vmovaps 64(%rcx), %xmm0
4118 ; AVX2-FP-NEXT: vmovaps (%rdx), %xmm6
4119 ; AVX2-FP-NEXT: vmovaps 32(%rdx), %xmm7
4120 ; AVX2-FP-NEXT: vmovaps 64(%rdx), %xmm1
4121 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm2 = xmm6[0],xmm4[0],xmm6[1],xmm4[1]
4122 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm8 = ymm2[0,0,2,1]
4123 ; AVX2-FP-NEXT: vmovaps (%rsi), %xmm9
4124 ; AVX2-FP-NEXT: vmovaps 32(%rsi), %xmm10
4125 ; AVX2-FP-NEXT: vmovaps 64(%rsi), %xmm2
4126 ; AVX2-FP-NEXT: vmovaps (%rdi), %xmm11
4127 ; AVX2-FP-NEXT: vmovaps 32(%rdi), %xmm12
4128 ; AVX2-FP-NEXT: vmovaps 64(%rdi), %xmm3
4129 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm13 = xmm11[0],xmm9[0],xmm11[1],xmm9[1]
4130 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[0,1,1,3]
4131 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm8 = ymm13[0,1],ymm8[2,3],ymm13[4,5],ymm8[6,7]
4132 ; AVX2-FP-NEXT: vmovups %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4133 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm4 = xmm6[2],xmm4[2],xmm6[3],xmm4[3]
4134 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,0,2,1]
4135 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm6 = xmm11[2],xmm9[2],xmm11[3],xmm9[3]
4136 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm6 = ymm6[0,1,1,3]
4137 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm4 = ymm6[0,1],ymm4[2,3],ymm6[4,5],ymm4[6,7]
4138 ; AVX2-FP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4139 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm4 = xmm7[0],xmm5[0],xmm7[1],xmm5[1]
4140 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,0,2,1]
4141 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm6 = xmm12[0],xmm10[0],xmm12[1],xmm10[1]
4142 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm6 = ymm6[0,1,1,3]
4143 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm4 = ymm6[0,1],ymm4[2,3],ymm6[4,5],ymm4[6,7]
4144 ; AVX2-FP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4145 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm4 = xmm7[2],xmm5[2],xmm7[3],xmm5[3]
4146 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,0,2,1]
4147 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm5 = xmm12[2],xmm10[2],xmm12[3],xmm10[3]
4148 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,1,3]
4149 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm4 = ymm5[0,1],ymm4[2,3],ymm5[4,5],ymm4[6,7]
4150 ; AVX2-FP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4151 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm4 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
4152 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,0,2,1]
4153 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm5 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
4154 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,1,3]
4155 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm4 = ymm5[0,1],ymm4[2,3],ymm5[4,5],ymm4[6,7]
4156 ; AVX2-FP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4157 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
4158 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,0,2,1]
4159 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm1 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
4160 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,1,3]
4161 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
4162 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4163 ; AVX2-FP-NEXT: vmovaps 96(%rcx), %xmm0
4164 ; AVX2-FP-NEXT: vmovaps 96(%rdx), %xmm1
4165 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm2 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
4166 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,0,2,1]
4167 ; AVX2-FP-NEXT: vmovaps 96(%rsi), %xmm3
4168 ; AVX2-FP-NEXT: vmovaps 96(%rdi), %xmm4
4169 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm5 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
4170 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,1,3]
4171 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
4172 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4173 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
4174 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,0,2,1]
4175 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm1 = xmm4[2],xmm3[2],xmm4[3],xmm3[3]
4176 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,1,3]
4177 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
4178 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4179 ; AVX2-FP-NEXT: vmovaps 128(%rcx), %xmm0
4180 ; AVX2-FP-NEXT: vmovaps 128(%rdx), %xmm1
4181 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm2 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
4182 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,0,2,1]
4183 ; AVX2-FP-NEXT: vmovaps 128(%rsi), %xmm3
4184 ; AVX2-FP-NEXT: vmovaps 128(%rdi), %xmm4
4185 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm5 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
4186 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,1,3]
4187 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
4188 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4189 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
4190 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,0,2,1]
4191 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm1 = xmm4[2],xmm3[2],xmm4[3],xmm3[3]
4192 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,1,3]
4193 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
4194 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4195 ; AVX2-FP-NEXT: vmovaps 160(%rcx), %xmm0
4196 ; AVX2-FP-NEXT: vmovaps 160(%rdx), %xmm1
4197 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm2 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
4198 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,0,2,1]
4199 ; AVX2-FP-NEXT: vmovaps 160(%rsi), %xmm3
4200 ; AVX2-FP-NEXT: vmovaps 160(%rdi), %xmm4
4201 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm5 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
4202 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,1,3]
4203 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
4204 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4205 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
4206 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,0,2,1]
4207 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm1 = xmm4[2],xmm3[2],xmm4[3],xmm3[3]
4208 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,1,3]
4209 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
4210 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4211 ; AVX2-FP-NEXT: vmovaps 192(%rcx), %xmm0
4212 ; AVX2-FP-NEXT: vmovaps 192(%rdx), %xmm1
4213 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm2 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
4214 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,0,2,1]
4215 ; AVX2-FP-NEXT: vmovaps 192(%rsi), %xmm3
4216 ; AVX2-FP-NEXT: vmovaps 192(%rdi), %xmm4
4217 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm5 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
4218 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,1,3]
4219 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
4220 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4221 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
4222 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,0,2,1]
4223 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm1 = xmm4[2],xmm3[2],xmm4[3],xmm3[3]
4224 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,1,3]
4225 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
4226 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4227 ; AVX2-FP-NEXT: vmovaps 224(%rcx), %xmm0
4228 ; AVX2-FP-NEXT: vmovaps 224(%rdx), %xmm1
4229 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm2 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
4230 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,0,2,1]
4231 ; AVX2-FP-NEXT: vmovaps 224(%rsi), %xmm3
4232 ; AVX2-FP-NEXT: vmovaps 224(%rdi), %xmm4
4233 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} xmm5 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
4234 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,1,3]
4235 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
4236 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4237 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
4238 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} xmm1 = xmm4[2],xmm3[2],xmm4[3],xmm3[3]
4239 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,0,2,1]
4240 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,1,3]
4241 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
4242 ; AVX2-FP-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
4243 ; AVX2-FP-NEXT: vmovaps (%rdx), %ymm0
4244 ; AVX2-FP-NEXT: vmovaps (%rcx), %ymm1
4245 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
4246 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
4247 ; AVX2-FP-NEXT: vmovaps (%rdi), %ymm3
4248 ; AVX2-FP-NEXT: vmovaps (%rsi), %ymm4
4249 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm5 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[4],ymm4[4],ymm3[5],ymm4[5]
4250 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[2,1,3,3]
4251 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
4252 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4253 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm0 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
4254 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm1 = ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[6],ymm4[6],ymm3[7],ymm4[7]
4255 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,2,3]
4256 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[2,1,3,3]
4257 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
4258 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4259 ; AVX2-FP-NEXT: vmovaps 32(%rdx), %ymm0
4260 ; AVX2-FP-NEXT: vmovaps 32(%rcx), %ymm1
4261 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
4262 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
4263 ; AVX2-FP-NEXT: vmovaps 32(%rdi), %ymm3
4264 ; AVX2-FP-NEXT: vmovaps 32(%rsi), %ymm4
4265 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm5 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[4],ymm4[4],ymm3[5],ymm4[5]
4266 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[2,1,3,3]
4267 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
4268 ; AVX2-FP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4269 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm0 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
4270 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm1 = ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[6],ymm4[6],ymm3[7],ymm4[7]
4271 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,2,3]
4272 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[2,1,3,3]
4273 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
4274 ; AVX2-FP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4275 ; AVX2-FP-NEXT: vmovaps 64(%rdx), %ymm0
4276 ; AVX2-FP-NEXT: vmovaps 64(%rcx), %ymm1
4277 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
4278 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
4279 ; AVX2-FP-NEXT: vmovaps 64(%rdi), %ymm3
4280 ; AVX2-FP-NEXT: vmovaps 64(%rsi), %ymm4
4281 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm5 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[4],ymm4[4],ymm3[5],ymm4[5]
4282 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[2,1,3,3]
4283 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm11 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
4284 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm0 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
4285 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm1 = ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[6],ymm4[6],ymm3[7],ymm4[7]
4286 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,2,3]
4287 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[2,1,3,3]
4288 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm10 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
4289 ; AVX2-FP-NEXT: vmovaps 96(%rdx), %ymm0
4290 ; AVX2-FP-NEXT: vmovaps 96(%rcx), %ymm1
4291 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
4292 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
4293 ; AVX2-FP-NEXT: vmovaps 96(%rdi), %ymm3
4294 ; AVX2-FP-NEXT: vmovaps 96(%rsi), %ymm4
4295 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm5 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[4],ymm4[4],ymm3[5],ymm4[5]
4296 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[2,1,3,3]
4297 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm9 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
4298 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm0 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
4299 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm1 = ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[6],ymm4[6],ymm3[7],ymm4[7]
4300 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,2,3]
4301 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[2,1,3,3]
4302 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm8 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
4303 ; AVX2-FP-NEXT: vmovaps 128(%rdx), %ymm3
4304 ; AVX2-FP-NEXT: vmovaps 128(%rcx), %ymm1
4305 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm2 = ymm3[0],ymm1[0],ymm3[1],ymm1[1],ymm3[4],ymm1[4],ymm3[5],ymm1[5]
4306 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm4 = ymm2[0,2,2,3]
4307 ; AVX2-FP-NEXT: vmovaps 128(%rdi), %ymm2
4308 ; AVX2-FP-NEXT: vmovaps 128(%rsi), %ymm0
4309 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm13 = ymm2[0],ymm0[0],ymm2[1],ymm0[1],ymm2[4],ymm0[4],ymm2[5],ymm0[5]
4310 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[2,1,3,3]
4311 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm7 = ymm13[0,1],ymm4[2,3],ymm13[4,5],ymm4[6,7]
4312 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm1 = ymm3[2],ymm1[2],ymm3[3],ymm1[3],ymm3[6],ymm1[6],ymm3[7],ymm1[7]
4313 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm0 = ymm2[2],ymm0[2],ymm2[3],ymm0[3],ymm2[6],ymm0[6],ymm2[7],ymm0[7]
4314 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,2,3]
4315 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,1,3,3]
4316 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm6 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
4317 ; AVX2-FP-NEXT: vmovaps 160(%rdx), %ymm2
4318 ; AVX2-FP-NEXT: vmovaps 160(%rcx), %ymm3
4319 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm1 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[4],ymm3[4],ymm2[5],ymm3[5]
4320 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,2,3]
4321 ; AVX2-FP-NEXT: vmovaps 160(%rdi), %ymm13
4322 ; AVX2-FP-NEXT: vmovaps 160(%rsi), %ymm0
4323 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm15 = ymm13[0],ymm0[0],ymm13[1],ymm0[1],ymm13[4],ymm0[4],ymm13[5],ymm0[5]
4324 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm15 = ymm15[2,1,3,3]
4325 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm5 = ymm15[0,1],ymm1[2,3],ymm15[4,5],ymm1[6,7]
4326 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm2 = ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[6],ymm3[6],ymm2[7],ymm3[7]
4327 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm0 = ymm13[2],ymm0[2],ymm13[3],ymm0[3],ymm13[6],ymm0[6],ymm13[7],ymm0[7]
4328 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
4329 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,1,3,3]
4330 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm4 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
4331 ; AVX2-FP-NEXT: vmovaps 192(%rdx), %ymm2
4332 ; AVX2-FP-NEXT: vmovaps 192(%rcx), %ymm3
4333 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm13 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[4],ymm3[4],ymm2[5],ymm3[5]
4334 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[0,2,2,3]
4335 ; AVX2-FP-NEXT: vmovaps 192(%rdi), %ymm15
4336 ; AVX2-FP-NEXT: vmovaps 192(%rsi), %ymm0
4337 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm14 = ymm15[0],ymm0[0],ymm15[1],ymm0[1],ymm15[4],ymm0[4],ymm15[5],ymm0[5]
4338 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm14 = ymm14[2,1,3,3]
4339 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm13 = ymm14[0,1],ymm13[2,3],ymm14[4,5],ymm13[6,7]
4340 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm2 = ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[6],ymm3[6],ymm2[7],ymm3[7]
4341 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm0 = ymm15[2],ymm0[2],ymm15[3],ymm0[3],ymm15[6],ymm0[6],ymm15[7],ymm0[7]
4342 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
4343 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,1,3,3]
4344 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
4345 ; AVX2-FP-NEXT: vmovaps 224(%rdx), %ymm2
4346 ; AVX2-FP-NEXT: vmovaps 224(%rcx), %ymm3
4347 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm14 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[4],ymm3[4],ymm2[5],ymm3[5]
4348 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm14 = ymm14[0,2,2,3]
4349 ; AVX2-FP-NEXT: vmovaps 224(%rdi), %ymm15
4350 ; AVX2-FP-NEXT: vmovaps 224(%rsi), %ymm0
4351 ; AVX2-FP-NEXT: vunpcklps {{.*#+}} ymm12 = ymm15[0],ymm0[0],ymm15[1],ymm0[1],ymm15[4],ymm0[4],ymm15[5],ymm0[5]
4352 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm12 = ymm12[2,1,3,3]
4353 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm12 = ymm12[0,1],ymm14[2,3],ymm12[4,5],ymm14[6,7]
4354 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm2 = ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[6],ymm3[6],ymm2[7],ymm3[7]
4355 ; AVX2-FP-NEXT: vunpckhps {{.*#+}} ymm0 = ymm15[2],ymm0[2],ymm15[3],ymm0[3],ymm15[6],ymm0[6],ymm15[7],ymm0[7]
4356 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
4357 ; AVX2-FP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,1,3,3]
4358 ; AVX2-FP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
4359 ; AVX2-FP-NEXT: vmovaps %ymm0, 992(%r8)
4360 ; AVX2-FP-NEXT: vmovaps %ymm12, 960(%r8)
4361 ; AVX2-FP-NEXT: vmovaps %ymm1, 864(%r8)
4362 ; AVX2-FP-NEXT: vmovaps %ymm13, 832(%r8)
4363 ; AVX2-FP-NEXT: vmovaps %ymm4, 736(%r8)
4364 ; AVX2-FP-NEXT: vmovaps %ymm5, 704(%r8)
4365 ; AVX2-FP-NEXT: vmovaps %ymm6, 608(%r8)
4366 ; AVX2-FP-NEXT: vmovaps %ymm7, 576(%r8)
4367 ; AVX2-FP-NEXT: vmovaps %ymm8, 480(%r8)
4368 ; AVX2-FP-NEXT: vmovaps %ymm9, 448(%r8)
4369 ; AVX2-FP-NEXT: vmovaps %ymm10, 352(%r8)
4370 ; AVX2-FP-NEXT: vmovaps %ymm11, 320(%r8)
4371 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4372 ; AVX2-FP-NEXT: vmovaps %ymm0, 224(%r8)
4373 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4374 ; AVX2-FP-NEXT: vmovaps %ymm0, 192(%r8)
4375 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4376 ; AVX2-FP-NEXT: vmovaps %ymm0, 96(%r8)
4377 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4378 ; AVX2-FP-NEXT: vmovaps %ymm0, 64(%r8)
4379 ; AVX2-FP-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
4380 ; AVX2-FP-NEXT: vmovaps %ymm0, 928(%r8)
4381 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4382 ; AVX2-FP-NEXT: vmovaps %ymm0, 896(%r8)
4383 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4384 ; AVX2-FP-NEXT: vmovaps %ymm0, 800(%r8)
4385 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4386 ; AVX2-FP-NEXT: vmovaps %ymm0, 768(%r8)
4387 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4388 ; AVX2-FP-NEXT: vmovaps %ymm0, 672(%r8)
4389 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4390 ; AVX2-FP-NEXT: vmovaps %ymm0, 640(%r8)
4391 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4392 ; AVX2-FP-NEXT: vmovaps %ymm0, 544(%r8)
4393 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4394 ; AVX2-FP-NEXT: vmovaps %ymm0, 512(%r8)
4395 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4396 ; AVX2-FP-NEXT: vmovaps %ymm0, 416(%r8)
4397 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4398 ; AVX2-FP-NEXT: vmovaps %ymm0, 384(%r8)
4399 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4400 ; AVX2-FP-NEXT: vmovaps %ymm0, 288(%r8)
4401 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4402 ; AVX2-FP-NEXT: vmovaps %ymm0, 256(%r8)
4403 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4404 ; AVX2-FP-NEXT: vmovaps %ymm0, 160(%r8)
4405 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4406 ; AVX2-FP-NEXT: vmovaps %ymm0, 128(%r8)
4407 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4408 ; AVX2-FP-NEXT: vmovaps %ymm0, 32(%r8)
4409 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4410 ; AVX2-FP-NEXT: vmovaps %ymm0, (%r8)
4411 ; AVX2-FP-NEXT: addq $520, %rsp # imm = 0x208
4412 ; AVX2-FP-NEXT: vzeroupper
4413 ; AVX2-FP-NEXT: retq
4415 ; AVX2-FCP-LABEL: store_i32_stride4_vf64:
4416 ; AVX2-FCP: # %bb.0:
4417 ; AVX2-FCP-NEXT: subq $520, %rsp # imm = 0x208
4418 ; AVX2-FCP-NEXT: vmovaps (%rcx), %xmm4
4419 ; AVX2-FCP-NEXT: vmovaps 32(%rcx), %xmm5
4420 ; AVX2-FCP-NEXT: vmovaps 64(%rcx), %xmm0
4421 ; AVX2-FCP-NEXT: vmovaps (%rdx), %xmm6
4422 ; AVX2-FCP-NEXT: vmovaps 32(%rdx), %xmm7
4423 ; AVX2-FCP-NEXT: vmovaps 64(%rdx), %xmm1
4424 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm2 = xmm6[0],xmm4[0],xmm6[1],xmm4[1]
4425 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm8 = ymm2[0,0,2,1]
4426 ; AVX2-FCP-NEXT: vmovaps (%rsi), %xmm9
4427 ; AVX2-FCP-NEXT: vmovaps 32(%rsi), %xmm10
4428 ; AVX2-FCP-NEXT: vmovaps 64(%rsi), %xmm2
4429 ; AVX2-FCP-NEXT: vmovaps (%rdi), %xmm11
4430 ; AVX2-FCP-NEXT: vmovaps 32(%rdi), %xmm12
4431 ; AVX2-FCP-NEXT: vmovaps 64(%rdi), %xmm3
4432 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm13 = xmm11[0],xmm9[0],xmm11[1],xmm9[1]
4433 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[0,1,1,3]
4434 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm8 = ymm13[0,1],ymm8[2,3],ymm13[4,5],ymm8[6,7]
4435 ; AVX2-FCP-NEXT: vmovups %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4436 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm4 = xmm6[2],xmm4[2],xmm6[3],xmm4[3]
4437 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,0,2,1]
4438 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm6 = xmm11[2],xmm9[2],xmm11[3],xmm9[3]
4439 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm6 = ymm6[0,1,1,3]
4440 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm4 = ymm6[0,1],ymm4[2,3],ymm6[4,5],ymm4[6,7]
4441 ; AVX2-FCP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4442 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm4 = xmm7[0],xmm5[0],xmm7[1],xmm5[1]
4443 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,0,2,1]
4444 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm6 = xmm12[0],xmm10[0],xmm12[1],xmm10[1]
4445 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm6 = ymm6[0,1,1,3]
4446 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm4 = ymm6[0,1],ymm4[2,3],ymm6[4,5],ymm4[6,7]
4447 ; AVX2-FCP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4448 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm4 = xmm7[2],xmm5[2],xmm7[3],xmm5[3]
4449 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,0,2,1]
4450 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm5 = xmm12[2],xmm10[2],xmm12[3],xmm10[3]
4451 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,1,3]
4452 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm4 = ymm5[0,1],ymm4[2,3],ymm5[4,5],ymm4[6,7]
4453 ; AVX2-FCP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4454 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm4 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
4455 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[0,0,2,1]
4456 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm5 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
4457 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,1,3]
4458 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm4 = ymm5[0,1],ymm4[2,3],ymm5[4,5],ymm4[6,7]
4459 ; AVX2-FCP-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4460 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
4461 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,0,2,1]
4462 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm1 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
4463 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,1,3]
4464 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
4465 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4466 ; AVX2-FCP-NEXT: vmovaps 96(%rcx), %xmm0
4467 ; AVX2-FCP-NEXT: vmovaps 96(%rdx), %xmm1
4468 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm2 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
4469 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,0,2,1]
4470 ; AVX2-FCP-NEXT: vmovaps 96(%rsi), %xmm3
4471 ; AVX2-FCP-NEXT: vmovaps 96(%rdi), %xmm4
4472 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm5 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
4473 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,1,3]
4474 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
4475 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4476 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
4477 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,0,2,1]
4478 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm1 = xmm4[2],xmm3[2],xmm4[3],xmm3[3]
4479 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,1,3]
4480 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
4481 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4482 ; AVX2-FCP-NEXT: vmovaps 128(%rcx), %xmm0
4483 ; AVX2-FCP-NEXT: vmovaps 128(%rdx), %xmm1
4484 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm2 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
4485 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,0,2,1]
4486 ; AVX2-FCP-NEXT: vmovaps 128(%rsi), %xmm3
4487 ; AVX2-FCP-NEXT: vmovaps 128(%rdi), %xmm4
4488 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm5 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
4489 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,1,3]
4490 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
4491 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4492 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
4493 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,0,2,1]
4494 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm1 = xmm4[2],xmm3[2],xmm4[3],xmm3[3]
4495 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,1,3]
4496 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
4497 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4498 ; AVX2-FCP-NEXT: vmovaps 160(%rcx), %xmm0
4499 ; AVX2-FCP-NEXT: vmovaps 160(%rdx), %xmm1
4500 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm2 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
4501 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,0,2,1]
4502 ; AVX2-FCP-NEXT: vmovaps 160(%rsi), %xmm3
4503 ; AVX2-FCP-NEXT: vmovaps 160(%rdi), %xmm4
4504 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm5 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
4505 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,1,3]
4506 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
4507 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4508 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
4509 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,0,2,1]
4510 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm1 = xmm4[2],xmm3[2],xmm4[3],xmm3[3]
4511 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,1,3]
4512 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
4513 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4514 ; AVX2-FCP-NEXT: vmovaps 192(%rcx), %xmm0
4515 ; AVX2-FCP-NEXT: vmovaps 192(%rdx), %xmm1
4516 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm2 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
4517 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,0,2,1]
4518 ; AVX2-FCP-NEXT: vmovaps 192(%rsi), %xmm3
4519 ; AVX2-FCP-NEXT: vmovaps 192(%rdi), %xmm4
4520 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm5 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
4521 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,1,3]
4522 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
4523 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4524 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
4525 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,0,2,1]
4526 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm1 = xmm4[2],xmm3[2],xmm4[3],xmm3[3]
4527 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,1,3]
4528 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
4529 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4530 ; AVX2-FCP-NEXT: vmovaps 224(%rcx), %xmm0
4531 ; AVX2-FCP-NEXT: vmovaps 224(%rdx), %xmm1
4532 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm2 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
4533 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,0,2,1]
4534 ; AVX2-FCP-NEXT: vmovaps 224(%rsi), %xmm3
4535 ; AVX2-FCP-NEXT: vmovaps 224(%rdi), %xmm4
4536 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} xmm5 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
4537 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[0,1,1,3]
4538 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
4539 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4540 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
4541 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} xmm1 = xmm4[2],xmm3[2],xmm4[3],xmm3[3]
4542 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,0,2,1]
4543 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,1,1,3]
4544 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
4545 ; AVX2-FCP-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
4546 ; AVX2-FCP-NEXT: vmovaps (%rdx), %ymm0
4547 ; AVX2-FCP-NEXT: vmovaps (%rcx), %ymm1
4548 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
4549 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
4550 ; AVX2-FCP-NEXT: vmovaps (%rdi), %ymm3
4551 ; AVX2-FCP-NEXT: vmovaps (%rsi), %ymm4
4552 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm5 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[4],ymm4[4],ymm3[5],ymm4[5]
4553 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[2,1,3,3]
4554 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
4555 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4556 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm0 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
4557 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm1 = ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[6],ymm4[6],ymm3[7],ymm4[7]
4558 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,2,3]
4559 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[2,1,3,3]
4560 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
4561 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4562 ; AVX2-FCP-NEXT: vmovaps 32(%rdx), %ymm0
4563 ; AVX2-FCP-NEXT: vmovaps 32(%rcx), %ymm1
4564 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
4565 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
4566 ; AVX2-FCP-NEXT: vmovaps 32(%rdi), %ymm3
4567 ; AVX2-FCP-NEXT: vmovaps 32(%rsi), %ymm4
4568 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm5 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[4],ymm4[4],ymm3[5],ymm4[5]
4569 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[2,1,3,3]
4570 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
4571 ; AVX2-FCP-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4572 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm0 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
4573 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm1 = ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[6],ymm4[6],ymm3[7],ymm4[7]
4574 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,2,3]
4575 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[2,1,3,3]
4576 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
4577 ; AVX2-FCP-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4578 ; AVX2-FCP-NEXT: vmovaps 64(%rdx), %ymm0
4579 ; AVX2-FCP-NEXT: vmovaps 64(%rcx), %ymm1
4580 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
4581 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
4582 ; AVX2-FCP-NEXT: vmovaps 64(%rdi), %ymm3
4583 ; AVX2-FCP-NEXT: vmovaps 64(%rsi), %ymm4
4584 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm5 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[4],ymm4[4],ymm3[5],ymm4[5]
4585 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[2,1,3,3]
4586 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm11 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
4587 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm0 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
4588 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm1 = ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[6],ymm4[6],ymm3[7],ymm4[7]
4589 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,2,3]
4590 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[2,1,3,3]
4591 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm10 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
4592 ; AVX2-FCP-NEXT: vmovaps 96(%rdx), %ymm0
4593 ; AVX2-FCP-NEXT: vmovaps 96(%rcx), %ymm1
4594 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
4595 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
4596 ; AVX2-FCP-NEXT: vmovaps 96(%rdi), %ymm3
4597 ; AVX2-FCP-NEXT: vmovaps 96(%rsi), %ymm4
4598 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm5 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[4],ymm4[4],ymm3[5],ymm4[5]
4599 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[2,1,3,3]
4600 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm9 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
4601 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm0 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
4602 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm1 = ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[6],ymm4[6],ymm3[7],ymm4[7]
4603 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,2,3]
4604 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[2,1,3,3]
4605 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm8 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
4606 ; AVX2-FCP-NEXT: vmovaps 128(%rdx), %ymm3
4607 ; AVX2-FCP-NEXT: vmovaps 128(%rcx), %ymm1
4608 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm2 = ymm3[0],ymm1[0],ymm3[1],ymm1[1],ymm3[4],ymm1[4],ymm3[5],ymm1[5]
4609 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm4 = ymm2[0,2,2,3]
4610 ; AVX2-FCP-NEXT: vmovaps 128(%rdi), %ymm2
4611 ; AVX2-FCP-NEXT: vmovaps 128(%rsi), %ymm0
4612 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm13 = ymm2[0],ymm0[0],ymm2[1],ymm0[1],ymm2[4],ymm0[4],ymm2[5],ymm0[5]
4613 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[2,1,3,3]
4614 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm7 = ymm13[0,1],ymm4[2,3],ymm13[4,5],ymm4[6,7]
4615 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm1 = ymm3[2],ymm1[2],ymm3[3],ymm1[3],ymm3[6],ymm1[6],ymm3[7],ymm1[7]
4616 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm0 = ymm2[2],ymm0[2],ymm2[3],ymm0[3],ymm2[6],ymm0[6],ymm2[7],ymm0[7]
4617 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,2,3]
4618 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,1,3,3]
4619 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm6 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
4620 ; AVX2-FCP-NEXT: vmovaps 160(%rdx), %ymm2
4621 ; AVX2-FCP-NEXT: vmovaps 160(%rcx), %ymm3
4622 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm1 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[4],ymm3[4],ymm2[5],ymm3[5]
4623 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,2,3]
4624 ; AVX2-FCP-NEXT: vmovaps 160(%rdi), %ymm13
4625 ; AVX2-FCP-NEXT: vmovaps 160(%rsi), %ymm0
4626 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm15 = ymm13[0],ymm0[0],ymm13[1],ymm0[1],ymm13[4],ymm0[4],ymm13[5],ymm0[5]
4627 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm15 = ymm15[2,1,3,3]
4628 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm5 = ymm15[0,1],ymm1[2,3],ymm15[4,5],ymm1[6,7]
4629 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm2 = ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[6],ymm3[6],ymm2[7],ymm3[7]
4630 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm0 = ymm13[2],ymm0[2],ymm13[3],ymm0[3],ymm13[6],ymm0[6],ymm13[7],ymm0[7]
4631 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
4632 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,1,3,3]
4633 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm4 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
4634 ; AVX2-FCP-NEXT: vmovaps 192(%rdx), %ymm2
4635 ; AVX2-FCP-NEXT: vmovaps 192(%rcx), %ymm3
4636 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm13 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[4],ymm3[4],ymm2[5],ymm3[5]
4637 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[0,2,2,3]
4638 ; AVX2-FCP-NEXT: vmovaps 192(%rdi), %ymm15
4639 ; AVX2-FCP-NEXT: vmovaps 192(%rsi), %ymm0
4640 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm14 = ymm15[0],ymm0[0],ymm15[1],ymm0[1],ymm15[4],ymm0[4],ymm15[5],ymm0[5]
4641 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm14 = ymm14[2,1,3,3]
4642 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm13 = ymm14[0,1],ymm13[2,3],ymm14[4,5],ymm13[6,7]
4643 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm2 = ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[6],ymm3[6],ymm2[7],ymm3[7]
4644 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm0 = ymm15[2],ymm0[2],ymm15[3],ymm0[3],ymm15[6],ymm0[6],ymm15[7],ymm0[7]
4645 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
4646 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,1,3,3]
4647 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
4648 ; AVX2-FCP-NEXT: vmovaps 224(%rdx), %ymm2
4649 ; AVX2-FCP-NEXT: vmovaps 224(%rcx), %ymm3
4650 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm14 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[4],ymm3[4],ymm2[5],ymm3[5]
4651 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm14 = ymm14[0,2,2,3]
4652 ; AVX2-FCP-NEXT: vmovaps 224(%rdi), %ymm15
4653 ; AVX2-FCP-NEXT: vmovaps 224(%rsi), %ymm0
4654 ; AVX2-FCP-NEXT: vunpcklps {{.*#+}} ymm12 = ymm15[0],ymm0[0],ymm15[1],ymm0[1],ymm15[4],ymm0[4],ymm15[5],ymm0[5]
4655 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm12 = ymm12[2,1,3,3]
4656 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm12 = ymm12[0,1],ymm14[2,3],ymm12[4,5],ymm14[6,7]
4657 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm2 = ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[6],ymm3[6],ymm2[7],ymm3[7]
4658 ; AVX2-FCP-NEXT: vunpckhps {{.*#+}} ymm0 = ymm15[2],ymm0[2],ymm15[3],ymm0[3],ymm15[6],ymm0[6],ymm15[7],ymm0[7]
4659 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3]
4660 ; AVX2-FCP-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,1,3,3]
4661 ; AVX2-FCP-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
4662 ; AVX2-FCP-NEXT: vmovaps %ymm0, 992(%r8)
4663 ; AVX2-FCP-NEXT: vmovaps %ymm12, 960(%r8)
4664 ; AVX2-FCP-NEXT: vmovaps %ymm1, 864(%r8)
4665 ; AVX2-FCP-NEXT: vmovaps %ymm13, 832(%r8)
4666 ; AVX2-FCP-NEXT: vmovaps %ymm4, 736(%r8)
4667 ; AVX2-FCP-NEXT: vmovaps %ymm5, 704(%r8)
4668 ; AVX2-FCP-NEXT: vmovaps %ymm6, 608(%r8)
4669 ; AVX2-FCP-NEXT: vmovaps %ymm7, 576(%r8)
4670 ; AVX2-FCP-NEXT: vmovaps %ymm8, 480(%r8)
4671 ; AVX2-FCP-NEXT: vmovaps %ymm9, 448(%r8)
4672 ; AVX2-FCP-NEXT: vmovaps %ymm10, 352(%r8)
4673 ; AVX2-FCP-NEXT: vmovaps %ymm11, 320(%r8)
4674 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4675 ; AVX2-FCP-NEXT: vmovaps %ymm0, 224(%r8)
4676 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4677 ; AVX2-FCP-NEXT: vmovaps %ymm0, 192(%r8)
4678 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4679 ; AVX2-FCP-NEXT: vmovaps %ymm0, 96(%r8)
4680 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4681 ; AVX2-FCP-NEXT: vmovaps %ymm0, 64(%r8)
4682 ; AVX2-FCP-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
4683 ; AVX2-FCP-NEXT: vmovaps %ymm0, 928(%r8)
4684 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4685 ; AVX2-FCP-NEXT: vmovaps %ymm0, 896(%r8)
4686 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4687 ; AVX2-FCP-NEXT: vmovaps %ymm0, 800(%r8)
4688 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4689 ; AVX2-FCP-NEXT: vmovaps %ymm0, 768(%r8)
4690 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4691 ; AVX2-FCP-NEXT: vmovaps %ymm0, 672(%r8)
4692 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4693 ; AVX2-FCP-NEXT: vmovaps %ymm0, 640(%r8)
4694 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4695 ; AVX2-FCP-NEXT: vmovaps %ymm0, 544(%r8)
4696 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4697 ; AVX2-FCP-NEXT: vmovaps %ymm0, 512(%r8)
4698 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4699 ; AVX2-FCP-NEXT: vmovaps %ymm0, 416(%r8)
4700 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4701 ; AVX2-FCP-NEXT: vmovaps %ymm0, 384(%r8)
4702 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4703 ; AVX2-FCP-NEXT: vmovaps %ymm0, 288(%r8)
4704 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4705 ; AVX2-FCP-NEXT: vmovaps %ymm0, 256(%r8)
4706 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4707 ; AVX2-FCP-NEXT: vmovaps %ymm0, 160(%r8)
4708 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4709 ; AVX2-FCP-NEXT: vmovaps %ymm0, 128(%r8)
4710 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4711 ; AVX2-FCP-NEXT: vmovaps %ymm0, 32(%r8)
4712 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4713 ; AVX2-FCP-NEXT: vmovaps %ymm0, (%r8)
4714 ; AVX2-FCP-NEXT: addq $520, %rsp # imm = 0x208
4715 ; AVX2-FCP-NEXT: vzeroupper
4716 ; AVX2-FCP-NEXT: retq
4718 ; AVX512-LABEL: store_i32_stride4_vf64:
4720 ; AVX512-NEXT: vmovdqa64 (%rdi), %zmm0
4721 ; AVX512-NEXT: vmovdqa64 64(%rdi), %zmm1
4722 ; AVX512-NEXT: vmovdqa64 128(%rdi), %zmm2
4723 ; AVX512-NEXT: vmovdqa64 192(%rdi), %zmm3
4724 ; AVX512-NEXT: vmovdqa64 (%rsi), %zmm17
4725 ; AVX512-NEXT: vmovdqa64 64(%rsi), %zmm23
4726 ; AVX512-NEXT: vmovdqa64 128(%rsi), %zmm12
4727 ; AVX512-NEXT: vmovdqa64 192(%rsi), %zmm5
4728 ; AVX512-NEXT: vmovdqa64 (%rdx), %zmm22
4729 ; AVX512-NEXT: vmovdqa64 64(%rdx), %zmm25
4730 ; AVX512-NEXT: vmovdqa64 128(%rdx), %zmm13
4731 ; AVX512-NEXT: vmovdqa64 192(%rdx), %zmm6
4732 ; AVX512-NEXT: vmovdqa64 (%rcx), %zmm21
4733 ; AVX512-NEXT: vmovdqa64 64(%rcx), %zmm26
4734 ; AVX512-NEXT: vmovdqa64 128(%rcx), %zmm19
4735 ; AVX512-NEXT: vmovdqa64 192(%rcx), %zmm9
4736 ; AVX512-NEXT: vpmovsxbd {{.*#+}} zmm14 = [0,0,4,20,0,0,5,21,0,0,6,22,0,0,7,23]
4737 ; AVX512-NEXT: vmovdqa64 %zmm22, %zmm8
4738 ; AVX512-NEXT: vpermt2d %zmm21, %zmm14, %zmm8
4739 ; AVX512-NEXT: vpmovsxbd {{.*#+}} zmm7 = [4,20,0,0,5,21,0,0,6,22,0,0,7,23,0,0]
4740 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm4
4741 ; AVX512-NEXT: vpermt2d %zmm17, %zmm7, %zmm4
4742 ; AVX512-NEXT: movb $-86, %al
4743 ; AVX512-NEXT: kmovw %eax, %k1
4744 ; AVX512-NEXT: vmovdqa64 %zmm8, %zmm4 {%k1}
4745 ; AVX512-NEXT: vpmovsxbd {{.*#+}} zmm16 = [0,0,0,16,0,0,1,17,0,0,2,18,0,0,3,19]
4746 ; AVX512-NEXT: vmovdqa64 %zmm22, %zmm10
4747 ; AVX512-NEXT: vpermt2d %zmm21, %zmm16, %zmm10
4748 ; AVX512-NEXT: vpmovsxbd {{.*#+}} zmm11 = [0,16,0,0,1,17,0,0,2,18,0,0,3,19,0,0]
4749 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm8
4750 ; AVX512-NEXT: vpermt2d %zmm17, %zmm11, %zmm8
4751 ; AVX512-NEXT: vmovdqa64 %zmm10, %zmm8 {%k1}
4752 ; AVX512-NEXT: vpmovsxbd {{.*#+}} zmm18 = [0,0,12,28,0,0,13,29,0,0,14,30,0,0,15,31]
4753 ; AVX512-NEXT: vmovdqa64 %zmm22, %zmm20
4754 ; AVX512-NEXT: vpermt2d %zmm21, %zmm18, %zmm20
4755 ; AVX512-NEXT: vpmovsxbd {{.*#+}} zmm15 = [12,28,0,0,13,29,0,0,14,30,0,0,15,31,0,0]
4756 ; AVX512-NEXT: vmovdqa64 %zmm0, %zmm10
4757 ; AVX512-NEXT: vpermt2d %zmm17, %zmm15, %zmm10
4758 ; AVX512-NEXT: vmovdqa64 %zmm20, %zmm10 {%k1}
4759 ; AVX512-NEXT: vpmovsxbd {{.*#+}} zmm20 = [0,0,8,24,0,0,9,25,0,0,10,26,0,0,11,27]
4760 ; AVX512-NEXT: vpermt2d %zmm21, %zmm20, %zmm22
4761 ; AVX512-NEXT: vpmovsxbd {{.*#+}} zmm21 = [8,24,0,0,9,25,0,0,10,26,0,0,11,27,0,0]
4762 ; AVX512-NEXT: vpermt2d %zmm17, %zmm21, %zmm0
4763 ; AVX512-NEXT: vmovdqa64 %zmm22, %zmm0 {%k1}
4764 ; AVX512-NEXT: vmovdqa64 %zmm25, %zmm22
4765 ; AVX512-NEXT: vpermt2d %zmm26, %zmm14, %zmm22
4766 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm17
4767 ; AVX512-NEXT: vpermt2d %zmm23, %zmm7, %zmm17
4768 ; AVX512-NEXT: vmovdqa64 %zmm22, %zmm17 {%k1}
4769 ; AVX512-NEXT: vmovdqa64 %zmm25, %zmm24
4770 ; AVX512-NEXT: vpermt2d %zmm26, %zmm16, %zmm24
4771 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm22
4772 ; AVX512-NEXT: vpermt2d %zmm23, %zmm11, %zmm22
4773 ; AVX512-NEXT: vmovdqa64 %zmm24, %zmm22 {%k1}
4774 ; AVX512-NEXT: vmovdqa64 %zmm25, %zmm27
4775 ; AVX512-NEXT: vpermt2d %zmm26, %zmm18, %zmm27
4776 ; AVX512-NEXT: vmovdqa64 %zmm1, %zmm24
4777 ; AVX512-NEXT: vpermt2d %zmm23, %zmm15, %zmm24
4778 ; AVX512-NEXT: vmovdqa64 %zmm27, %zmm24 {%k1}
4779 ; AVX512-NEXT: vpermt2d %zmm26, %zmm20, %zmm25
4780 ; AVX512-NEXT: vpermt2d %zmm23, %zmm21, %zmm1
4781 ; AVX512-NEXT: vmovdqa64 %zmm25, %zmm1 {%k1}
4782 ; AVX512-NEXT: vmovdqa64 %zmm13, %zmm23
4783 ; AVX512-NEXT: vpermt2d %zmm19, %zmm14, %zmm23
4784 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm25
4785 ; AVX512-NEXT: vpermt2d %zmm12, %zmm7, %zmm25
4786 ; AVX512-NEXT: vmovdqa64 %zmm23, %zmm25 {%k1}
4787 ; AVX512-NEXT: vmovdqa64 %zmm13, %zmm23
4788 ; AVX512-NEXT: vpermt2d %zmm19, %zmm16, %zmm23
4789 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm26
4790 ; AVX512-NEXT: vpermt2d %zmm12, %zmm11, %zmm26
4791 ; AVX512-NEXT: vmovdqa64 %zmm23, %zmm26 {%k1}
4792 ; AVX512-NEXT: vmovdqa64 %zmm13, %zmm23
4793 ; AVX512-NEXT: vpermt2d %zmm19, %zmm18, %zmm23
4794 ; AVX512-NEXT: vmovdqa64 %zmm2, %zmm27
4795 ; AVX512-NEXT: vpermt2d %zmm12, %zmm15, %zmm27
4796 ; AVX512-NEXT: vmovdqa64 %zmm23, %zmm27 {%k1}
4797 ; AVX512-NEXT: vpermt2d %zmm19, %zmm20, %zmm13
4798 ; AVX512-NEXT: vpermt2d %zmm12, %zmm21, %zmm2
4799 ; AVX512-NEXT: vmovdqa64 %zmm13, %zmm2 {%k1}
4800 ; AVX512-NEXT: vpermi2d %zmm9, %zmm6, %zmm14
4801 ; AVX512-NEXT: vpermi2d %zmm5, %zmm3, %zmm7
4802 ; AVX512-NEXT: vmovdqa64 %zmm14, %zmm7 {%k1}
4803 ; AVX512-NEXT: vpermi2d %zmm9, %zmm6, %zmm16
4804 ; AVX512-NEXT: vpermi2d %zmm5, %zmm3, %zmm11
4805 ; AVX512-NEXT: vmovdqa64 %zmm16, %zmm11 {%k1}
4806 ; AVX512-NEXT: vpermi2d %zmm9, %zmm6, %zmm18
4807 ; AVX512-NEXT: vpermi2d %zmm5, %zmm3, %zmm15
4808 ; AVX512-NEXT: vmovdqa64 %zmm18, %zmm15 {%k1}
4809 ; AVX512-NEXT: vpermt2d %zmm9, %zmm20, %zmm6
4810 ; AVX512-NEXT: vpermt2d %zmm5, %zmm21, %zmm3
4811 ; AVX512-NEXT: vmovdqa64 %zmm6, %zmm3 {%k1}
4812 ; AVX512-NEXT: vmovdqa64 %zmm3, 896(%r8)
4813 ; AVX512-NEXT: vmovdqa64 %zmm15, 960(%r8)
4814 ; AVX512-NEXT: vmovdqa64 %zmm11, 768(%r8)
4815 ; AVX512-NEXT: vmovdqa64 %zmm7, 832(%r8)
4816 ; AVX512-NEXT: vmovdqa64 %zmm2, 640(%r8)
4817 ; AVX512-NEXT: vmovdqa64 %zmm27, 704(%r8)
4818 ; AVX512-NEXT: vmovdqa64 %zmm26, 512(%r8)
4819 ; AVX512-NEXT: vmovdqa64 %zmm25, 576(%r8)
4820 ; AVX512-NEXT: vmovdqa64 %zmm1, 384(%r8)
4821 ; AVX512-NEXT: vmovdqa64 %zmm24, 448(%r8)
4822 ; AVX512-NEXT: vmovdqa64 %zmm22, 256(%r8)
4823 ; AVX512-NEXT: vmovdqa64 %zmm17, 320(%r8)
4824 ; AVX512-NEXT: vmovdqa64 %zmm0, 128(%r8)
4825 ; AVX512-NEXT: vmovdqa64 %zmm10, 192(%r8)
4826 ; AVX512-NEXT: vmovdqa64 %zmm8, (%r8)
4827 ; AVX512-NEXT: vmovdqa64 %zmm4, 64(%r8)
4828 ; AVX512-NEXT: vzeroupper
4831 ; AVX512-FCP-LABEL: store_i32_stride4_vf64:
4832 ; AVX512-FCP: # %bb.0:
4833 ; AVX512-FCP-NEXT: vmovdqa64 (%rdi), %zmm0
4834 ; AVX512-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
4835 ; AVX512-FCP-NEXT: vmovdqa64 128(%rdi), %zmm2
4836 ; AVX512-FCP-NEXT: vmovdqa64 192(%rdi), %zmm3
4837 ; AVX512-FCP-NEXT: vmovdqa64 (%rsi), %zmm17
4838 ; AVX512-FCP-NEXT: vmovdqa64 64(%rsi), %zmm23
4839 ; AVX512-FCP-NEXT: vmovdqa64 128(%rsi), %zmm12
4840 ; AVX512-FCP-NEXT: vmovdqa64 192(%rsi), %zmm5
4841 ; AVX512-FCP-NEXT: vmovdqa64 (%rdx), %zmm22
4842 ; AVX512-FCP-NEXT: vmovdqa64 64(%rdx), %zmm25
4843 ; AVX512-FCP-NEXT: vmovdqa64 128(%rdx), %zmm13
4844 ; AVX512-FCP-NEXT: vmovdqa64 192(%rdx), %zmm6
4845 ; AVX512-FCP-NEXT: vmovdqa64 (%rcx), %zmm21
4846 ; AVX512-FCP-NEXT: vmovdqa64 64(%rcx), %zmm26
4847 ; AVX512-FCP-NEXT: vmovdqa64 128(%rcx), %zmm19
4848 ; AVX512-FCP-NEXT: vmovdqa64 192(%rcx), %zmm9
4849 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} zmm14 = [0,0,4,20,0,0,5,21,0,0,6,22,0,0,7,23]
4850 ; AVX512-FCP-NEXT: vmovdqa64 %zmm22, %zmm8
4851 ; AVX512-FCP-NEXT: vpermt2d %zmm21, %zmm14, %zmm8
4852 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} zmm7 = [4,20,0,0,5,21,0,0,6,22,0,0,7,23,0,0]
4853 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm4
4854 ; AVX512-FCP-NEXT: vpermt2d %zmm17, %zmm7, %zmm4
4855 ; AVX512-FCP-NEXT: movb $-86, %al
4856 ; AVX512-FCP-NEXT: kmovw %eax, %k1
4857 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, %zmm4 {%k1}
4858 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} zmm16 = [0,0,0,16,0,0,1,17,0,0,2,18,0,0,3,19]
4859 ; AVX512-FCP-NEXT: vmovdqa64 %zmm22, %zmm10
4860 ; AVX512-FCP-NEXT: vpermt2d %zmm21, %zmm16, %zmm10
4861 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} zmm11 = [0,16,0,0,1,17,0,0,2,18,0,0,3,19,0,0]
4862 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm8
4863 ; AVX512-FCP-NEXT: vpermt2d %zmm17, %zmm11, %zmm8
4864 ; AVX512-FCP-NEXT: vmovdqa64 %zmm10, %zmm8 {%k1}
4865 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} zmm18 = [0,0,12,28,0,0,13,29,0,0,14,30,0,0,15,31]
4866 ; AVX512-FCP-NEXT: vmovdqa64 %zmm22, %zmm20
4867 ; AVX512-FCP-NEXT: vpermt2d %zmm21, %zmm18, %zmm20
4868 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} zmm15 = [12,28,0,0,13,29,0,0,14,30,0,0,15,31,0,0]
4869 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, %zmm10
4870 ; AVX512-FCP-NEXT: vpermt2d %zmm17, %zmm15, %zmm10
4871 ; AVX512-FCP-NEXT: vmovdqa64 %zmm20, %zmm10 {%k1}
4872 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} zmm20 = [0,0,8,24,0,0,9,25,0,0,10,26,0,0,11,27]
4873 ; AVX512-FCP-NEXT: vpermt2d %zmm21, %zmm20, %zmm22
4874 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} zmm21 = [8,24,0,0,9,25,0,0,10,26,0,0,11,27,0,0]
4875 ; AVX512-FCP-NEXT: vpermt2d %zmm17, %zmm21, %zmm0
4876 ; AVX512-FCP-NEXT: vmovdqa64 %zmm22, %zmm0 {%k1}
4877 ; AVX512-FCP-NEXT: vmovdqa64 %zmm25, %zmm22
4878 ; AVX512-FCP-NEXT: vpermt2d %zmm26, %zmm14, %zmm22
4879 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm17
4880 ; AVX512-FCP-NEXT: vpermt2d %zmm23, %zmm7, %zmm17
4881 ; AVX512-FCP-NEXT: vmovdqa64 %zmm22, %zmm17 {%k1}
4882 ; AVX512-FCP-NEXT: vmovdqa64 %zmm25, %zmm24
4883 ; AVX512-FCP-NEXT: vpermt2d %zmm26, %zmm16, %zmm24
4884 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm22
4885 ; AVX512-FCP-NEXT: vpermt2d %zmm23, %zmm11, %zmm22
4886 ; AVX512-FCP-NEXT: vmovdqa64 %zmm24, %zmm22 {%k1}
4887 ; AVX512-FCP-NEXT: vmovdqa64 %zmm25, %zmm27
4888 ; AVX512-FCP-NEXT: vpermt2d %zmm26, %zmm18, %zmm27
4889 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, %zmm24
4890 ; AVX512-FCP-NEXT: vpermt2d %zmm23, %zmm15, %zmm24
4891 ; AVX512-FCP-NEXT: vmovdqa64 %zmm27, %zmm24 {%k1}
4892 ; AVX512-FCP-NEXT: vpermt2d %zmm26, %zmm20, %zmm25
4893 ; AVX512-FCP-NEXT: vpermt2d %zmm23, %zmm21, %zmm1
4894 ; AVX512-FCP-NEXT: vmovdqa64 %zmm25, %zmm1 {%k1}
4895 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, %zmm23
4896 ; AVX512-FCP-NEXT: vpermt2d %zmm19, %zmm14, %zmm23
4897 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm25
4898 ; AVX512-FCP-NEXT: vpermt2d %zmm12, %zmm7, %zmm25
4899 ; AVX512-FCP-NEXT: vmovdqa64 %zmm23, %zmm25 {%k1}
4900 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, %zmm23
4901 ; AVX512-FCP-NEXT: vpermt2d %zmm19, %zmm16, %zmm23
4902 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm26
4903 ; AVX512-FCP-NEXT: vpermt2d %zmm12, %zmm11, %zmm26
4904 ; AVX512-FCP-NEXT: vmovdqa64 %zmm23, %zmm26 {%k1}
4905 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, %zmm23
4906 ; AVX512-FCP-NEXT: vpermt2d %zmm19, %zmm18, %zmm23
4907 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, %zmm27
4908 ; AVX512-FCP-NEXT: vpermt2d %zmm12, %zmm15, %zmm27
4909 ; AVX512-FCP-NEXT: vmovdqa64 %zmm23, %zmm27 {%k1}
4910 ; AVX512-FCP-NEXT: vpermt2d %zmm19, %zmm20, %zmm13
4911 ; AVX512-FCP-NEXT: vpermt2d %zmm12, %zmm21, %zmm2
4912 ; AVX512-FCP-NEXT: vmovdqa64 %zmm13, %zmm2 {%k1}
4913 ; AVX512-FCP-NEXT: vpermi2d %zmm9, %zmm6, %zmm14
4914 ; AVX512-FCP-NEXT: vpermi2d %zmm5, %zmm3, %zmm7
4915 ; AVX512-FCP-NEXT: vmovdqa64 %zmm14, %zmm7 {%k1}
4916 ; AVX512-FCP-NEXT: vpermi2d %zmm9, %zmm6, %zmm16
4917 ; AVX512-FCP-NEXT: vpermi2d %zmm5, %zmm3, %zmm11
4918 ; AVX512-FCP-NEXT: vmovdqa64 %zmm16, %zmm11 {%k1}
4919 ; AVX512-FCP-NEXT: vpermi2d %zmm9, %zmm6, %zmm18
4920 ; AVX512-FCP-NEXT: vpermi2d %zmm5, %zmm3, %zmm15
4921 ; AVX512-FCP-NEXT: vmovdqa64 %zmm18, %zmm15 {%k1}
4922 ; AVX512-FCP-NEXT: vpermt2d %zmm9, %zmm20, %zmm6
4923 ; AVX512-FCP-NEXT: vpermt2d %zmm5, %zmm21, %zmm3
4924 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, %zmm3 {%k1}
4925 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, 896(%r8)
4926 ; AVX512-FCP-NEXT: vmovdqa64 %zmm15, 960(%r8)
4927 ; AVX512-FCP-NEXT: vmovdqa64 %zmm11, 768(%r8)
4928 ; AVX512-FCP-NEXT: vmovdqa64 %zmm7, 832(%r8)
4929 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, 640(%r8)
4930 ; AVX512-FCP-NEXT: vmovdqa64 %zmm27, 704(%r8)
4931 ; AVX512-FCP-NEXT: vmovdqa64 %zmm26, 512(%r8)
4932 ; AVX512-FCP-NEXT: vmovdqa64 %zmm25, 576(%r8)
4933 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, 384(%r8)
4934 ; AVX512-FCP-NEXT: vmovdqa64 %zmm24, 448(%r8)
4935 ; AVX512-FCP-NEXT: vmovdqa64 %zmm22, 256(%r8)
4936 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, 320(%r8)
4937 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, 128(%r8)
4938 ; AVX512-FCP-NEXT: vmovdqa64 %zmm10, 192(%r8)
4939 ; AVX512-FCP-NEXT: vmovdqa64 %zmm8, (%r8)
4940 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, 64(%r8)
4941 ; AVX512-FCP-NEXT: vzeroupper
4942 ; AVX512-FCP-NEXT: retq
4944 ; AVX512DQ-LABEL: store_i32_stride4_vf64:
4945 ; AVX512DQ: # %bb.0:
4946 ; AVX512DQ-NEXT: vmovdqa64 (%rdi), %zmm0
4947 ; AVX512DQ-NEXT: vmovdqa64 64(%rdi), %zmm1
4948 ; AVX512DQ-NEXT: vmovdqa64 128(%rdi), %zmm2
4949 ; AVX512DQ-NEXT: vmovdqa64 192(%rdi), %zmm3
4950 ; AVX512DQ-NEXT: vmovdqa64 (%rsi), %zmm17
4951 ; AVX512DQ-NEXT: vmovdqa64 64(%rsi), %zmm23
4952 ; AVX512DQ-NEXT: vmovdqa64 128(%rsi), %zmm12
4953 ; AVX512DQ-NEXT: vmovdqa64 192(%rsi), %zmm5
4954 ; AVX512DQ-NEXT: vmovdqa64 (%rdx), %zmm22
4955 ; AVX512DQ-NEXT: vmovdqa64 64(%rdx), %zmm25
4956 ; AVX512DQ-NEXT: vmovdqa64 128(%rdx), %zmm13
4957 ; AVX512DQ-NEXT: vmovdqa64 192(%rdx), %zmm6
4958 ; AVX512DQ-NEXT: vmovdqa64 (%rcx), %zmm21
4959 ; AVX512DQ-NEXT: vmovdqa64 64(%rcx), %zmm26
4960 ; AVX512DQ-NEXT: vmovdqa64 128(%rcx), %zmm19
4961 ; AVX512DQ-NEXT: vmovdqa64 192(%rcx), %zmm9
4962 ; AVX512DQ-NEXT: vpmovsxbd {{.*#+}} zmm14 = [0,0,4,20,0,0,5,21,0,0,6,22,0,0,7,23]
4963 ; AVX512DQ-NEXT: vmovdqa64 %zmm22, %zmm8
4964 ; AVX512DQ-NEXT: vpermt2d %zmm21, %zmm14, %zmm8
4965 ; AVX512DQ-NEXT: vpmovsxbd {{.*#+}} zmm7 = [4,20,0,0,5,21,0,0,6,22,0,0,7,23,0,0]
4966 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm4
4967 ; AVX512DQ-NEXT: vpermt2d %zmm17, %zmm7, %zmm4
4968 ; AVX512DQ-NEXT: movb $-86, %al
4969 ; AVX512DQ-NEXT: kmovw %eax, %k1
4970 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, %zmm4 {%k1}
4971 ; AVX512DQ-NEXT: vpmovsxbd {{.*#+}} zmm16 = [0,0,0,16,0,0,1,17,0,0,2,18,0,0,3,19]
4972 ; AVX512DQ-NEXT: vmovdqa64 %zmm22, %zmm10
4973 ; AVX512DQ-NEXT: vpermt2d %zmm21, %zmm16, %zmm10
4974 ; AVX512DQ-NEXT: vpmovsxbd {{.*#+}} zmm11 = [0,16,0,0,1,17,0,0,2,18,0,0,3,19,0,0]
4975 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm8
4976 ; AVX512DQ-NEXT: vpermt2d %zmm17, %zmm11, %zmm8
4977 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, %zmm8 {%k1}
4978 ; AVX512DQ-NEXT: vpmovsxbd {{.*#+}} zmm18 = [0,0,12,28,0,0,13,29,0,0,14,30,0,0,15,31]
4979 ; AVX512DQ-NEXT: vmovdqa64 %zmm22, %zmm20
4980 ; AVX512DQ-NEXT: vpermt2d %zmm21, %zmm18, %zmm20
4981 ; AVX512DQ-NEXT: vpmovsxbd {{.*#+}} zmm15 = [12,28,0,0,13,29,0,0,14,30,0,0,15,31,0,0]
4982 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, %zmm10
4983 ; AVX512DQ-NEXT: vpermt2d %zmm17, %zmm15, %zmm10
4984 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, %zmm10 {%k1}
4985 ; AVX512DQ-NEXT: vpmovsxbd {{.*#+}} zmm20 = [0,0,8,24,0,0,9,25,0,0,10,26,0,0,11,27]
4986 ; AVX512DQ-NEXT: vpermt2d %zmm21, %zmm20, %zmm22
4987 ; AVX512DQ-NEXT: vpmovsxbd {{.*#+}} zmm21 = [8,24,0,0,9,25,0,0,10,26,0,0,11,27,0,0]
4988 ; AVX512DQ-NEXT: vpermt2d %zmm17, %zmm21, %zmm0
4989 ; AVX512DQ-NEXT: vmovdqa64 %zmm22, %zmm0 {%k1}
4990 ; AVX512DQ-NEXT: vmovdqa64 %zmm25, %zmm22
4991 ; AVX512DQ-NEXT: vpermt2d %zmm26, %zmm14, %zmm22
4992 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm17
4993 ; AVX512DQ-NEXT: vpermt2d %zmm23, %zmm7, %zmm17
4994 ; AVX512DQ-NEXT: vmovdqa64 %zmm22, %zmm17 {%k1}
4995 ; AVX512DQ-NEXT: vmovdqa64 %zmm25, %zmm24
4996 ; AVX512DQ-NEXT: vpermt2d %zmm26, %zmm16, %zmm24
4997 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm22
4998 ; AVX512DQ-NEXT: vpermt2d %zmm23, %zmm11, %zmm22
4999 ; AVX512DQ-NEXT: vmovdqa64 %zmm24, %zmm22 {%k1}
5000 ; AVX512DQ-NEXT: vmovdqa64 %zmm25, %zmm27
5001 ; AVX512DQ-NEXT: vpermt2d %zmm26, %zmm18, %zmm27
5002 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm24
5003 ; AVX512DQ-NEXT: vpermt2d %zmm23, %zmm15, %zmm24
5004 ; AVX512DQ-NEXT: vmovdqa64 %zmm27, %zmm24 {%k1}
5005 ; AVX512DQ-NEXT: vpermt2d %zmm26, %zmm20, %zmm25
5006 ; AVX512DQ-NEXT: vpermt2d %zmm23, %zmm21, %zmm1
5007 ; AVX512DQ-NEXT: vmovdqa64 %zmm25, %zmm1 {%k1}
5008 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, %zmm23
5009 ; AVX512DQ-NEXT: vpermt2d %zmm19, %zmm14, %zmm23
5010 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm25
5011 ; AVX512DQ-NEXT: vpermt2d %zmm12, %zmm7, %zmm25
5012 ; AVX512DQ-NEXT: vmovdqa64 %zmm23, %zmm25 {%k1}
5013 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, %zmm23
5014 ; AVX512DQ-NEXT: vpermt2d %zmm19, %zmm16, %zmm23
5015 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm26
5016 ; AVX512DQ-NEXT: vpermt2d %zmm12, %zmm11, %zmm26
5017 ; AVX512DQ-NEXT: vmovdqa64 %zmm23, %zmm26 {%k1}
5018 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, %zmm23
5019 ; AVX512DQ-NEXT: vpermt2d %zmm19, %zmm18, %zmm23
5020 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, %zmm27
5021 ; AVX512DQ-NEXT: vpermt2d %zmm12, %zmm15, %zmm27
5022 ; AVX512DQ-NEXT: vmovdqa64 %zmm23, %zmm27 {%k1}
5023 ; AVX512DQ-NEXT: vpermt2d %zmm19, %zmm20, %zmm13
5024 ; AVX512DQ-NEXT: vpermt2d %zmm12, %zmm21, %zmm2
5025 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, %zmm2 {%k1}
5026 ; AVX512DQ-NEXT: vpermi2d %zmm9, %zmm6, %zmm14
5027 ; AVX512DQ-NEXT: vpermi2d %zmm5, %zmm3, %zmm7
5028 ; AVX512DQ-NEXT: vmovdqa64 %zmm14, %zmm7 {%k1}
5029 ; AVX512DQ-NEXT: vpermi2d %zmm9, %zmm6, %zmm16
5030 ; AVX512DQ-NEXT: vpermi2d %zmm5, %zmm3, %zmm11
5031 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, %zmm11 {%k1}
5032 ; AVX512DQ-NEXT: vpermi2d %zmm9, %zmm6, %zmm18
5033 ; AVX512DQ-NEXT: vpermi2d %zmm5, %zmm3, %zmm15
5034 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, %zmm15 {%k1}
5035 ; AVX512DQ-NEXT: vpermt2d %zmm9, %zmm20, %zmm6
5036 ; AVX512DQ-NEXT: vpermt2d %zmm5, %zmm21, %zmm3
5037 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, %zmm3 {%k1}
5038 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, 896(%r8)
5039 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, 960(%r8)
5040 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, 768(%r8)
5041 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, 832(%r8)
5042 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 640(%r8)
5043 ; AVX512DQ-NEXT: vmovdqa64 %zmm27, 704(%r8)
5044 ; AVX512DQ-NEXT: vmovdqa64 %zmm26, 512(%r8)
5045 ; AVX512DQ-NEXT: vmovdqa64 %zmm25, 576(%r8)
5046 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 384(%r8)
5047 ; AVX512DQ-NEXT: vmovdqa64 %zmm24, 448(%r8)
5048 ; AVX512DQ-NEXT: vmovdqa64 %zmm22, 256(%r8)
5049 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, 320(%r8)
5050 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, 128(%r8)
5051 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, 192(%r8)
5052 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, (%r8)
5053 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, 64(%r8)
5054 ; AVX512DQ-NEXT: vzeroupper
5055 ; AVX512DQ-NEXT: retq
5057 ; AVX512DQ-FCP-LABEL: store_i32_stride4_vf64:
5058 ; AVX512DQ-FCP: # %bb.0:
5059 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rdi), %zmm0
5060 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
5061 ; AVX512DQ-FCP-NEXT: vmovdqa64 128(%rdi), %zmm2
5062 ; AVX512DQ-FCP-NEXT: vmovdqa64 192(%rdi), %zmm3
5063 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rsi), %zmm17
5064 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rsi), %zmm23
5065 ; AVX512DQ-FCP-NEXT: vmovdqa64 128(%rsi), %zmm12
5066 ; AVX512DQ-FCP-NEXT: vmovdqa64 192(%rsi), %zmm5
5067 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rdx), %zmm22
5068 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rdx), %zmm25
5069 ; AVX512DQ-FCP-NEXT: vmovdqa64 128(%rdx), %zmm13
5070 ; AVX512DQ-FCP-NEXT: vmovdqa64 192(%rdx), %zmm6
5071 ; AVX512DQ-FCP-NEXT: vmovdqa64 (%rcx), %zmm21
5072 ; AVX512DQ-FCP-NEXT: vmovdqa64 64(%rcx), %zmm26
5073 ; AVX512DQ-FCP-NEXT: vmovdqa64 128(%rcx), %zmm19
5074 ; AVX512DQ-FCP-NEXT: vmovdqa64 192(%rcx), %zmm9
5075 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} zmm14 = [0,0,4,20,0,0,5,21,0,0,6,22,0,0,7,23]
5076 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm22, %zmm8
5077 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm21, %zmm14, %zmm8
5078 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} zmm7 = [4,20,0,0,5,21,0,0,6,22,0,0,7,23,0,0]
5079 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm4
5080 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm17, %zmm7, %zmm4
5081 ; AVX512DQ-FCP-NEXT: movb $-86, %al
5082 ; AVX512DQ-FCP-NEXT: kmovw %eax, %k1
5083 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, %zmm4 {%k1}
5084 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} zmm16 = [0,0,0,16,0,0,1,17,0,0,2,18,0,0,3,19]
5085 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm22, %zmm10
5086 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm21, %zmm16, %zmm10
5087 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} zmm11 = [0,16,0,0,1,17,0,0,2,18,0,0,3,19,0,0]
5088 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm8
5089 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm17, %zmm11, %zmm8
5090 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm10, %zmm8 {%k1}
5091 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} zmm18 = [0,0,12,28,0,0,13,29,0,0,14,30,0,0,15,31]
5092 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm22, %zmm20
5093 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm21, %zmm18, %zmm20
5094 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} zmm15 = [12,28,0,0,13,29,0,0,14,30,0,0,15,31,0,0]
5095 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, %zmm10
5096 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm17, %zmm15, %zmm10
5097 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm20, %zmm10 {%k1}
5098 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} zmm20 = [0,0,8,24,0,0,9,25,0,0,10,26,0,0,11,27]
5099 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm21, %zmm20, %zmm22
5100 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} zmm21 = [8,24,0,0,9,25,0,0,10,26,0,0,11,27,0,0]
5101 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm17, %zmm21, %zmm0
5102 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm22, %zmm0 {%k1}
5103 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm25, %zmm22
5104 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm26, %zmm14, %zmm22
5105 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm17
5106 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm23, %zmm7, %zmm17
5107 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm22, %zmm17 {%k1}
5108 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm25, %zmm24
5109 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm26, %zmm16, %zmm24
5110 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm22
5111 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm23, %zmm11, %zmm22
5112 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm24, %zmm22 {%k1}
5113 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm25, %zmm27
5114 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm26, %zmm18, %zmm27
5115 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, %zmm24
5116 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm23, %zmm15, %zmm24
5117 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm27, %zmm24 {%k1}
5118 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm26, %zmm20, %zmm25
5119 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm23, %zmm21, %zmm1
5120 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm25, %zmm1 {%k1}
5121 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, %zmm23
5122 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm19, %zmm14, %zmm23
5123 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm25
5124 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm12, %zmm7, %zmm25
5125 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm23, %zmm25 {%k1}
5126 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, %zmm23
5127 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm19, %zmm16, %zmm23
5128 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm26
5129 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm12, %zmm11, %zmm26
5130 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm23, %zmm26 {%k1}
5131 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, %zmm23
5132 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm19, %zmm18, %zmm23
5133 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, %zmm27
5134 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm12, %zmm15, %zmm27
5135 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm23, %zmm27 {%k1}
5136 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm19, %zmm20, %zmm13
5137 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm12, %zmm21, %zmm2
5138 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm13, %zmm2 {%k1}
5139 ; AVX512DQ-FCP-NEXT: vpermi2d %zmm9, %zmm6, %zmm14
5140 ; AVX512DQ-FCP-NEXT: vpermi2d %zmm5, %zmm3, %zmm7
5141 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm14, %zmm7 {%k1}
5142 ; AVX512DQ-FCP-NEXT: vpermi2d %zmm9, %zmm6, %zmm16
5143 ; AVX512DQ-FCP-NEXT: vpermi2d %zmm5, %zmm3, %zmm11
5144 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm16, %zmm11 {%k1}
5145 ; AVX512DQ-FCP-NEXT: vpermi2d %zmm9, %zmm6, %zmm18
5146 ; AVX512DQ-FCP-NEXT: vpermi2d %zmm5, %zmm3, %zmm15
5147 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm18, %zmm15 {%k1}
5148 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm9, %zmm20, %zmm6
5149 ; AVX512DQ-FCP-NEXT: vpermt2d %zmm5, %zmm21, %zmm3
5150 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, %zmm3 {%k1}
5151 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, 896(%r8)
5152 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm15, 960(%r8)
5153 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm11, 768(%r8)
5154 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm7, 832(%r8)
5155 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, 640(%r8)
5156 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm27, 704(%r8)
5157 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm26, 512(%r8)
5158 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm25, 576(%r8)
5159 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, 384(%r8)
5160 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm24, 448(%r8)
5161 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm22, 256(%r8)
5162 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, 320(%r8)
5163 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, 128(%r8)
5164 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm10, 192(%r8)
5165 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm8, (%r8)
5166 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, 64(%r8)
5167 ; AVX512DQ-FCP-NEXT: vzeroupper
5168 ; AVX512DQ-FCP-NEXT: retq
5170 ; AVX512BW-LABEL: store_i32_stride4_vf64:
5171 ; AVX512BW: # %bb.0:
5172 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm0
5173 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %zmm1
5174 ; AVX512BW-NEXT: vmovdqa64 128(%rdi), %zmm2
5175 ; AVX512BW-NEXT: vmovdqa64 192(%rdi), %zmm3
5176 ; AVX512BW-NEXT: vmovdqa64 (%rsi), %zmm17
5177 ; AVX512BW-NEXT: vmovdqa64 64(%rsi), %zmm23
5178 ; AVX512BW-NEXT: vmovdqa64 128(%rsi), %zmm12
5179 ; AVX512BW-NEXT: vmovdqa64 192(%rsi), %zmm5
5180 ; AVX512BW-NEXT: vmovdqa64 (%rdx), %zmm22
5181 ; AVX512BW-NEXT: vmovdqa64 64(%rdx), %zmm25
5182 ; AVX512BW-NEXT: vmovdqa64 128(%rdx), %zmm13
5183 ; AVX512BW-NEXT: vmovdqa64 192(%rdx), %zmm6
5184 ; AVX512BW-NEXT: vmovdqa64 (%rcx), %zmm21
5185 ; AVX512BW-NEXT: vmovdqa64 64(%rcx), %zmm26
5186 ; AVX512BW-NEXT: vmovdqa64 128(%rcx), %zmm19
5187 ; AVX512BW-NEXT: vmovdqa64 192(%rcx), %zmm9
5188 ; AVX512BW-NEXT: vpmovsxbd {{.*#+}} zmm14 = [0,0,4,20,0,0,5,21,0,0,6,22,0,0,7,23]
5189 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm8
5190 ; AVX512BW-NEXT: vpermt2d %zmm21, %zmm14, %zmm8
5191 ; AVX512BW-NEXT: vpmovsxbd {{.*#+}} zmm7 = [4,20,0,0,5,21,0,0,6,22,0,0,7,23,0,0]
5192 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm4
5193 ; AVX512BW-NEXT: vpermt2d %zmm17, %zmm7, %zmm4
5194 ; AVX512BW-NEXT: movb $-86, %al
5195 ; AVX512BW-NEXT: kmovd %eax, %k1
5196 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm4 {%k1}
5197 ; AVX512BW-NEXT: vpmovsxbd {{.*#+}} zmm16 = [0,0,0,16,0,0,1,17,0,0,2,18,0,0,3,19]
5198 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm10
5199 ; AVX512BW-NEXT: vpermt2d %zmm21, %zmm16, %zmm10
5200 ; AVX512BW-NEXT: vpmovsxbd {{.*#+}} zmm11 = [0,16,0,0,1,17,0,0,2,18,0,0,3,19,0,0]
5201 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm8
5202 ; AVX512BW-NEXT: vpermt2d %zmm17, %zmm11, %zmm8
5203 ; AVX512BW-NEXT: vmovdqa64 %zmm10, %zmm8 {%k1}
5204 ; AVX512BW-NEXT: vpmovsxbd {{.*#+}} zmm18 = [0,0,12,28,0,0,13,29,0,0,14,30,0,0,15,31]
5205 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm20
5206 ; AVX512BW-NEXT: vpermt2d %zmm21, %zmm18, %zmm20
5207 ; AVX512BW-NEXT: vpmovsxbd {{.*#+}} zmm15 = [12,28,0,0,13,29,0,0,14,30,0,0,15,31,0,0]
5208 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm10
5209 ; AVX512BW-NEXT: vpermt2d %zmm17, %zmm15, %zmm10
5210 ; AVX512BW-NEXT: vmovdqa64 %zmm20, %zmm10 {%k1}
5211 ; AVX512BW-NEXT: vpmovsxbd {{.*#+}} zmm20 = [0,0,8,24,0,0,9,25,0,0,10,26,0,0,11,27]
5212 ; AVX512BW-NEXT: vpermt2d %zmm21, %zmm20, %zmm22
5213 ; AVX512BW-NEXT: vpmovsxbd {{.*#+}} zmm21 = [8,24,0,0,9,25,0,0,10,26,0,0,11,27,0,0]
5214 ; AVX512BW-NEXT: vpermt2d %zmm17, %zmm21, %zmm0
5215 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm0 {%k1}
5216 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm22
5217 ; AVX512BW-NEXT: vpermt2d %zmm26, %zmm14, %zmm22
5218 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm17
5219 ; AVX512BW-NEXT: vpermt2d %zmm23, %zmm7, %zmm17
5220 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm17 {%k1}
5221 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm24
5222 ; AVX512BW-NEXT: vpermt2d %zmm26, %zmm16, %zmm24
5223 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm22
5224 ; AVX512BW-NEXT: vpermt2d %zmm23, %zmm11, %zmm22
5225 ; AVX512BW-NEXT: vmovdqa64 %zmm24, %zmm22 {%k1}
5226 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm27
5227 ; AVX512BW-NEXT: vpermt2d %zmm26, %zmm18, %zmm27
5228 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm24
5229 ; AVX512BW-NEXT: vpermt2d %zmm23, %zmm15, %zmm24
5230 ; AVX512BW-NEXT: vmovdqa64 %zmm27, %zmm24 {%k1}
5231 ; AVX512BW-NEXT: vpermt2d %zmm26, %zmm20, %zmm25
5232 ; AVX512BW-NEXT: vpermt2d %zmm23, %zmm21, %zmm1
5233 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm1 {%k1}
5234 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm23
5235 ; AVX512BW-NEXT: vpermt2d %zmm19, %zmm14, %zmm23
5236 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm25
5237 ; AVX512BW-NEXT: vpermt2d %zmm12, %zmm7, %zmm25
5238 ; AVX512BW-NEXT: vmovdqa64 %zmm23, %zmm25 {%k1}
5239 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm23
5240 ; AVX512BW-NEXT: vpermt2d %zmm19, %zmm16, %zmm23
5241 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm26
5242 ; AVX512BW-NEXT: vpermt2d %zmm12, %zmm11, %zmm26
5243 ; AVX512BW-NEXT: vmovdqa64 %zmm23, %zmm26 {%k1}
5244 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm23
5245 ; AVX512BW-NEXT: vpermt2d %zmm19, %zmm18, %zmm23
5246 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm27
5247 ; AVX512BW-NEXT: vpermt2d %zmm12, %zmm15, %zmm27
5248 ; AVX512BW-NEXT: vmovdqa64 %zmm23, %zmm27 {%k1}
5249 ; AVX512BW-NEXT: vpermt2d %zmm19, %zmm20, %zmm13
5250 ; AVX512BW-NEXT: vpermt2d %zmm12, %zmm21, %zmm2
5251 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm2 {%k1}
5252 ; AVX512BW-NEXT: vpermi2d %zmm9, %zmm6, %zmm14
5253 ; AVX512BW-NEXT: vpermi2d %zmm5, %zmm3, %zmm7
5254 ; AVX512BW-NEXT: vmovdqa64 %zmm14, %zmm7 {%k1}
5255 ; AVX512BW-NEXT: vpermi2d %zmm9, %zmm6, %zmm16
5256 ; AVX512BW-NEXT: vpermi2d %zmm5, %zmm3, %zmm11
5257 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm11 {%k1}
5258 ; AVX512BW-NEXT: vpermi2d %zmm9, %zmm6, %zmm18
5259 ; AVX512BW-NEXT: vpermi2d %zmm5, %zmm3, %zmm15
5260 ; AVX512BW-NEXT: vmovdqa64 %zmm18, %zmm15 {%k1}
5261 ; AVX512BW-NEXT: vpermt2d %zmm9, %zmm20, %zmm6
5262 ; AVX512BW-NEXT: vpermt2d %zmm5, %zmm21, %zmm3
5263 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm3 {%k1}
5264 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 896(%r8)
5265 ; AVX512BW-NEXT: vmovdqa64 %zmm15, 960(%r8)
5266 ; AVX512BW-NEXT: vmovdqa64 %zmm11, 768(%r8)
5267 ; AVX512BW-NEXT: vmovdqa64 %zmm7, 832(%r8)
5268 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 640(%r8)
5269 ; AVX512BW-NEXT: vmovdqa64 %zmm27, 704(%r8)
5270 ; AVX512BW-NEXT: vmovdqa64 %zmm26, 512(%r8)
5271 ; AVX512BW-NEXT: vmovdqa64 %zmm25, 576(%r8)
5272 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 384(%r8)
5273 ; AVX512BW-NEXT: vmovdqa64 %zmm24, 448(%r8)
5274 ; AVX512BW-NEXT: vmovdqa64 %zmm22, 256(%r8)
5275 ; AVX512BW-NEXT: vmovdqa64 %zmm17, 320(%r8)
5276 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 128(%r8)
5277 ; AVX512BW-NEXT: vmovdqa64 %zmm10, 192(%r8)
5278 ; AVX512BW-NEXT: vmovdqa64 %zmm8, (%r8)
5279 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 64(%r8)
5280 ; AVX512BW-NEXT: vzeroupper
5281 ; AVX512BW-NEXT: retq
5283 ; AVX512BW-FCP-LABEL: store_i32_stride4_vf64:
5284 ; AVX512BW-FCP: # %bb.0:
5285 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm0
5286 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
5287 ; AVX512BW-FCP-NEXT: vmovdqa64 128(%rdi), %zmm2
5288 ; AVX512BW-FCP-NEXT: vmovdqa64 192(%rdi), %zmm3
5289 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rsi), %zmm17
5290 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rsi), %zmm23
5291 ; AVX512BW-FCP-NEXT: vmovdqa64 128(%rsi), %zmm12
5292 ; AVX512BW-FCP-NEXT: vmovdqa64 192(%rsi), %zmm5
5293 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdx), %zmm22
5294 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rdx), %zmm25
5295 ; AVX512BW-FCP-NEXT: vmovdqa64 128(%rdx), %zmm13
5296 ; AVX512BW-FCP-NEXT: vmovdqa64 192(%rdx), %zmm6
5297 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rcx), %zmm21
5298 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rcx), %zmm26
5299 ; AVX512BW-FCP-NEXT: vmovdqa64 128(%rcx), %zmm19
5300 ; AVX512BW-FCP-NEXT: vmovdqa64 192(%rcx), %zmm9
5301 ; AVX512BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm14 = [0,0,4,20,0,0,5,21,0,0,6,22,0,0,7,23]
5302 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm8
5303 ; AVX512BW-FCP-NEXT: vpermt2d %zmm21, %zmm14, %zmm8
5304 ; AVX512BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm7 = [4,20,0,0,5,21,0,0,6,22,0,0,7,23,0,0]
5305 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm4
5306 ; AVX512BW-FCP-NEXT: vpermt2d %zmm17, %zmm7, %zmm4
5307 ; AVX512BW-FCP-NEXT: movb $-86, %al
5308 ; AVX512BW-FCP-NEXT: kmovd %eax, %k1
5309 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm4 {%k1}
5310 ; AVX512BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm16 = [0,0,0,16,0,0,1,17,0,0,2,18,0,0,3,19]
5311 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm10
5312 ; AVX512BW-FCP-NEXT: vpermt2d %zmm21, %zmm16, %zmm10
5313 ; AVX512BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm11 = [0,16,0,0,1,17,0,0,2,18,0,0,3,19,0,0]
5314 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm8
5315 ; AVX512BW-FCP-NEXT: vpermt2d %zmm17, %zmm11, %zmm8
5316 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm8 {%k1}
5317 ; AVX512BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm18 = [0,0,12,28,0,0,13,29,0,0,14,30,0,0,15,31]
5318 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm20
5319 ; AVX512BW-FCP-NEXT: vpermt2d %zmm21, %zmm18, %zmm20
5320 ; AVX512BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm15 = [12,28,0,0,13,29,0,0,14,30,0,0,15,31,0,0]
5321 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm10
5322 ; AVX512BW-FCP-NEXT: vpermt2d %zmm17, %zmm15, %zmm10
5323 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm10 {%k1}
5324 ; AVX512BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm20 = [0,0,8,24,0,0,9,25,0,0,10,26,0,0,11,27]
5325 ; AVX512BW-FCP-NEXT: vpermt2d %zmm21, %zmm20, %zmm22
5326 ; AVX512BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm21 = [8,24,0,0,9,25,0,0,10,26,0,0,11,27,0,0]
5327 ; AVX512BW-FCP-NEXT: vpermt2d %zmm17, %zmm21, %zmm0
5328 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm0 {%k1}
5329 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm22
5330 ; AVX512BW-FCP-NEXT: vpermt2d %zmm26, %zmm14, %zmm22
5331 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm17
5332 ; AVX512BW-FCP-NEXT: vpermt2d %zmm23, %zmm7, %zmm17
5333 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm17 {%k1}
5334 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm24
5335 ; AVX512BW-FCP-NEXT: vpermt2d %zmm26, %zmm16, %zmm24
5336 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm22
5337 ; AVX512BW-FCP-NEXT: vpermt2d %zmm23, %zmm11, %zmm22
5338 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm22 {%k1}
5339 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm27
5340 ; AVX512BW-FCP-NEXT: vpermt2d %zmm26, %zmm18, %zmm27
5341 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm24
5342 ; AVX512BW-FCP-NEXT: vpermt2d %zmm23, %zmm15, %zmm24
5343 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm27, %zmm24 {%k1}
5344 ; AVX512BW-FCP-NEXT: vpermt2d %zmm26, %zmm20, %zmm25
5345 ; AVX512BW-FCP-NEXT: vpermt2d %zmm23, %zmm21, %zmm1
5346 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm1 {%k1}
5347 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm23
5348 ; AVX512BW-FCP-NEXT: vpermt2d %zmm19, %zmm14, %zmm23
5349 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm25
5350 ; AVX512BW-FCP-NEXT: vpermt2d %zmm12, %zmm7, %zmm25
5351 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm25 {%k1}
5352 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm23
5353 ; AVX512BW-FCP-NEXT: vpermt2d %zmm19, %zmm16, %zmm23
5354 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm26
5355 ; AVX512BW-FCP-NEXT: vpermt2d %zmm12, %zmm11, %zmm26
5356 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm26 {%k1}
5357 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm23
5358 ; AVX512BW-FCP-NEXT: vpermt2d %zmm19, %zmm18, %zmm23
5359 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm27
5360 ; AVX512BW-FCP-NEXT: vpermt2d %zmm12, %zmm15, %zmm27
5361 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm27 {%k1}
5362 ; AVX512BW-FCP-NEXT: vpermt2d %zmm19, %zmm20, %zmm13
5363 ; AVX512BW-FCP-NEXT: vpermt2d %zmm12, %zmm21, %zmm2
5364 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm2 {%k1}
5365 ; AVX512BW-FCP-NEXT: vpermi2d %zmm9, %zmm6, %zmm14
5366 ; AVX512BW-FCP-NEXT: vpermi2d %zmm5, %zmm3, %zmm7
5367 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm14, %zmm7 {%k1}
5368 ; AVX512BW-FCP-NEXT: vpermi2d %zmm9, %zmm6, %zmm16
5369 ; AVX512BW-FCP-NEXT: vpermi2d %zmm5, %zmm3, %zmm11
5370 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm11 {%k1}
5371 ; AVX512BW-FCP-NEXT: vpermi2d %zmm9, %zmm6, %zmm18
5372 ; AVX512BW-FCP-NEXT: vpermi2d %zmm5, %zmm3, %zmm15
5373 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm15 {%k1}
5374 ; AVX512BW-FCP-NEXT: vpermt2d %zmm9, %zmm20, %zmm6
5375 ; AVX512BW-FCP-NEXT: vpermt2d %zmm5, %zmm21, %zmm3
5376 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm3 {%k1}
5377 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, 896(%r8)
5378 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm15, 960(%r8)
5379 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm11, 768(%r8)
5380 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, 832(%r8)
5381 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, 640(%r8)
5382 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm27, 704(%r8)
5383 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm26, 512(%r8)
5384 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm25, 576(%r8)
5385 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, 384(%r8)
5386 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm24, 448(%r8)
5387 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm22, 256(%r8)
5388 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, 320(%r8)
5389 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, 128(%r8)
5390 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, 192(%r8)
5391 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, (%r8)
5392 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, 64(%r8)
5393 ; AVX512BW-FCP-NEXT: vzeroupper
5394 ; AVX512BW-FCP-NEXT: retq
5396 ; AVX512DQ-BW-LABEL: store_i32_stride4_vf64:
5397 ; AVX512DQ-BW: # %bb.0:
5398 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdi), %zmm0
5399 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rdi), %zmm1
5400 ; AVX512DQ-BW-NEXT: vmovdqa64 128(%rdi), %zmm2
5401 ; AVX512DQ-BW-NEXT: vmovdqa64 192(%rdi), %zmm3
5402 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rsi), %zmm17
5403 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rsi), %zmm23
5404 ; AVX512DQ-BW-NEXT: vmovdqa64 128(%rsi), %zmm12
5405 ; AVX512DQ-BW-NEXT: vmovdqa64 192(%rsi), %zmm5
5406 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdx), %zmm22
5407 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rdx), %zmm25
5408 ; AVX512DQ-BW-NEXT: vmovdqa64 128(%rdx), %zmm13
5409 ; AVX512DQ-BW-NEXT: vmovdqa64 192(%rdx), %zmm6
5410 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rcx), %zmm21
5411 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rcx), %zmm26
5412 ; AVX512DQ-BW-NEXT: vmovdqa64 128(%rcx), %zmm19
5413 ; AVX512DQ-BW-NEXT: vmovdqa64 192(%rcx), %zmm9
5414 ; AVX512DQ-BW-NEXT: vpmovsxbd {{.*#+}} zmm14 = [0,0,4,20,0,0,5,21,0,0,6,22,0,0,7,23]
5415 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm22, %zmm8
5416 ; AVX512DQ-BW-NEXT: vpermt2d %zmm21, %zmm14, %zmm8
5417 ; AVX512DQ-BW-NEXT: vpmovsxbd {{.*#+}} zmm7 = [4,20,0,0,5,21,0,0,6,22,0,0,7,23,0,0]
5418 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm4
5419 ; AVX512DQ-BW-NEXT: vpermt2d %zmm17, %zmm7, %zmm4
5420 ; AVX512DQ-BW-NEXT: movb $-86, %al
5421 ; AVX512DQ-BW-NEXT: kmovd %eax, %k1
5422 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, %zmm4 {%k1}
5423 ; AVX512DQ-BW-NEXT: vpmovsxbd {{.*#+}} zmm16 = [0,0,0,16,0,0,1,17,0,0,2,18,0,0,3,19]
5424 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm22, %zmm10
5425 ; AVX512DQ-BW-NEXT: vpermt2d %zmm21, %zmm16, %zmm10
5426 ; AVX512DQ-BW-NEXT: vpmovsxbd {{.*#+}} zmm11 = [0,16,0,0,1,17,0,0,2,18,0,0,3,19,0,0]
5427 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm8
5428 ; AVX512DQ-BW-NEXT: vpermt2d %zmm17, %zmm11, %zmm8
5429 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, %zmm8 {%k1}
5430 ; AVX512DQ-BW-NEXT: vpmovsxbd {{.*#+}} zmm18 = [0,0,12,28,0,0,13,29,0,0,14,30,0,0,15,31]
5431 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm22, %zmm20
5432 ; AVX512DQ-BW-NEXT: vpermt2d %zmm21, %zmm18, %zmm20
5433 ; AVX512DQ-BW-NEXT: vpmovsxbd {{.*#+}} zmm15 = [12,28,0,0,13,29,0,0,14,30,0,0,15,31,0,0]
5434 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, %zmm10
5435 ; AVX512DQ-BW-NEXT: vpermt2d %zmm17, %zmm15, %zmm10
5436 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, %zmm10 {%k1}
5437 ; AVX512DQ-BW-NEXT: vpmovsxbd {{.*#+}} zmm20 = [0,0,8,24,0,0,9,25,0,0,10,26,0,0,11,27]
5438 ; AVX512DQ-BW-NEXT: vpermt2d %zmm21, %zmm20, %zmm22
5439 ; AVX512DQ-BW-NEXT: vpmovsxbd {{.*#+}} zmm21 = [8,24,0,0,9,25,0,0,10,26,0,0,11,27,0,0]
5440 ; AVX512DQ-BW-NEXT: vpermt2d %zmm17, %zmm21, %zmm0
5441 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm22, %zmm0 {%k1}
5442 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm25, %zmm22
5443 ; AVX512DQ-BW-NEXT: vpermt2d %zmm26, %zmm14, %zmm22
5444 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm17
5445 ; AVX512DQ-BW-NEXT: vpermt2d %zmm23, %zmm7, %zmm17
5446 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm22, %zmm17 {%k1}
5447 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm25, %zmm24
5448 ; AVX512DQ-BW-NEXT: vpermt2d %zmm26, %zmm16, %zmm24
5449 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm22
5450 ; AVX512DQ-BW-NEXT: vpermt2d %zmm23, %zmm11, %zmm22
5451 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm24, %zmm22 {%k1}
5452 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm25, %zmm27
5453 ; AVX512DQ-BW-NEXT: vpermt2d %zmm26, %zmm18, %zmm27
5454 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm24
5455 ; AVX512DQ-BW-NEXT: vpermt2d %zmm23, %zmm15, %zmm24
5456 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm27, %zmm24 {%k1}
5457 ; AVX512DQ-BW-NEXT: vpermt2d %zmm26, %zmm20, %zmm25
5458 ; AVX512DQ-BW-NEXT: vpermt2d %zmm23, %zmm21, %zmm1
5459 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm25, %zmm1 {%k1}
5460 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, %zmm23
5461 ; AVX512DQ-BW-NEXT: vpermt2d %zmm19, %zmm14, %zmm23
5462 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm25
5463 ; AVX512DQ-BW-NEXT: vpermt2d %zmm12, %zmm7, %zmm25
5464 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm23, %zmm25 {%k1}
5465 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, %zmm23
5466 ; AVX512DQ-BW-NEXT: vpermt2d %zmm19, %zmm16, %zmm23
5467 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm26
5468 ; AVX512DQ-BW-NEXT: vpermt2d %zmm12, %zmm11, %zmm26
5469 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm23, %zmm26 {%k1}
5470 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, %zmm23
5471 ; AVX512DQ-BW-NEXT: vpermt2d %zmm19, %zmm18, %zmm23
5472 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, %zmm27
5473 ; AVX512DQ-BW-NEXT: vpermt2d %zmm12, %zmm15, %zmm27
5474 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm23, %zmm27 {%k1}
5475 ; AVX512DQ-BW-NEXT: vpermt2d %zmm19, %zmm20, %zmm13
5476 ; AVX512DQ-BW-NEXT: vpermt2d %zmm12, %zmm21, %zmm2
5477 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, %zmm2 {%k1}
5478 ; AVX512DQ-BW-NEXT: vpermi2d %zmm9, %zmm6, %zmm14
5479 ; AVX512DQ-BW-NEXT: vpermi2d %zmm5, %zmm3, %zmm7
5480 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm14, %zmm7 {%k1}
5481 ; AVX512DQ-BW-NEXT: vpermi2d %zmm9, %zmm6, %zmm16
5482 ; AVX512DQ-BW-NEXT: vpermi2d %zmm5, %zmm3, %zmm11
5483 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm16, %zmm11 {%k1}
5484 ; AVX512DQ-BW-NEXT: vpermi2d %zmm9, %zmm6, %zmm18
5485 ; AVX512DQ-BW-NEXT: vpermi2d %zmm5, %zmm3, %zmm15
5486 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, %zmm15 {%k1}
5487 ; AVX512DQ-BW-NEXT: vpermt2d %zmm9, %zmm20, %zmm6
5488 ; AVX512DQ-BW-NEXT: vpermt2d %zmm5, %zmm21, %zmm3
5489 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, %zmm3 {%k1}
5490 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, 896(%r8)
5491 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm15, 960(%r8)
5492 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm11, 768(%r8)
5493 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, 832(%r8)
5494 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, 640(%r8)
5495 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm27, 704(%r8)
5496 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm26, 512(%r8)
5497 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm25, 576(%r8)
5498 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, 384(%r8)
5499 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm24, 448(%r8)
5500 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm22, 256(%r8)
5501 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm17, 320(%r8)
5502 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, 128(%r8)
5503 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, 192(%r8)
5504 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, (%r8)
5505 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, 64(%r8)
5506 ; AVX512DQ-BW-NEXT: vzeroupper
5507 ; AVX512DQ-BW-NEXT: retq
5509 ; AVX512DQ-BW-FCP-LABEL: store_i32_stride4_vf64:
5510 ; AVX512DQ-BW-FCP: # %bb.0:
5511 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm0
5512 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
5513 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 128(%rdi), %zmm2
5514 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 192(%rdi), %zmm3
5515 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rsi), %zmm17
5516 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rsi), %zmm23
5517 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 128(%rsi), %zmm12
5518 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 192(%rsi), %zmm5
5519 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdx), %zmm22
5520 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rdx), %zmm25
5521 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 128(%rdx), %zmm13
5522 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 192(%rdx), %zmm6
5523 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rcx), %zmm21
5524 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rcx), %zmm26
5525 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 128(%rcx), %zmm19
5526 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 192(%rcx), %zmm9
5527 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm14 = [0,0,4,20,0,0,5,21,0,0,6,22,0,0,7,23]
5528 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm8
5529 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm21, %zmm14, %zmm8
5530 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm7 = [4,20,0,0,5,21,0,0,6,22,0,0,7,23,0,0]
5531 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm4
5532 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm17, %zmm7, %zmm4
5533 ; AVX512DQ-BW-FCP-NEXT: movb $-86, %al
5534 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k1
5535 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, %zmm4 {%k1}
5536 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm16 = [0,0,0,16,0,0,1,17,0,0,2,18,0,0,3,19]
5537 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm10
5538 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm21, %zmm16, %zmm10
5539 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm11 = [0,16,0,0,1,17,0,0,2,18,0,0,3,19,0,0]
5540 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm8
5541 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm17, %zmm11, %zmm8
5542 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm8 {%k1}
5543 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm18 = [0,0,12,28,0,0,13,29,0,0,14,30,0,0,15,31]
5544 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm20
5545 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm21, %zmm18, %zmm20
5546 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm15 = [12,28,0,0,13,29,0,0,14,30,0,0,15,31,0,0]
5547 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, %zmm10
5548 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm17, %zmm15, %zmm10
5549 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm20, %zmm10 {%k1}
5550 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm20 = [0,0,8,24,0,0,9,25,0,0,10,26,0,0,11,27]
5551 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm21, %zmm20, %zmm22
5552 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbd {{.*#+}} zmm21 = [8,24,0,0,9,25,0,0,10,26,0,0,11,27,0,0]
5553 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm17, %zmm21, %zmm0
5554 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm0 {%k1}
5555 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm22
5556 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm26, %zmm14, %zmm22
5557 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm17
5558 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm23, %zmm7, %zmm17
5559 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm22, %zmm17 {%k1}
5560 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm24
5561 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm26, %zmm16, %zmm24
5562 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm22
5563 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm23, %zmm11, %zmm22
5564 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm24, %zmm22 {%k1}
5565 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm27
5566 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm26, %zmm18, %zmm27
5567 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm24
5568 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm23, %zmm15, %zmm24
5569 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm27, %zmm24 {%k1}
5570 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm26, %zmm20, %zmm25
5571 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm23, %zmm21, %zmm1
5572 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm25, %zmm1 {%k1}
5573 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm23
5574 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm19, %zmm14, %zmm23
5575 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm25
5576 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm12, %zmm7, %zmm25
5577 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm25 {%k1}
5578 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm23
5579 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm19, %zmm16, %zmm23
5580 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm26
5581 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm12, %zmm11, %zmm26
5582 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm26 {%k1}
5583 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm23
5584 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm19, %zmm18, %zmm23
5585 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, %zmm27
5586 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm12, %zmm15, %zmm27
5587 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm23, %zmm27 {%k1}
5588 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm19, %zmm20, %zmm13
5589 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm12, %zmm21, %zmm2
5590 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, %zmm2 {%k1}
5591 ; AVX512DQ-BW-FCP-NEXT: vpermi2d %zmm9, %zmm6, %zmm14
5592 ; AVX512DQ-BW-FCP-NEXT: vpermi2d %zmm5, %zmm3, %zmm7
5593 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm14, %zmm7 {%k1}
5594 ; AVX512DQ-BW-FCP-NEXT: vpermi2d %zmm9, %zmm6, %zmm16
5595 ; AVX512DQ-BW-FCP-NEXT: vpermi2d %zmm5, %zmm3, %zmm11
5596 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm16, %zmm11 {%k1}
5597 ; AVX512DQ-BW-FCP-NEXT: vpermi2d %zmm9, %zmm6, %zmm18
5598 ; AVX512DQ-BW-FCP-NEXT: vpermi2d %zmm5, %zmm3, %zmm15
5599 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, %zmm15 {%k1}
5600 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm9, %zmm20, %zmm6
5601 ; AVX512DQ-BW-FCP-NEXT: vpermt2d %zmm5, %zmm21, %zmm3
5602 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, %zmm3 {%k1}
5603 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, 896(%r8)
5604 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm15, 960(%r8)
5605 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm11, 768(%r8)
5606 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, 832(%r8)
5607 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, 640(%r8)
5608 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm27, 704(%r8)
5609 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm26, 512(%r8)
5610 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm25, 576(%r8)
5611 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, 384(%r8)
5612 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm24, 448(%r8)
5613 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm22, 256(%r8)
5614 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, 320(%r8)
5615 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, 128(%r8)
5616 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, 192(%r8)
5617 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, (%r8)
5618 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, 64(%r8)
5619 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
5620 ; AVX512DQ-BW-FCP-NEXT: retq
5621 %in.vec0 = load <64 x i32>, ptr %in.vecptr0, align 64
5622 %in.vec1 = load <64 x i32>, ptr %in.vecptr1, align 64
5623 %in.vec2 = load <64 x i32>, ptr %in.vecptr2, align 64
5624 %in.vec3 = load <64 x i32>, ptr %in.vecptr3, align 64
5625 %1 = shufflevector <64 x i32> %in.vec0, <64 x i32> %in.vec1, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
5626 %2 = shufflevector <64 x i32> %in.vec2, <64 x i32> %in.vec3, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
5627 %3 = shufflevector <128 x i32> %1, <128 x i32> %2, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255>
5628 %interleaved.vec = shufflevector <256 x i32> %3, <256 x i32> poison, <256 x i32> <i32 0, i32 64, i32 128, i32 192, i32 1, i32 65, i32 129, i32 193, i32 2, i32 66, i32 130, i32 194, i32 3, i32 67, i32 131, i32 195, i32 4, i32 68, i32 132, i32 196, i32 5, i32 69, i32 133, i32 197, i32 6, i32 70, i32 134, i32 198, i32 7, i32 71, i32 135, i32 199, i32 8, i32 72, i32 136, i32 200, i32 9, i32 73, i32 137, i32 201, i32 10, i32 74, i32 138, i32 202, i32 11, i32 75, i32 139, i32 203, i32 12, i32 76, i32 140, i32 204, i32 13, i32 77, i32 141, i32 205, i32 14, i32 78, i32 142, i32 206, i32 15, i32 79, i32 143, i32 207, i32 16, i32 80, i32 144, i32 208, i32 17, i32 81, i32 145, i32 209, i32 18, i32 82, i32 146, i32 210, i32 19, i32 83, i32 147, i32 211, i32 20, i32 84, i32 148, i32 212, i32 21, i32 85, i32 149, i32 213, i32 22, i32 86, i32 150, i32 214, i32 23, i32 87, i32 151, i32 215, i32 24, i32 88, i32 152, i32 216, i32 25, i32 89, i32 153, i32 217, i32 26, i32 90, i32 154, i32 218, i32 27, i32 91, i32 155, i32 219, i32 28, i32 92, i32 156, i32 220, i32 29, i32 93, i32 157, i32 221, i32 30, i32 94, i32 158, i32 222, i32 31, i32 95, i32 159, i32 223, i32 32, i32 96, i32 160, i32 224, i32 33, i32 97, i32 161, i32 225, i32 34, i32 98, i32 162, i32 226, i32 35, i32 99, i32 163, i32 227, i32 36, i32 100, i32 164, i32 228, i32 37, i32 101, i32 165, i32 229, i32 38, i32 102, i32 166, i32 230, i32 39, i32 103, i32 167, i32 231, i32 40, i32 104, i32 168, i32 232, i32 41, i32 105, i32 169, i32 233, i32 42, i32 106, i32 170, i32 234, i32 43, i32 107, i32 171, i32 235, i32 44, i32 108, i32 172, i32 236, i32 45, i32 109, i32 173, i32 237, i32 46, i32 110, i32 174, i32 238, i32 47, i32 111, i32 175, i32 239, i32 48, i32 112, i32 176, i32 240, i32 49, i32 113, i32 177, i32 241, i32 50, i32 114, i32 178, i32 242, i32 51, i32 115, i32 179, i32 243, i32 52, i32 116, i32 180, i32 244, i32 53, i32 117, i32 181, i32 245, i32 54, i32 118, i32 182, i32 246, i32 55, i32 119, i32 183, i32 247, i32 56, i32 120, i32 184, i32 248, i32 57, i32 121, i32 185, i32 249, i32 58, i32 122, i32 186, i32 250, i32 59, i32 123, i32 187, i32 251, i32 60, i32 124, i32 188, i32 252, i32 61, i32 125, i32 189, i32 253, i32 62, i32 126, i32 190, i32 254, i32 63, i32 127, i32 191, i32 255>
5629 store <256 x i32> %interleaved.vec, ptr %out.vec, align 64