1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+sse2 | FileCheck %s --check-prefixes=SSE,FALLBACK0
3 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx | FileCheck %s --check-prefixes=AVX,AVX1,AVX1-ONLY,FALLBACK1
4 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2 | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-SLOW,FALLBACK2
5 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-FAST,FALLBACK3
6 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-FAST-PERLANE,FALLBACK4
7 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-SLOW,AVX512F-SLOW,AVX512F-ONLY-SLOW,FALLBACK5
8 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-FAST,AVX512F-FAST,AVX512F-ONLY-FAST,FALLBACK6
9 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-SLOW,AVX512F-SLOW,AVX512DQ-SLOW,FALLBACK7
10 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-FAST,AVX512F-FAST,AVX512DQ-FAST,FALLBACK8
11 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-SLOW,AVX512BW-SLOW,AVX512BW-ONLY-SLOW,FALLBACK9
12 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-FAST,AVX512BW-FAST,AVX512BW-ONLY-FAST,FALLBACK10
13 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-SLOW,AVX512BW-SLOW,AVX512DQBW-SLOW,FALLBACK11
14 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-FAST,AVX512BW-FAST,AVX512DQBW-FAST,FALLBACK12
16 ; These patterns are produced by LoopVectorizer for interleaved stores.
18 define void @store_i32_stride8_vf2(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %in.vecptr7, ptr %out.vec) nounwind {
19 ; SSE-LABEL: store_i32_stride8_vf2:
21 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
22 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
23 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r11
24 ; SSE-NEXT: movsd {{.*#+}} xmm0 = mem[0],zero
25 ; SSE-NEXT: movsd {{.*#+}} xmm1 = mem[0],zero
26 ; SSE-NEXT: unpcklps {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
27 ; SSE-NEXT: movsd {{.*#+}} xmm1 = mem[0],zero
28 ; SSE-NEXT: movsd {{.*#+}} xmm2 = mem[0],zero
29 ; SSE-NEXT: unpcklps {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1]
30 ; SSE-NEXT: movsd {{.*#+}} xmm2 = mem[0],zero
31 ; SSE-NEXT: movsd {{.*#+}} xmm3 = mem[0],zero
32 ; SSE-NEXT: unpcklps {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
33 ; SSE-NEXT: movsd {{.*#+}} xmm3 = mem[0],zero
34 ; SSE-NEXT: movsd {{.*#+}} xmm4 = mem[0],zero
35 ; SSE-NEXT: unpcklps {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1]
36 ; SSE-NEXT: movaps %xmm0, %xmm4
37 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm1[0]
38 ; SSE-NEXT: movaps %xmm2, %xmm5
39 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm3[0]
40 ; SSE-NEXT: movhlps {{.*#+}} xmm1 = xmm0[1],xmm1[1]
41 ; SSE-NEXT: movhlps {{.*#+}} xmm3 = xmm2[1],xmm3[1]
42 ; SSE-NEXT: movaps %xmm3, 48(%rax)
43 ; SSE-NEXT: movaps %xmm1, 32(%rax)
44 ; SSE-NEXT: movaps %xmm5, 16(%rax)
45 ; SSE-NEXT: movaps %xmm4, (%rax)
48 ; AVX1-ONLY-LABEL: store_i32_stride8_vf2:
50 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
51 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
52 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r11
53 ; AVX1-ONLY-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
54 ; AVX1-ONLY-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
55 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm0 = xmm1[0],xmm0[0]
56 ; AVX1-ONLY-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
57 ; AVX1-ONLY-NEXT: vmovsd {{.*#+}} xmm2 = mem[0],zero
58 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
59 ; AVX1-ONLY-NEXT: vmovsd {{.*#+}} xmm2 = mem[0],zero
60 ; AVX1-ONLY-NEXT: vmovsd {{.*#+}} xmm3 = mem[0],zero
61 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm3[0],xmm2[0]
62 ; AVX1-ONLY-NEXT: vmovsd {{.*#+}} xmm3 = mem[0],zero
63 ; AVX1-ONLY-NEXT: vmovsd {{.*#+}} xmm4 = mem[0],zero
64 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm3 = xmm4[0],xmm3[0]
65 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm1, %ymm1
66 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm0
67 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm0[0,2],ymm1[0,2],ymm0[4,6],ymm1[4,6]
68 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm0[1,3],ymm1[1,3],ymm0[5,7],ymm1[5,7]
69 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
70 ; AVX1-ONLY-NEXT: vmovaps %ymm2, (%rax)
71 ; AVX1-ONLY-NEXT: vzeroupper
72 ; AVX1-ONLY-NEXT: retq
74 ; AVX2-ONLY-LABEL: store_i32_stride8_vf2:
76 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
77 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
78 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r11
79 ; AVX2-ONLY-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
80 ; AVX2-ONLY-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
81 ; AVX2-ONLY-NEXT: vmovlhps {{.*#+}} xmm0 = xmm1[0],xmm0[0]
82 ; AVX2-ONLY-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
83 ; AVX2-ONLY-NEXT: vmovsd {{.*#+}} xmm2 = mem[0],zero
84 ; AVX2-ONLY-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
85 ; AVX2-ONLY-NEXT: vmovsd {{.*#+}} xmm2 = mem[0],zero
86 ; AVX2-ONLY-NEXT: vmovsd {{.*#+}} xmm3 = mem[0],zero
87 ; AVX2-ONLY-NEXT: vmovlhps {{.*#+}} xmm2 = xmm3[0],xmm2[0]
88 ; AVX2-ONLY-NEXT: vmovsd {{.*#+}} xmm3 = mem[0],zero
89 ; AVX2-ONLY-NEXT: vmovsd {{.*#+}} xmm4 = mem[0],zero
90 ; AVX2-ONLY-NEXT: vmovlhps {{.*#+}} xmm3 = xmm4[0],xmm3[0]
91 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
92 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm1
93 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm0[0,2],ymm1[0,2],ymm0[4,6],ymm1[4,6]
94 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,1,3]
95 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm0[1,3],ymm1[1,3],ymm0[5,7],ymm1[5,7]
96 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,1,3]
97 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
98 ; AVX2-ONLY-NEXT: vmovaps %ymm2, (%rax)
99 ; AVX2-ONLY-NEXT: vzeroupper
100 ; AVX2-ONLY-NEXT: retq
102 ; AVX512-LABEL: store_i32_stride8_vf2:
104 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
105 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %r10
106 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %r11
107 ; AVX512-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
108 ; AVX512-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
109 ; AVX512-NEXT: vmovlhps {{.*#+}} xmm0 = xmm1[0],xmm0[0]
110 ; AVX512-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
111 ; AVX512-NEXT: vmovsd {{.*#+}} xmm2 = mem[0],zero
112 ; AVX512-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
113 ; AVX512-NEXT: vmovsd {{.*#+}} xmm2 = mem[0],zero
114 ; AVX512-NEXT: vmovsd {{.*#+}} xmm3 = mem[0],zero
115 ; AVX512-NEXT: vmovlhps {{.*#+}} xmm2 = xmm3[0],xmm2[0]
116 ; AVX512-NEXT: vmovsd {{.*#+}} xmm3 = mem[0],zero
117 ; AVX512-NEXT: vmovsd {{.*#+}} xmm4 = mem[0],zero
118 ; AVX512-NEXT: vmovlhps {{.*#+}} xmm3 = xmm4[0],xmm3[0]
119 ; AVX512-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
120 ; AVX512-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
121 ; AVX512-NEXT: vinsertf64x4 $1, %ymm2, %zmm0, %zmm0
122 ; AVX512-NEXT: vmovaps {{.*#+}} zmm1 = [0,2,4,6,8,10,12,14,1,3,5,7,9,11,13,15]
123 ; AVX512-NEXT: vpermps %zmm0, %zmm1, %zmm0
124 ; AVX512-NEXT: vmovaps %zmm0, (%rax)
125 ; AVX512-NEXT: vzeroupper
127 %in.vec0 = load <2 x i32>, ptr %in.vecptr0, align 64
128 %in.vec1 = load <2 x i32>, ptr %in.vecptr1, align 64
129 %in.vec2 = load <2 x i32>, ptr %in.vecptr2, align 64
130 %in.vec3 = load <2 x i32>, ptr %in.vecptr3, align 64
131 %in.vec4 = load <2 x i32>, ptr %in.vecptr4, align 64
132 %in.vec5 = load <2 x i32>, ptr %in.vecptr5, align 64
133 %in.vec6 = load <2 x i32>, ptr %in.vecptr6, align 64
134 %in.vec7 = load <2 x i32>, ptr %in.vecptr7, align 64
135 %1 = shufflevector <2 x i32> %in.vec0, <2 x i32> %in.vec1, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
136 %2 = shufflevector <2 x i32> %in.vec2, <2 x i32> %in.vec3, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
137 %3 = shufflevector <2 x i32> %in.vec4, <2 x i32> %in.vec5, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
138 %4 = shufflevector <2 x i32> %in.vec6, <2 x i32> %in.vec7, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
139 %5 = shufflevector <4 x i32> %1, <4 x i32> %2, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
140 %6 = shufflevector <4 x i32> %3, <4 x i32> %4, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
141 %7 = shufflevector <8 x i32> %5, <8 x i32> %6, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
142 %interleaved.vec = shufflevector <16 x i32> %7, <16 x i32> poison, <16 x i32> <i32 0, i32 2, i32 4, i32 6, i32 8, i32 10, i32 12, i32 14, i32 1, i32 3, i32 5, i32 7, i32 9, i32 11, i32 13, i32 15>
143 store <16 x i32> %interleaved.vec, ptr %out.vec, align 64
147 define void @store_i32_stride8_vf4(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %in.vecptr7, ptr %out.vec) nounwind {
148 ; SSE-LABEL: store_i32_stride8_vf4:
150 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
151 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
152 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r11
153 ; SSE-NEXT: movaps (%rdi), %xmm0
154 ; SSE-NEXT: movaps (%rsi), %xmm5
155 ; SSE-NEXT: movaps (%rdx), %xmm1
156 ; SSE-NEXT: movaps (%rcx), %xmm6
157 ; SSE-NEXT: movaps (%r8), %xmm2
158 ; SSE-NEXT: movaps (%r9), %xmm7
159 ; SSE-NEXT: movaps (%r11), %xmm8
160 ; SSE-NEXT: movaps (%r10), %xmm9
161 ; SSE-NEXT: movaps %xmm1, %xmm10
162 ; SSE-NEXT: unpcklps {{.*#+}} xmm10 = xmm10[0],xmm6[0],xmm10[1],xmm6[1]
163 ; SSE-NEXT: movaps %xmm0, %xmm4
164 ; SSE-NEXT: unpcklps {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1]
165 ; SSE-NEXT: movaps %xmm4, %xmm3
166 ; SSE-NEXT: movlhps {{.*#+}} xmm3 = xmm3[0],xmm10[0]
167 ; SSE-NEXT: movaps %xmm8, %xmm11
168 ; SSE-NEXT: unpcklps {{.*#+}} xmm11 = xmm11[0],xmm9[0],xmm11[1],xmm9[1]
169 ; SSE-NEXT: movaps %xmm2, %xmm12
170 ; SSE-NEXT: unpcklps {{.*#+}} xmm12 = xmm12[0],xmm7[0],xmm12[1],xmm7[1]
171 ; SSE-NEXT: movaps %xmm12, %xmm13
172 ; SSE-NEXT: movlhps {{.*#+}} xmm13 = xmm13[0],xmm11[0]
173 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1],xmm11[1]
174 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm10[1]
175 ; SSE-NEXT: unpckhps {{.*#+}} xmm1 = xmm1[2],xmm6[2],xmm1[3],xmm6[3]
176 ; SSE-NEXT: unpckhps {{.*#+}} xmm0 = xmm0[2],xmm5[2],xmm0[3],xmm5[3]
177 ; SSE-NEXT: movaps %xmm0, %xmm5
178 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm1[0]
179 ; SSE-NEXT: unpckhps {{.*#+}} xmm8 = xmm8[2],xmm9[2],xmm8[3],xmm9[3]
180 ; SSE-NEXT: unpckhps {{.*#+}} xmm2 = xmm2[2],xmm7[2],xmm2[3],xmm7[3]
181 ; SSE-NEXT: movaps %xmm2, %xmm6
182 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm8[0]
183 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm8[1]
184 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
185 ; SSE-NEXT: movaps %xmm0, 96(%rax)
186 ; SSE-NEXT: movaps %xmm2, 112(%rax)
187 ; SSE-NEXT: movaps %xmm6, 80(%rax)
188 ; SSE-NEXT: movaps %xmm5, 64(%rax)
189 ; SSE-NEXT: movaps %xmm4, 32(%rax)
190 ; SSE-NEXT: movaps %xmm12, 48(%rax)
191 ; SSE-NEXT: movaps %xmm13, 16(%rax)
192 ; SSE-NEXT: movaps %xmm3, (%rax)
195 ; AVX1-ONLY-LABEL: store_i32_stride8_vf4:
196 ; AVX1-ONLY: # %bb.0:
197 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
198 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
199 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r11
200 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %xmm0
201 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %xmm1
202 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %xmm2
203 ; AVX1-ONLY-NEXT: vmovaps (%rcx), %xmm3
204 ; AVX1-ONLY-NEXT: vmovaps (%r8), %xmm4
205 ; AVX1-ONLY-NEXT: vmovaps (%r9), %xmm5
206 ; AVX1-ONLY-NEXT: vmovaps (%r11), %xmm6
207 ; AVX1-ONLY-NEXT: vmovaps (%r10), %xmm7
208 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm4, %ymm8
209 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm6, %ymm9
210 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm7, %ymm6
211 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm7 = ymm9[0],ymm6[0],ymm9[2],ymm6[2]
212 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm5, %ymm4
213 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm5 = ymm4[0],ymm8[0],ymm4[1],ymm8[1],ymm4[4],ymm8[4],ymm4[5],ymm8[5]
214 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm5 = ymm5[0,1],ymm7[2,0],ymm5[4,5],ymm7[6,4]
215 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm7 = xmm3[0],xmm2[0]
216 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm10 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
217 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm10[0,1],xmm7[2,0]
218 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm7[0,1,2,3],ymm5[4,5,6,7]
219 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm7 = ymm6[0],ymm9[0],ymm6[1],ymm9[1],ymm6[4],ymm9[4],ymm6[5],ymm9[5]
220 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm8[1,0],ymm4[1,0],ymm8[5,4],ymm4[5,4]
221 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm10[2,0],ymm7[2,3],ymm10[6,4],ymm7[6,7]
222 ; AVX1-ONLY-NEXT: vinsertps {{.*#+}} xmm10 = xmm0[1],xmm1[1],zero,zero
223 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm11 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
224 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm10 = xmm10[0,1],xmm11[2,3]
225 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm10[0,1,2,3],ymm7[4,5,6,7]
226 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm10 = ymm9[1],ymm6[1],ymm9[3],ymm6[3]
227 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm11 = ymm4[2],ymm8[2],ymm4[3],ymm8[3],ymm4[6],ymm8[6],ymm4[7],ymm8[7]
228 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm11[0,1],ymm10[2,0],ymm11[4,5],ymm10[6,4]
229 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm11 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
230 ; AVX1-ONLY-NEXT: vinsertps {{.*#+}} xmm12 = zero,zero,xmm2[2],xmm3[2]
231 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm11 = xmm11[0,1],xmm12[2,3]
232 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm11[0,1,2,3],ymm10[4,5,6,7]
233 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm6 = ymm6[2],ymm9[2],ymm6[3],ymm9[3],ymm6[6],ymm9[6],ymm6[7],ymm9[7]
234 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm4 = ymm8[3,0],ymm4[3,0],ymm8[7,4],ymm4[7,4]
235 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm4 = ymm4[2,0],ymm6[2,3],ymm4[6,4],ymm6[6,7]
236 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm2 = xmm2[2],xmm3[2],xmm2[3],xmm3[3]
237 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm0 = xmm1[3,0],xmm0[3,0]
238 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm0 = xmm0[2,0],xmm2[2,3]
239 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm4[4,5,6,7]
240 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 96(%rax)
241 ; AVX1-ONLY-NEXT: vmovaps %ymm10, 64(%rax)
242 ; AVX1-ONLY-NEXT: vmovaps %ymm7, 32(%rax)
243 ; AVX1-ONLY-NEXT: vmovaps %ymm5, (%rax)
244 ; AVX1-ONLY-NEXT: vzeroupper
245 ; AVX1-ONLY-NEXT: retq
247 ; AVX2-ONLY-LABEL: store_i32_stride8_vf4:
248 ; AVX2-ONLY: # %bb.0:
249 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
250 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
251 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r11
252 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %xmm3
253 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %xmm4
254 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %xmm0
255 ; AVX2-ONLY-NEXT: vmovaps (%rcx), %xmm1
256 ; AVX2-ONLY-NEXT: vmovaps (%r8), %xmm5
257 ; AVX2-ONLY-NEXT: vmovaps (%r11), %xmm6
258 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm3, %ymm2
259 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm8
260 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%r9), %ymm5, %ymm5
261 ; AVX2-ONLY-NEXT: vinsertf128 $1, (%r10), %ymm6, %ymm7
262 ; AVX2-ONLY-NEXT: vbroadcastsd {{.*#+}} ymm6 = [0,4,0,4,0,4,0,4]
263 ; AVX2-ONLY-NEXT: vpermps %ymm7, %ymm6, %ymm9
264 ; AVX2-ONLY-NEXT: vpermps %ymm5, %ymm6, %ymm6
265 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3,4,5],ymm9[6,7]
266 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm9 = [0,4,0,4]
267 ; AVX2-ONLY-NEXT: # xmm9 = mem[0,0]
268 ; AVX2-ONLY-NEXT: vpermps %ymm8, %ymm9, %ymm9
269 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm10 = xmm3[0],xmm4[0],xmm3[1],xmm4[1]
270 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm9 = xmm10[0,1],xmm9[2,3]
271 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm9[0,1,2,3],ymm6[4,5,6,7]
272 ; AVX2-ONLY-NEXT: vbroadcastsd {{.*#+}} ymm9 = [1,5,1,5,1,5,1,5]
273 ; AVX2-ONLY-NEXT: vpermps %ymm7, %ymm9, %ymm10
274 ; AVX2-ONLY-NEXT: vpermps %ymm5, %ymm9, %ymm9
275 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],ymm10[6,7]
276 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm10 = [1,5,1,5]
277 ; AVX2-ONLY-NEXT: # xmm10 = mem[0,0]
278 ; AVX2-ONLY-NEXT: vpermps %ymm2, %ymm10, %ymm10
279 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm11 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
280 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm10 = xmm10[0,1],xmm11[2,3]
281 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm10[0,1,2,3],ymm9[4,5,6,7]
282 ; AVX2-ONLY-NEXT: vbroadcastsd {{.*#+}} ymm10 = [2,6,2,6,2,6,2,6]
283 ; AVX2-ONLY-NEXT: vpermps %ymm7, %ymm10, %ymm11
284 ; AVX2-ONLY-NEXT: vpermps %ymm5, %ymm10, %ymm10
285 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1,2,3,4,5],ymm11[6,7]
286 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm11 = [2,6,2,6]
287 ; AVX2-ONLY-NEXT: # xmm11 = mem[0,0]
288 ; AVX2-ONLY-NEXT: vpermps %ymm8, %ymm11, %ymm8
289 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm3 = xmm3[2],xmm4[2],xmm3[3],xmm4[3]
290 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm3 = xmm3[0,1],xmm8[2,3]
291 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm10[4,5,6,7]
292 ; AVX2-ONLY-NEXT: vbroadcastsd {{.*#+}} ymm4 = [3,7,3,7,3,7,3,7]
293 ; AVX2-ONLY-NEXT: vpermps %ymm7, %ymm4, %ymm7
294 ; AVX2-ONLY-NEXT: vpermps %ymm5, %ymm4, %ymm4
295 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm7[6,7]
296 ; AVX2-ONLY-NEXT: vmovddup {{.*#+}} xmm5 = [3,7,3,7]
297 ; AVX2-ONLY-NEXT: # xmm5 = mem[0,0]
298 ; AVX2-ONLY-NEXT: vpermps %ymm2, %ymm5, %ymm2
299 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
300 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm0 = xmm2[0,1],xmm0[2,3]
301 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm4[4,5,6,7]
302 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 96(%rax)
303 ; AVX2-ONLY-NEXT: vmovaps %ymm3, 64(%rax)
304 ; AVX2-ONLY-NEXT: vmovaps %ymm9, 32(%rax)
305 ; AVX2-ONLY-NEXT: vmovaps %ymm6, (%rax)
306 ; AVX2-ONLY-NEXT: vzeroupper
307 ; AVX2-ONLY-NEXT: retq
309 ; AVX512-LABEL: store_i32_stride8_vf4:
311 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
312 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %r10
313 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %r11
314 ; AVX512-NEXT: vmovdqa (%rdi), %xmm0
315 ; AVX512-NEXT: vmovdqa (%rdx), %xmm1
316 ; AVX512-NEXT: vmovdqa (%r8), %xmm2
317 ; AVX512-NEXT: vmovdqa (%r11), %xmm3
318 ; AVX512-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
319 ; AVX512-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
320 ; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
321 ; AVX512-NEXT: vinserti128 $1, (%r10), %ymm3, %ymm1
322 ; AVX512-NEXT: vinserti128 $1, (%r9), %ymm2, %ymm2
323 ; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm1
324 ; AVX512-NEXT: vmovdqa64 {{.*#+}} zmm2 = [0,4,8,12,16,20,24,28,1,5,9,13,17,21,25,29]
325 ; AVX512-NEXT: vpermi2d %zmm1, %zmm0, %zmm2
326 ; AVX512-NEXT: vmovdqa64 {{.*#+}} zmm3 = [2,6,10,14,18,22,26,30,3,7,11,15,19,23,27,31]
327 ; AVX512-NEXT: vpermi2d %zmm1, %zmm0, %zmm3
328 ; AVX512-NEXT: vmovdqa64 %zmm3, 64(%rax)
329 ; AVX512-NEXT: vmovdqa64 %zmm2, (%rax)
330 ; AVX512-NEXT: vzeroupper
332 %in.vec0 = load <4 x i32>, ptr %in.vecptr0, align 64
333 %in.vec1 = load <4 x i32>, ptr %in.vecptr1, align 64
334 %in.vec2 = load <4 x i32>, ptr %in.vecptr2, align 64
335 %in.vec3 = load <4 x i32>, ptr %in.vecptr3, align 64
336 %in.vec4 = load <4 x i32>, ptr %in.vecptr4, align 64
337 %in.vec5 = load <4 x i32>, ptr %in.vecptr5, align 64
338 %in.vec6 = load <4 x i32>, ptr %in.vecptr6, align 64
339 %in.vec7 = load <4 x i32>, ptr %in.vecptr7, align 64
340 %1 = shufflevector <4 x i32> %in.vec0, <4 x i32> %in.vec1, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
341 %2 = shufflevector <4 x i32> %in.vec2, <4 x i32> %in.vec3, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
342 %3 = shufflevector <4 x i32> %in.vec4, <4 x i32> %in.vec5, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
343 %4 = shufflevector <4 x i32> %in.vec6, <4 x i32> %in.vec7, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
344 %5 = shufflevector <8 x i32> %1, <8 x i32> %2, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
345 %6 = shufflevector <8 x i32> %3, <8 x i32> %4, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
346 %7 = shufflevector <16 x i32> %5, <16 x i32> %6, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
347 %interleaved.vec = shufflevector <32 x i32> %7, <32 x i32> poison, <32 x i32> <i32 0, i32 4, i32 8, i32 12, i32 16, i32 20, i32 24, i32 28, i32 1, i32 5, i32 9, i32 13, i32 17, i32 21, i32 25, i32 29, i32 2, i32 6, i32 10, i32 14, i32 18, i32 22, i32 26, i32 30, i32 3, i32 7, i32 11, i32 15, i32 19, i32 23, i32 27, i32 31>
348 store <32 x i32> %interleaved.vec, ptr %out.vec, align 64
352 define void @store_i32_stride8_vf8(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %in.vecptr7, ptr %out.vec) nounwind {
353 ; SSE-LABEL: store_i32_stride8_vf8:
355 ; SSE-NEXT: subq $72, %rsp
356 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
357 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
358 ; SSE-NEXT: movaps (%rdi), %xmm8
359 ; SSE-NEXT: movaps (%rsi), %xmm0
360 ; SSE-NEXT: movaps (%rdx), %xmm9
361 ; SSE-NEXT: movaps (%rcx), %xmm3
362 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
363 ; SSE-NEXT: movaps (%r8), %xmm15
364 ; SSE-NEXT: movaps 16(%r8), %xmm10
365 ; SSE-NEXT: movaps (%r9), %xmm1
366 ; SSE-NEXT: movaps (%r10), %xmm14
367 ; SSE-NEXT: movaps 16(%r10), %xmm12
368 ; SSE-NEXT: movaps (%rax), %xmm4
369 ; SSE-NEXT: movaps 16(%rax), %xmm7
370 ; SSE-NEXT: movaps %xmm4, %xmm2
371 ; SSE-NEXT: movaps %xmm4, %xmm11
372 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm14[0]
373 ; SSE-NEXT: movaps %xmm15, %xmm4
374 ; SSE-NEXT: unpcklps {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
375 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
376 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm2[2,0]
377 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
378 ; SSE-NEXT: movaps %xmm3, %xmm2
379 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm9[0]
380 ; SSE-NEXT: movaps %xmm8, %xmm4
381 ; SSE-NEXT: unpcklps {{.*#+}} xmm4 = xmm4[0],xmm0[0],xmm4[1],xmm0[1]
382 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
383 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm2[2,0]
384 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
385 ; SSE-NEXT: movaps %xmm14, %xmm2
386 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm11[1]
387 ; SSE-NEXT: movaps %xmm11, %xmm4
388 ; SSE-NEXT: unpckhps {{.*#+}} xmm15 = xmm15[2],xmm1[2],xmm15[3],xmm1[3]
389 ; SSE-NEXT: movaps %xmm15, %xmm1
390 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,1],xmm2[0,2]
391 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
392 ; SSE-NEXT: movaps %xmm9, %xmm1
393 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm3[1]
394 ; SSE-NEXT: unpckhps {{.*#+}} xmm8 = xmm8[2],xmm0[2],xmm8[3],xmm0[3]
395 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
396 ; SSE-NEXT: movaps %xmm8, %xmm0
397 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,1],xmm1[0,2]
398 ; SSE-NEXT: movaps %xmm0, (%rsp) # 16-byte Spill
399 ; SSE-NEXT: movaps %xmm7, %xmm0
400 ; SSE-NEXT: movaps %xmm7, %xmm3
401 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
402 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm12[0]
403 ; SSE-NEXT: movaps 16(%r9), %xmm7
404 ; SSE-NEXT: movaps %xmm10, %xmm13
405 ; SSE-NEXT: unpcklps {{.*#+}} xmm13 = xmm13[0],xmm7[0],xmm13[1],xmm7[1]
406 ; SSE-NEXT: movaps %xmm13, %xmm1
407 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,1],xmm0[2,0]
408 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
409 ; SSE-NEXT: movaps 16(%rdx), %xmm6
410 ; SSE-NEXT: movaps 16(%rcx), %xmm8
411 ; SSE-NEXT: movaps %xmm8, %xmm11
412 ; SSE-NEXT: movlhps {{.*#+}} xmm11 = xmm11[0],xmm6[0]
413 ; SSE-NEXT: movaps 16(%rdi), %xmm2
414 ; SSE-NEXT: movaps 16(%rsi), %xmm1
415 ; SSE-NEXT: movaps %xmm2, %xmm0
416 ; SSE-NEXT: unpcklps {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
417 ; SSE-NEXT: movaps %xmm0, %xmm5
418 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm11[2,0]
419 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
420 ; SSE-NEXT: unpckhps {{.*#+}} xmm10 = xmm10[2],xmm7[2],xmm10[3],xmm7[3]
421 ; SSE-NEXT: movaps %xmm12, %xmm7
422 ; SSE-NEXT: unpckhpd {{.*#+}} xmm7 = xmm7[1],xmm3[1]
423 ; SSE-NEXT: movaps %xmm10, %xmm3
424 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm7[0,2]
425 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
426 ; SSE-NEXT: unpckhps {{.*#+}} xmm2 = xmm2[2],xmm1[2],xmm2[3],xmm1[3]
427 ; SSE-NEXT: movaps %xmm6, %xmm1
428 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm8[1]
429 ; SSE-NEXT: movaps %xmm2, %xmm7
430 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[0,1],xmm1[0,2]
431 ; SSE-NEXT: movaps %xmm4, %xmm1
432 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[1,1],xmm14[1,1]
433 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
434 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[2,3],xmm1[2,0]
435 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
436 ; SSE-NEXT: movaps %xmm3, %xmm1
437 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[1,1],xmm9[1,1]
438 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
439 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,3],xmm1[2,0]
440 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
441 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[3,3],xmm4[3,3]
442 ; SSE-NEXT: shufps {{.*#+}} xmm15 = xmm15[2,3],xmm14[0,2]
443 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[3,3],xmm3[3,3]
444 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
445 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[2,3],xmm9[0,2]
446 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
447 ; SSE-NEXT: movaps %xmm9, %xmm1
448 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[1,1],xmm12[1,1]
449 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[2,3],xmm1[2,0]
450 ; SSE-NEXT: movaps %xmm8, %xmm1
451 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[1,1],xmm6[1,1]
452 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,3],xmm1[2,0]
453 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[3,3],xmm9[3,3]
454 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[2,3],xmm12[0,2]
455 ; SSE-NEXT: shufps {{.*#+}} xmm6 = xmm6[3,3],xmm8[3,3]
456 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[2,3],xmm6[0,2]
457 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
458 ; SSE-NEXT: movaps %xmm2, 224(%rax)
459 ; SSE-NEXT: movaps %xmm10, 240(%rax)
460 ; SSE-NEXT: movaps %xmm0, 160(%rax)
461 ; SSE-NEXT: movaps %xmm13, 176(%rax)
462 ; SSE-NEXT: movaps %xmm14, 96(%rax)
463 ; SSE-NEXT: movaps %xmm15, 112(%rax)
464 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
465 ; SSE-NEXT: movaps %xmm0, 32(%rax)
466 ; SSE-NEXT: movaps %xmm5, 48(%rax)
467 ; SSE-NEXT: movaps %xmm7, 192(%rax)
468 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
469 ; SSE-NEXT: movaps %xmm0, 208(%rax)
470 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
471 ; SSE-NEXT: movaps %xmm0, 128(%rax)
472 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
473 ; SSE-NEXT: movaps %xmm0, 144(%rax)
474 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
475 ; SSE-NEXT: movaps %xmm0, 64(%rax)
476 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
477 ; SSE-NEXT: movaps %xmm0, 80(%rax)
478 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
479 ; SSE-NEXT: movaps %xmm0, (%rax)
480 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
481 ; SSE-NEXT: movaps %xmm0, 16(%rax)
482 ; SSE-NEXT: addq $72, %rsp
485 ; AVX1-ONLY-LABEL: store_i32_stride8_vf8:
486 ; AVX1-ONLY: # %bb.0:
487 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
488 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
489 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %ymm3
490 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %ymm5
491 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %ymm4
492 ; AVX1-ONLY-NEXT: vmovaps (%rcx), %ymm6
493 ; AVX1-ONLY-NEXT: vmovaps (%r8), %ymm7
494 ; AVX1-ONLY-NEXT: vmovaps (%r9), %ymm8
495 ; AVX1-ONLY-NEXT: vmovaps (%r10), %ymm9
496 ; AVX1-ONLY-NEXT: vmovaps (%rax), %ymm10
497 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm0 = ymm9[0],ymm10[0],ymm9[1],ymm10[1],ymm9[4],ymm10[4],ymm9[5],ymm10[5]
498 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm1 = ymm8[1,0],ymm7[1,0],ymm8[5,4],ymm7[5,4]
499 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm1[2,0],ymm0[2,3],ymm1[6,4],ymm0[6,7]
500 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm1 = ymm4[0],ymm6[0],ymm4[1],ymm6[1],ymm4[4],ymm6[4],ymm4[5],ymm6[5]
501 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm1
502 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm5[1,0],ymm3[1,0],ymm5[5,4],ymm3[5,4]
503 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm2[2,0,2,3,6,4,6,7]
504 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm2
505 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3]
506 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
507 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
508 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm1 = ymm9[2],ymm10[2],ymm9[3],ymm10[3],ymm9[6],ymm10[6],ymm9[7],ymm10[7]
509 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm8[3,0],ymm7[3,0],ymm8[7,4],ymm7[7,4]
510 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm1 = ymm2[2,0],ymm1[2,3],ymm2[6,4],ymm1[6,7]
511 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm2 = ymm4[2],ymm6[2],ymm4[3],ymm6[3],ymm4[6],ymm6[6],ymm4[7],ymm6[7]
512 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm2
513 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm11 = ymm5[3,0],ymm3[3,0],ymm5[7,4],ymm3[7,4]
514 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm11 = ymm11[2,0,2,3,6,4,6,7]
515 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm11, %xmm11
516 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm2 = xmm11[0,1],xmm2[2,3]
517 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm1[4,5,6,7]
518 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
519 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm10[1],ymm9[1],ymm10[3],ymm9[3]
520 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm11 = ymm7[2],ymm8[2],ymm7[3],ymm8[3],ymm7[6],ymm8[6],ymm7[7],ymm8[7]
521 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm11[0,1],ymm2[2,0],ymm11[4,5],ymm2[6,4]
522 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm11 = ymm3[2],ymm5[2],ymm3[3],ymm5[3],ymm3[6],ymm5[6],ymm3[7],ymm5[7]
523 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm11, %xmm11
524 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm12 = ymm6[1],ymm4[1],ymm6[3],ymm4[3]
525 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm12 = ymm12[0,1,2,0,4,5,6,4]
526 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm12, %xmm12
527 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm11 = xmm11[0,1],xmm12[2,3]
528 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm11[0,1,2,3],ymm2[4,5,6,7]
529 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
530 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm9 = ymm10[0],ymm9[0],ymm10[2],ymm9[2]
531 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm7 = ymm7[0],ymm8[0],ymm7[1],ymm8[1],ymm7[4],ymm8[4],ymm7[5],ymm8[5]
532 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm7[0,1],ymm9[2,0],ymm7[4,5],ymm9[6,4]
533 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm3 = ymm3[0],ymm5[0],ymm3[1],ymm5[1],ymm3[4],ymm5[4],ymm3[5],ymm5[5]
534 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm3, %xmm3
535 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm6[0],ymm4[0],ymm6[2],ymm4[2]
536 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm4 = ymm4[0,1,2,0,4,5,6,4]
537 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm4, %xmm4
538 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm3 = xmm3[0,1],xmm4[2,3]
539 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm7[4,5,6,7]
540 ; AVX1-ONLY-NEXT: vmovaps (%rcx), %xmm4
541 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %xmm5
542 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm9 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
543 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %xmm10
544 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %xmm11
545 ; AVX1-ONLY-NEXT: vmovaps (%rax), %xmm6
546 ; AVX1-ONLY-NEXT: vmovaps (%r10), %xmm7
547 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm12 = xmm7[0],xmm6[0],xmm7[1],xmm6[1]
548 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm0, %ymm8
549 ; AVX1-ONLY-NEXT: vmovaps (%r9), %xmm13
550 ; AVX1-ONLY-NEXT: vmovaps (%r8), %xmm14
551 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm15 = xmm14[1,1,1,1]
552 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm15 = xmm15[0],xmm13[1],xmm15[2,3]
553 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm15, %ymm0, %ymm15
554 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm15[0,1,2,3,4,5],ymm8[6,7]
555 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm15 = xmm11[1,1,1,1]
556 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm15 = xmm15[0],xmm10[1],xmm15[2,3]
557 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm15 = xmm15[0,1],xmm9[2,3]
558 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm15[0,1,2,3],ymm8[4,5,6,7]
559 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm15 = xmm7[2],xmm6[2],xmm7[3],xmm6[3]
560 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm15, %ymm0, %ymm15
561 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm2 = xmm14[2],xmm13[2],xmm14[3],xmm13[3]
562 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm2[2,3,2,3]
563 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
564 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm15[6,7]
565 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm15 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
566 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm11[2],xmm10[2],xmm11[3],xmm10[3]
567 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm15 = xmm0[1],xmm15[1]
568 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm15[0,1,2,3],ymm1[4,5,6,7]
569 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm10 = xmm11[0],xmm10[0],xmm11[1],xmm10[1]
570 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm9 = xmm10[0],xmm9[0]
571 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm10 = xmm14[0],xmm13[0],xmm14[1],xmm13[1]
572 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm0, %ymm10
573 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm11 = xmm12[0,1,0,1]
574 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm0, %ymm11
575 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1,2,3,4,5],ymm11[6,7]
576 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm10[4,5,6,7]
577 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm4 = xmm4[2,2,2,2]
578 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm4 = xmm5[0,1,2],xmm4[3]
579 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm0 = xmm0[0,1],xmm4[2,3]
580 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm4 = xmm6[2,2,2,2]
581 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm4 = xmm7[0,1,2],xmm4[3]
582 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
583 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm4
584 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm4[6,7]
585 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
586 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
587 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 64(%rax)
588 ; AVX1-ONLY-NEXT: vmovaps %ymm9, (%rax)
589 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 96(%rax)
590 ; AVX1-ONLY-NEXT: vmovaps %ymm8, 32(%rax)
591 ; AVX1-ONLY-NEXT: vmovaps %ymm3, 128(%rax)
592 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
593 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 192(%rax)
594 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
595 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 224(%rax)
596 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
597 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 160(%rax)
598 ; AVX1-ONLY-NEXT: vzeroupper
599 ; AVX1-ONLY-NEXT: retq
601 ; AVX2-ONLY-LABEL: store_i32_stride8_vf8:
602 ; AVX2-ONLY: # %bb.0:
603 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
604 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
605 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %ymm3
606 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %ymm4
607 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %ymm1
608 ; AVX2-ONLY-NEXT: vmovaps (%rcx), %ymm2
609 ; AVX2-ONLY-NEXT: vmovaps (%r8), %ymm5
610 ; AVX2-ONLY-NEXT: vmovaps (%r9), %ymm6
611 ; AVX2-ONLY-NEXT: vmovaps (%r10), %ymm9
612 ; AVX2-ONLY-NEXT: vmovaps (%rax), %ymm10
613 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm7 = ymm9[0],ymm10[0],ymm9[1],ymm10[1],ymm9[4],ymm10[4],ymm9[5],ymm10[5]
614 ; AVX2-ONLY-NEXT: vbroadcastss 20(%r8), %ymm0
615 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4],ymm6[5],ymm0[6,7]
616 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm7[6,7]
617 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm8 = ymm1[0],ymm2[0],ymm1[1],ymm2[1],ymm1[4],ymm2[4],ymm1[5],ymm2[5]
618 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm8, %xmm11
619 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm12 = ymm3[1,1,1,1,5,5,5,5]
620 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm12[0],ymm4[1],ymm12[2,3,4],ymm4[5],ymm12[6,7]
621 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm12, %xmm12
622 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm11 = xmm12[0,1],xmm11[2,3]
623 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm11[0,1,2,3],ymm0[4,5,6,7]
624 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
625 ; AVX2-ONLY-NEXT: vbroadcastss 28(%r10), %ymm11
626 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm10 = ymm11[2],ymm10[2],ymm11[3],ymm10[3],ymm11[6],ymm10[6],ymm11[7],ymm10[7]
627 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm11 = ymm5[2],ymm6[2],ymm5[3],ymm6[3],ymm5[6],ymm6[6],ymm5[7],ymm6[7]
628 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm10 = ymm11[1],ymm10[1],ymm11[3],ymm10[3]
629 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm2 = ymm1[2],ymm2[2],ymm1[3],ymm2[3],ymm1[6],ymm2[6],ymm1[7],ymm2[7]
630 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm1
631 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm12 = ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[6],ymm4[6],ymm3[7],ymm4[7]
632 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm13 = ymm12[2,3,2,3,6,7,6,7]
633 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm13, %xmm13
634 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm1 = xmm13[0,1],xmm1[2,3]
635 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm10[4,5,6,7]
636 ; AVX2-ONLY-NEXT: vbroadcastss 24(%rax), %ymm10
637 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5,6],ymm10[7]
638 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm11[0,1,2,3,4,5],ymm9[6,7]
639 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm12, %xmm10
640 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[2,2,2,2]
641 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm2 = xmm10[0,1],xmm2[2,3]
642 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm9[4,5,6,7]
643 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm5 = ymm5[0],ymm6[0],ymm5[1],ymm6[1],ymm5[4],ymm6[4],ymm5[5],ymm6[5]
644 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm5 = ymm5[0],ymm7[0],ymm5[2],ymm7[2]
645 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm3 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[4],ymm4[4],ymm3[5],ymm4[5]
646 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm3, %xmm3
647 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm4 = ymm8[2,2,2,2]
648 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm3 = xmm3[0,1],xmm4[2,3]
649 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm5[4,5,6,7]
650 ; AVX2-ONLY-NEXT: vmovaps (%rcx), %xmm4
651 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %xmm5
652 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm6 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
653 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %xmm9
654 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %xmm10
655 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm10[1,1,1,1]
656 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm7[0],xmm9[1],xmm7[2,3]
657 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm7[0,1],xmm6[2,3]
658 ; AVX2-ONLY-NEXT: vmovaps (%rax), %xmm6
659 ; AVX2-ONLY-NEXT: vmovaps (%r10), %xmm7
660 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm11 = xmm7[0],xmm6[0],xmm7[1],xmm6[1]
661 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm0, %ymm11
662 ; AVX2-ONLY-NEXT: vmovaps (%r9), %xmm12
663 ; AVX2-ONLY-NEXT: vmovaps (%r8), %xmm13
664 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm14 = xmm13[1,1,1,1]
665 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm14 = xmm14[0],xmm12[1],xmm14[2,3]
666 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm14, %ymm0, %ymm14
667 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm14[0,1,2,3,4,5],ymm11[6,7]
668 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
669 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm11 = xmm7[2],xmm6[2],xmm7[3],xmm6[3]
670 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm0, %ymm11
671 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm14 = xmm13[2],xmm12[2],xmm13[3],xmm12[3]
672 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm15 = xmm14[2,3,2,3]
673 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm15, %ymm0, %ymm15
674 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm15[0,1,2,3,4,5],ymm11[6,7]
675 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm15 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
676 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm10[2],xmm9[2],xmm10[3],xmm9[3]
677 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm15 = xmm0[1],xmm15[1]
678 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm15[0,1,2,3],ymm11[4,5,6,7]
679 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm9 = xmm10[0],xmm9[0],xmm10[1],xmm9[1]
680 ; AVX2-ONLY-NEXT: vbroadcastss %xmm4, %xmm10
681 ; AVX2-ONLY-NEXT: vbroadcastss %xmm5, %xmm15
682 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm10 = xmm15[0],xmm10[0],xmm15[1],xmm10[1]
683 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm9 = xmm9[0,1],xmm10[2,3]
684 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm10 = xmm13[0],xmm12[0],xmm13[1],xmm12[1]
685 ; AVX2-ONLY-NEXT: vbroadcastss %xmm6, %xmm12
686 ; AVX2-ONLY-NEXT: vbroadcastss %xmm7, %xmm13
687 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm12 = xmm13[0],xmm12[0],xmm13[1],xmm12[1]
688 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm0, %ymm10
689 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm0, %ymm12
690 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1,2,3,4,5],ymm12[6,7]
691 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm10[4,5,6,7]
692 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm4 = xmm4[2,2,2,2]
693 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm4 = xmm5[0,1,2],xmm4[3]
694 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm0 = xmm0[0,1],xmm4[2,3]
695 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm4 = xmm6[2,2,2,2]
696 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm4 = xmm7[0,1,2],xmm4[3]
697 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm14, %ymm0, %ymm5
698 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm4
699 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm5[0,1,2,3,4,5],ymm4[6,7]
700 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm4[4,5,6,7]
701 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
702 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 64(%rax)
703 ; AVX2-ONLY-NEXT: vmovaps %ymm9, (%rax)
704 ; AVX2-ONLY-NEXT: vmovaps %ymm11, 96(%rax)
705 ; AVX2-ONLY-NEXT: vmovaps %ymm8, 32(%rax)
706 ; AVX2-ONLY-NEXT: vmovaps %ymm3, 128(%rax)
707 ; AVX2-ONLY-NEXT: vmovaps %ymm2, 192(%rax)
708 ; AVX2-ONLY-NEXT: vmovaps %ymm1, 224(%rax)
709 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
710 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 160(%rax)
711 ; AVX2-ONLY-NEXT: vzeroupper
712 ; AVX2-ONLY-NEXT: retq
714 ; AVX512F-LABEL: store_i32_stride8_vf8:
716 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %rax
717 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %r10
718 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %r11
719 ; AVX512F-NEXT: vmovdqa (%rdi), %ymm0
720 ; AVX512F-NEXT: vmovdqa (%rdx), %ymm1
721 ; AVX512F-NEXT: vmovdqa (%r8), %ymm2
722 ; AVX512F-NEXT: vmovdqa (%r11), %ymm3
723 ; AVX512F-NEXT: vinserti64x4 $1, (%rsi), %zmm0, %zmm0
724 ; AVX512F-NEXT: vinserti64x4 $1, (%rcx), %zmm1, %zmm1
725 ; AVX512F-NEXT: vinserti64x4 $1, (%r9), %zmm2, %zmm2
726 ; AVX512F-NEXT: vinserti64x4 $1, (%r10), %zmm3, %zmm3
727 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm4 = <u,u,u,u,0,8,16,24,u,u,u,u,1,9,17,25>
728 ; AVX512F-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
729 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm5 = <0,8,16,24,u,u,u,u,1,9,17,25,u,u,u,u>
730 ; AVX512F-NEXT: vpermi2d %zmm1, %zmm0, %zmm5
731 ; AVX512F-NEXT: movb $-52, %cl
732 ; AVX512F-NEXT: kmovw %ecx, %k1
733 ; AVX512F-NEXT: vmovdqa64 %zmm4, %zmm5 {%k1}
734 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm4 = <u,u,u,u,2,10,18,26,u,u,u,u,3,11,19,27>
735 ; AVX512F-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
736 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm6 = <2,10,18,26,u,u,u,u,3,11,19,27,u,u,u,u>
737 ; AVX512F-NEXT: vpermi2d %zmm1, %zmm0, %zmm6
738 ; AVX512F-NEXT: vmovdqa64 %zmm4, %zmm6 {%k1}
739 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm4 = <u,u,u,u,4,12,20,28,u,u,u,u,5,13,21,29>
740 ; AVX512F-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
741 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm7 = <4,12,20,28,u,u,u,u,5,13,21,29,u,u,u,u>
742 ; AVX512F-NEXT: vpermi2d %zmm1, %zmm0, %zmm7
743 ; AVX512F-NEXT: vmovdqa64 %zmm4, %zmm7 {%k1}
744 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm4 = <u,u,u,u,6,14,22,30,u,u,u,u,7,15,23,31>
745 ; AVX512F-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
746 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm2 = <6,14,22,30,u,u,u,u,7,15,23,31,u,u,u,u>
747 ; AVX512F-NEXT: vpermi2d %zmm1, %zmm0, %zmm2
748 ; AVX512F-NEXT: vmovdqa64 %zmm4, %zmm2 {%k1}
749 ; AVX512F-NEXT: vmovdqa64 %zmm2, 192(%rax)
750 ; AVX512F-NEXT: vmovdqa64 %zmm7, 128(%rax)
751 ; AVX512F-NEXT: vmovdqa64 %zmm6, 64(%rax)
752 ; AVX512F-NEXT: vmovdqa64 %zmm5, (%rax)
753 ; AVX512F-NEXT: vzeroupper
756 ; AVX512BW-LABEL: store_i32_stride8_vf8:
758 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
759 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r10
760 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r11
761 ; AVX512BW-NEXT: vmovdqa (%rdi), %ymm0
762 ; AVX512BW-NEXT: vmovdqa (%rdx), %ymm1
763 ; AVX512BW-NEXT: vmovdqa (%r8), %ymm2
764 ; AVX512BW-NEXT: vmovdqa (%r11), %ymm3
765 ; AVX512BW-NEXT: vinserti64x4 $1, (%rsi), %zmm0, %zmm0
766 ; AVX512BW-NEXT: vinserti64x4 $1, (%rcx), %zmm1, %zmm1
767 ; AVX512BW-NEXT: vinserti64x4 $1, (%r9), %zmm2, %zmm2
768 ; AVX512BW-NEXT: vinserti64x4 $1, (%r10), %zmm3, %zmm3
769 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = <u,u,u,u,0,8,16,24,u,u,u,u,1,9,17,25>
770 ; AVX512BW-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
771 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm5 = <0,8,16,24,u,u,u,u,1,9,17,25,u,u,u,u>
772 ; AVX512BW-NEXT: vpermi2d %zmm1, %zmm0, %zmm5
773 ; AVX512BW-NEXT: movb $-52, %cl
774 ; AVX512BW-NEXT: kmovd %ecx, %k1
775 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm5 {%k1}
776 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = <u,u,u,u,2,10,18,26,u,u,u,u,3,11,19,27>
777 ; AVX512BW-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
778 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm6 = <2,10,18,26,u,u,u,u,3,11,19,27,u,u,u,u>
779 ; AVX512BW-NEXT: vpermi2d %zmm1, %zmm0, %zmm6
780 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm6 {%k1}
781 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = <u,u,u,u,4,12,20,28,u,u,u,u,5,13,21,29>
782 ; AVX512BW-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
783 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <4,12,20,28,u,u,u,u,5,13,21,29,u,u,u,u>
784 ; AVX512BW-NEXT: vpermi2d %zmm1, %zmm0, %zmm7
785 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm7 {%k1}
786 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = <u,u,u,u,6,14,22,30,u,u,u,u,7,15,23,31>
787 ; AVX512BW-NEXT: vpermi2d %zmm3, %zmm2, %zmm4
788 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <6,14,22,30,u,u,u,u,7,15,23,31,u,u,u,u>
789 ; AVX512BW-NEXT: vpermi2d %zmm1, %zmm0, %zmm2
790 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm2 {%k1}
791 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 192(%rax)
792 ; AVX512BW-NEXT: vmovdqa64 %zmm7, 128(%rax)
793 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 64(%rax)
794 ; AVX512BW-NEXT: vmovdqa64 %zmm5, (%rax)
795 ; AVX512BW-NEXT: vzeroupper
796 ; AVX512BW-NEXT: retq
797 %in.vec0 = load <8 x i32>, ptr %in.vecptr0, align 64
798 %in.vec1 = load <8 x i32>, ptr %in.vecptr1, align 64
799 %in.vec2 = load <8 x i32>, ptr %in.vecptr2, align 64
800 %in.vec3 = load <8 x i32>, ptr %in.vecptr3, align 64
801 %in.vec4 = load <8 x i32>, ptr %in.vecptr4, align 64
802 %in.vec5 = load <8 x i32>, ptr %in.vecptr5, align 64
803 %in.vec6 = load <8 x i32>, ptr %in.vecptr6, align 64
804 %in.vec7 = load <8 x i32>, ptr %in.vecptr7, align 64
805 %1 = shufflevector <8 x i32> %in.vec0, <8 x i32> %in.vec1, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
806 %2 = shufflevector <8 x i32> %in.vec2, <8 x i32> %in.vec3, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
807 %3 = shufflevector <8 x i32> %in.vec4, <8 x i32> %in.vec5, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
808 %4 = shufflevector <8 x i32> %in.vec6, <8 x i32> %in.vec7, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
809 %5 = shufflevector <16 x i32> %1, <16 x i32> %2, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
810 %6 = shufflevector <16 x i32> %3, <16 x i32> %4, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
811 %7 = shufflevector <32 x i32> %5, <32 x i32> %6, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
812 %interleaved.vec = shufflevector <64 x i32> %7, <64 x i32> poison, <64 x i32> <i32 0, i32 8, i32 16, i32 24, i32 32, i32 40, i32 48, i32 56, i32 1, i32 9, i32 17, i32 25, i32 33, i32 41, i32 49, i32 57, i32 2, i32 10, i32 18, i32 26, i32 34, i32 42, i32 50, i32 58, i32 3, i32 11, i32 19, i32 27, i32 35, i32 43, i32 51, i32 59, i32 4, i32 12, i32 20, i32 28, i32 36, i32 44, i32 52, i32 60, i32 5, i32 13, i32 21, i32 29, i32 37, i32 45, i32 53, i32 61, i32 6, i32 14, i32 22, i32 30, i32 38, i32 46, i32 54, i32 62, i32 7, i32 15, i32 23, i32 31, i32 39, i32 47, i32 55, i32 63>
813 store <64 x i32> %interleaved.vec, ptr %out.vec, align 64
817 define void @store_i32_stride8_vf16(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %in.vecptr7, ptr %out.vec) nounwind {
818 ; SSE-LABEL: store_i32_stride8_vf16:
820 ; SSE-NEXT: subq $216, %rsp
821 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
822 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
823 ; SSE-NEXT: movaps (%rdi), %xmm9
824 ; SSE-NEXT: movaps 16(%rdi), %xmm10
825 ; SSE-NEXT: movaps (%rsi), %xmm4
826 ; SSE-NEXT: movaps 16(%rsi), %xmm1
827 ; SSE-NEXT: movaps (%rdx), %xmm2
828 ; SSE-NEXT: movaps 16(%rdx), %xmm0
829 ; SSE-NEXT: movaps (%rcx), %xmm3
830 ; SSE-NEXT: movaps (%r8), %xmm11
831 ; SSE-NEXT: movaps (%r9), %xmm7
832 ; SSE-NEXT: movaps (%r10), %xmm5
833 ; SSE-NEXT: movaps (%rax), %xmm6
834 ; SSE-NEXT: movaps %xmm3, %xmm8
835 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm2[0]
836 ; SSE-NEXT: movaps %xmm9, %xmm13
837 ; SSE-NEXT: unpcklps {{.*#+}} xmm13 = xmm13[0],xmm4[0],xmm13[1],xmm4[1]
838 ; SSE-NEXT: movaps %xmm13, %xmm12
839 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[0,1],xmm8[2,0]
840 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
841 ; SSE-NEXT: movaps %xmm6, %xmm8
842 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm5[0]
843 ; SSE-NEXT: movaps %xmm11, %xmm14
844 ; SSE-NEXT: unpcklps {{.*#+}} xmm14 = xmm14[0],xmm7[0],xmm14[1],xmm7[1]
845 ; SSE-NEXT: movaps %xmm14, %xmm12
846 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[0,1],xmm8[2,0]
847 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
848 ; SSE-NEXT: movaps %xmm3, %xmm8
849 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[1,1],xmm2[1,1]
850 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[2,3],xmm8[2,0]
851 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
852 ; SSE-NEXT: movaps %xmm6, %xmm8
853 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[1,1],xmm5[1,1]
854 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[2,3],xmm8[2,0]
855 ; SSE-NEXT: movaps %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
856 ; SSE-NEXT: movaps %xmm2, %xmm8
857 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm3[1]
858 ; SSE-NEXT: unpckhps {{.*#+}} xmm9 = xmm9[2],xmm4[2],xmm9[3],xmm4[3]
859 ; SSE-NEXT: movaps %xmm9, %xmm4
860 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm8[0,2]
861 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
862 ; SSE-NEXT: movaps %xmm5, %xmm4
863 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm6[1]
864 ; SSE-NEXT: unpckhps {{.*#+}} xmm11 = xmm11[2],xmm7[2],xmm11[3],xmm7[3]
865 ; SSE-NEXT: movaps %xmm11, %xmm7
866 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[0,1],xmm4[0,2]
867 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
868 ; SSE-NEXT: movaps 16(%rcx), %xmm4
869 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,3],xmm3[3,3]
870 ; SSE-NEXT: movaps 16(%r10), %xmm3
871 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[2,3],xmm2[0,2]
872 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
873 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[3,3],xmm6[3,3]
874 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,3],xmm5[0,2]
875 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
876 ; SSE-NEXT: movaps %xmm4, %xmm2
877 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
878 ; SSE-NEXT: movaps %xmm10, %xmm6
879 ; SSE-NEXT: unpcklps {{.*#+}} xmm6 = xmm6[0],xmm1[0],xmm6[1],xmm1[1]
880 ; SSE-NEXT: movaps %xmm6, %xmm5
881 ; SSE-NEXT: movaps %xmm6, %xmm7
882 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm2[2,0]
883 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
884 ; SSE-NEXT: movaps 16(%rax), %xmm2
885 ; SSE-NEXT: movaps %xmm2, %xmm5
886 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm3[0]
887 ; SSE-NEXT: movaps 16(%r8), %xmm11
888 ; SSE-NEXT: movaps 16(%r9), %xmm6
889 ; SSE-NEXT: movaps %xmm11, %xmm9
890 ; SSE-NEXT: unpcklps {{.*#+}} xmm9 = xmm9[0],xmm6[0],xmm9[1],xmm6[1]
891 ; SSE-NEXT: movaps %xmm9, %xmm8
892 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[0,1],xmm5[2,0]
893 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
894 ; SSE-NEXT: movaps %xmm4, %xmm5
895 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm0[1,1]
896 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,3],xmm5[2,0]
897 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
898 ; SSE-NEXT: movaps %xmm2, %xmm5
899 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm3[1,1]
900 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[2,3],xmm5[2,0]
901 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
902 ; SSE-NEXT: unpckhps {{.*#+}} xmm10 = xmm10[2],xmm1[2],xmm10[3],xmm1[3]
903 ; SSE-NEXT: movaps %xmm0, %xmm1
904 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm4[1]
905 ; SSE-NEXT: movaps %xmm10, %xmm5
906 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm1[0,2]
907 ; SSE-NEXT: movaps %xmm5, (%rsp) # 16-byte Spill
908 ; SSE-NEXT: unpckhps {{.*#+}} xmm11 = xmm11[2],xmm6[2],xmm11[3],xmm6[3]
909 ; SSE-NEXT: movaps %xmm3, %xmm1
910 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
911 ; SSE-NEXT: movaps %xmm11, %xmm5
912 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm1[0,2]
913 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
914 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3],xmm4[3,3]
915 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[2,3],xmm0[0,2]
916 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
917 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[3,3],xmm2[3,3]
918 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,3],xmm3[0,2]
919 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
920 ; SSE-NEXT: movaps 32(%rdx), %xmm3
921 ; SSE-NEXT: movaps 32(%rcx), %xmm0
922 ; SSE-NEXT: movaps %xmm0, %xmm1
923 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
924 ; SSE-NEXT: movaps 32(%rdi), %xmm14
925 ; SSE-NEXT: movaps 32(%rsi), %xmm4
926 ; SSE-NEXT: movaps %xmm14, %xmm5
927 ; SSE-NEXT: unpcklps {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
928 ; SSE-NEXT: movaps %xmm5, %xmm2
929 ; SSE-NEXT: movaps %xmm5, %xmm7
930 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm1[2,0]
931 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
932 ; SSE-NEXT: movaps 32(%r10), %xmm1
933 ; SSE-NEXT: movaps 32(%rax), %xmm2
934 ; SSE-NEXT: movaps %xmm2, %xmm5
935 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm1[0]
936 ; SSE-NEXT: movaps 32(%r8), %xmm11
937 ; SSE-NEXT: movaps 32(%r9), %xmm6
938 ; SSE-NEXT: movaps %xmm11, %xmm13
939 ; SSE-NEXT: unpcklps {{.*#+}} xmm13 = xmm13[0],xmm6[0],xmm13[1],xmm6[1]
940 ; SSE-NEXT: movaps %xmm13, %xmm8
941 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[0,1],xmm5[2,0]
942 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
943 ; SSE-NEXT: movaps %xmm0, %xmm5
944 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm3[1,1]
945 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,3],xmm5[2,0]
946 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
947 ; SSE-NEXT: movaps %xmm2, %xmm5
948 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm1[1,1]
949 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[2,3],xmm5[2,0]
950 ; SSE-NEXT: unpckhps {{.*#+}} xmm14 = xmm14[2],xmm4[2],xmm14[3],xmm4[3]
951 ; SSE-NEXT: movaps %xmm3, %xmm4
952 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm0[1]
953 ; SSE-NEXT: movaps %xmm14, %xmm5
954 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
955 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
956 ; SSE-NEXT: unpckhps {{.*#+}} xmm11 = xmm11[2],xmm6[2],xmm11[3],xmm6[3]
957 ; SSE-NEXT: movaps %xmm1, %xmm4
958 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm2[1]
959 ; SSE-NEXT: movaps %xmm11, %xmm5
960 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
961 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
962 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[3,3],xmm0[3,3]
963 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[2,3],xmm3[0,2]
964 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[3,3],xmm2[3,3]
965 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,3],xmm1[0,2]
966 ; SSE-NEXT: movaps 48(%rdx), %xmm1
967 ; SSE-NEXT: movaps 48(%rcx), %xmm8
968 ; SSE-NEXT: movaps %xmm8, %xmm0
969 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
970 ; SSE-NEXT: movaps 48(%rdi), %xmm3
971 ; SSE-NEXT: movaps 48(%rsi), %xmm12
972 ; SSE-NEXT: movaps %xmm3, %xmm4
973 ; SSE-NEXT: unpcklps {{.*#+}} xmm4 = xmm4[0],xmm12[0],xmm4[1],xmm12[1]
974 ; SSE-NEXT: movaps %xmm4, %xmm15
975 ; SSE-NEXT: shufps {{.*#+}} xmm15 = xmm15[0,1],xmm0[2,0]
976 ; SSE-NEXT: movaps 48(%r10), %xmm0
977 ; SSE-NEXT: movaps 48(%rax), %xmm7
978 ; SSE-NEXT: movaps %xmm7, %xmm2
979 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
980 ; SSE-NEXT: movaps 48(%r8), %xmm5
981 ; SSE-NEXT: movaps 48(%r9), %xmm9
982 ; SSE-NEXT: movaps %xmm5, %xmm6
983 ; SSE-NEXT: unpcklps {{.*#+}} xmm6 = xmm6[0],xmm9[0],xmm6[1],xmm9[1]
984 ; SSE-NEXT: movaps %xmm6, %xmm10
985 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[0,1],xmm2[2,0]
986 ; SSE-NEXT: movaps %xmm8, %xmm2
987 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[1,1],xmm1[1,1]
988 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[2,3],xmm2[2,0]
989 ; SSE-NEXT: movaps %xmm7, %xmm2
990 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[1,1],xmm0[1,1]
991 ; SSE-NEXT: shufps {{.*#+}} xmm6 = xmm6[2,3],xmm2[2,0]
992 ; SSE-NEXT: unpckhps {{.*#+}} xmm3 = xmm3[2],xmm12[2],xmm3[3],xmm12[3]
993 ; SSE-NEXT: movaps %xmm1, %xmm2
994 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm8[1]
995 ; SSE-NEXT: movaps %xmm3, %xmm12
996 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[0,1],xmm2[0,2]
997 ; SSE-NEXT: unpckhps {{.*#+}} xmm5 = xmm5[2],xmm9[2],xmm5[3],xmm9[3]
998 ; SSE-NEXT: movaps %xmm0, %xmm2
999 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm7[1]
1000 ; SSE-NEXT: movaps %xmm5, %xmm9
1001 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[0,1],xmm2[0,2]
1002 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[3,3],xmm8[3,3]
1003 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[2,3],xmm1[0,2]
1004 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3],xmm7[3,3]
1005 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[2,3],xmm0[0,2]
1006 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
1007 ; SSE-NEXT: movaps %xmm5, 496(%rax)
1008 ; SSE-NEXT: movaps %xmm3, 480(%rax)
1009 ; SSE-NEXT: movaps %xmm9, 464(%rax)
1010 ; SSE-NEXT: movaps %xmm12, 448(%rax)
1011 ; SSE-NEXT: movaps %xmm6, 432(%rax)
1012 ; SSE-NEXT: movaps %xmm4, 416(%rax)
1013 ; SSE-NEXT: movaps %xmm10, 400(%rax)
1014 ; SSE-NEXT: movaps %xmm15, 384(%rax)
1015 ; SSE-NEXT: movaps %xmm11, 368(%rax)
1016 ; SSE-NEXT: movaps %xmm14, 352(%rax)
1017 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1018 ; SSE-NEXT: movaps %xmm0, 336(%rax)
1019 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1020 ; SSE-NEXT: movaps %xmm0, 320(%rax)
1021 ; SSE-NEXT: movaps %xmm13, 304(%rax)
1022 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1023 ; SSE-NEXT: movaps %xmm0, 288(%rax)
1024 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1025 ; SSE-NEXT: movaps %xmm0, 272(%rax)
1026 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1027 ; SSE-NEXT: movaps %xmm0, 256(%rax)
1028 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1029 ; SSE-NEXT: movaps %xmm0, 240(%rax)
1030 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1031 ; SSE-NEXT: movaps %xmm0, 224(%rax)
1032 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1033 ; SSE-NEXT: movaps %xmm0, 208(%rax)
1034 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
1035 ; SSE-NEXT: movaps %xmm0, 192(%rax)
1036 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1037 ; SSE-NEXT: movaps %xmm0, 176(%rax)
1038 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1039 ; SSE-NEXT: movaps %xmm0, 160(%rax)
1040 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1041 ; SSE-NEXT: movaps %xmm0, 144(%rax)
1042 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1043 ; SSE-NEXT: movaps %xmm0, 128(%rax)
1044 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1045 ; SSE-NEXT: movaps %xmm0, 112(%rax)
1046 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1047 ; SSE-NEXT: movaps %xmm0, 96(%rax)
1048 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1049 ; SSE-NEXT: movaps %xmm0, 80(%rax)
1050 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1051 ; SSE-NEXT: movaps %xmm0, 64(%rax)
1052 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1053 ; SSE-NEXT: movaps %xmm0, 48(%rax)
1054 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1055 ; SSE-NEXT: movaps %xmm0, 32(%rax)
1056 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1057 ; SSE-NEXT: movaps %xmm0, 16(%rax)
1058 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1059 ; SSE-NEXT: movaps %xmm0, (%rax)
1060 ; SSE-NEXT: addq $216, %rsp
1063 ; AVX1-ONLY-LABEL: store_i32_stride8_vf16:
1064 ; AVX1-ONLY: # %bb.0:
1065 ; AVX1-ONLY-NEXT: subq $264, %rsp # imm = 0x108
1066 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
1067 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
1068 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %ymm7
1069 ; AVX1-ONLY-NEXT: vmovaps 32(%rdi), %ymm0
1070 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %ymm8
1071 ; AVX1-ONLY-NEXT: vmovaps 32(%rsi), %ymm1
1072 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %ymm9
1073 ; AVX1-ONLY-NEXT: vmovaps 32(%rdx), %ymm3
1074 ; AVX1-ONLY-NEXT: vmovaps 32(%rcx), %ymm4
1075 ; AVX1-ONLY-NEXT: vmovaps 32(%r8), %ymm5
1076 ; AVX1-ONLY-NEXT: vmovaps 32(%r9), %ymm6
1077 ; AVX1-ONLY-NEXT: vmovaps 32(%r10), %ymm11
1078 ; AVX1-ONLY-NEXT: vmovaps 32(%rax), %ymm12
1079 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm12[1],ymm11[1],ymm12[3],ymm11[3]
1080 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm10 = ymm5[2],ymm6[2],ymm5[3],ymm6[3],ymm5[6],ymm6[6],ymm5[7],ymm6[7]
1081 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm10[0,1],ymm2[2,0],ymm10[4,5],ymm2[6,4]
1082 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm10 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
1083 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
1084 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm13 = ymm4[1],ymm3[1],ymm4[3],ymm3[3]
1085 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm13 = ymm13[0,1,2,0,4,5,6,4]
1086 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm13, %xmm13
1087 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm10 = xmm10[0,1],xmm13[2,3]
1088 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm10[0,1,2,3],ymm2[4,5,6,7]
1089 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1090 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm2 = ymm11[2],ymm12[2],ymm11[3],ymm12[3],ymm11[6],ymm12[6],ymm11[7],ymm12[7]
1091 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm6[3,0],ymm5[3,0],ymm6[7,4],ymm5[7,4]
1092 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm10[2,0],ymm2[2,3],ymm10[6,4],ymm2[6,7]
1093 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm10 = ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[6],ymm4[6],ymm3[7],ymm4[7]
1094 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
1095 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm13 = ymm1[3,0],ymm0[3,0],ymm1[7,4],ymm0[7,4]
1096 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm13 = ymm13[2,0,2,3,6,4,6,7]
1097 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm13, %xmm13
1098 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm10 = xmm13[0,1],xmm10[2,3]
1099 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm10[0,1,2,3],ymm2[4,5,6,7]
1100 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1101 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm12[0],ymm11[0],ymm12[2],ymm11[2]
1102 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm10 = ymm5[0],ymm6[0],ymm5[1],ymm6[1],ymm5[4],ymm6[4],ymm5[5],ymm6[5]
1103 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm10[0,1],ymm2[2,0],ymm10[4,5],ymm2[6,4]
1104 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
1105 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm2
1106 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm13 = ymm4[0],ymm3[0],ymm4[2],ymm3[2]
1107 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm13 = ymm13[0,1,2,0,4,5,6,4]
1108 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm13, %xmm13
1109 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm13 = xmm2[0,1],xmm13[2,3]
1110 ; AVX1-ONLY-NEXT: vmovaps (%rcx), %ymm2
1111 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm13[0,1,2,3],ymm10[4,5,6,7]
1112 ; AVX1-ONLY-NEXT: vmovups %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1113 ; AVX1-ONLY-NEXT: vmovaps (%r8), %ymm10
1114 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm13 = ymm11[0],ymm12[0],ymm11[1],ymm12[1],ymm11[4],ymm12[4],ymm11[5],ymm12[5]
1115 ; AVX1-ONLY-NEXT: vmovaps (%r9), %ymm11
1116 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm5 = ymm6[1,0],ymm5[1,0],ymm6[5,4],ymm5[5,4]
1117 ; AVX1-ONLY-NEXT: vmovaps (%r10), %ymm12
1118 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm6 = ymm5[2,0],ymm13[2,3],ymm5[6,4],ymm13[6,7]
1119 ; AVX1-ONLY-NEXT: vmovaps (%rax), %ymm5
1120 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm3 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[4],ymm4[4],ymm3[5],ymm4[5]
1121 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm3, %xmm3
1122 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm1[1,0],ymm0[1,0],ymm1[5,4],ymm0[5,4]
1123 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm0[2,0,2,3,6,4,6,7]
1124 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm0
1125 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm0 = xmm0[0,1],xmm3[2,3]
1126 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm6[4,5,6,7]
1127 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1128 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm0 = ymm5[1],ymm12[1],ymm5[3],ymm12[3]
1129 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm1 = ymm10[2],ymm11[2],ymm10[3],ymm11[3],ymm10[6],ymm11[6],ymm10[7],ymm11[7]
1130 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,0],ymm1[4,5],ymm0[6,4]
1131 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm1 = ymm7[2],ymm8[2],ymm7[3],ymm8[3],ymm7[6],ymm8[6],ymm7[7],ymm8[7]
1132 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm1
1133 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm2[1],ymm9[1],ymm2[3],ymm9[3]
1134 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm3[0,1,2,0,4,5,6,4]
1135 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm3, %xmm3
1136 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm1 = xmm1[0,1],xmm3[2,3]
1137 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
1138 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1139 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm0 = ymm12[2],ymm5[2],ymm12[3],ymm5[3],ymm12[6],ymm5[6],ymm12[7],ymm5[7]
1140 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm1 = ymm11[3,0],ymm10[3,0],ymm11[7,4],ymm10[7,4]
1141 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm1[2,0],ymm0[2,3],ymm1[6,4],ymm0[6,7]
1142 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm1 = ymm9[2],ymm2[2],ymm9[3],ymm2[3],ymm9[6],ymm2[6],ymm9[7],ymm2[7]
1143 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm1
1144 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm8[3,0],ymm7[3,0],ymm8[7,4],ymm7[7,4]
1145 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm3[2,0,2,3,6,4,6,7]
1146 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm3, %xmm3
1147 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm1 = xmm3[0,1],xmm1[2,3]
1148 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
1149 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1150 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm0 = ymm5[0],ymm12[0],ymm5[2],ymm12[2]
1151 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm1 = ymm10[0],ymm11[0],ymm10[1],ymm11[1],ymm10[4],ymm11[4],ymm10[5],ymm11[5]
1152 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,0],ymm1[4,5],ymm0[6,4]
1153 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm1 = ymm7[0],ymm8[0],ymm7[1],ymm8[1],ymm7[4],ymm8[4],ymm7[5],ymm8[5]
1154 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm1
1155 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm2[0],ymm9[0],ymm2[2],ymm9[2]
1156 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm3[0,1,2,0,4,5,6,4]
1157 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm3, %xmm3
1158 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm1 = xmm1[0,1],xmm3[2,3]
1159 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
1160 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1161 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm0 = ymm12[0],ymm5[0],ymm12[1],ymm5[1],ymm12[4],ymm5[4],ymm12[5],ymm5[5]
1162 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm1 = ymm11[1,0],ymm10[1,0],ymm11[5,4],ymm10[5,4]
1163 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm1[2,0],ymm0[2,3],ymm1[6,4],ymm0[6,7]
1164 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm1 = ymm9[0],ymm2[0],ymm9[1],ymm2[1],ymm9[4],ymm2[4],ymm9[5],ymm2[5]
1165 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm1
1166 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm8[1,0],ymm7[1,0],ymm8[5,4],ymm7[5,4]
1167 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm2[2,0,2,3,6,4,6,7]
1168 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm2
1169 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3]
1170 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
1171 ; AVX1-ONLY-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
1172 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %xmm4
1173 ; AVX1-ONLY-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1174 ; AVX1-ONLY-NEXT: vmovaps (%rax), %xmm1
1175 ; AVX1-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1176 ; AVX1-ONLY-NEXT: vmovaps (%r10), %xmm0
1177 ; AVX1-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1178 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
1179 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm1
1180 ; AVX1-ONLY-NEXT: vmovaps (%r9), %xmm15
1181 ; AVX1-ONLY-NEXT: vmovaps (%r8), %xmm13
1182 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm2 = xmm13[1,1,1,1]
1183 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm2 = xmm2[0],xmm15[1],xmm2[2,3]
1184 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
1185 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
1186 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %xmm11
1187 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm2 = xmm11[1,1,1,1]
1188 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm2 = xmm2[0],xmm4[1],xmm2[2,3]
1189 ; AVX1-ONLY-NEXT: vmovaps (%rcx), %xmm3
1190 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1191 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %xmm14
1192 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm3 = xmm14[0],xmm3[0],xmm14[1],xmm3[1]
1193 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm2 = xmm2[0,1],xmm3[2,3]
1194 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
1195 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1196 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm1 = xmm11[0],xmm4[0],xmm11[1],xmm4[1]
1197 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm1 = xmm1[0],xmm3[0]
1198 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm2 = xmm13[0],xmm15[0],xmm13[1],xmm15[1]
1199 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
1200 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm0 = xmm0[0,1,0,1]
1201 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
1202 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
1203 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
1204 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1205 ; AVX1-ONLY-NEXT: vmovaps 32(%rax), %xmm9
1206 ; AVX1-ONLY-NEXT: vmovaps 32(%r10), %xmm7
1207 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm0 = xmm9[2,2,2,2]
1208 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm0 = xmm7[0,1,2],xmm0[3]
1209 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm6
1210 ; AVX1-ONLY-NEXT: vmovaps 32(%r9), %xmm8
1211 ; AVX1-ONLY-NEXT: vmovaps 32(%r8), %xmm5
1212 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm4 = xmm5[2],xmm8[2],xmm5[3],xmm8[3]
1213 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm10
1214 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1,2,3,4,5],ymm6[6,7]
1215 ; AVX1-ONLY-NEXT: vmovaps 32(%rcx), %xmm6
1216 ; AVX1-ONLY-NEXT: vmovaps 32(%rdx), %xmm1
1217 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm12 = xmm6[2,2,2,2]
1218 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm12 = xmm1[0,1,2],xmm12[3]
1219 ; AVX1-ONLY-NEXT: vmovaps 32(%rsi), %xmm3
1220 ; AVX1-ONLY-NEXT: vmovaps 32(%rdi), %xmm2
1221 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm2[2],xmm3[2],xmm2[3],xmm3[3]
1222 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm12 = xmm0[0,1],xmm12[2,3]
1223 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm12[0,1,2,3],ymm10[4,5,6,7]
1224 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm10 = xmm1[2],xmm6[2],xmm1[3],xmm6[3]
1225 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm10[1]
1226 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm10 = xmm7[2],xmm9[2],xmm7[3],xmm9[3]
1227 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm0, %ymm10
1228 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm4 = xmm4[2,3,2,3]
1229 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm4
1230 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm10[6,7]
1231 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm0[0,1,2,3],ymm4[4,5,6,7]
1232 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm1 = xmm1[0],xmm6[0],xmm1[1],xmm6[1]
1233 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm4 = xmm7[0],xmm9[0],xmm7[1],xmm9[1]
1234 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm0 = xmm5[0],xmm8[0],xmm5[1],xmm8[1]
1235 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
1236 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm6 = xmm4[0,1,0,1]
1237 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
1238 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm6[6,7]
1239 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm6 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
1240 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm6 = xmm6[0],xmm1[0]
1241 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1,2,3],ymm0[4,5,6,7]
1242 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm2 = xmm2[1,1,1,1]
1243 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm2 = xmm2[0],xmm3[1],xmm2[2,3]
1244 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3]
1245 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm2 = xmm5[1,1,1,1]
1246 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm2 = xmm2[0],xmm8[1],xmm2[2,3]
1247 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm3
1248 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
1249 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
1250 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
1251 ; AVX1-ONLY-NEXT: vunpckhps {{[-0-9]+}}(%r{{[sb]}}p), %xmm11, %xmm2 # 16-byte Folded Reload
1252 ; AVX1-ONLY-NEXT: # xmm2 = xmm11[2],mem[2],xmm11[3],mem[3]
1253 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm3 = xmm13[2],xmm15[2],xmm13[3],xmm15[3]
1254 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
1255 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm4 = xmm7[2,2,2,2]
1256 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
1257 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm4 = xmm6[0,1,2],xmm4[3]
1258 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm4
1259 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm5
1260 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm5[0,1,2,3,4,5],ymm4[6,7]
1261 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
1262 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm5 = xmm8[2,2,2,2]
1263 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm14[0,1,2],xmm5[3]
1264 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm2[0,1],xmm5[2,3]
1265 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm5[0,1,2,3],ymm4[4,5,6,7]
1266 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm5 = xmm14[2],xmm8[2],xmm14[3],xmm8[3]
1267 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm5[1]
1268 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm5 = xmm6[2],xmm7[2],xmm6[3],xmm7[3]
1269 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
1270 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm3[2,3,2,3]
1271 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
1272 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm5[6,7]
1273 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
1274 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
1275 ; AVX1-ONLY-NEXT: vmovaps %ymm2, 96(%rax)
1276 ; AVX1-ONLY-NEXT: vmovaps %ymm4, 64(%rax)
1277 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 288(%rax)
1278 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 256(%rax)
1279 ; AVX1-ONLY-NEXT: vmovaps %ymm10, 352(%rax)
1280 ; AVX1-ONLY-NEXT: vmovaps %ymm12, 320(%rax)
1281 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1282 ; AVX1-ONLY-NEXT: vmovaps %ymm0, (%rax)
1283 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1284 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
1285 ; AVX1-ONLY-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
1286 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 160(%rax)
1287 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1288 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 128(%rax)
1289 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1290 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 224(%rax)
1291 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1292 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 192(%rax)
1293 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1294 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 416(%rax)
1295 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1296 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 384(%rax)
1297 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1298 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 480(%rax)
1299 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1300 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 448(%rax)
1301 ; AVX1-ONLY-NEXT: addq $264, %rsp # imm = 0x108
1302 ; AVX1-ONLY-NEXT: vzeroupper
1303 ; AVX1-ONLY-NEXT: retq
1305 ; AVX2-ONLY-LABEL: store_i32_stride8_vf16:
1306 ; AVX2-ONLY: # %bb.0:
1307 ; AVX2-ONLY-NEXT: subq $328, %rsp # imm = 0x148
1308 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
1309 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
1310 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %ymm7
1311 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %ymm3
1312 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %ymm8
1313 ; AVX2-ONLY-NEXT: vmovaps 32(%rsi), %ymm4
1314 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %ymm0
1315 ; AVX2-ONLY-NEXT: vmovaps 32(%rdx), %ymm10
1316 ; AVX2-ONLY-NEXT: vmovaps 32(%rcx), %ymm11
1317 ; AVX2-ONLY-NEXT: vmovaps 32(%r8), %ymm9
1318 ; AVX2-ONLY-NEXT: vmovaps 32(%r9), %ymm6
1319 ; AVX2-ONLY-NEXT: vmovaps 32(%r10), %ymm12
1320 ; AVX2-ONLY-NEXT: vmovaps 32(%rax), %ymm13
1321 ; AVX2-ONLY-NEXT: vbroadcastss 56(%rax), %ymm1
1322 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm12[0,1,2,3,4,5,6],ymm1[7]
1323 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm2 = ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[6],ymm4[6],ymm3[7],ymm4[7]
1324 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm5
1325 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm14 = ymm10[2],ymm11[2],ymm10[3],ymm11[3],ymm10[6],ymm11[6],ymm10[7],ymm11[7]
1326 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm15 = ymm14[2,2,2,2]
1327 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm5[0,1],xmm15[2,3]
1328 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm15 = ymm9[2],ymm6[2],ymm9[3],ymm6[3],ymm9[6],ymm6[6],ymm9[7],ymm6[7]
1329 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm15[0,1,2,3,4,5],ymm1[6,7]
1330 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm5[0,1,2,3],ymm1[4,5,6,7]
1331 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1332 ; AVX2-ONLY-NEXT: vbroadcastss 60(%r10), %ymm1
1333 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm1 = ymm1[2],ymm13[2],ymm1[3],ymm13[3],ymm1[6],ymm13[6],ymm1[7],ymm13[7]
1334 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm15 = ymm15[1],ymm1[1],ymm15[3],ymm1[3]
1335 ; AVX2-ONLY-NEXT: vmovaps (%rcx), %ymm1
1336 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm14, %xmm5
1337 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm2[2,3,2,3,6,7,6,7]
1338 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm2
1339 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm2 = xmm2[0,1],xmm5[2,3]
1340 ; AVX2-ONLY-NEXT: vmovaps (%r8), %ymm5
1341 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm15[4,5,6,7]
1342 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1343 ; AVX2-ONLY-NEXT: vmovaps (%r9), %ymm2
1344 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm12 = ymm12[0],ymm13[0],ymm12[1],ymm13[1],ymm12[4],ymm13[4],ymm12[5],ymm13[5]
1345 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm13 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[4],ymm4[4],ymm3[5],ymm4[5]
1346 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm13, %xmm13
1347 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm11 = ymm10[0],ymm11[0],ymm10[1],ymm11[1],ymm10[4],ymm11[4],ymm10[5],ymm11[5]
1348 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm10 = ymm11[2,2,2,2]
1349 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm13 = xmm13[0,1],xmm10[2,3]
1350 ; AVX2-ONLY-NEXT: vmovaps (%r10), %ymm10
1351 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm9 = ymm9[0],ymm6[0],ymm9[1],ymm6[1],ymm9[4],ymm6[4],ymm9[5],ymm6[5]
1352 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm9 = ymm9[0],ymm12[0],ymm9[2],ymm12[2]
1353 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm13[0,1,2,3],ymm9[4,5,6,7]
1354 ; AVX2-ONLY-NEXT: vmovups %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1355 ; AVX2-ONLY-NEXT: vbroadcastss 52(%r8), %ymm9
1356 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm9[0,1,2,3,4],ymm6[5],ymm9[6,7]
1357 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3,4,5],ymm12[6,7]
1358 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm11, %xmm9
1359 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm3[1,1,1,1,5,5,5,5]
1360 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0],ymm4[1],ymm3[2,3,4],ymm4[5],ymm3[6,7]
1361 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm3, %xmm3
1362 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm3 = xmm3[0,1],xmm9[2,3]
1363 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm6[4,5,6,7]
1364 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1365 ; AVX2-ONLY-NEXT: vbroadcastss 24(%rax), %ymm3
1366 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm10[0,1,2,3,4,5,6],ymm3[7]
1367 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm4 = ymm7[2],ymm8[2],ymm7[3],ymm8[3],ymm7[6],ymm8[6],ymm7[7],ymm8[7]
1368 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm4, %xmm6
1369 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm9 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
1370 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm11 = ymm9[2,2,2,2]
1371 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm6[0,1],xmm11[2,3]
1372 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm11 = ymm5[2],ymm2[2],ymm5[3],ymm2[3],ymm5[6],ymm2[6],ymm5[7],ymm2[7]
1373 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm11[0,1,2,3,4,5],ymm3[6,7]
1374 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm6[0,1,2,3],ymm3[4,5,6,7]
1375 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1376 ; AVX2-ONLY-NEXT: vmovaps (%rax), %ymm3
1377 ; AVX2-ONLY-NEXT: vbroadcastss 28(%r10), %ymm6
1378 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm6 = ymm6[2],ymm3[2],ymm6[3],ymm3[3],ymm6[6],ymm3[6],ymm6[7],ymm3[7]
1379 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm6 = ymm11[1],ymm6[1],ymm11[3],ymm6[3]
1380 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm9, %xmm9
1381 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm4 = ymm4[2,3,2,3,6,7,6,7]
1382 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm4, %xmm4
1383 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm4 = xmm4[0,1],xmm9[2,3]
1384 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
1385 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1386 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm3 = ymm10[0],ymm3[0],ymm10[1],ymm3[1],ymm10[4],ymm3[4],ymm10[5],ymm3[5]
1387 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm4 = ymm5[0],ymm2[0],ymm5[1],ymm2[1],ymm5[4],ymm2[4],ymm5[5],ymm2[5]
1388 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm4 = ymm4[0],ymm3[0],ymm4[2],ymm3[2]
1389 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm5 = ymm7[0],ymm8[0],ymm7[1],ymm8[1],ymm7[4],ymm8[4],ymm7[5],ymm8[5]
1390 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm5, %xmm5
1391 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
1392 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm1 = ymm0[2,2,2,2]
1393 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm1 = xmm5[0,1],xmm1[2,3]
1394 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm4[4,5,6,7]
1395 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1396 ; AVX2-ONLY-NEXT: vbroadcastss 20(%r8), %ymm1
1397 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4],ymm2[5],ymm1[6,7]
1398 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm3[6,7]
1399 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm0
1400 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm7[1,1,1,1,5,5,5,5]
1401 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0],ymm8[1],ymm2[2,3,4],ymm8[5],ymm2[6,7]
1402 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm2
1403 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm0 = xmm2[0,1],xmm0[2,3]
1404 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
1405 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1406 ; AVX2-ONLY-NEXT: vmovaps (%rcx), %xmm1
1407 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %xmm14
1408 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm0 = xmm14[0],xmm1[0],xmm14[1],xmm1[1]
1409 ; AVX2-ONLY-NEXT: vmovaps %xmm1, %xmm3
1410 ; AVX2-ONLY-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1411 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %xmm2
1412 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %xmm4
1413 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm4[1,1,1,1]
1414 ; AVX2-ONLY-NEXT: vmovaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1415 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm1 = xmm1[0],xmm2[1],xmm1[2,3]
1416 ; AVX2-ONLY-NEXT: vmovaps %xmm2, %xmm5
1417 ; AVX2-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1418 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm0 = xmm1[0,1],xmm0[2,3]
1419 ; AVX2-ONLY-NEXT: vmovaps (%rax), %xmm11
1420 ; AVX2-ONLY-NEXT: vmovaps (%r10), %xmm12
1421 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm1 = xmm12[0],xmm11[0],xmm12[1],xmm11[1]
1422 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
1423 ; AVX2-ONLY-NEXT: vmovaps (%r9), %xmm6
1424 ; AVX2-ONLY-NEXT: vmovaps (%r8), %xmm13
1425 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm2 = xmm13[1,1,1,1]
1426 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm2 = xmm2[0],xmm6[1],xmm2[2,3]
1427 ; AVX2-ONLY-NEXT: vmovaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1428 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
1429 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
1430 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
1431 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1432 ; AVX2-ONLY-NEXT: vbroadcastss %xmm3, %xmm0
1433 ; AVX2-ONLY-NEXT: vbroadcastss %xmm14, %xmm1
1434 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
1435 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm1 = xmm4[0],xmm5[0],xmm4[1],xmm5[1]
1436 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm0 = xmm1[0,1],xmm0[2,3]
1437 ; AVX2-ONLY-NEXT: vbroadcastss %xmm11, %xmm1
1438 ; AVX2-ONLY-NEXT: vbroadcastss %xmm12, %xmm2
1439 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
1440 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm2 = xmm13[0],xmm6[0],xmm13[1],xmm6[1]
1441 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
1442 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
1443 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
1444 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
1445 ; AVX2-ONLY-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
1446 ; AVX2-ONLY-NEXT: vmovaps 32(%rax), %xmm7
1447 ; AVX2-ONLY-NEXT: vmovaps 32(%r10), %xmm6
1448 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm0 = xmm7[2,2,2,2]
1449 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm0 = xmm6[0,1,2],xmm0[3]
1450 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
1451 ; AVX2-ONLY-NEXT: vmovaps 32(%r9), %xmm10
1452 ; AVX2-ONLY-NEXT: vmovaps 32(%r8), %xmm9
1453 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm15 = xmm9[2],xmm10[2],xmm9[3],xmm10[3]
1454 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm15, %ymm0, %ymm1
1455 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm1[0,1,2,3,4,5],ymm0[6,7]
1456 ; AVX2-ONLY-NEXT: vmovaps 32(%rcx), %xmm3
1457 ; AVX2-ONLY-NEXT: vmovaps 32(%rdx), %xmm1
1458 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm2 = xmm3[2,2,2,2]
1459 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm1[0,1,2],xmm2[3]
1460 ; AVX2-ONLY-NEXT: vmovaps 32(%rsi), %xmm4
1461 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %xmm2
1462 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm2[2],xmm4[2],xmm2[3],xmm4[3]
1463 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm0[0,1],xmm5[2,3]
1464 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm8[4,5,6,7]
1465 ; AVX2-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1466 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm5 = xmm1[2],xmm3[2],xmm1[3],xmm3[3]
1467 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm5[1]
1468 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm5 = xmm6[2],xmm7[2],xmm6[3],xmm7[3]
1469 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
1470 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm8 = xmm15[2,3,2,3]
1471 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm0, %ymm8
1472 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm8[0,1,2,3,4,5],ymm5[6,7]
1473 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm5[4,5,6,7]
1474 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1475 ; AVX2-ONLY-NEXT: vbroadcastss %xmm3, %xmm0
1476 ; AVX2-ONLY-NEXT: vbroadcastss %xmm1, %xmm5
1477 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm0 = xmm5[0],xmm0[0],xmm5[1],xmm0[1]
1478 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm5 = xmm2[0],xmm4[0],xmm2[1],xmm4[1]
1479 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm0 = xmm5[0,1],xmm0[2,3]
1480 ; AVX2-ONLY-NEXT: vbroadcastss %xmm7, %xmm5
1481 ; AVX2-ONLY-NEXT: vbroadcastss %xmm6, %xmm8
1482 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm5 = xmm8[0],xmm5[0],xmm8[1],xmm5[1]
1483 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm8 = xmm9[0],xmm10[0],xmm9[1],xmm10[1]
1484 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm0, %ymm8
1485 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
1486 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm8[0,1,2,3,4,5],ymm5[6,7]
1487 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm15 = ymm0[0,1,2,3],ymm5[4,5,6,7]
1488 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm0 = xmm1[0],xmm3[0],xmm1[1],xmm3[1]
1489 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm2[1,1,1,1]
1490 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm1 = xmm1[0],xmm4[1],xmm1[2,3]
1491 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm0 = xmm1[0,1],xmm0[2,3]
1492 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm1 = xmm6[0],xmm7[0],xmm6[1],xmm7[1]
1493 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm2 = xmm9[1,1,1,1]
1494 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm2 = xmm2[0],xmm10[1],xmm2[2,3]
1495 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
1496 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
1497 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
1498 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
1499 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
1500 ; AVX2-ONLY-NEXT: vunpckhps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
1501 ; AVX2-ONLY-NEXT: # xmm1 = xmm1[2],mem[2],xmm1[3],mem[3]
1502 ; AVX2-ONLY-NEXT: vunpckhps {{[-0-9]+}}(%r{{[sb]}}p), %xmm13, %xmm2 # 16-byte Folded Reload
1503 ; AVX2-ONLY-NEXT: # xmm2 = xmm13[2],mem[2],xmm13[3],mem[3]
1504 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm11[2,2,2,2]
1505 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm3 = xmm12[0,1,2],xmm3[3]
1506 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
1507 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm4
1508 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm4[0,1,2,3,4,5],ymm3[6,7]
1509 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
1510 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm4 = xmm5[2,2,2,2]
1511 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm4 = xmm14[0,1,2],xmm4[3]
1512 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm4 = xmm1[0,1],xmm4[2,3]
1513 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm4[0,1,2,3],ymm3[4,5,6,7]
1514 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm4 = xmm14[2],xmm5[2],xmm14[3],xmm5[3]
1515 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm1[1],xmm4[1]
1516 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm4 = xmm12[2],xmm11[2],xmm12[3],xmm11[3]
1517 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm4
1518 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm2 = xmm2[2,3,2,3]
1519 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
1520 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm4[6,7]
1521 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
1522 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
1523 ; AVX2-ONLY-NEXT: vmovaps %ymm1, 96(%rax)
1524 ; AVX2-ONLY-NEXT: vmovaps %ymm3, 64(%rax)
1525 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 288(%rax)
1526 ; AVX2-ONLY-NEXT: vmovaps %ymm15, 256(%rax)
1527 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1528 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 352(%rax)
1529 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1530 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 320(%rax)
1531 ; AVX2-ONLY-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
1532 ; AVX2-ONLY-NEXT: vmovaps %ymm0, (%rax)
1533 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1534 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
1535 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1536 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 160(%rax)
1537 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1538 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 128(%rax)
1539 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1540 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 224(%rax)
1541 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1542 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 192(%rax)
1543 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1544 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 416(%rax)
1545 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1546 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 384(%rax)
1547 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1548 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 480(%rax)
1549 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1550 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 448(%rax)
1551 ; AVX2-ONLY-NEXT: addq $328, %rsp # imm = 0x148
1552 ; AVX2-ONLY-NEXT: vzeroupper
1553 ; AVX2-ONLY-NEXT: retq
1555 ; AVX512F-LABEL: store_i32_stride8_vf16:
1557 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %rax
1558 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %r10
1559 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %r11
1560 ; AVX512F-NEXT: vmovdqa64 (%rdi), %zmm0
1561 ; AVX512F-NEXT: vmovdqa64 (%rsi), %zmm1
1562 ; AVX512F-NEXT: vmovdqa64 (%rdx), %zmm2
1563 ; AVX512F-NEXT: vmovdqa64 (%rcx), %zmm3
1564 ; AVX512F-NEXT: vmovdqa64 (%r8), %zmm5
1565 ; AVX512F-NEXT: vmovdqa64 (%r9), %zmm6
1566 ; AVX512F-NEXT: vmovdqa64 (%r11), %zmm7
1567 ; AVX512F-NEXT: vmovdqa64 (%r10), %zmm8
1568 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm4 = <u,u,u,u,u,u,0,16,u,u,u,u,u,u,1,17>
1569 ; AVX512F-NEXT: vpermi2d %zmm8, %zmm7, %zmm4
1570 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm9 = <u,u,u,u,0,16,u,u,u,u,u,u,1,17,u,u>
1571 ; AVX512F-NEXT: vpermi2d %zmm6, %zmm5, %zmm9
1572 ; AVX512F-NEXT: movb $-120, %cl
1573 ; AVX512F-NEXT: kmovw %ecx, %k1
1574 ; AVX512F-NEXT: vmovdqa64 %zmm4, %zmm9 {%k1}
1575 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm10 = <u,u,0,16,u,u,u,u,u,u,1,17,u,u,u,u>
1576 ; AVX512F-NEXT: vpermi2d %zmm3, %zmm2, %zmm10
1577 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm4 = <0,16,u,u,u,u,u,u,1,17,u,u,u,u,u,u>
1578 ; AVX512F-NEXT: vpermi2d %zmm1, %zmm0, %zmm4
1579 ; AVX512F-NEXT: movb $34, %cl
1580 ; AVX512F-NEXT: kmovw %ecx, %k2
1581 ; AVX512F-NEXT: vmovdqa64 %zmm10, %zmm4 {%k2}
1582 ; AVX512F-NEXT: movb $-52, %cl
1583 ; AVX512F-NEXT: kmovw %ecx, %k3
1584 ; AVX512F-NEXT: vmovdqa64 %zmm9, %zmm4 {%k3}
1585 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm9 = <u,u,u,u,u,u,2,18,u,u,u,u,u,u,3,19>
1586 ; AVX512F-NEXT: vpermi2d %zmm8, %zmm7, %zmm9
1587 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm10 = <u,u,u,u,2,18,u,u,u,u,u,u,3,19,u,u>
1588 ; AVX512F-NEXT: vpermi2d %zmm6, %zmm5, %zmm10
1589 ; AVX512F-NEXT: vmovdqa64 %zmm9, %zmm10 {%k1}
1590 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm11 = <u,u,2,18,u,u,u,u,u,u,3,19,u,u,u,u>
1591 ; AVX512F-NEXT: vpermi2d %zmm3, %zmm2, %zmm11
1592 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm9 = <2,18,u,u,u,u,u,u,3,19,u,u,u,u,u,u>
1593 ; AVX512F-NEXT: vpermi2d %zmm1, %zmm0, %zmm9
1594 ; AVX512F-NEXT: vmovdqa64 %zmm11, %zmm9 {%k2}
1595 ; AVX512F-NEXT: vmovdqa64 %zmm10, %zmm9 {%k3}
1596 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm10 = <u,u,u,u,u,u,4,20,u,u,u,u,u,u,5,21>
1597 ; AVX512F-NEXT: vpermi2d %zmm8, %zmm7, %zmm10
1598 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm11 = <u,u,u,u,4,20,u,u,u,u,u,u,5,21,u,u>
1599 ; AVX512F-NEXT: vpermi2d %zmm6, %zmm5, %zmm11
1600 ; AVX512F-NEXT: vmovdqa64 %zmm10, %zmm11 {%k1}
1601 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,4,20,u,u,u,u,u,u,5,21,u,u,u,u>
1602 ; AVX512F-NEXT: vpermi2d %zmm3, %zmm2, %zmm12
1603 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm10 = <4,20,u,u,u,u,u,u,5,21,u,u,u,u,u,u>
1604 ; AVX512F-NEXT: vpermi2d %zmm1, %zmm0, %zmm10
1605 ; AVX512F-NEXT: vmovdqa64 %zmm12, %zmm10 {%k2}
1606 ; AVX512F-NEXT: vmovdqa64 %zmm11, %zmm10 {%k3}
1607 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm11 = <u,u,u,u,u,u,6,22,u,u,u,u,u,u,7,23>
1608 ; AVX512F-NEXT: vpermi2d %zmm8, %zmm7, %zmm11
1609 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,u,u,6,22,u,u,u,u,u,u,7,23,u,u>
1610 ; AVX512F-NEXT: vpermi2d %zmm6, %zmm5, %zmm12
1611 ; AVX512F-NEXT: vmovdqa64 %zmm11, %zmm12 {%k1}
1612 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm13 = <u,u,6,22,u,u,u,u,u,u,7,23,u,u,u,u>
1613 ; AVX512F-NEXT: vpermi2d %zmm3, %zmm2, %zmm13
1614 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm11 = <6,22,u,u,u,u,u,u,7,23,u,u,u,u,u,u>
1615 ; AVX512F-NEXT: vpermi2d %zmm1, %zmm0, %zmm11
1616 ; AVX512F-NEXT: vmovdqa64 %zmm13, %zmm11 {%k2}
1617 ; AVX512F-NEXT: vmovdqa64 %zmm12, %zmm11 {%k3}
1618 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,u,u,u,u,8,24,u,u,u,u,u,u,9,25>
1619 ; AVX512F-NEXT: vpermi2d %zmm8, %zmm7, %zmm12
1620 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm13 = <u,u,u,u,8,24,u,u,u,u,u,u,9,25,u,u>
1621 ; AVX512F-NEXT: vpermi2d %zmm6, %zmm5, %zmm13
1622 ; AVX512F-NEXT: vmovdqa64 %zmm12, %zmm13 {%k1}
1623 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,8,24,u,u,u,u,u,u,9,25,u,u,u,u>
1624 ; AVX512F-NEXT: vpermi2d %zmm3, %zmm2, %zmm12
1625 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm14 = <8,24,u,u,u,u,u,u,9,25,u,u,u,u,u,u>
1626 ; AVX512F-NEXT: vpermi2d %zmm1, %zmm0, %zmm14
1627 ; AVX512F-NEXT: vmovdqa64 %zmm12, %zmm14 {%k2}
1628 ; AVX512F-NEXT: vmovdqa64 %zmm13, %zmm14 {%k3}
1629 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,u,u,u,u,10,26,u,u,u,u,u,u,11,27>
1630 ; AVX512F-NEXT: vpermi2d %zmm8, %zmm7, %zmm12
1631 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm13 = <u,u,u,u,10,26,u,u,u,u,u,u,11,27,u,u>
1632 ; AVX512F-NEXT: vpermi2d %zmm6, %zmm5, %zmm13
1633 ; AVX512F-NEXT: vmovdqa64 %zmm12, %zmm13 {%k1}
1634 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,10,26,u,u,u,u,u,u,11,27,u,u,u,u>
1635 ; AVX512F-NEXT: vpermi2d %zmm3, %zmm2, %zmm12
1636 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm15 = <10,26,u,u,u,u,u,u,11,27,u,u,u,u,u,u>
1637 ; AVX512F-NEXT: vpermi2d %zmm1, %zmm0, %zmm15
1638 ; AVX512F-NEXT: vmovdqa64 %zmm12, %zmm15 {%k2}
1639 ; AVX512F-NEXT: vmovdqa64 %zmm13, %zmm15 {%k3}
1640 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,u,u,u,u,12,28,u,u,u,u,u,u,13,29>
1641 ; AVX512F-NEXT: vpermi2d %zmm8, %zmm7, %zmm12
1642 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm13 = <u,u,u,u,12,28,u,u,u,u,u,u,13,29,u,u>
1643 ; AVX512F-NEXT: vpermi2d %zmm6, %zmm5, %zmm13
1644 ; AVX512F-NEXT: vmovdqa64 %zmm12, %zmm13 {%k1}
1645 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,12,28,u,u,u,u,u,u,13,29,u,u,u,u>
1646 ; AVX512F-NEXT: vpermi2d %zmm3, %zmm2, %zmm12
1647 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm16 = <12,28,u,u,u,u,u,u,13,29,u,u,u,u,u,u>
1648 ; AVX512F-NEXT: vpermi2d %zmm1, %zmm0, %zmm16
1649 ; AVX512F-NEXT: vmovdqa64 %zmm12, %zmm16 {%k2}
1650 ; AVX512F-NEXT: vmovdqa64 %zmm13, %zmm16 {%k3}
1651 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,u,u,u,u,14,30,u,u,u,u,u,u,15,31>
1652 ; AVX512F-NEXT: vpermi2d %zmm8, %zmm7, %zmm12
1653 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm7 = <u,u,u,u,14,30,u,u,u,u,u,u,15,31,u,u>
1654 ; AVX512F-NEXT: vpermi2d %zmm6, %zmm5, %zmm7
1655 ; AVX512F-NEXT: vmovdqa64 %zmm12, %zmm7 {%k1}
1656 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm5 = <u,u,14,30,u,u,u,u,u,u,15,31,u,u,u,u>
1657 ; AVX512F-NEXT: vpermi2d %zmm3, %zmm2, %zmm5
1658 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm2 = <14,30,u,u,u,u,u,u,15,31,u,u,u,u,u,u>
1659 ; AVX512F-NEXT: vpermi2d %zmm1, %zmm0, %zmm2
1660 ; AVX512F-NEXT: vmovdqa64 %zmm5, %zmm2 {%k2}
1661 ; AVX512F-NEXT: vmovdqa64 %zmm7, %zmm2 {%k3}
1662 ; AVX512F-NEXT: vmovdqa64 %zmm2, 448(%rax)
1663 ; AVX512F-NEXT: vmovdqa64 %zmm16, 384(%rax)
1664 ; AVX512F-NEXT: vmovdqa64 %zmm15, 320(%rax)
1665 ; AVX512F-NEXT: vmovdqa64 %zmm14, 256(%rax)
1666 ; AVX512F-NEXT: vmovdqa64 %zmm11, 192(%rax)
1667 ; AVX512F-NEXT: vmovdqa64 %zmm10, 128(%rax)
1668 ; AVX512F-NEXT: vmovdqa64 %zmm9, 64(%rax)
1669 ; AVX512F-NEXT: vmovdqa64 %zmm4, (%rax)
1670 ; AVX512F-NEXT: vzeroupper
1671 ; AVX512F-NEXT: retq
1673 ; AVX512BW-LABEL: store_i32_stride8_vf16:
1674 ; AVX512BW: # %bb.0:
1675 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1676 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r10
1677 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r11
1678 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm0
1679 ; AVX512BW-NEXT: vmovdqa64 (%rsi), %zmm1
1680 ; AVX512BW-NEXT: vmovdqa64 (%rdx), %zmm2
1681 ; AVX512BW-NEXT: vmovdqa64 (%rcx), %zmm3
1682 ; AVX512BW-NEXT: vmovdqa64 (%r8), %zmm5
1683 ; AVX512BW-NEXT: vmovdqa64 (%r9), %zmm6
1684 ; AVX512BW-NEXT: vmovdqa64 (%r11), %zmm7
1685 ; AVX512BW-NEXT: vmovdqa64 (%r10), %zmm8
1686 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = <u,u,u,u,u,u,0,16,u,u,u,u,u,u,1,17>
1687 ; AVX512BW-NEXT: vpermi2d %zmm8, %zmm7, %zmm4
1688 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm9 = <u,u,u,u,0,16,u,u,u,u,u,u,1,17,u,u>
1689 ; AVX512BW-NEXT: vpermi2d %zmm6, %zmm5, %zmm9
1690 ; AVX512BW-NEXT: movb $-120, %cl
1691 ; AVX512BW-NEXT: kmovd %ecx, %k1
1692 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm9 {%k1}
1693 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm10 = <u,u,0,16,u,u,u,u,u,u,1,17,u,u,u,u>
1694 ; AVX512BW-NEXT: vpermi2d %zmm3, %zmm2, %zmm10
1695 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = <0,16,u,u,u,u,u,u,1,17,u,u,u,u,u,u>
1696 ; AVX512BW-NEXT: vpermi2d %zmm1, %zmm0, %zmm4
1697 ; AVX512BW-NEXT: movb $34, %cl
1698 ; AVX512BW-NEXT: kmovd %ecx, %k2
1699 ; AVX512BW-NEXT: vmovdqa64 %zmm10, %zmm4 {%k2}
1700 ; AVX512BW-NEXT: movb $-52, %cl
1701 ; AVX512BW-NEXT: kmovd %ecx, %k3
1702 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm4 {%k3}
1703 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm9 = <u,u,u,u,u,u,2,18,u,u,u,u,u,u,3,19>
1704 ; AVX512BW-NEXT: vpermi2d %zmm8, %zmm7, %zmm9
1705 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm10 = <u,u,u,u,2,18,u,u,u,u,u,u,3,19,u,u>
1706 ; AVX512BW-NEXT: vpermi2d %zmm6, %zmm5, %zmm10
1707 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm10 {%k1}
1708 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm11 = <u,u,2,18,u,u,u,u,u,u,3,19,u,u,u,u>
1709 ; AVX512BW-NEXT: vpermi2d %zmm3, %zmm2, %zmm11
1710 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm9 = <2,18,u,u,u,u,u,u,3,19,u,u,u,u,u,u>
1711 ; AVX512BW-NEXT: vpermi2d %zmm1, %zmm0, %zmm9
1712 ; AVX512BW-NEXT: vmovdqa64 %zmm11, %zmm9 {%k2}
1713 ; AVX512BW-NEXT: vmovdqa64 %zmm10, %zmm9 {%k3}
1714 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm10 = <u,u,u,u,u,u,4,20,u,u,u,u,u,u,5,21>
1715 ; AVX512BW-NEXT: vpermi2d %zmm8, %zmm7, %zmm10
1716 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm11 = <u,u,u,u,4,20,u,u,u,u,u,u,5,21,u,u>
1717 ; AVX512BW-NEXT: vpermi2d %zmm6, %zmm5, %zmm11
1718 ; AVX512BW-NEXT: vmovdqa64 %zmm10, %zmm11 {%k1}
1719 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,4,20,u,u,u,u,u,u,5,21,u,u,u,u>
1720 ; AVX512BW-NEXT: vpermi2d %zmm3, %zmm2, %zmm12
1721 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm10 = <4,20,u,u,u,u,u,u,5,21,u,u,u,u,u,u>
1722 ; AVX512BW-NEXT: vpermi2d %zmm1, %zmm0, %zmm10
1723 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm10 {%k2}
1724 ; AVX512BW-NEXT: vmovdqa64 %zmm11, %zmm10 {%k3}
1725 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm11 = <u,u,u,u,u,u,6,22,u,u,u,u,u,u,7,23>
1726 ; AVX512BW-NEXT: vpermi2d %zmm8, %zmm7, %zmm11
1727 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,u,u,6,22,u,u,u,u,u,u,7,23,u,u>
1728 ; AVX512BW-NEXT: vpermi2d %zmm6, %zmm5, %zmm12
1729 ; AVX512BW-NEXT: vmovdqa64 %zmm11, %zmm12 {%k1}
1730 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm13 = <u,u,6,22,u,u,u,u,u,u,7,23,u,u,u,u>
1731 ; AVX512BW-NEXT: vpermi2d %zmm3, %zmm2, %zmm13
1732 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm11 = <6,22,u,u,u,u,u,u,7,23,u,u,u,u,u,u>
1733 ; AVX512BW-NEXT: vpermi2d %zmm1, %zmm0, %zmm11
1734 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm11 {%k2}
1735 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm11 {%k3}
1736 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,u,u,u,u,8,24,u,u,u,u,u,u,9,25>
1737 ; AVX512BW-NEXT: vpermi2d %zmm8, %zmm7, %zmm12
1738 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm13 = <u,u,u,u,8,24,u,u,u,u,u,u,9,25,u,u>
1739 ; AVX512BW-NEXT: vpermi2d %zmm6, %zmm5, %zmm13
1740 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm13 {%k1}
1741 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,8,24,u,u,u,u,u,u,9,25,u,u,u,u>
1742 ; AVX512BW-NEXT: vpermi2d %zmm3, %zmm2, %zmm12
1743 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm14 = <8,24,u,u,u,u,u,u,9,25,u,u,u,u,u,u>
1744 ; AVX512BW-NEXT: vpermi2d %zmm1, %zmm0, %zmm14
1745 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm14 {%k2}
1746 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm14 {%k3}
1747 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,u,u,u,u,10,26,u,u,u,u,u,u,11,27>
1748 ; AVX512BW-NEXT: vpermi2d %zmm8, %zmm7, %zmm12
1749 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm13 = <u,u,u,u,10,26,u,u,u,u,u,u,11,27,u,u>
1750 ; AVX512BW-NEXT: vpermi2d %zmm6, %zmm5, %zmm13
1751 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm13 {%k1}
1752 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,10,26,u,u,u,u,u,u,11,27,u,u,u,u>
1753 ; AVX512BW-NEXT: vpermi2d %zmm3, %zmm2, %zmm12
1754 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm15 = <10,26,u,u,u,u,u,u,11,27,u,u,u,u,u,u>
1755 ; AVX512BW-NEXT: vpermi2d %zmm1, %zmm0, %zmm15
1756 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm15 {%k2}
1757 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm15 {%k3}
1758 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,u,u,u,u,12,28,u,u,u,u,u,u,13,29>
1759 ; AVX512BW-NEXT: vpermi2d %zmm8, %zmm7, %zmm12
1760 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm13 = <u,u,u,u,12,28,u,u,u,u,u,u,13,29,u,u>
1761 ; AVX512BW-NEXT: vpermi2d %zmm6, %zmm5, %zmm13
1762 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm13 {%k1}
1763 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,12,28,u,u,u,u,u,u,13,29,u,u,u,u>
1764 ; AVX512BW-NEXT: vpermi2d %zmm3, %zmm2, %zmm12
1765 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm16 = <12,28,u,u,u,u,u,u,13,29,u,u,u,u,u,u>
1766 ; AVX512BW-NEXT: vpermi2d %zmm1, %zmm0, %zmm16
1767 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm16 {%k2}
1768 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm16 {%k3}
1769 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,u,u,u,u,14,30,u,u,u,u,u,u,15,31>
1770 ; AVX512BW-NEXT: vpermi2d %zmm8, %zmm7, %zmm12
1771 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <u,u,u,u,14,30,u,u,u,u,u,u,15,31,u,u>
1772 ; AVX512BW-NEXT: vpermi2d %zmm6, %zmm5, %zmm7
1773 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm7 {%k1}
1774 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm5 = <u,u,14,30,u,u,u,u,u,u,15,31,u,u,u,u>
1775 ; AVX512BW-NEXT: vpermi2d %zmm3, %zmm2, %zmm5
1776 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <14,30,u,u,u,u,u,u,15,31,u,u,u,u,u,u>
1777 ; AVX512BW-NEXT: vpermi2d %zmm1, %zmm0, %zmm2
1778 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm2 {%k2}
1779 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm2 {%k3}
1780 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 448(%rax)
1781 ; AVX512BW-NEXT: vmovdqa64 %zmm16, 384(%rax)
1782 ; AVX512BW-NEXT: vmovdqa64 %zmm15, 320(%rax)
1783 ; AVX512BW-NEXT: vmovdqa64 %zmm14, 256(%rax)
1784 ; AVX512BW-NEXT: vmovdqa64 %zmm11, 192(%rax)
1785 ; AVX512BW-NEXT: vmovdqa64 %zmm10, 128(%rax)
1786 ; AVX512BW-NEXT: vmovdqa64 %zmm9, 64(%rax)
1787 ; AVX512BW-NEXT: vmovdqa64 %zmm4, (%rax)
1788 ; AVX512BW-NEXT: vzeroupper
1789 ; AVX512BW-NEXT: retq
1790 %in.vec0 = load <16 x i32>, ptr %in.vecptr0, align 64
1791 %in.vec1 = load <16 x i32>, ptr %in.vecptr1, align 64
1792 %in.vec2 = load <16 x i32>, ptr %in.vecptr2, align 64
1793 %in.vec3 = load <16 x i32>, ptr %in.vecptr3, align 64
1794 %in.vec4 = load <16 x i32>, ptr %in.vecptr4, align 64
1795 %in.vec5 = load <16 x i32>, ptr %in.vecptr5, align 64
1796 %in.vec6 = load <16 x i32>, ptr %in.vecptr6, align 64
1797 %in.vec7 = load <16 x i32>, ptr %in.vecptr7, align 64
1798 %1 = shufflevector <16 x i32> %in.vec0, <16 x i32> %in.vec1, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1799 %2 = shufflevector <16 x i32> %in.vec2, <16 x i32> %in.vec3, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1800 %3 = shufflevector <16 x i32> %in.vec4, <16 x i32> %in.vec5, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1801 %4 = shufflevector <16 x i32> %in.vec6, <16 x i32> %in.vec7, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1802 %5 = shufflevector <32 x i32> %1, <32 x i32> %2, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
1803 %6 = shufflevector <32 x i32> %3, <32 x i32> %4, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
1804 %7 = shufflevector <64 x i32> %5, <64 x i32> %6, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
1805 %interleaved.vec = shufflevector <128 x i32> %7, <128 x i32> poison, <128 x i32> <i32 0, i32 16, i32 32, i32 48, i32 64, i32 80, i32 96, i32 112, i32 1, i32 17, i32 33, i32 49, i32 65, i32 81, i32 97, i32 113, i32 2, i32 18, i32 34, i32 50, i32 66, i32 82, i32 98, i32 114, i32 3, i32 19, i32 35, i32 51, i32 67, i32 83, i32 99, i32 115, i32 4, i32 20, i32 36, i32 52, i32 68, i32 84, i32 100, i32 116, i32 5, i32 21, i32 37, i32 53, i32 69, i32 85, i32 101, i32 117, i32 6, i32 22, i32 38, i32 54, i32 70, i32 86, i32 102, i32 118, i32 7, i32 23, i32 39, i32 55, i32 71, i32 87, i32 103, i32 119, i32 8, i32 24, i32 40, i32 56, i32 72, i32 88, i32 104, i32 120, i32 9, i32 25, i32 41, i32 57, i32 73, i32 89, i32 105, i32 121, i32 10, i32 26, i32 42, i32 58, i32 74, i32 90, i32 106, i32 122, i32 11, i32 27, i32 43, i32 59, i32 75, i32 91, i32 107, i32 123, i32 12, i32 28, i32 44, i32 60, i32 76, i32 92, i32 108, i32 124, i32 13, i32 29, i32 45, i32 61, i32 77, i32 93, i32 109, i32 125, i32 14, i32 30, i32 46, i32 62, i32 78, i32 94, i32 110, i32 126, i32 15, i32 31, i32 47, i32 63, i32 79, i32 95, i32 111, i32 127>
1806 store <128 x i32> %interleaved.vec, ptr %out.vec, align 64
1810 define void @store_i32_stride8_vf32(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %in.vecptr7, ptr %out.vec) nounwind {
1811 ; SSE-LABEL: store_i32_stride8_vf32:
1813 ; SSE-NEXT: subq $728, %rsp # imm = 0x2D8
1814 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
1815 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
1816 ; SSE-NEXT: movaps (%rdi), %xmm9
1817 ; SSE-NEXT: movaps 16(%rdi), %xmm10
1818 ; SSE-NEXT: movaps (%rsi), %xmm3
1819 ; SSE-NEXT: movaps 16(%rsi), %xmm1
1820 ; SSE-NEXT: movaps (%rdx), %xmm2
1821 ; SSE-NEXT: movaps 16(%rdx), %xmm0
1822 ; SSE-NEXT: movaps (%rcx), %xmm4
1823 ; SSE-NEXT: movaps (%r8), %xmm11
1824 ; SSE-NEXT: movaps (%r9), %xmm6
1825 ; SSE-NEXT: movaps (%r10), %xmm5
1826 ; SSE-NEXT: movaps (%rax), %xmm7
1827 ; SSE-NEXT: movaps %xmm4, %xmm8
1828 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm2[0]
1829 ; SSE-NEXT: movaps %xmm9, %xmm13
1830 ; SSE-NEXT: unpcklps {{.*#+}} xmm13 = xmm13[0],xmm3[0],xmm13[1],xmm3[1]
1831 ; SSE-NEXT: movaps %xmm13, %xmm12
1832 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[0,1],xmm8[2,0]
1833 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1834 ; SSE-NEXT: movaps %xmm7, %xmm8
1835 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm5[0]
1836 ; SSE-NEXT: movaps %xmm11, %xmm14
1837 ; SSE-NEXT: unpcklps {{.*#+}} xmm14 = xmm14[0],xmm6[0],xmm14[1],xmm6[1]
1838 ; SSE-NEXT: movaps %xmm14, %xmm12
1839 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[0,1],xmm8[2,0]
1840 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1841 ; SSE-NEXT: movaps %xmm4, %xmm8
1842 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[1,1],xmm2[1,1]
1843 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[2,3],xmm8[2,0]
1844 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1845 ; SSE-NEXT: movaps %xmm7, %xmm8
1846 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[1,1],xmm5[1,1]
1847 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[2,3],xmm8[2,0]
1848 ; SSE-NEXT: movaps %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1849 ; SSE-NEXT: movaps %xmm2, %xmm8
1850 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm4[1]
1851 ; SSE-NEXT: unpckhps {{.*#+}} xmm9 = xmm9[2],xmm3[2],xmm9[3],xmm3[3]
1852 ; SSE-NEXT: movaps %xmm9, %xmm3
1853 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm8[0,2]
1854 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1855 ; SSE-NEXT: movaps %xmm5, %xmm3
1856 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm7[1]
1857 ; SSE-NEXT: unpckhps {{.*#+}} xmm11 = xmm11[2],xmm6[2],xmm11[3],xmm6[3]
1858 ; SSE-NEXT: movaps %xmm11, %xmm6
1859 ; SSE-NEXT: shufps {{.*#+}} xmm6 = xmm6[0,1],xmm3[0,2]
1860 ; SSE-NEXT: movaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1861 ; SSE-NEXT: movaps 16(%rcx), %xmm6
1862 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,3],xmm4[3,3]
1863 ; SSE-NEXT: movaps 16(%r10), %xmm3
1864 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[2,3],xmm2[0,2]
1865 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1866 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[3,3],xmm7[3,3]
1867 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,3],xmm5[0,2]
1868 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1869 ; SSE-NEXT: movaps %xmm6, %xmm2
1870 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
1871 ; SSE-NEXT: movaps %xmm10, %xmm5
1872 ; SSE-NEXT: unpcklps {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1]
1873 ; SSE-NEXT: movaps %xmm5, %xmm4
1874 ; SSE-NEXT: movaps %xmm5, %xmm7
1875 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm2[2,0]
1876 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1877 ; SSE-NEXT: movaps 16(%rax), %xmm2
1878 ; SSE-NEXT: movaps %xmm2, %xmm4
1879 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm3[0]
1880 ; SSE-NEXT: movaps 16(%r8), %xmm11
1881 ; SSE-NEXT: movaps 16(%r9), %xmm5
1882 ; SSE-NEXT: movaps %xmm11, %xmm9
1883 ; SSE-NEXT: unpcklps {{.*#+}} xmm9 = xmm9[0],xmm5[0],xmm9[1],xmm5[1]
1884 ; SSE-NEXT: movaps %xmm9, %xmm8
1885 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[0,1],xmm4[2,0]
1886 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1887 ; SSE-NEXT: movaps %xmm6, %xmm4
1888 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[1,1],xmm0[1,1]
1889 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,3],xmm4[2,0]
1890 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1891 ; SSE-NEXT: movaps %xmm2, %xmm4
1892 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[1,1],xmm3[1,1]
1893 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[2,3],xmm4[2,0]
1894 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1895 ; SSE-NEXT: unpckhps {{.*#+}} xmm10 = xmm10[2],xmm1[2],xmm10[3],xmm1[3]
1896 ; SSE-NEXT: movaps %xmm0, %xmm1
1897 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm6[1]
1898 ; SSE-NEXT: movaps %xmm10, %xmm4
1899 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm1[0,2]
1900 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1901 ; SSE-NEXT: unpckhps {{.*#+}} xmm11 = xmm11[2],xmm5[2],xmm11[3],xmm5[3]
1902 ; SSE-NEXT: movaps %xmm3, %xmm1
1903 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
1904 ; SSE-NEXT: movaps %xmm11, %xmm4
1905 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm1[0,2]
1906 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1907 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3],xmm6[3,3]
1908 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[2,3],xmm0[0,2]
1909 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1910 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[3,3],xmm2[3,3]
1911 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,3],xmm3[0,2]
1912 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1913 ; SSE-NEXT: movaps 32(%rdx), %xmm0
1914 ; SSE-NEXT: movaps 32(%rcx), %xmm1
1915 ; SSE-NEXT: movaps %xmm1, %xmm2
1916 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
1917 ; SSE-NEXT: movaps 32(%rdi), %xmm7
1918 ; SSE-NEXT: movaps 32(%rsi), %xmm4
1919 ; SSE-NEXT: movaps %xmm7, %xmm5
1920 ; SSE-NEXT: unpcklps {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
1921 ; SSE-NEXT: movaps %xmm5, %xmm3
1922 ; SSE-NEXT: movaps %xmm5, %xmm8
1923 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm2[2,0]
1924 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1925 ; SSE-NEXT: movaps 32(%r10), %xmm2
1926 ; SSE-NEXT: movaps 32(%rax), %xmm3
1927 ; SSE-NEXT: movaps %xmm3, %xmm5
1928 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm2[0]
1929 ; SSE-NEXT: movaps 32(%r8), %xmm11
1930 ; SSE-NEXT: movaps 32(%r9), %xmm6
1931 ; SSE-NEXT: movaps %xmm11, %xmm10
1932 ; SSE-NEXT: unpcklps {{.*#+}} xmm10 = xmm10[0],xmm6[0],xmm10[1],xmm6[1]
1933 ; SSE-NEXT: movaps %xmm10, %xmm9
1934 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[0,1],xmm5[2,0]
1935 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1936 ; SSE-NEXT: movaps %xmm1, %xmm5
1937 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm0[1,1]
1938 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[2,3],xmm5[2,0]
1939 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1940 ; SSE-NEXT: movaps %xmm3, %xmm5
1941 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm2[1,1]
1942 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[2,3],xmm5[2,0]
1943 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1944 ; SSE-NEXT: unpckhps {{.*#+}} xmm7 = xmm7[2],xmm4[2],xmm7[3],xmm4[3]
1945 ; SSE-NEXT: movaps %xmm0, %xmm4
1946 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm1[1]
1947 ; SSE-NEXT: movaps %xmm7, %xmm5
1948 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
1949 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1950 ; SSE-NEXT: unpckhps {{.*#+}} xmm11 = xmm11[2],xmm6[2],xmm11[3],xmm6[3]
1951 ; SSE-NEXT: movaps %xmm2, %xmm4
1952 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm3[1]
1953 ; SSE-NEXT: movaps %xmm11, %xmm5
1954 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
1955 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1956 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3],xmm1[3,3]
1957 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,3],xmm0[0,2]
1958 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1959 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,3],xmm3[3,3]
1960 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,3],xmm2[0,2]
1961 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1962 ; SSE-NEXT: movaps 48(%rdx), %xmm0
1963 ; SSE-NEXT: movaps 48(%rcx), %xmm1
1964 ; SSE-NEXT: movaps %xmm1, %xmm2
1965 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
1966 ; SSE-NEXT: movaps 48(%rdi), %xmm7
1967 ; SSE-NEXT: movaps 48(%rsi), %xmm4
1968 ; SSE-NEXT: movaps %xmm7, %xmm5
1969 ; SSE-NEXT: unpcklps {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
1970 ; SSE-NEXT: movaps %xmm5, %xmm3
1971 ; SSE-NEXT: movaps %xmm5, %xmm8
1972 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm2[2,0]
1973 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1974 ; SSE-NEXT: movaps 48(%r10), %xmm2
1975 ; SSE-NEXT: movaps 48(%rax), %xmm3
1976 ; SSE-NEXT: movaps %xmm3, %xmm5
1977 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm2[0]
1978 ; SSE-NEXT: movaps 48(%r8), %xmm11
1979 ; SSE-NEXT: movaps 48(%r9), %xmm6
1980 ; SSE-NEXT: movaps %xmm11, %xmm10
1981 ; SSE-NEXT: unpcklps {{.*#+}} xmm10 = xmm10[0],xmm6[0],xmm10[1],xmm6[1]
1982 ; SSE-NEXT: movaps %xmm10, %xmm9
1983 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[0,1],xmm5[2,0]
1984 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1985 ; SSE-NEXT: movaps %xmm1, %xmm5
1986 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm0[1,1]
1987 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[2,3],xmm5[2,0]
1988 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1989 ; SSE-NEXT: movaps %xmm3, %xmm5
1990 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm2[1,1]
1991 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[2,3],xmm5[2,0]
1992 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1993 ; SSE-NEXT: unpckhps {{.*#+}} xmm7 = xmm7[2],xmm4[2],xmm7[3],xmm4[3]
1994 ; SSE-NEXT: movaps %xmm0, %xmm4
1995 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm1[1]
1996 ; SSE-NEXT: movaps %xmm7, %xmm5
1997 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
1998 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1999 ; SSE-NEXT: unpckhps {{.*#+}} xmm11 = xmm11[2],xmm6[2],xmm11[3],xmm6[3]
2000 ; SSE-NEXT: movaps %xmm2, %xmm4
2001 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm3[1]
2002 ; SSE-NEXT: movaps %xmm11, %xmm5
2003 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
2004 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2005 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3],xmm1[3,3]
2006 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,3],xmm0[0,2]
2007 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2008 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,3],xmm3[3,3]
2009 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,3],xmm2[0,2]
2010 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2011 ; SSE-NEXT: movaps 64(%rdx), %xmm0
2012 ; SSE-NEXT: movaps 64(%rcx), %xmm1
2013 ; SSE-NEXT: movaps %xmm1, %xmm2
2014 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
2015 ; SSE-NEXT: movaps 64(%rdi), %xmm7
2016 ; SSE-NEXT: movaps 64(%rsi), %xmm4
2017 ; SSE-NEXT: movaps %xmm7, %xmm5
2018 ; SSE-NEXT: unpcklps {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
2019 ; SSE-NEXT: movaps %xmm5, %xmm3
2020 ; SSE-NEXT: movaps %xmm5, %xmm8
2021 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm2[2,0]
2022 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2023 ; SSE-NEXT: movaps 64(%r10), %xmm2
2024 ; SSE-NEXT: movaps 64(%rax), %xmm3
2025 ; SSE-NEXT: movaps %xmm3, %xmm5
2026 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm2[0]
2027 ; SSE-NEXT: movaps 64(%r8), %xmm11
2028 ; SSE-NEXT: movaps 64(%r9), %xmm6
2029 ; SSE-NEXT: movaps %xmm11, %xmm10
2030 ; SSE-NEXT: unpcklps {{.*#+}} xmm10 = xmm10[0],xmm6[0],xmm10[1],xmm6[1]
2031 ; SSE-NEXT: movaps %xmm10, %xmm9
2032 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[0,1],xmm5[2,0]
2033 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2034 ; SSE-NEXT: movaps %xmm1, %xmm5
2035 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm0[1,1]
2036 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[2,3],xmm5[2,0]
2037 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2038 ; SSE-NEXT: movaps %xmm3, %xmm5
2039 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm2[1,1]
2040 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[2,3],xmm5[2,0]
2041 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2042 ; SSE-NEXT: unpckhps {{.*#+}} xmm7 = xmm7[2],xmm4[2],xmm7[3],xmm4[3]
2043 ; SSE-NEXT: movaps %xmm0, %xmm4
2044 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm1[1]
2045 ; SSE-NEXT: movaps %xmm7, %xmm5
2046 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
2047 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2048 ; SSE-NEXT: unpckhps {{.*#+}} xmm11 = xmm11[2],xmm6[2],xmm11[3],xmm6[3]
2049 ; SSE-NEXT: movaps %xmm2, %xmm4
2050 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm3[1]
2051 ; SSE-NEXT: movaps %xmm11, %xmm5
2052 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
2053 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2054 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3],xmm1[3,3]
2055 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,3],xmm0[0,2]
2056 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2057 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,3],xmm3[3,3]
2058 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,3],xmm2[0,2]
2059 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2060 ; SSE-NEXT: movaps 80(%rdx), %xmm0
2061 ; SSE-NEXT: movaps 80(%rcx), %xmm1
2062 ; SSE-NEXT: movaps %xmm1, %xmm2
2063 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
2064 ; SSE-NEXT: movaps 80(%rdi), %xmm7
2065 ; SSE-NEXT: movaps 80(%rsi), %xmm4
2066 ; SSE-NEXT: movaps %xmm7, %xmm5
2067 ; SSE-NEXT: unpcklps {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
2068 ; SSE-NEXT: movaps %xmm5, %xmm3
2069 ; SSE-NEXT: movaps %xmm5, %xmm8
2070 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm2[2,0]
2071 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2072 ; SSE-NEXT: movaps 80(%r10), %xmm2
2073 ; SSE-NEXT: movaps 80(%rax), %xmm3
2074 ; SSE-NEXT: movaps %xmm3, %xmm5
2075 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm2[0]
2076 ; SSE-NEXT: movaps 80(%r8), %xmm11
2077 ; SSE-NEXT: movaps 80(%r9), %xmm6
2078 ; SSE-NEXT: movaps %xmm11, %xmm10
2079 ; SSE-NEXT: unpcklps {{.*#+}} xmm10 = xmm10[0],xmm6[0],xmm10[1],xmm6[1]
2080 ; SSE-NEXT: movaps %xmm10, %xmm9
2081 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[0,1],xmm5[2,0]
2082 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2083 ; SSE-NEXT: movaps %xmm1, %xmm5
2084 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm0[1,1]
2085 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[2,3],xmm5[2,0]
2086 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2087 ; SSE-NEXT: movaps %xmm3, %xmm5
2088 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm2[1,1]
2089 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[2,3],xmm5[2,0]
2090 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2091 ; SSE-NEXT: unpckhps {{.*#+}} xmm7 = xmm7[2],xmm4[2],xmm7[3],xmm4[3]
2092 ; SSE-NEXT: movaps %xmm0, %xmm4
2093 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm1[1]
2094 ; SSE-NEXT: movaps %xmm7, %xmm5
2095 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
2096 ; SSE-NEXT: movaps %xmm5, (%rsp) # 16-byte Spill
2097 ; SSE-NEXT: unpckhps {{.*#+}} xmm11 = xmm11[2],xmm6[2],xmm11[3],xmm6[3]
2098 ; SSE-NEXT: movaps %xmm2, %xmm4
2099 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm3[1]
2100 ; SSE-NEXT: movaps %xmm11, %xmm5
2101 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
2102 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2103 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3],xmm1[3,3]
2104 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,3],xmm0[0,2]
2105 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2106 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,3],xmm3[3,3]
2107 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,3],xmm2[0,2]
2108 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2109 ; SSE-NEXT: movaps 96(%rdx), %xmm2
2110 ; SSE-NEXT: movaps 96(%rcx), %xmm0
2111 ; SSE-NEXT: movaps %xmm0, %xmm1
2112 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
2113 ; SSE-NEXT: movaps 96(%rdi), %xmm15
2114 ; SSE-NEXT: movaps 96(%rsi), %xmm4
2115 ; SSE-NEXT: movaps %xmm15, %xmm5
2116 ; SSE-NEXT: unpcklps {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
2117 ; SSE-NEXT: movaps %xmm5, %xmm3
2118 ; SSE-NEXT: movaps %xmm5, %xmm7
2119 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm1[2,0]
2120 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2121 ; SSE-NEXT: movaps 96(%r10), %xmm1
2122 ; SSE-NEXT: movaps 96(%rax), %xmm3
2123 ; SSE-NEXT: movaps %xmm3, %xmm5
2124 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm1[0]
2125 ; SSE-NEXT: movaps 96(%r8), %xmm13
2126 ; SSE-NEXT: movaps 96(%r9), %xmm6
2127 ; SSE-NEXT: movaps %xmm13, %xmm12
2128 ; SSE-NEXT: unpcklps {{.*#+}} xmm12 = xmm12[0],xmm6[0],xmm12[1],xmm6[1]
2129 ; SSE-NEXT: movaps %xmm12, %xmm8
2130 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[0,1],xmm5[2,0]
2131 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2132 ; SSE-NEXT: movaps %xmm0, %xmm5
2133 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm2[1,1]
2134 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,3],xmm5[2,0]
2135 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2136 ; SSE-NEXT: movaps %xmm3, %xmm5
2137 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm1[1,1]
2138 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[2,3],xmm5[2,0]
2139 ; SSE-NEXT: unpckhps {{.*#+}} xmm15 = xmm15[2],xmm4[2],xmm15[3],xmm4[3]
2140 ; SSE-NEXT: movaps %xmm2, %xmm4
2141 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm0[1]
2142 ; SSE-NEXT: movaps %xmm15, %xmm5
2143 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
2144 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2145 ; SSE-NEXT: unpckhps {{.*#+}} xmm13 = xmm13[2],xmm6[2],xmm13[3],xmm6[3]
2146 ; SSE-NEXT: movaps %xmm1, %xmm4
2147 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm3[1]
2148 ; SSE-NEXT: movaps %xmm13, %xmm5
2149 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
2150 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2151 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,3],xmm0[3,3]
2152 ; SSE-NEXT: shufps {{.*#+}} xmm15 = xmm15[2,3],xmm2[0,2]
2153 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[3,3],xmm3[3,3]
2154 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[2,3],xmm1[0,2]
2155 ; SSE-NEXT: movaps 112(%rdx), %xmm1
2156 ; SSE-NEXT: movaps 112(%rcx), %xmm8
2157 ; SSE-NEXT: movaps %xmm8, %xmm0
2158 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
2159 ; SSE-NEXT: movaps 112(%rdi), %xmm2
2160 ; SSE-NEXT: movaps 112(%rsi), %xmm11
2161 ; SSE-NEXT: movaps %xmm2, %xmm3
2162 ; SSE-NEXT: unpcklps {{.*#+}} xmm3 = xmm3[0],xmm11[0],xmm3[1],xmm11[1]
2163 ; SSE-NEXT: movaps %xmm3, %xmm14
2164 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[0,1],xmm0[2,0]
2165 ; SSE-NEXT: movaps 112(%r10), %xmm0
2166 ; SSE-NEXT: movaps 112(%rax), %xmm7
2167 ; SSE-NEXT: movaps %xmm7, %xmm5
2168 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm0[0]
2169 ; SSE-NEXT: movaps 112(%r8), %xmm4
2170 ; SSE-NEXT: movaps 112(%r9), %xmm9
2171 ; SSE-NEXT: movaps %xmm4, %xmm6
2172 ; SSE-NEXT: unpcklps {{.*#+}} xmm6 = xmm6[0],xmm9[0],xmm6[1],xmm9[1]
2173 ; SSE-NEXT: movaps %xmm6, %xmm10
2174 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[0,1],xmm5[2,0]
2175 ; SSE-NEXT: movaps %xmm8, %xmm5
2176 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm1[1,1]
2177 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[2,3],xmm5[2,0]
2178 ; SSE-NEXT: movaps %xmm7, %xmm5
2179 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm0[1,1]
2180 ; SSE-NEXT: shufps {{.*#+}} xmm6 = xmm6[2,3],xmm5[2,0]
2181 ; SSE-NEXT: unpckhps {{.*#+}} xmm2 = xmm2[2],xmm11[2],xmm2[3],xmm11[3]
2182 ; SSE-NEXT: movaps %xmm1, %xmm5
2183 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm8[1]
2184 ; SSE-NEXT: movaps %xmm2, %xmm11
2185 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[0,1],xmm5[0,2]
2186 ; SSE-NEXT: unpckhps {{.*#+}} xmm4 = xmm4[2],xmm9[2],xmm4[3],xmm9[3]
2187 ; SSE-NEXT: movaps %xmm0, %xmm5
2188 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm7[1]
2189 ; SSE-NEXT: movaps %xmm4, %xmm9
2190 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[0,1],xmm5[0,2]
2191 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[3,3],xmm8[3,3]
2192 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[2,3],xmm1[0,2]
2193 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3],xmm7[3,3]
2194 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[2,3],xmm0[0,2]
2195 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
2196 ; SSE-NEXT: movaps %xmm4, 1008(%rax)
2197 ; SSE-NEXT: movaps %xmm2, 992(%rax)
2198 ; SSE-NEXT: movaps %xmm9, 976(%rax)
2199 ; SSE-NEXT: movaps %xmm11, 960(%rax)
2200 ; SSE-NEXT: movaps %xmm6, 944(%rax)
2201 ; SSE-NEXT: movaps %xmm3, 928(%rax)
2202 ; SSE-NEXT: movaps %xmm10, 912(%rax)
2203 ; SSE-NEXT: movaps %xmm14, 896(%rax)
2204 ; SSE-NEXT: movaps %xmm13, 880(%rax)
2205 ; SSE-NEXT: movaps %xmm15, 864(%rax)
2206 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2207 ; SSE-NEXT: movaps %xmm0, 848(%rax)
2208 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2209 ; SSE-NEXT: movaps %xmm0, 832(%rax)
2210 ; SSE-NEXT: movaps %xmm12, 816(%rax)
2211 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2212 ; SSE-NEXT: movaps %xmm0, 800(%rax)
2213 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2214 ; SSE-NEXT: movaps %xmm0, 784(%rax)
2215 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2216 ; SSE-NEXT: movaps %xmm0, 768(%rax)
2217 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2218 ; SSE-NEXT: movaps %xmm0, 752(%rax)
2219 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2220 ; SSE-NEXT: movaps %xmm0, 736(%rax)
2221 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2222 ; SSE-NEXT: movaps %xmm0, 720(%rax)
2223 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
2224 ; SSE-NEXT: movaps %xmm0, 704(%rax)
2225 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2226 ; SSE-NEXT: movaps %xmm0, 688(%rax)
2227 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2228 ; SSE-NEXT: movaps %xmm0, 672(%rax)
2229 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2230 ; SSE-NEXT: movaps %xmm0, 656(%rax)
2231 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2232 ; SSE-NEXT: movaps %xmm0, 640(%rax)
2233 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2234 ; SSE-NEXT: movaps %xmm0, 624(%rax)
2235 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2236 ; SSE-NEXT: movaps %xmm0, 608(%rax)
2237 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2238 ; SSE-NEXT: movaps %xmm0, 592(%rax)
2239 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2240 ; SSE-NEXT: movaps %xmm0, 576(%rax)
2241 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2242 ; SSE-NEXT: movaps %xmm0, 560(%rax)
2243 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2244 ; SSE-NEXT: movaps %xmm0, 544(%rax)
2245 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2246 ; SSE-NEXT: movaps %xmm0, 528(%rax)
2247 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2248 ; SSE-NEXT: movaps %xmm0, 512(%rax)
2249 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2250 ; SSE-NEXT: movaps %xmm0, 496(%rax)
2251 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2252 ; SSE-NEXT: movaps %xmm0, 480(%rax)
2253 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2254 ; SSE-NEXT: movaps %xmm0, 464(%rax)
2255 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2256 ; SSE-NEXT: movaps %xmm0, 448(%rax)
2257 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2258 ; SSE-NEXT: movaps %xmm0, 432(%rax)
2259 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2260 ; SSE-NEXT: movaps %xmm0, 416(%rax)
2261 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2262 ; SSE-NEXT: movaps %xmm0, 400(%rax)
2263 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2264 ; SSE-NEXT: movaps %xmm0, 384(%rax)
2265 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2266 ; SSE-NEXT: movaps %xmm0, 368(%rax)
2267 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2268 ; SSE-NEXT: movaps %xmm0, 352(%rax)
2269 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2270 ; SSE-NEXT: movaps %xmm0, 336(%rax)
2271 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2272 ; SSE-NEXT: movaps %xmm0, 320(%rax)
2273 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2274 ; SSE-NEXT: movaps %xmm0, 304(%rax)
2275 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2276 ; SSE-NEXT: movaps %xmm0, 288(%rax)
2277 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2278 ; SSE-NEXT: movaps %xmm0, 272(%rax)
2279 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2280 ; SSE-NEXT: movaps %xmm0, 256(%rax)
2281 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2282 ; SSE-NEXT: movaps %xmm0, 240(%rax)
2283 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2284 ; SSE-NEXT: movaps %xmm0, 224(%rax)
2285 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2286 ; SSE-NEXT: movaps %xmm0, 208(%rax)
2287 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2288 ; SSE-NEXT: movaps %xmm0, 192(%rax)
2289 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2290 ; SSE-NEXT: movaps %xmm0, 176(%rax)
2291 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2292 ; SSE-NEXT: movaps %xmm0, 160(%rax)
2293 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2294 ; SSE-NEXT: movaps %xmm0, 144(%rax)
2295 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2296 ; SSE-NEXT: movaps %xmm0, 128(%rax)
2297 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2298 ; SSE-NEXT: movaps %xmm0, 112(%rax)
2299 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2300 ; SSE-NEXT: movaps %xmm0, 96(%rax)
2301 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2302 ; SSE-NEXT: movaps %xmm0, 80(%rax)
2303 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2304 ; SSE-NEXT: movaps %xmm0, 64(%rax)
2305 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2306 ; SSE-NEXT: movaps %xmm0, 48(%rax)
2307 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2308 ; SSE-NEXT: movaps %xmm0, 32(%rax)
2309 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2310 ; SSE-NEXT: movaps %xmm0, 16(%rax)
2311 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2312 ; SSE-NEXT: movaps %xmm0, (%rax)
2313 ; SSE-NEXT: addq $728, %rsp # imm = 0x2D8
2316 ; AVX1-ONLY-LABEL: store_i32_stride8_vf32:
2317 ; AVX1-ONLY: # %bb.0:
2318 ; AVX1-ONLY-NEXT: subq $648, %rsp # imm = 0x288
2319 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
2320 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
2321 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %ymm3
2322 ; AVX1-ONLY-NEXT: vmovaps 32(%rdi), %ymm0
2323 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %ymm4
2324 ; AVX1-ONLY-NEXT: vmovaps 32(%rsi), %ymm1
2325 ; AVX1-ONLY-NEXT: vmovaps 32(%rdx), %ymm2
2326 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %ymm5
2327 ; AVX1-ONLY-NEXT: vmovaps (%rcx), %ymm7
2328 ; AVX1-ONLY-NEXT: vmovaps (%r8), %ymm8
2329 ; AVX1-ONLY-NEXT: vmovaps (%r9), %ymm11
2330 ; AVX1-ONLY-NEXT: vmovaps (%r10), %ymm10
2331 ; AVX1-ONLY-NEXT: vmovaps (%rax), %ymm12
2332 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm6 = ymm12[0],ymm10[0],ymm12[2],ymm10[2]
2333 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm9 = ymm8[0],ymm11[0],ymm8[1],ymm11[1],ymm8[4],ymm11[4],ymm8[5],ymm11[5]
2334 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm6 = ymm9[0,1],ymm6[2,0],ymm9[4,5],ymm6[6,4]
2335 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm9 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[4],ymm4[4],ymm3[5],ymm4[5]
2336 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm9, %xmm9
2337 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm13 = ymm7[0],ymm5[0],ymm7[2],ymm5[2]
2338 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm13 = ymm13[0,1,2,0,4,5,6,4]
2339 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm13, %xmm13
2340 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm9 = xmm9[0,1],xmm13[2,3]
2341 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm9[0,1,2,3],ymm6[4,5,6,7]
2342 ; AVX1-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2343 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm6 = ymm10[0],ymm12[0],ymm10[1],ymm12[1],ymm10[4],ymm12[4],ymm10[5],ymm12[5]
2344 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm9 = ymm11[1,0],ymm8[1,0],ymm11[5,4],ymm8[5,4]
2345 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm6 = ymm9[2,0],ymm6[2,3],ymm9[6,4],ymm6[6,7]
2346 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm9 = ymm5[0],ymm7[0],ymm5[1],ymm7[1],ymm5[4],ymm7[4],ymm5[5],ymm7[5]
2347 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm9, %xmm9
2348 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm13 = ymm4[1,0],ymm3[1,0],ymm4[5,4],ymm3[5,4]
2349 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm13 = ymm13[2,0,2,3,6,4,6,7]
2350 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm13, %xmm13
2351 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm9 = xmm13[0,1],xmm9[2,3]
2352 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm9[0,1,2,3],ymm6[4,5,6,7]
2353 ; AVX1-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2354 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm6 = ymm12[1],ymm10[1],ymm12[3],ymm10[3]
2355 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm9 = ymm8[2],ymm11[2],ymm8[3],ymm11[3],ymm8[6],ymm11[6],ymm8[7],ymm11[7]
2356 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm9 = ymm9[0,1],ymm6[2,0],ymm9[4,5],ymm6[6,4]
2357 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm6 = ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[6],ymm4[6],ymm3[7],ymm4[7]
2358 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm6, %xmm6
2359 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm13 = ymm7[1],ymm5[1],ymm7[3],ymm5[3]
2360 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm13 = ymm13[0,1,2,0,4,5,6,4]
2361 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm13, %xmm13
2362 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm13 = xmm6[0,1],xmm13[2,3]
2363 ; AVX1-ONLY-NEXT: vmovaps 32(%rcx), %ymm6
2364 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm13[0,1,2,3],ymm9[4,5,6,7]
2365 ; AVX1-ONLY-NEXT: vmovups %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2366 ; AVX1-ONLY-NEXT: vmovaps 32(%r8), %ymm9
2367 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm12 = ymm10[2],ymm12[2],ymm10[3],ymm12[3],ymm10[6],ymm12[6],ymm10[7],ymm12[7]
2368 ; AVX1-ONLY-NEXT: vmovaps 32(%r9), %ymm10
2369 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm11 = ymm11[3,0],ymm8[3,0],ymm11[7,4],ymm8[7,4]
2370 ; AVX1-ONLY-NEXT: vmovaps 32(%r10), %ymm8
2371 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm12 = ymm11[2,0],ymm12[2,3],ymm11[6,4],ymm12[6,7]
2372 ; AVX1-ONLY-NEXT: vmovaps 32(%rax), %ymm11
2373 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm5 = ymm5[2],ymm7[2],ymm5[3],ymm7[3],ymm5[6],ymm7[6],ymm5[7],ymm7[7]
2374 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm5, %xmm5
2375 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm4[3,0],ymm3[3,0],ymm4[7,4],ymm3[7,4]
2376 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm3[2,0,2,3,6,4,6,7]
2377 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm3, %xmm3
2378 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm3 = xmm3[0,1],xmm5[2,3]
2379 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm12[4,5,6,7]
2380 ; AVX1-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2381 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm11[0],ymm8[0],ymm11[2],ymm8[2]
2382 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm4 = ymm9[0],ymm10[0],ymm9[1],ymm10[1],ymm9[4],ymm10[4],ymm9[5],ymm10[5]
2383 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm4[0,1],ymm3[2,0],ymm4[4,5],ymm3[6,4]
2384 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm4 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
2385 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm4, %xmm4
2386 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm5 = ymm6[0],ymm2[0],ymm6[2],ymm2[2]
2387 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm5 = ymm5[0,1,2,0,4,5,6,4]
2388 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm5, %xmm5
2389 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm4 = xmm4[0,1],xmm5[2,3]
2390 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm4[0,1,2,3],ymm3[4,5,6,7]
2391 ; AVX1-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2392 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm3 = ymm8[0],ymm11[0],ymm8[1],ymm11[1],ymm8[4],ymm11[4],ymm8[5],ymm11[5]
2393 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm4 = ymm10[1,0],ymm9[1,0],ymm10[5,4],ymm9[5,4]
2394 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm4[2,0],ymm3[2,3],ymm4[6,4],ymm3[6,7]
2395 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm4 = ymm2[0],ymm6[0],ymm2[1],ymm6[1],ymm2[4],ymm6[4],ymm2[5],ymm6[5]
2396 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm4, %xmm4
2397 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm5 = ymm1[1,0],ymm0[1,0],ymm1[5,4],ymm0[5,4]
2398 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm5 = ymm5[2,0,2,3,6,4,6,7]
2399 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm5, %xmm5
2400 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3]
2401 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm4[0,1,2,3],ymm3[4,5,6,7]
2402 ; AVX1-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2403 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm11[1],ymm8[1],ymm11[3],ymm8[3]
2404 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm4 = ymm9[2],ymm10[2],ymm9[3],ymm10[3],ymm9[6],ymm10[6],ymm9[7],ymm10[7]
2405 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm4[0,1],ymm3[2,0],ymm4[4,5],ymm3[6,4]
2406 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm4 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
2407 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm4, %xmm4
2408 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm6[1],ymm2[1],ymm6[3],ymm2[3]
2409 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm5 = ymm5[0,1,2,0,4,5,6,4]
2410 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm5, %xmm5
2411 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm4 = xmm4[0,1],xmm5[2,3]
2412 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm4[0,1,2,3],ymm3[4,5,6,7]
2413 ; AVX1-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2414 ; AVX1-ONLY-NEXT: vmovaps 64(%rdi), %ymm3
2415 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm7 = ymm8[2],ymm11[2],ymm8[3],ymm11[3],ymm8[6],ymm11[6],ymm8[7],ymm11[7]
2416 ; AVX1-ONLY-NEXT: vmovaps 64(%rsi), %ymm4
2417 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm8 = ymm10[3,0],ymm9[3,0],ymm10[7,4],ymm9[7,4]
2418 ; AVX1-ONLY-NEXT: vmovaps 64(%r8), %ymm5
2419 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm8 = ymm8[2,0],ymm7[2,3],ymm8[6,4],ymm7[6,7]
2420 ; AVX1-ONLY-NEXT: vmovaps 64(%r9), %ymm7
2421 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm6 = ymm2[2],ymm6[2],ymm2[3],ymm6[3],ymm2[6],ymm6[6],ymm2[7],ymm6[7]
2422 ; AVX1-ONLY-NEXT: vmovaps 64(%r10), %ymm2
2423 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm1 = ymm1[3,0],ymm0[3,0],ymm1[7,4],ymm0[7,4]
2424 ; AVX1-ONLY-NEXT: vmovaps 64(%rax), %ymm0
2425 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm6, %xmm6
2426 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm1 = ymm1[2,0,2,3,6,4,6,7]
2427 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm1
2428 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm1 = xmm1[0,1],xmm6[2,3]
2429 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm8[4,5,6,7]
2430 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2431 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm0[0],ymm2[0],ymm0[2],ymm2[2]
2432 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm6 = ymm5[0],ymm7[0],ymm5[1],ymm7[1],ymm5[4],ymm7[4],ymm5[5],ymm7[5]
2433 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm6 = ymm6[0,1],ymm1[2,0],ymm6[4,5],ymm1[6,4]
2434 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm1 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[4],ymm4[4],ymm3[5],ymm4[5]
2435 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm8
2436 ; AVX1-ONLY-NEXT: vmovaps 64(%rdx), %ymm1
2437 ; AVX1-ONLY-NEXT: vmovaps 64(%rcx), %ymm9
2438 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm10 = ymm9[0],ymm1[0],ymm9[2],ymm1[2]
2439 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm10[0,1,2,0,4,5,6,4]
2440 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
2441 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm8[0,1],xmm10[2,3]
2442 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm8[0,1,2,3],ymm6[4,5,6,7]
2443 ; AVX1-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2444 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm6 = ymm2[0],ymm0[0],ymm2[1],ymm0[1],ymm2[4],ymm0[4],ymm2[5],ymm0[5]
2445 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm8 = ymm7[1,0],ymm5[1,0],ymm7[5,4],ymm5[5,4]
2446 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm6 = ymm8[2,0],ymm6[2,3],ymm8[6,4],ymm6[6,7]
2447 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm8 = ymm1[0],ymm9[0],ymm1[1],ymm9[1],ymm1[4],ymm9[4],ymm1[5],ymm9[5]
2448 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm8, %xmm8
2449 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm4[1,0],ymm3[1,0],ymm4[5,4],ymm3[5,4]
2450 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm10[2,0,2,3,6,4,6,7]
2451 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
2452 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm10[0,1],xmm8[2,3]
2453 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm8[0,1,2,3],ymm6[4,5,6,7]
2454 ; AVX1-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2455 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm6 = ymm0[1],ymm2[1],ymm0[3],ymm2[3]
2456 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm8 = ymm5[2],ymm7[2],ymm5[3],ymm7[3],ymm5[6],ymm7[6],ymm5[7],ymm7[7]
2457 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm6 = ymm8[0,1],ymm6[2,0],ymm8[4,5],ymm6[6,4]
2458 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm8 = ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[6],ymm4[6],ymm3[7],ymm4[7]
2459 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm8, %xmm8
2460 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm10 = ymm9[1],ymm1[1],ymm9[3],ymm1[3]
2461 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm10[0,1,2,0,4,5,6,4]
2462 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
2463 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm8[0,1],xmm10[2,3]
2464 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm8[0,1,2,3],ymm6[4,5,6,7]
2465 ; AVX1-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2466 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm0 = ymm2[2],ymm0[2],ymm2[3],ymm0[3],ymm2[6],ymm0[6],ymm2[7],ymm0[7]
2467 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm7[3,0],ymm5[3,0],ymm7[7,4],ymm5[7,4]
2468 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm5 = ymm2[2,0],ymm0[2,3],ymm2[6,4],ymm0[6,7]
2469 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm2 = ymm1[2],ymm9[2],ymm1[3],ymm9[3],ymm1[6],ymm9[6],ymm1[7],ymm9[7]
2470 ; AVX1-ONLY-NEXT: vmovaps 96(%r8), %ymm0
2471 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm4[3,0],ymm3[3,0],ymm4[7,4],ymm3[7,4]
2472 ; AVX1-ONLY-NEXT: vmovaps 96(%r9), %ymm1
2473 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm2
2474 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm3[2,0,2,3,6,4,6,7]
2475 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm3, %xmm3
2476 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm3 = xmm3[0,1],xmm2[2,3]
2477 ; AVX1-ONLY-NEXT: vmovaps 96(%r10), %ymm2
2478 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm5[4,5,6,7]
2479 ; AVX1-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2480 ; AVX1-ONLY-NEXT: vmovaps 96(%rax), %ymm5
2481 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm5[0],ymm2[0],ymm5[2],ymm2[2]
2482 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm4 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
2483 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm4[0,1],ymm3[2,0],ymm4[4,5],ymm3[6,4]
2484 ; AVX1-ONLY-NEXT: vmovaps 96(%rdi), %ymm3
2485 ; AVX1-ONLY-NEXT: vmovaps 96(%rsi), %ymm4
2486 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm6 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[4],ymm4[4],ymm3[5],ymm4[5]
2487 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm6, %xmm8
2488 ; AVX1-ONLY-NEXT: vmovaps 96(%rdx), %ymm6
2489 ; AVX1-ONLY-NEXT: vmovaps 96(%rcx), %ymm9
2490 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm10 = ymm9[0],ymm6[0],ymm9[2],ymm6[2]
2491 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm10[0,1,2,0,4,5,6,4]
2492 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
2493 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm8[0,1],xmm10[2,3]
2494 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm8[0,1,2,3],ymm7[4,5,6,7]
2495 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2496 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm7 = ymm2[0],ymm5[0],ymm2[1],ymm5[1],ymm2[4],ymm5[4],ymm2[5],ymm5[5]
2497 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm8 = ymm1[1,0],ymm0[1,0],ymm1[5,4],ymm0[5,4]
2498 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm8[2,0],ymm7[2,3],ymm8[6,4],ymm7[6,7]
2499 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm8 = ymm6[0],ymm9[0],ymm6[1],ymm9[1],ymm6[4],ymm9[4],ymm6[5],ymm9[5]
2500 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm8, %xmm8
2501 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm4[1,0],ymm3[1,0],ymm4[5,4],ymm3[5,4]
2502 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm10[2,0,2,3,6,4,6,7]
2503 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
2504 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm10[0,1],xmm8[2,3]
2505 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm8[0,1,2,3],ymm7[4,5,6,7]
2506 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2507 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm7 = ymm5[1],ymm2[1],ymm5[3],ymm2[3]
2508 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm8 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
2509 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm8[0,1],ymm7[2,0],ymm8[4,5],ymm7[6,4]
2510 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm8 = ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[6],ymm4[6],ymm3[7],ymm4[7]
2511 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm8, %xmm8
2512 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm10 = ymm9[1],ymm6[1],ymm9[3],ymm6[3]
2513 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm10[0,1,2,0,4,5,6,4]
2514 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
2515 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm8[0,1],xmm10[2,3]
2516 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm8[0,1,2,3],ymm7[4,5,6,7]
2517 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2518 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm2 = ymm2[2],ymm5[2],ymm2[3],ymm5[3],ymm2[6],ymm5[6],ymm2[7],ymm5[7]
2519 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm1[3,0],ymm0[3,0],ymm1[7,4],ymm0[7,4]
2520 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm0[2,0],ymm2[2,3],ymm0[6,4],ymm2[6,7]
2521 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm1 = ymm6[2],ymm9[2],ymm6[3],ymm9[3],ymm6[6],ymm9[6],ymm6[7],ymm9[7]
2522 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm4[3,0],ymm3[3,0],ymm4[7,4],ymm3[7,4]
2523 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm1
2524 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm2[2,0,2,3,6,4,6,7]
2525 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm2
2526 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3]
2527 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
2528 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2529 ; AVX1-ONLY-NEXT: vmovaps (%r9), %xmm2
2530 ; AVX1-ONLY-NEXT: vmovaps (%r8), %xmm3
2531 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm0 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
2532 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm4
2533 ; AVX1-ONLY-NEXT: vmovaps (%rax), %xmm0
2534 ; AVX1-ONLY-NEXT: vmovaps (%r10), %xmm1
2535 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm6 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
2536 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm5 = xmm6[0,1,0,1]
2537 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
2538 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm4[0,1,2,3,4,5],ymm5[6,7]
2539 ; AVX1-ONLY-NEXT: vmovaps (%rcx), %xmm4
2540 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %xmm5
2541 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm8 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
2542 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %xmm9
2543 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %xmm10
2544 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm11 = xmm10[0],xmm9[0],xmm10[1],xmm9[1]
2545 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm11 = xmm11[0],xmm8[0]
2546 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm11[0,1,2,3],ymm7[4,5,6,7]
2547 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2548 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm10[1,1,1,1]
2549 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm7[0],xmm9[1],xmm7[2,3]
2550 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm7[0,1],xmm8[2,3]
2551 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
2552 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm8 = xmm3[1,1,1,1]
2553 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm8[0],xmm2[1],xmm8[2,3]
2554 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm0, %ymm8
2555 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm8[0,1,2,3,4,5],ymm6[6,7]
2556 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3],ymm6[4,5,6,7]
2557 ; AVX1-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2558 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm6 = xmm10[2],xmm9[2],xmm10[3],xmm9[3]
2559 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm2 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
2560 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm0[2,2,2,2]
2561 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm3 = xmm1[0,1,2],xmm3[3]
2562 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
2563 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm7
2564 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm7[0,1,2,3,4,5],ymm3[6,7]
2565 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm4[2,2,2,2]
2566 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm5[0,1,2],xmm7[3]
2567 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm6[0,1],xmm7[2,3]
2568 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm7[0,1,2,3],ymm3[4,5,6,7]
2569 ; AVX1-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2570 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm3 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
2571 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm6[1],xmm3[1]
2572 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
2573 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
2574 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm2[2,3,2,3]
2575 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
2576 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
2577 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm3[0,1,2,3],ymm0[4,5,6,7]
2578 ; AVX1-ONLY-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
2579 ; AVX1-ONLY-NEXT: vmovaps 32(%r9), %xmm4
2580 ; AVX1-ONLY-NEXT: vmovaps 32(%r8), %xmm5
2581 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm0 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
2582 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm2
2583 ; AVX1-ONLY-NEXT: vmovaps 32(%rax), %xmm0
2584 ; AVX1-ONLY-NEXT: vmovaps 32(%r10), %xmm1
2585 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm6 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
2586 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm6[0,1,0,1]
2587 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
2588 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm2[0,1,2,3,4,5],ymm3[6,7]
2589 ; AVX1-ONLY-NEXT: vmovaps 32(%rcx), %xmm2
2590 ; AVX1-ONLY-NEXT: vmovaps 32(%rdx), %xmm3
2591 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm8 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
2592 ; AVX1-ONLY-NEXT: vmovaps 32(%rsi), %xmm9
2593 ; AVX1-ONLY-NEXT: vmovaps 32(%rdi), %xmm10
2594 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm11 = xmm10[0],xmm9[0],xmm10[1],xmm9[1]
2595 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm11 = xmm11[0],xmm8[0]
2596 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm11[0,1,2,3],ymm7[4,5,6,7]
2597 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2598 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm10[1,1,1,1]
2599 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm7[0],xmm9[1],xmm7[2,3]
2600 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm7[0,1],xmm8[2,3]
2601 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
2602 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm8 = xmm5[1,1,1,1]
2603 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm8[0],xmm4[1],xmm8[2,3]
2604 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm0, %ymm8
2605 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm8[0,1,2,3,4,5],ymm6[6,7]
2606 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3],ymm6[4,5,6,7]
2607 ; AVX1-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2608 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm6 = xmm10[2],xmm9[2],xmm10[3],xmm9[3]
2609 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm4 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
2610 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm5 = xmm0[2,2,2,2]
2611 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm1[0,1,2],xmm5[3]
2612 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
2613 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm7
2614 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm7[0,1,2,3,4,5],ymm5[6,7]
2615 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm2[2,2,2,2]
2616 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm3[0,1,2],xmm7[3]
2617 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm6[0,1],xmm7[2,3]
2618 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm7[0,1,2,3],ymm5[4,5,6,7]
2619 ; AVX1-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2620 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm2 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
2621 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm6[1],xmm2[1]
2622 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
2623 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
2624 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm4[2,3,2,3]
2625 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
2626 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
2627 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
2628 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2629 ; AVX1-ONLY-NEXT: vmovaps 64(%r9), %xmm4
2630 ; AVX1-ONLY-NEXT: vmovaps 64(%r8), %xmm5
2631 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm0 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
2632 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm2
2633 ; AVX1-ONLY-NEXT: vmovaps 64(%rax), %xmm9
2634 ; AVX1-ONLY-NEXT: vmovaps 64(%r10), %xmm1
2635 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm6 = xmm1[0],xmm9[0],xmm1[1],xmm9[1]
2636 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm6[0,1,0,1]
2637 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
2638 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm2[0,1,2,3,4,5],ymm3[6,7]
2639 ; AVX1-ONLY-NEXT: vmovaps 64(%rcx), %xmm2
2640 ; AVX1-ONLY-NEXT: vmovaps 64(%rdx), %xmm3
2641 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm8 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
2642 ; AVX1-ONLY-NEXT: vmovaps 64(%rsi), %xmm0
2643 ; AVX1-ONLY-NEXT: vmovaps 64(%rdi), %xmm7
2644 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm15 = xmm7[0],xmm0[0],xmm7[1],xmm0[1]
2645 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm15 = xmm15[0],xmm8[0]
2646 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm15[0,1,2,3],ymm10[4,5,6,7]
2647 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm15 = xmm7[1,1,1,1]
2648 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm15 = xmm15[0],xmm0[1],xmm15[2,3]
2649 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm15[0,1],xmm8[2,3]
2650 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
2651 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm15 = xmm5[1,1,1,1]
2652 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm15 = xmm15[0],xmm4[1],xmm15[2,3]
2653 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm15, %ymm0, %ymm15
2654 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm15[0,1,2,3,4,5],ymm6[6,7]
2655 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm8[0,1,2,3],ymm6[4,5,6,7]
2656 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm7[2],xmm0[2],xmm7[3],xmm0[3]
2657 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm4 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
2658 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm5 = xmm9[2,2,2,2]
2659 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm1[0,1,2],xmm5[3]
2660 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
2661 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm6
2662 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5],ymm5[6,7]
2663 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm6 = xmm2[2,2,2,2]
2664 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm3[0,1,2],xmm6[3]
2665 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm0[0,1],xmm6[2,3]
2666 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm6[0,1,2,3],ymm5[4,5,6,7]
2667 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm2 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
2668 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
2669 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm1 = xmm1[2],xmm9[2],xmm1[3],xmm9[3]
2670 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
2671 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm2 = xmm4[2,3,2,3]
2672 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
2673 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
2674 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm0[0,1,2,3],ymm1[4,5,6,7]
2675 ; AVX1-ONLY-NEXT: vmovaps 96(%r9), %xmm3
2676 ; AVX1-ONLY-NEXT: vmovaps 96(%r8), %xmm9
2677 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm0 = xmm9[0],xmm3[0],xmm9[1],xmm3[1]
2678 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
2679 ; AVX1-ONLY-NEXT: vmovaps 96(%rax), %xmm7
2680 ; AVX1-ONLY-NEXT: vmovaps 96(%r10), %xmm6
2681 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm15 = xmm6[0],xmm7[0],xmm6[1],xmm7[1]
2682 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm2 = xmm15[0,1,0,1]
2683 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
2684 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm0[0,1,2,3,4,5],ymm2[6,7]
2685 ; AVX1-ONLY-NEXT: vmovaps 96(%rcx), %xmm5
2686 ; AVX1-ONLY-NEXT: vmovaps 96(%rdx), %xmm4
2687 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm2 = xmm4[0],xmm5[0],xmm4[1],xmm5[1]
2688 ; AVX1-ONLY-NEXT: vmovaps 96(%rsi), %xmm1
2689 ; AVX1-ONLY-NEXT: vmovaps 96(%rdi), %xmm0
2690 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm14 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
2691 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm14 = xmm14[0],xmm2[0]
2692 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm14[0,1,2,3],ymm8[4,5,6,7]
2693 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm14 = xmm0[1,1,1,1]
2694 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm14 = xmm14[0],xmm1[1],xmm14[2,3]
2695 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm2 = xmm14[0,1],xmm2[2,3]
2696 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm15, %ymm0, %ymm14
2697 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm15 = xmm9[1,1,1,1]
2698 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm15 = xmm15[0],xmm3[1],xmm15[2,3]
2699 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm15, %ymm0, %ymm15
2700 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm15[0,1,2,3,4,5],ymm14[6,7]
2701 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm14[4,5,6,7]
2702 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
2703 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm1 = xmm9[2],xmm3[2],xmm9[3],xmm3[3]
2704 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm7[2,2,2,2]
2705 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm3 = xmm6[0,1,2],xmm3[3]
2706 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
2707 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm9
2708 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm9[0,1,2,3,4,5],ymm3[6,7]
2709 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm9 = xmm5[2,2,2,2]
2710 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm9 = xmm4[0,1,2],xmm9[3]
2711 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm9 = xmm0[0,1],xmm9[2,3]
2712 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm9[0,1,2,3],ymm3[4,5,6,7]
2713 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm4 = xmm4[2],xmm5[2],xmm4[3],xmm5[3]
2714 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm4[1]
2715 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm4 = xmm6[2],xmm7[2],xmm6[3],xmm7[3]
2716 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm4
2717 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm1[2,3,2,3]
2718 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
2719 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm4[6,7]
2720 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
2721 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
2722 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 864(%rax)
2723 ; AVX1-ONLY-NEXT: vmovaps %ymm3, 832(%rax)
2724 ; AVX1-ONLY-NEXT: vmovaps %ymm2, 800(%rax)
2725 ; AVX1-ONLY-NEXT: vmovaps %ymm8, 768(%rax)
2726 ; AVX1-ONLY-NEXT: vmovaps %ymm10, 608(%rax)
2727 ; AVX1-ONLY-NEXT: vmovaps %ymm11, 576(%rax)
2728 ; AVX1-ONLY-NEXT: vmovaps %ymm12, 544(%rax)
2729 ; AVX1-ONLY-NEXT: vmovaps %ymm13, 512(%rax)
2730 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2731 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 352(%rax)
2732 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2733 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 320(%rax)
2734 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2735 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 288(%rax)
2736 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2737 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 256(%rax)
2738 ; AVX1-ONLY-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
2739 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 96(%rax)
2740 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2741 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 64(%rax)
2742 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2743 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
2744 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2745 ; AVX1-ONLY-NEXT: vmovaps %ymm0, (%rax)
2746 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2747 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 992(%rax)
2748 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2749 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 960(%rax)
2750 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2751 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 928(%rax)
2752 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2753 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 896(%rax)
2754 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2755 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 736(%rax)
2756 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2757 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 704(%rax)
2758 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2759 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 672(%rax)
2760 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2761 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 640(%rax)
2762 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2763 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 480(%rax)
2764 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2765 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 448(%rax)
2766 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2767 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 416(%rax)
2768 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2769 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 384(%rax)
2770 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2771 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 224(%rax)
2772 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2773 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 192(%rax)
2774 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2775 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 160(%rax)
2776 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2777 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 128(%rax)
2778 ; AVX1-ONLY-NEXT: addq $648, %rsp # imm = 0x288
2779 ; AVX1-ONLY-NEXT: vzeroupper
2780 ; AVX1-ONLY-NEXT: retq
2782 ; AVX2-ONLY-LABEL: store_i32_stride8_vf32:
2783 ; AVX2-ONLY: # %bb.0:
2784 ; AVX2-ONLY-NEXT: subq $648, %rsp # imm = 0x288
2785 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
2786 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
2787 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %ymm0
2788 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %ymm4
2789 ; AVX2-ONLY-NEXT: vmovaps 32(%rsi), %ymm1
2790 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %ymm5
2791 ; AVX2-ONLY-NEXT: vmovaps 32(%rdx), %ymm2
2792 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %ymm7
2793 ; AVX2-ONLY-NEXT: vmovaps 32(%rcx), %ymm3
2794 ; AVX2-ONLY-NEXT: vmovaps (%rcx), %ymm9
2795 ; AVX2-ONLY-NEXT: vmovaps (%r8), %ymm10
2796 ; AVX2-ONLY-NEXT: vmovaps (%r9), %ymm12
2797 ; AVX2-ONLY-NEXT: vmovaps (%rax), %ymm8
2798 ; AVX2-ONLY-NEXT: vmovaps (%r10), %ymm11
2799 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm6 = ymm8[0],ymm11[0],ymm8[1],ymm11[1],ymm8[4],ymm11[4],ymm8[5],ymm11[5]
2800 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm13 = ymm4[0],ymm5[0],ymm4[1],ymm5[1],ymm4[4],ymm5[4],ymm4[5],ymm5[5]
2801 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm13, %xmm13
2802 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm14 = ymm7[0],ymm9[0],ymm7[1],ymm9[1],ymm7[4],ymm9[4],ymm7[5],ymm9[5]
2803 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm15 = ymm14[2,2,2,2]
2804 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm13 = xmm13[0,1],xmm15[2,3]
2805 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm15 = ymm10[0],ymm12[0],ymm10[1],ymm12[1],ymm10[4],ymm12[4],ymm10[5],ymm12[5]
2806 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm15 = ymm15[0],ymm6[0],ymm15[2],ymm6[2]
2807 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3],ymm15[4,5,6,7]
2808 ; AVX2-ONLY-NEXT: vmovups %ymm13, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2809 ; AVX2-ONLY-NEXT: vbroadcastss 20(%r8), %ymm13
2810 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3,4],ymm12[5],ymm13[6,7]
2811 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3,4,5],ymm6[6,7]
2812 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm14, %xmm6
2813 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm14 = ymm4[1,1,1,1,5,5,5,5]
2814 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0],ymm5[1],ymm14[2,3,4],ymm5[5],ymm14[6,7]
2815 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm14, %xmm14
2816 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm14 = xmm14[0,1],xmm6[2,3]
2817 ; AVX2-ONLY-NEXT: vmovaps 32(%r8), %ymm6
2818 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm14[0,1,2,3],ymm13[4,5,6,7]
2819 ; AVX2-ONLY-NEXT: vmovups %ymm13, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2820 ; AVX2-ONLY-NEXT: vbroadcastss 24(%r10), %ymm13
2821 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm8[0,1,2,3,4,5,6],ymm13[7]
2822 ; AVX2-ONLY-NEXT: vmovaps 32(%r9), %ymm8
2823 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm12 = ymm10[2],ymm12[2],ymm10[3],ymm12[3],ymm10[6],ymm12[6],ymm10[7],ymm12[7]
2824 ; AVX2-ONLY-NEXT: vmovaps 32(%rax), %ymm10
2825 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm4 = ymm4[2],ymm5[2],ymm4[3],ymm5[3],ymm4[6],ymm5[6],ymm4[7],ymm5[7]
2826 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm4, %xmm5
2827 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm9 = ymm7[2],ymm9[2],ymm7[3],ymm9[3],ymm7[6],ymm9[6],ymm7[7],ymm9[7]
2828 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm7 = ymm9[2,2,2,2]
2829 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm5[0,1],xmm7[2,3]
2830 ; AVX2-ONLY-NEXT: vmovaps 32(%r10), %ymm7
2831 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm12[0,1,2,3,4,5],ymm13[6,7]
2832 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm13[4,5,6,7]
2833 ; AVX2-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2834 ; AVX2-ONLY-NEXT: vbroadcastss 28(%rax), %ymm5
2835 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm5 = ymm5[2],ymm11[2],ymm5[3],ymm11[3],ymm5[6],ymm11[6],ymm5[7],ymm11[7]
2836 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm12[1],ymm5[1],ymm12[3],ymm5[3]
2837 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm9, %xmm9
2838 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm4 = ymm4[2,3,2,3,6,7,6,7]
2839 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm4, %xmm4
2840 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm4 = xmm4[0,1],xmm9[2,3]
2841 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
2842 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2843 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm4 = ymm10[0],ymm7[0],ymm10[1],ymm7[1],ymm10[4],ymm7[4],ymm10[5],ymm7[5]
2844 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm5 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
2845 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm5, %xmm5
2846 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm9 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[4],ymm3[4],ymm2[5],ymm3[5]
2847 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm11 = ymm9[2,2,2,2]
2848 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm5[0,1],xmm11[2,3]
2849 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm11 = ymm6[0],ymm8[0],ymm6[1],ymm8[1],ymm6[4],ymm8[4],ymm6[5],ymm8[5]
2850 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm11 = ymm11[0],ymm4[0],ymm11[2],ymm4[2]
2851 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm11[4,5,6,7]
2852 ; AVX2-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2853 ; AVX2-ONLY-NEXT: vbroadcastss 52(%r8), %ymm5
2854 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3,4],ymm8[5],ymm5[6,7]
2855 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],ymm4[6,7]
2856 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm9, %xmm4
2857 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm9 = ymm0[1,1,1,1,5,5,5,5]
2858 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0],ymm1[1],ymm9[2,3,4],ymm1[5],ymm9[6,7]
2859 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm9, %xmm9
2860 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm9 = xmm9[0,1],xmm4[2,3]
2861 ; AVX2-ONLY-NEXT: vmovaps 64(%rdi), %ymm4
2862 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm9[0,1,2,3],ymm5[4,5,6,7]
2863 ; AVX2-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2864 ; AVX2-ONLY-NEXT: vbroadcastss 56(%r10), %ymm5
2865 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm10[0,1,2,3,4,5,6],ymm5[7]
2866 ; AVX2-ONLY-NEXT: vmovaps 64(%rsi), %ymm5
2867 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm8 = ymm6[2],ymm8[2],ymm6[3],ymm8[3],ymm6[6],ymm8[6],ymm6[7],ymm8[7]
2868 ; AVX2-ONLY-NEXT: vmovaps 64(%rdx), %ymm6
2869 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm10 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
2870 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm0
2871 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm2 = ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[6],ymm3[6],ymm2[7],ymm3[7]
2872 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm1 = ymm2[2,2,2,2]
2873 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm1 = xmm0[0,1],xmm1[2,3]
2874 ; AVX2-ONLY-NEXT: vmovaps 64(%rcx), %ymm0
2875 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm8[0,1,2,3,4,5],ymm9[6,7]
2876 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
2877 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2878 ; AVX2-ONLY-NEXT: vbroadcastss 60(%rax), %ymm1
2879 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm3 = ymm1[2],ymm7[2],ymm1[3],ymm7[3],ymm1[6],ymm7[6],ymm1[7],ymm7[7]
2880 ; AVX2-ONLY-NEXT: vmovaps 64(%r8), %ymm1
2881 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm8[1],ymm3[1],ymm8[3],ymm3[3]
2882 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm2
2883 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm10[2,3,2,3,6,7,6,7]
2884 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm7, %xmm7
2885 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm2 = xmm7[0,1],xmm2[2,3]
2886 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
2887 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2888 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm2 = ymm4[0],ymm5[0],ymm4[1],ymm5[1],ymm4[4],ymm5[4],ymm4[5],ymm5[5]
2889 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm2
2890 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm3 = ymm6[0],ymm0[0],ymm6[1],ymm0[1],ymm6[4],ymm0[4],ymm6[5],ymm0[5]
2891 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm7 = ymm3[2,2,2,2]
2892 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm2 = xmm2[0,1],xmm7[2,3]
2893 ; AVX2-ONLY-NEXT: vmovaps 64(%r9), %ymm7
2894 ; AVX2-ONLY-NEXT: vmovaps 64(%rax), %ymm8
2895 ; AVX2-ONLY-NEXT: vmovaps 64(%r10), %ymm9
2896 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm10 = ymm8[0],ymm9[0],ymm8[1],ymm9[1],ymm8[4],ymm9[4],ymm8[5],ymm9[5]
2897 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm11 = ymm1[0],ymm7[0],ymm1[1],ymm7[1],ymm1[4],ymm7[4],ymm1[5],ymm7[5]
2898 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm11 = ymm11[0],ymm10[0],ymm11[2],ymm10[2]
2899 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm11[4,5,6,7]
2900 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2901 ; AVX2-ONLY-NEXT: vbroadcastss 84(%r8), %ymm2
2902 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4],ymm7[5],ymm2[6,7]
2903 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm10[6,7]
2904 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm3, %xmm3
2905 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm4[1,1,1,1,5,5,5,5]
2906 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0],ymm5[1],ymm10[2,3,4],ymm5[5],ymm10[6,7]
2907 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
2908 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm3 = xmm10[0,1],xmm3[2,3]
2909 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1,2,3],ymm2[4,5,6,7]
2910 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2911 ; AVX2-ONLY-NEXT: vbroadcastss 88(%r10), %ymm2
2912 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm8[0,1,2,3,4,5,6],ymm2[7]
2913 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm1 = ymm1[2],ymm7[2],ymm1[3],ymm7[3],ymm1[6],ymm7[6],ymm1[7],ymm7[7]
2914 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm3 = ymm4[2],ymm5[2],ymm4[3],ymm5[3],ymm4[6],ymm5[6],ymm4[7],ymm5[7]
2915 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm4 = ymm6[2],ymm0[2],ymm6[3],ymm0[3],ymm6[6],ymm0[6],ymm6[7],ymm0[7]
2916 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm3, %xmm0
2917 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm5 = ymm4[2,2,2,2]
2918 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm0 = xmm0[0,1],xmm5[2,3]
2919 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm1[0,1,2,3,4,5],ymm2[6,7]
2920 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
2921 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2922 ; AVX2-ONLY-NEXT: vbroadcastss 92(%rax), %ymm0
2923 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm2 = ymm0[2],ymm9[2],ymm0[3],ymm9[3],ymm0[6],ymm9[6],ymm0[7],ymm9[7]
2924 ; AVX2-ONLY-NEXT: vmovaps 96(%rdi), %ymm0
2925 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
2926 ; AVX2-ONLY-NEXT: vmovaps 96(%rsi), %ymm1
2927 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm4, %xmm2
2928 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm3[2,3,2,3,6,7,6,7]
2929 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm3, %xmm3
2930 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm3 = xmm3[0,1],xmm2[2,3]
2931 ; AVX2-ONLY-NEXT: vmovaps 96(%rdx), %ymm2
2932 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm5[4,5,6,7]
2933 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2934 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
2935 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm3, %xmm3
2936 ; AVX2-ONLY-NEXT: vmovaps 96(%rcx), %ymm4
2937 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm5 = ymm2[0],ymm4[0],ymm2[1],ymm4[1],ymm2[4],ymm4[4],ymm2[5],ymm4[5]
2938 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm6 = ymm5[2,2,2,2]
2939 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm3 = xmm3[0,1],xmm6[2,3]
2940 ; AVX2-ONLY-NEXT: vmovaps 96(%r8), %ymm6
2941 ; AVX2-ONLY-NEXT: vmovaps 96(%r9), %ymm7
2942 ; AVX2-ONLY-NEXT: vmovaps 96(%rax), %ymm8
2943 ; AVX2-ONLY-NEXT: vmovaps 96(%r10), %ymm9
2944 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm10 = ymm8[0],ymm9[0],ymm8[1],ymm9[1],ymm8[4],ymm9[4],ymm8[5],ymm9[5]
2945 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm11 = ymm6[0],ymm7[0],ymm6[1],ymm7[1],ymm6[4],ymm7[4],ymm6[5],ymm7[5]
2946 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm11 = ymm11[0],ymm10[0],ymm11[2],ymm10[2]
2947 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm11[4,5,6,7]
2948 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2949 ; AVX2-ONLY-NEXT: vbroadcastss 116(%r8), %ymm3
2950 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4],ymm7[5],ymm3[6,7]
2951 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm10[6,7]
2952 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm5, %xmm5
2953 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm0[1,1,1,1,5,5,5,5]
2954 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0],ymm1[1],ymm10[2,3,4],ymm1[5],ymm10[6,7]
2955 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
2956 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm10[0,1],xmm5[2,3]
2957 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm5[0,1,2,3],ymm3[4,5,6,7]
2958 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2959 ; AVX2-ONLY-NEXT: vbroadcastss 120(%r10), %ymm3
2960 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm8[0,1,2,3,4,5,6],ymm3[7]
2961 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm5 = ymm6[2],ymm7[2],ymm6[3],ymm7[3],ymm6[6],ymm7[6],ymm6[7],ymm7[7]
2962 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm0 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
2963 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm1 = ymm2[2],ymm4[2],ymm2[3],ymm4[3],ymm2[6],ymm4[6],ymm2[7],ymm4[7]
2964 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm2
2965 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm4 = ymm1[2,2,2,2]
2966 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm2 = xmm2[0,1],xmm4[2,3]
2967 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm5[0,1,2,3,4,5],ymm3[6,7]
2968 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
2969 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2970 ; AVX2-ONLY-NEXT: vbroadcastss 124(%rax), %ymm2
2971 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm2 = ymm2[2],ymm9[2],ymm2[3],ymm9[3],ymm2[6],ymm9[6],ymm2[7],ymm9[7]
2972 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm5[1],ymm2[1],ymm5[3],ymm2[3]
2973 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm1
2974 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm0[2,3,2,3,6,7,6,7]
2975 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm0
2976 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3]
2977 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
2978 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2979 ; AVX2-ONLY-NEXT: vmovaps (%rcx), %xmm0
2980 ; AVX2-ONLY-NEXT: vbroadcastss %xmm0, %xmm2
2981 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %xmm1
2982 ; AVX2-ONLY-NEXT: vbroadcastss %xmm1, %xmm3
2983 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
2984 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %xmm4
2985 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %xmm5
2986 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm3 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
2987 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm3[0,1],xmm2[2,3]
2988 ; AVX2-ONLY-NEXT: vmovaps (%r10), %xmm2
2989 ; AVX2-ONLY-NEXT: vbroadcastss %xmm2, %xmm7
2990 ; AVX2-ONLY-NEXT: vmovaps (%rax), %xmm3
2991 ; AVX2-ONLY-NEXT: vbroadcastss %xmm3, %xmm8
2992 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1]
2993 ; AVX2-ONLY-NEXT: vmovaps (%r9), %xmm8
2994 ; AVX2-ONLY-NEXT: vmovaps (%r8), %xmm9
2995 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm10 = xmm9[0],xmm8[0],xmm9[1],xmm8[1]
2996 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm0, %ymm10
2997 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm0, %ymm7
2998 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm10[0,1,2,3,4,5],ymm7[6,7]
2999 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm7[4,5,6,7]
3000 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3001 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm6 = xmm5[1,1,1,1]
3002 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm6[0],xmm4[1],xmm6[2,3]
3003 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm7 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
3004 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm6[0,1],xmm7[2,3]
3005 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm7 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
3006 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm0, %ymm7
3007 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm10 = xmm9[1,1,1,1]
3008 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm10 = xmm10[0],xmm8[1],xmm10[2,3]
3009 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm0, %ymm10
3010 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm10[0,1,2,3,4,5],ymm7[6,7]
3011 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm7[4,5,6,7]
3012 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3013 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm4 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
3014 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm5 = xmm9[2],xmm8[2],xmm9[3],xmm8[3]
3015 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm6 = xmm2[2,2,2,2]
3016 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm3[0,1,2],xmm6[3]
3017 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
3018 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm7
3019 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3,4,5],ymm6[6,7]
3020 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm0[2,2,2,2]
3021 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm1[0,1,2],xmm7[3]
3022 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm4[0,1],xmm7[2,3]
3023 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3],ymm6[4,5,6,7]
3024 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3025 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
3026 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm0[1]
3027 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm1 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
3028 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
3029 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm2 = xmm5[2,3,2,3]
3030 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
3031 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
3032 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3033 ; AVX2-ONLY-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
3034 ; AVX2-ONLY-NEXT: vmovaps 32(%rcx), %xmm0
3035 ; AVX2-ONLY-NEXT: vmovaps 32(%rdx), %xmm1
3036 ; AVX2-ONLY-NEXT: vbroadcastss %xmm0, %xmm2
3037 ; AVX2-ONLY-NEXT: vbroadcastss %xmm1, %xmm3
3038 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
3039 ; AVX2-ONLY-NEXT: vmovaps 32(%rsi), %xmm4
3040 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %xmm5
3041 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm3 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
3042 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm3[0,1],xmm2[2,3]
3043 ; AVX2-ONLY-NEXT: vmovaps 32(%r10), %xmm2
3044 ; AVX2-ONLY-NEXT: vmovaps 32(%rax), %xmm3
3045 ; AVX2-ONLY-NEXT: vbroadcastss %xmm2, %xmm7
3046 ; AVX2-ONLY-NEXT: vbroadcastss %xmm3, %xmm8
3047 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1]
3048 ; AVX2-ONLY-NEXT: vmovaps 32(%r9), %xmm8
3049 ; AVX2-ONLY-NEXT: vmovaps 32(%r8), %xmm9
3050 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm10 = xmm9[0],xmm8[0],xmm9[1],xmm8[1]
3051 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm0, %ymm10
3052 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm0, %ymm7
3053 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm10[0,1,2,3,4,5],ymm7[6,7]
3054 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm7[4,5,6,7]
3055 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3056 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm6 = xmm5[1,1,1,1]
3057 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm6[0],xmm4[1],xmm6[2,3]
3058 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm7 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
3059 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm6[0,1],xmm7[2,3]
3060 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm7 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
3061 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm0, %ymm7
3062 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm10 = xmm9[1,1,1,1]
3063 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm10 = xmm10[0],xmm8[1],xmm10[2,3]
3064 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm0, %ymm10
3065 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm10[0,1,2,3,4,5],ymm7[6,7]
3066 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm7[4,5,6,7]
3067 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3068 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm4 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
3069 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm5 = xmm9[2],xmm8[2],xmm9[3],xmm8[3]
3070 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm6 = xmm2[2,2,2,2]
3071 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm3[0,1,2],xmm6[3]
3072 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
3073 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm7
3074 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3,4,5],ymm6[6,7]
3075 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm0[2,2,2,2]
3076 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm1[0,1,2],xmm7[3]
3077 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm4[0,1],xmm7[2,3]
3078 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3],ymm6[4,5,6,7]
3079 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3080 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
3081 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm0[1]
3082 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm1 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
3083 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
3084 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm2 = xmm5[2,3,2,3]
3085 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
3086 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
3087 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3088 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3089 ; AVX2-ONLY-NEXT: vmovaps 64(%rcx), %xmm3
3090 ; AVX2-ONLY-NEXT: vmovaps 64(%rdx), %xmm1
3091 ; AVX2-ONLY-NEXT: vbroadcastss %xmm3, %xmm0
3092 ; AVX2-ONLY-NEXT: vbroadcastss %xmm1, %xmm2
3093 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm0 = xmm2[0],xmm0[0],xmm2[1],xmm0[1]
3094 ; AVX2-ONLY-NEXT: vmovaps 64(%rsi), %xmm4
3095 ; AVX2-ONLY-NEXT: vmovaps 64(%rdi), %xmm5
3096 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm2 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
3097 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm9 = xmm2[0,1],xmm0[2,3]
3098 ; AVX2-ONLY-NEXT: vmovaps 64(%r10), %xmm2
3099 ; AVX2-ONLY-NEXT: vmovaps 64(%rax), %xmm0
3100 ; AVX2-ONLY-NEXT: vbroadcastss %xmm2, %xmm7
3101 ; AVX2-ONLY-NEXT: vbroadcastss %xmm0, %xmm8
3102 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1]
3103 ; AVX2-ONLY-NEXT: vmovaps 64(%r9), %xmm8
3104 ; AVX2-ONLY-NEXT: vmovaps 64(%r8), %xmm6
3105 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm10 = xmm6[0],xmm8[0],xmm6[1],xmm8[1]
3106 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm0, %ymm10
3107 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm0, %ymm7
3108 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm10[0,1,2,3,4,5],ymm7[6,7]
3109 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm9[0,1,2,3],ymm7[4,5,6,7]
3110 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm5[1,1,1,1]
3111 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm7[0],xmm4[1],xmm7[2,3]
3112 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm9 = xmm1[0],xmm3[0],xmm1[1],xmm3[1]
3113 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm7[0,1],xmm9[2,3]
3114 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm9 = xmm0[0],xmm2[0],xmm0[1],xmm2[1]
3115 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm0, %ymm9
3116 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm15 = xmm6[1,1,1,1]
3117 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm15 = xmm15[0],xmm8[1],xmm15[2,3]
3118 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm15, %ymm0, %ymm15
3119 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm15[0,1,2,3,4,5],ymm9[6,7]
3120 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm7[0,1,2,3],ymm9[4,5,6,7]
3121 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm4 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
3122 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm5 = xmm6[2],xmm8[2],xmm6[3],xmm8[3]
3123 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm6 = xmm2[2,2,2,2]
3124 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm0[0,1,2],xmm6[3]
3125 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
3126 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm7
3127 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3,4,5],ymm6[6,7]
3128 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm3[2,2,2,2]
3129 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm1[0,1,2],xmm7[3]
3130 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm4[0,1],xmm7[2,3]
3131 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm7[0,1,2,3],ymm6[4,5,6,7]
3132 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm1 = xmm1[2],xmm3[2],xmm1[3],xmm3[3]
3133 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm1 = xmm4[1],xmm1[1]
3134 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm0[2],xmm2[2],xmm0[3],xmm2[3]
3135 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
3136 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm2 = xmm5[2,3,2,3]
3137 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
3138 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
3139 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm1[0,1,2,3],ymm0[4,5,6,7]
3140 ; AVX2-ONLY-NEXT: vmovaps 96(%rcx), %xmm6
3141 ; AVX2-ONLY-NEXT: vbroadcastss %xmm6, %xmm0
3142 ; AVX2-ONLY-NEXT: vmovaps 96(%rdx), %xmm5
3143 ; AVX2-ONLY-NEXT: vbroadcastss %xmm5, %xmm1
3144 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
3145 ; AVX2-ONLY-NEXT: vmovaps 96(%rsi), %xmm4
3146 ; AVX2-ONLY-NEXT: vmovaps 96(%rdi), %xmm2
3147 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm3 = xmm2[0],xmm4[0],xmm2[1],xmm4[1]
3148 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm3[0,1],xmm0[2,3]
3149 ; AVX2-ONLY-NEXT: vmovaps 96(%r10), %xmm3
3150 ; AVX2-ONLY-NEXT: vbroadcastss %xmm3, %xmm15
3151 ; AVX2-ONLY-NEXT: vmovaps 96(%rax), %xmm1
3152 ; AVX2-ONLY-NEXT: vbroadcastss %xmm1, %xmm14
3153 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm14 = xmm14[0],xmm15[0],xmm14[1],xmm15[1]
3154 ; AVX2-ONLY-NEXT: vmovaps 96(%r9), %xmm15
3155 ; AVX2-ONLY-NEXT: vmovaps 96(%r8), %xmm0
3156 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm13 = xmm0[0],xmm15[0],xmm0[1],xmm15[1]
3157 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm0, %ymm13
3158 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm14, %ymm0, %ymm14
3159 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3,4,5],ymm14[6,7]
3160 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm7[0,1,2,3],ymm13[4,5,6,7]
3161 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm2[1,1,1,1]
3162 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm7[0],xmm4[1],xmm7[2,3]
3163 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm13 = xmm5[0],xmm6[0],xmm5[1],xmm6[1]
3164 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm7[0,1],xmm13[2,3]
3165 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm13 = xmm1[0],xmm3[0],xmm1[1],xmm3[1]
3166 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm0, %ymm13
3167 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm12 = xmm0[1,1,1,1]
3168 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm12 = xmm12[0],xmm15[1],xmm12[2,3]
3169 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm0, %ymm12
3170 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm12[0,1,2,3,4,5],ymm13[6,7]
3171 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm12[4,5,6,7]
3172 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm2 = xmm2[2],xmm4[2],xmm2[3],xmm4[3]
3173 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm0[2],xmm15[2],xmm0[3],xmm15[3]
3174 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm4 = xmm3[2,2,2,2]
3175 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm4 = xmm1[0,1,2],xmm4[3]
3176 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm4
3177 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm12
3178 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm12[0,1,2,3,4,5],ymm4[6,7]
3179 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm12 = xmm6[2,2,2,2]
3180 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm12 = xmm5[0,1,2],xmm12[3]
3181 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm12 = xmm2[0,1],xmm12[2,3]
3182 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm12[0,1,2,3],ymm4[4,5,6,7]
3183 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm5 = xmm5[2],xmm6[2],xmm5[3],xmm6[3]
3184 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm5[1]
3185 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm1 = xmm1[2],xmm3[2],xmm1[3],xmm3[3]
3186 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
3187 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm0 = xmm0[2,3,2,3]
3188 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
3189 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
3190 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
3191 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
3192 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 864(%rax)
3193 ; AVX2-ONLY-NEXT: vmovaps %ymm4, 832(%rax)
3194 ; AVX2-ONLY-NEXT: vmovaps %ymm7, 800(%rax)
3195 ; AVX2-ONLY-NEXT: vmovaps %ymm14, 768(%rax)
3196 ; AVX2-ONLY-NEXT: vmovaps %ymm8, 608(%rax)
3197 ; AVX2-ONLY-NEXT: vmovaps %ymm9, 576(%rax)
3198 ; AVX2-ONLY-NEXT: vmovaps %ymm10, 544(%rax)
3199 ; AVX2-ONLY-NEXT: vmovaps %ymm11, 512(%rax)
3200 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3201 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 352(%rax)
3202 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3203 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 320(%rax)
3204 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3205 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 288(%rax)
3206 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3207 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 256(%rax)
3208 ; AVX2-ONLY-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
3209 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 96(%rax)
3210 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3211 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 64(%rax)
3212 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3213 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
3214 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3215 ; AVX2-ONLY-NEXT: vmovaps %ymm0, (%rax)
3216 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3217 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 992(%rax)
3218 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3219 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 960(%rax)
3220 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3221 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 928(%rax)
3222 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3223 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 896(%rax)
3224 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3225 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 736(%rax)
3226 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3227 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 704(%rax)
3228 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3229 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 672(%rax)
3230 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3231 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 640(%rax)
3232 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3233 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 480(%rax)
3234 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3235 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 448(%rax)
3236 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3237 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 416(%rax)
3238 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3239 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 384(%rax)
3240 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3241 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 224(%rax)
3242 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3243 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 192(%rax)
3244 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3245 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 160(%rax)
3246 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3247 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 128(%rax)
3248 ; AVX2-ONLY-NEXT: addq $648, %rsp # imm = 0x288
3249 ; AVX2-ONLY-NEXT: vzeroupper
3250 ; AVX2-ONLY-NEXT: retq
3252 ; AVX512F-LABEL: store_i32_stride8_vf32:
3254 ; AVX512F-NEXT: subq $2056, %rsp # imm = 0x808
3255 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %rax
3256 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %r10
3257 ; AVX512F-NEXT: vmovdqa64 (%r8), %zmm1
3258 ; AVX512F-NEXT: vmovdqa64 (%r9), %zmm27
3259 ; AVX512F-NEXT: vmovdqa64 (%r10), %zmm2
3260 ; AVX512F-NEXT: vmovdqa64 64(%r10), %zmm0
3261 ; AVX512F-NEXT: vmovdqa64 (%rax), %zmm30
3262 ; AVX512F-NEXT: vmovdqa64 64(%rax), %zmm28
3263 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm3 = <u,u,u,u,u,u,2,18,u,u,u,u,u,u,3,19>
3264 ; AVX512F-NEXT: vmovdqa64 %zmm2, %zmm5
3265 ; AVX512F-NEXT: vmovdqa64 %zmm2, %zmm4
3266 ; AVX512F-NEXT: vpermt2d %zmm30, %zmm3, %zmm5
3267 ; AVX512F-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3268 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm2 = <u,u,u,u,2,18,u,u,u,u,u,u,3,19,u,u>
3269 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm5
3270 ; AVX512F-NEXT: vpermt2d %zmm27, %zmm2, %zmm5
3271 ; AVX512F-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3272 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm5 = <u,u,u,u,u,u,0,16,u,u,u,u,u,u,1,17>
3273 ; AVX512F-NEXT: vmovdqa64 %zmm4, %zmm6
3274 ; AVX512F-NEXT: vpermt2d %zmm30, %zmm5, %zmm6
3275 ; AVX512F-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3276 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm6 = <u,u,u,u,0,16,u,u,u,u,u,u,1,17,u,u>
3277 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm7
3278 ; AVX512F-NEXT: vpermt2d %zmm27, %zmm6, %zmm7
3279 ; AVX512F-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3280 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm7 = <u,u,u,u,u,u,6,22,u,u,u,u,u,u,7,23>
3281 ; AVX512F-NEXT: vmovdqa64 %zmm4, %zmm8
3282 ; AVX512F-NEXT: vpermt2d %zmm30, %zmm7, %zmm8
3283 ; AVX512F-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3284 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm8 = <u,u,u,u,6,22,u,u,u,u,u,u,7,23,u,u>
3285 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm9
3286 ; AVX512F-NEXT: vpermt2d %zmm27, %zmm8, %zmm9
3287 ; AVX512F-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3288 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm9 = <u,u,u,u,u,u,4,20,u,u,u,u,u,u,5,21>
3289 ; AVX512F-NEXT: vmovdqa64 %zmm4, %zmm10
3290 ; AVX512F-NEXT: vpermt2d %zmm30, %zmm9, %zmm10
3291 ; AVX512F-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3292 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm10 = <u,u,u,u,4,20,u,u,u,u,u,u,5,21,u,u>
3293 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm11
3294 ; AVX512F-NEXT: vpermt2d %zmm27, %zmm10, %zmm11
3295 ; AVX512F-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3296 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm11 = <u,u,u,u,u,u,10,26,u,u,u,u,u,u,11,27>
3297 ; AVX512F-NEXT: vmovdqa64 %zmm4, %zmm12
3298 ; AVX512F-NEXT: vpermt2d %zmm30, %zmm11, %zmm12
3299 ; AVX512F-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3300 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,u,u,10,26,u,u,u,u,u,u,11,27,u,u>
3301 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm13
3302 ; AVX512F-NEXT: vpermt2d %zmm27, %zmm12, %zmm13
3303 ; AVX512F-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3304 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm13 = <u,u,u,u,u,u,8,24,u,u,u,u,u,u,9,25>
3305 ; AVX512F-NEXT: vmovdqa64 %zmm4, %zmm14
3306 ; AVX512F-NEXT: vpermt2d %zmm30, %zmm13, %zmm14
3307 ; AVX512F-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3308 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm14 = <u,u,u,u,8,24,u,u,u,u,u,u,9,25,u,u>
3309 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm15 = <u,u,u,u,u,u,14,30,u,u,u,u,u,u,15,31>
3310 ; AVX512F-NEXT: vmovdqa64 %zmm4, %zmm16
3311 ; AVX512F-NEXT: vpermt2d %zmm30, %zmm15, %zmm16
3312 ; AVX512F-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3313 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm31 = <u,u,u,u,u,u,12,28,u,u,u,u,u,u,13,29>
3314 ; AVX512F-NEXT: vpermt2d %zmm30, %zmm31, %zmm4
3315 ; AVX512F-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3316 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm4
3317 ; AVX512F-NEXT: vpermt2d %zmm27, %zmm14, %zmm4
3318 ; AVX512F-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3319 ; AVX512F-NEXT: vpermi2d %zmm28, %zmm0, %zmm3
3320 ; AVX512F-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3321 ; AVX512F-NEXT: vpermi2d %zmm28, %zmm0, %zmm5
3322 ; AVX512F-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3323 ; AVX512F-NEXT: vpermi2d %zmm28, %zmm0, %zmm7
3324 ; AVX512F-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3325 ; AVX512F-NEXT: vpermi2d %zmm28, %zmm0, %zmm9
3326 ; AVX512F-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3327 ; AVX512F-NEXT: vpermi2d %zmm28, %zmm0, %zmm11
3328 ; AVX512F-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3329 ; AVX512F-NEXT: vpermi2d %zmm28, %zmm0, %zmm13
3330 ; AVX512F-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3331 ; AVX512F-NEXT: vpermi2d %zmm28, %zmm0, %zmm15
3332 ; AVX512F-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3333 ; AVX512F-NEXT: vpermt2d %zmm28, %zmm31, %zmm0
3334 ; AVX512F-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3335 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm28 = <u,u,u,u,14,30,u,u,u,u,u,u,15,31,u,u>
3336 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm31
3337 ; AVX512F-NEXT: vpermt2d %zmm27, %zmm28, %zmm31
3338 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm0 = <u,u,u,u,12,28,u,u,u,u,u,u,13,29,u,u>
3339 ; AVX512F-NEXT: vpermt2d %zmm27, %zmm0, %zmm1
3340 ; AVX512F-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3341 ; AVX512F-NEXT: vmovdqa64 64(%r8), %zmm27
3342 ; AVX512F-NEXT: vmovdqa64 64(%r9), %zmm1
3343 ; AVX512F-NEXT: vpermi2d %zmm1, %zmm27, %zmm2
3344 ; AVX512F-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3345 ; AVX512F-NEXT: vpermi2d %zmm1, %zmm27, %zmm6
3346 ; AVX512F-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3347 ; AVX512F-NEXT: vpermi2d %zmm1, %zmm27, %zmm8
3348 ; AVX512F-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3349 ; AVX512F-NEXT: vpermi2d %zmm1, %zmm27, %zmm10
3350 ; AVX512F-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3351 ; AVX512F-NEXT: vpermi2d %zmm1, %zmm27, %zmm12
3352 ; AVX512F-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3353 ; AVX512F-NEXT: vpermi2d %zmm1, %zmm27, %zmm14
3354 ; AVX512F-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3355 ; AVX512F-NEXT: vpermi2d %zmm1, %zmm27, %zmm28
3356 ; AVX512F-NEXT: vpermt2d %zmm1, %zmm0, %zmm27
3357 ; AVX512F-NEXT: vmovdqa64 (%rdx), %zmm16
3358 ; AVX512F-NEXT: vmovdqa64 (%rcx), %zmm0
3359 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,2,18,u,u,u,u,u,u,3,19,u,u,u,u>
3360 ; AVX512F-NEXT: vmovdqa64 %zmm16, %zmm20
3361 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm20
3362 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm2
3363 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,0,16,u,u,u,u,u,u,1,17,u,u,u,u>
3364 ; AVX512F-NEXT: vmovdqa64 %zmm16, %zmm21
3365 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm21
3366 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm3
3367 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,6,22,u,u,u,u,u,u,7,23,u,u,u,u>
3368 ; AVX512F-NEXT: vmovdqa64 %zmm16, %zmm22
3369 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm22
3370 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm4
3371 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,4,20,u,u,u,u,u,u,5,21,u,u,u,u>
3372 ; AVX512F-NEXT: vmovdqa64 %zmm16, %zmm23
3373 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm23
3374 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm6
3375 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,10,26,u,u,u,u,u,u,11,27,u,u,u,u>
3376 ; AVX512F-NEXT: vmovdqa64 %zmm16, %zmm24
3377 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm24
3378 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm7
3379 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm29 = <u,u,8,24,u,u,u,u,u,u,9,25,u,u,u,u>
3380 ; AVX512F-NEXT: vmovdqa64 %zmm16, %zmm25
3381 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm29, %zmm25
3382 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm30 = <u,u,14,30,u,u,u,u,u,u,15,31,u,u,u,u>
3383 ; AVX512F-NEXT: vmovdqa64 %zmm16, %zmm26
3384 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm30, %zmm26
3385 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,12,28,u,u,u,u,u,u,13,29,u,u,u,u>
3386 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm16
3387 ; AVX512F-NEXT: vmovdqa64 64(%rdx), %zmm5
3388 ; AVX512F-NEXT: vmovdqa64 64(%rcx), %zmm0
3389 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm5, %zmm2
3390 ; AVX512F-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3391 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm5, %zmm3
3392 ; AVX512F-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3393 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm5, %zmm4
3394 ; AVX512F-NEXT: vmovdqu64 %zmm4, (%rsp) # 64-byte Spill
3395 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm5, %zmm6
3396 ; AVX512F-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3397 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm5, %zmm7
3398 ; AVX512F-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3399 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm5, %zmm29
3400 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm5, %zmm30
3401 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm5
3402 ; AVX512F-NEXT: vmovdqa64 (%rdi), %zmm3
3403 ; AVX512F-NEXT: vmovdqa64 (%rsi), %zmm0
3404 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm12 = <2,18,u,u,u,u,u,u,3,19,u,u,u,u,u,u>
3405 ; AVX512F-NEXT: vmovdqa64 %zmm3, %zmm18
3406 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm12, %zmm18
3407 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm10 = <0,16,u,u,u,u,u,u,1,17,u,u,u,u,u,u>
3408 ; AVX512F-NEXT: vmovdqa64 %zmm3, %zmm17
3409 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm10, %zmm17
3410 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm8 = <6,22,u,u,u,u,u,u,7,23,u,u,u,u,u,u>
3411 ; AVX512F-NEXT: vmovdqa64 %zmm3, %zmm15
3412 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm8, %zmm15
3413 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm7 = <4,20,u,u,u,u,u,u,5,21,u,u,u,u,u,u>
3414 ; AVX512F-NEXT: vmovdqa64 %zmm3, %zmm14
3415 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm7, %zmm14
3416 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm6 = <10,26,u,u,u,u,u,u,11,27,u,u,u,u,u,u>
3417 ; AVX512F-NEXT: vmovdqa64 %zmm3, %zmm13
3418 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm6, %zmm13
3419 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm4 = <8,24,u,u,u,u,u,u,9,25,u,u,u,u,u,u>
3420 ; AVX512F-NEXT: vmovdqa64 %zmm3, %zmm11
3421 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm4, %zmm11
3422 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm2 = <14,30,u,u,u,u,u,u,15,31,u,u,u,u,u,u>
3423 ; AVX512F-NEXT: vmovdqa64 %zmm3, %zmm9
3424 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm2, %zmm9
3425 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm19 = <12,28,u,u,u,u,u,u,13,29,u,u,u,u,u,u>
3426 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm19, %zmm3
3427 ; AVX512F-NEXT: vmovdqa64 64(%rdi), %zmm1
3428 ; AVX512F-NEXT: vmovdqa64 64(%rsi), %zmm0
3429 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm1, %zmm12
3430 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm1, %zmm10
3431 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm1, %zmm8
3432 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm1, %zmm7
3433 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm1, %zmm6
3434 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm1, %zmm4
3435 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm1, %zmm2
3436 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm19, %zmm1
3437 ; AVX512F-NEXT: movb $-120, %al
3438 ; AVX512F-NEXT: kmovw %eax, %k1
3439 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3440 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
3441 ; AVX512F-NEXT: vmovdqa64 %zmm19, %zmm0 {%k1}
3442 ; AVX512F-NEXT: movb $34, %al
3443 ; AVX512F-NEXT: kmovw %eax, %k2
3444 ; AVX512F-NEXT: vmovdqa64 %zmm20, %zmm18 {%k2}
3445 ; AVX512F-NEXT: movb $-52, %al
3446 ; AVX512F-NEXT: kmovw %eax, %k3
3447 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm18 {%k3}
3448 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3449 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
3450 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
3451 ; AVX512F-NEXT: vmovdqa64 %zmm21, %zmm17 {%k2}
3452 ; AVX512F-NEXT: vmovdqa64 %zmm19, %zmm17 {%k3}
3453 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3454 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
3455 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
3456 ; AVX512F-NEXT: vmovdqa64 %zmm22, %zmm15 {%k2}
3457 ; AVX512F-NEXT: vmovdqa64 %zmm19, %zmm15 {%k3}
3458 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3459 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
3460 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
3461 ; AVX512F-NEXT: vmovdqa64 %zmm23, %zmm14 {%k2}
3462 ; AVX512F-NEXT: vmovdqa64 %zmm19, %zmm14 {%k3}
3463 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3464 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
3465 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
3466 ; AVX512F-NEXT: vmovdqa64 %zmm24, %zmm13 {%k2}
3467 ; AVX512F-NEXT: vmovdqa64 %zmm19, %zmm13 {%k3}
3468 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3469 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
3470 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
3471 ; AVX512F-NEXT: vmovdqa64 %zmm25, %zmm11 {%k2}
3472 ; AVX512F-NEXT: vmovdqa64 %zmm19, %zmm11 {%k3}
3473 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3474 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm31 {%k1}
3475 ; AVX512F-NEXT: vmovdqa64 %zmm26, %zmm9 {%k2}
3476 ; AVX512F-NEXT: vmovdqa64 %zmm31, %zmm9 {%k3}
3477 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3478 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
3479 ; AVX512F-NEXT: vmovdqa64 %zmm19, %zmm0 {%k1}
3480 ; AVX512F-NEXT: vmovdqa64 %zmm16, %zmm3 {%k2}
3481 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm3 {%k3}
3482 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3483 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
3484 ; AVX512F-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
3485 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
3486 ; AVX512F-NEXT: vmovdqa64 %zmm16, %zmm12 {%k2}
3487 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm12 {%k3}
3488 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3489 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
3490 ; AVX512F-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
3491 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
3492 ; AVX512F-NEXT: vmovdqa64 %zmm16, %zmm10 {%k2}
3493 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm10 {%k3}
3494 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3495 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
3496 ; AVX512F-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
3497 ; AVX512F-NEXT: vmovdqu64 (%rsp), %zmm16 # 64-byte Reload
3498 ; AVX512F-NEXT: vmovdqa64 %zmm16, %zmm8 {%k2}
3499 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm8 {%k3}
3500 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3501 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
3502 ; AVX512F-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
3503 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
3504 ; AVX512F-NEXT: vmovdqa64 %zmm16, %zmm7 {%k2}
3505 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm7 {%k3}
3506 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3507 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
3508 ; AVX512F-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
3509 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
3510 ; AVX512F-NEXT: vmovdqa64 %zmm16, %zmm6 {%k2}
3511 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm6 {%k3}
3512 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3513 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
3514 ; AVX512F-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
3515 ; AVX512F-NEXT: vmovdqa64 %zmm29, %zmm4 {%k2}
3516 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm4 {%k3}
3517 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3518 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm28 {%k1}
3519 ; AVX512F-NEXT: vmovdqa64 %zmm30, %zmm2 {%k2}
3520 ; AVX512F-NEXT: vmovdqa64 %zmm28, %zmm2 {%k3}
3521 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3522 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm27 {%k1}
3523 ; AVX512F-NEXT: vmovdqa64 %zmm5, %zmm1 {%k2}
3524 ; AVX512F-NEXT: vmovdqa64 %zmm27, %zmm1 {%k3}
3525 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %rax
3526 ; AVX512F-NEXT: vmovdqa64 %zmm1, 896(%rax)
3527 ; AVX512F-NEXT: vmovdqa64 %zmm2, 960(%rax)
3528 ; AVX512F-NEXT: vmovdqa64 %zmm4, 768(%rax)
3529 ; AVX512F-NEXT: vmovdqa64 %zmm6, 832(%rax)
3530 ; AVX512F-NEXT: vmovdqa64 %zmm7, 640(%rax)
3531 ; AVX512F-NEXT: vmovdqa64 %zmm8, 704(%rax)
3532 ; AVX512F-NEXT: vmovdqa64 %zmm10, 512(%rax)
3533 ; AVX512F-NEXT: vmovdqa64 %zmm12, 576(%rax)
3534 ; AVX512F-NEXT: vmovdqa64 %zmm3, 384(%rax)
3535 ; AVX512F-NEXT: vmovdqa64 %zmm9, 448(%rax)
3536 ; AVX512F-NEXT: vmovdqa64 %zmm11, 256(%rax)
3537 ; AVX512F-NEXT: vmovdqa64 %zmm13, 320(%rax)
3538 ; AVX512F-NEXT: vmovdqa64 %zmm14, 128(%rax)
3539 ; AVX512F-NEXT: vmovdqa64 %zmm15, 192(%rax)
3540 ; AVX512F-NEXT: vmovdqa64 %zmm17, (%rax)
3541 ; AVX512F-NEXT: vmovdqa64 %zmm18, 64(%rax)
3542 ; AVX512F-NEXT: addq $2056, %rsp # imm = 0x808
3543 ; AVX512F-NEXT: vzeroupper
3544 ; AVX512F-NEXT: retq
3546 ; AVX512BW-LABEL: store_i32_stride8_vf32:
3547 ; AVX512BW: # %bb.0:
3548 ; AVX512BW-NEXT: subq $2056, %rsp # imm = 0x808
3549 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
3550 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r10
3551 ; AVX512BW-NEXT: vmovdqa64 (%r8), %zmm1
3552 ; AVX512BW-NEXT: vmovdqa64 (%r9), %zmm27
3553 ; AVX512BW-NEXT: vmovdqa64 (%r10), %zmm2
3554 ; AVX512BW-NEXT: vmovdqa64 64(%r10), %zmm0
3555 ; AVX512BW-NEXT: vmovdqa64 (%rax), %zmm30
3556 ; AVX512BW-NEXT: vmovdqa64 64(%rax), %zmm28
3557 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = <u,u,u,u,u,u,2,18,u,u,u,u,u,u,3,19>
3558 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm5
3559 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm4
3560 ; AVX512BW-NEXT: vpermt2d %zmm30, %zmm3, %zmm5
3561 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3562 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <u,u,u,u,2,18,u,u,u,u,u,u,3,19,u,u>
3563 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm5
3564 ; AVX512BW-NEXT: vpermt2d %zmm27, %zmm2, %zmm5
3565 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3566 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm5 = <u,u,u,u,u,u,0,16,u,u,u,u,u,u,1,17>
3567 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm6
3568 ; AVX512BW-NEXT: vpermt2d %zmm30, %zmm5, %zmm6
3569 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3570 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm6 = <u,u,u,u,0,16,u,u,u,u,u,u,1,17,u,u>
3571 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm7
3572 ; AVX512BW-NEXT: vpermt2d %zmm27, %zmm6, %zmm7
3573 ; AVX512BW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3574 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <u,u,u,u,u,u,6,22,u,u,u,u,u,u,7,23>
3575 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm8
3576 ; AVX512BW-NEXT: vpermt2d %zmm30, %zmm7, %zmm8
3577 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3578 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm8 = <u,u,u,u,6,22,u,u,u,u,u,u,7,23,u,u>
3579 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm9
3580 ; AVX512BW-NEXT: vpermt2d %zmm27, %zmm8, %zmm9
3581 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3582 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm9 = <u,u,u,u,u,u,4,20,u,u,u,u,u,u,5,21>
3583 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm10
3584 ; AVX512BW-NEXT: vpermt2d %zmm30, %zmm9, %zmm10
3585 ; AVX512BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3586 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm10 = <u,u,u,u,4,20,u,u,u,u,u,u,5,21,u,u>
3587 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm11
3588 ; AVX512BW-NEXT: vpermt2d %zmm27, %zmm10, %zmm11
3589 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3590 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm11 = <u,u,u,u,u,u,10,26,u,u,u,u,u,u,11,27>
3591 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm12
3592 ; AVX512BW-NEXT: vpermt2d %zmm30, %zmm11, %zmm12
3593 ; AVX512BW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3594 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,u,u,10,26,u,u,u,u,u,u,11,27,u,u>
3595 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm13
3596 ; AVX512BW-NEXT: vpermt2d %zmm27, %zmm12, %zmm13
3597 ; AVX512BW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3598 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm13 = <u,u,u,u,u,u,8,24,u,u,u,u,u,u,9,25>
3599 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm14
3600 ; AVX512BW-NEXT: vpermt2d %zmm30, %zmm13, %zmm14
3601 ; AVX512BW-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3602 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm14 = <u,u,u,u,8,24,u,u,u,u,u,u,9,25,u,u>
3603 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm15 = <u,u,u,u,u,u,14,30,u,u,u,u,u,u,15,31>
3604 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm16
3605 ; AVX512BW-NEXT: vpermt2d %zmm30, %zmm15, %zmm16
3606 ; AVX512BW-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3607 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm31 = <u,u,u,u,u,u,12,28,u,u,u,u,u,u,13,29>
3608 ; AVX512BW-NEXT: vpermt2d %zmm30, %zmm31, %zmm4
3609 ; AVX512BW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3610 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm4
3611 ; AVX512BW-NEXT: vpermt2d %zmm27, %zmm14, %zmm4
3612 ; AVX512BW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3613 ; AVX512BW-NEXT: vpermi2d %zmm28, %zmm0, %zmm3
3614 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3615 ; AVX512BW-NEXT: vpermi2d %zmm28, %zmm0, %zmm5
3616 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3617 ; AVX512BW-NEXT: vpermi2d %zmm28, %zmm0, %zmm7
3618 ; AVX512BW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3619 ; AVX512BW-NEXT: vpermi2d %zmm28, %zmm0, %zmm9
3620 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3621 ; AVX512BW-NEXT: vpermi2d %zmm28, %zmm0, %zmm11
3622 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3623 ; AVX512BW-NEXT: vpermi2d %zmm28, %zmm0, %zmm13
3624 ; AVX512BW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3625 ; AVX512BW-NEXT: vpermi2d %zmm28, %zmm0, %zmm15
3626 ; AVX512BW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3627 ; AVX512BW-NEXT: vpermt2d %zmm28, %zmm31, %zmm0
3628 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3629 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm28 = <u,u,u,u,14,30,u,u,u,u,u,u,15,31,u,u>
3630 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm31
3631 ; AVX512BW-NEXT: vpermt2d %zmm27, %zmm28, %zmm31
3632 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm0 = <u,u,u,u,12,28,u,u,u,u,u,u,13,29,u,u>
3633 ; AVX512BW-NEXT: vpermt2d %zmm27, %zmm0, %zmm1
3634 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3635 ; AVX512BW-NEXT: vmovdqa64 64(%r8), %zmm27
3636 ; AVX512BW-NEXT: vmovdqa64 64(%r9), %zmm1
3637 ; AVX512BW-NEXT: vpermi2d %zmm1, %zmm27, %zmm2
3638 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3639 ; AVX512BW-NEXT: vpermi2d %zmm1, %zmm27, %zmm6
3640 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3641 ; AVX512BW-NEXT: vpermi2d %zmm1, %zmm27, %zmm8
3642 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3643 ; AVX512BW-NEXT: vpermi2d %zmm1, %zmm27, %zmm10
3644 ; AVX512BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3645 ; AVX512BW-NEXT: vpermi2d %zmm1, %zmm27, %zmm12
3646 ; AVX512BW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3647 ; AVX512BW-NEXT: vpermi2d %zmm1, %zmm27, %zmm14
3648 ; AVX512BW-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3649 ; AVX512BW-NEXT: vpermi2d %zmm1, %zmm27, %zmm28
3650 ; AVX512BW-NEXT: vpermt2d %zmm1, %zmm0, %zmm27
3651 ; AVX512BW-NEXT: vmovdqa64 (%rdx), %zmm16
3652 ; AVX512BW-NEXT: vmovdqa64 (%rcx), %zmm0
3653 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,2,18,u,u,u,u,u,u,3,19,u,u,u,u>
3654 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm20
3655 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm20
3656 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm2
3657 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,0,16,u,u,u,u,u,u,1,17,u,u,u,u>
3658 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm21
3659 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm21
3660 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
3661 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,6,22,u,u,u,u,u,u,7,23,u,u,u,u>
3662 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm22
3663 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm22
3664 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm4
3665 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,4,20,u,u,u,u,u,u,5,21,u,u,u,u>
3666 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm23
3667 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm23
3668 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm6
3669 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,10,26,u,u,u,u,u,u,11,27,u,u,u,u>
3670 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm24
3671 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm24
3672 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm7
3673 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm29 = <u,u,8,24,u,u,u,u,u,u,9,25,u,u,u,u>
3674 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm25
3675 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm29, %zmm25
3676 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm30 = <u,u,14,30,u,u,u,u,u,u,15,31,u,u,u,u>
3677 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm26
3678 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm30, %zmm26
3679 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,12,28,u,u,u,u,u,u,13,29,u,u,u,u>
3680 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm16
3681 ; AVX512BW-NEXT: vmovdqa64 64(%rdx), %zmm5
3682 ; AVX512BW-NEXT: vmovdqa64 64(%rcx), %zmm0
3683 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm5, %zmm2
3684 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3685 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm5, %zmm3
3686 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3687 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm5, %zmm4
3688 ; AVX512BW-NEXT: vmovdqu64 %zmm4, (%rsp) # 64-byte Spill
3689 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm5, %zmm6
3690 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3691 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm5, %zmm7
3692 ; AVX512BW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3693 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm5, %zmm29
3694 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm5, %zmm30
3695 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm5
3696 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm3
3697 ; AVX512BW-NEXT: vmovdqa64 (%rsi), %zmm0
3698 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <2,18,u,u,u,u,u,u,3,19,u,u,u,u,u,u>
3699 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm18
3700 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm12, %zmm18
3701 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm10 = <0,16,u,u,u,u,u,u,1,17,u,u,u,u,u,u>
3702 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm17
3703 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm10, %zmm17
3704 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm8 = <6,22,u,u,u,u,u,u,7,23,u,u,u,u,u,u>
3705 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm15
3706 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm8, %zmm15
3707 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <4,20,u,u,u,u,u,u,5,21,u,u,u,u,u,u>
3708 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm14
3709 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm7, %zmm14
3710 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm6 = <10,26,u,u,u,u,u,u,11,27,u,u,u,u,u,u>
3711 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm13
3712 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm6, %zmm13
3713 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = <8,24,u,u,u,u,u,u,9,25,u,u,u,u,u,u>
3714 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm11
3715 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm4, %zmm11
3716 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <14,30,u,u,u,u,u,u,15,31,u,u,u,u,u,u>
3717 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm9
3718 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm2, %zmm9
3719 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm19 = <12,28,u,u,u,u,u,u,13,29,u,u,u,u,u,u>
3720 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm19, %zmm3
3721 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %zmm1
3722 ; AVX512BW-NEXT: vmovdqa64 64(%rsi), %zmm0
3723 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm1, %zmm12
3724 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm1, %zmm10
3725 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm1, %zmm8
3726 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm1, %zmm7
3727 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm1, %zmm6
3728 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm1, %zmm4
3729 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm1, %zmm2
3730 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm19, %zmm1
3731 ; AVX512BW-NEXT: movb $-120, %al
3732 ; AVX512BW-NEXT: kmovd %eax, %k1
3733 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3734 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
3735 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm0 {%k1}
3736 ; AVX512BW-NEXT: movb $34, %al
3737 ; AVX512BW-NEXT: kmovd %eax, %k2
3738 ; AVX512BW-NEXT: vmovdqa64 %zmm20, %zmm18 {%k2}
3739 ; AVX512BW-NEXT: movb $-52, %al
3740 ; AVX512BW-NEXT: kmovd %eax, %k3
3741 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm18 {%k3}
3742 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3743 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
3744 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
3745 ; AVX512BW-NEXT: vmovdqa64 %zmm21, %zmm17 {%k2}
3746 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm17 {%k3}
3747 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3748 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
3749 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
3750 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm15 {%k2}
3751 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm15 {%k3}
3752 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3753 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
3754 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
3755 ; AVX512BW-NEXT: vmovdqa64 %zmm23, %zmm14 {%k2}
3756 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm14 {%k3}
3757 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3758 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
3759 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
3760 ; AVX512BW-NEXT: vmovdqa64 %zmm24, %zmm13 {%k2}
3761 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm13 {%k3}
3762 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3763 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
3764 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm19 {%k1}
3765 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm11 {%k2}
3766 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm11 {%k3}
3767 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3768 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm31 {%k1}
3769 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm9 {%k2}
3770 ; AVX512BW-NEXT: vmovdqa64 %zmm31, %zmm9 {%k3}
3771 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3772 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
3773 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm0 {%k1}
3774 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm3 {%k2}
3775 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm3 {%k3}
3776 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3777 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
3778 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
3779 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
3780 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm12 {%k2}
3781 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm12 {%k3}
3782 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3783 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
3784 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
3785 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
3786 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm10 {%k2}
3787 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm10 {%k3}
3788 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3789 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
3790 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
3791 ; AVX512BW-NEXT: vmovdqu64 (%rsp), %zmm16 # 64-byte Reload
3792 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm8 {%k2}
3793 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm8 {%k3}
3794 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3795 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
3796 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
3797 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
3798 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm7 {%k2}
3799 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm7 {%k3}
3800 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3801 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
3802 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
3803 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
3804 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm6 {%k2}
3805 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm6 {%k3}
3806 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3807 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
3808 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm0 {%k1}
3809 ; AVX512BW-NEXT: vmovdqa64 %zmm29, %zmm4 {%k2}
3810 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm4 {%k3}
3811 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3812 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm28 {%k1}
3813 ; AVX512BW-NEXT: vmovdqa64 %zmm30, %zmm2 {%k2}
3814 ; AVX512BW-NEXT: vmovdqa64 %zmm28, %zmm2 {%k3}
3815 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3816 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm27 {%k1}
3817 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm1 {%k2}
3818 ; AVX512BW-NEXT: vmovdqa64 %zmm27, %zmm1 {%k3}
3819 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
3820 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 896(%rax)
3821 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 960(%rax)
3822 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 768(%rax)
3823 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 832(%rax)
3824 ; AVX512BW-NEXT: vmovdqa64 %zmm7, 640(%rax)
3825 ; AVX512BW-NEXT: vmovdqa64 %zmm8, 704(%rax)
3826 ; AVX512BW-NEXT: vmovdqa64 %zmm10, 512(%rax)
3827 ; AVX512BW-NEXT: vmovdqa64 %zmm12, 576(%rax)
3828 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 384(%rax)
3829 ; AVX512BW-NEXT: vmovdqa64 %zmm9, 448(%rax)
3830 ; AVX512BW-NEXT: vmovdqa64 %zmm11, 256(%rax)
3831 ; AVX512BW-NEXT: vmovdqa64 %zmm13, 320(%rax)
3832 ; AVX512BW-NEXT: vmovdqa64 %zmm14, 128(%rax)
3833 ; AVX512BW-NEXT: vmovdqa64 %zmm15, 192(%rax)
3834 ; AVX512BW-NEXT: vmovdqa64 %zmm17, (%rax)
3835 ; AVX512BW-NEXT: vmovdqa64 %zmm18, 64(%rax)
3836 ; AVX512BW-NEXT: addq $2056, %rsp # imm = 0x808
3837 ; AVX512BW-NEXT: vzeroupper
3838 ; AVX512BW-NEXT: retq
3839 %in.vec0 = load <32 x i32>, ptr %in.vecptr0, align 64
3840 %in.vec1 = load <32 x i32>, ptr %in.vecptr1, align 64
3841 %in.vec2 = load <32 x i32>, ptr %in.vecptr2, align 64
3842 %in.vec3 = load <32 x i32>, ptr %in.vecptr3, align 64
3843 %in.vec4 = load <32 x i32>, ptr %in.vecptr4, align 64
3844 %in.vec5 = load <32 x i32>, ptr %in.vecptr5, align 64
3845 %in.vec6 = load <32 x i32>, ptr %in.vecptr6, align 64
3846 %in.vec7 = load <32 x i32>, ptr %in.vecptr7, align 64
3847 %1 = shufflevector <32 x i32> %in.vec0, <32 x i32> %in.vec1, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
3848 %2 = shufflevector <32 x i32> %in.vec2, <32 x i32> %in.vec3, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
3849 %3 = shufflevector <32 x i32> %in.vec4, <32 x i32> %in.vec5, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
3850 %4 = shufflevector <32 x i32> %in.vec6, <32 x i32> %in.vec7, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
3851 %5 = shufflevector <64 x i32> %1, <64 x i32> %2, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
3852 %6 = shufflevector <64 x i32> %3, <64 x i32> %4, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
3853 %7 = shufflevector <128 x i32> %5, <128 x i32> %6, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255>
3854 %interleaved.vec = shufflevector <256 x i32> %7, <256 x i32> poison, <256 x i32> <i32 0, i32 32, i32 64, i32 96, i32 128, i32 160, i32 192, i32 224, i32 1, i32 33, i32 65, i32 97, i32 129, i32 161, i32 193, i32 225, i32 2, i32 34, i32 66, i32 98, i32 130, i32 162, i32 194, i32 226, i32 3, i32 35, i32 67, i32 99, i32 131, i32 163, i32 195, i32 227, i32 4, i32 36, i32 68, i32 100, i32 132, i32 164, i32 196, i32 228, i32 5, i32 37, i32 69, i32 101, i32 133, i32 165, i32 197, i32 229, i32 6, i32 38, i32 70, i32 102, i32 134, i32 166, i32 198, i32 230, i32 7, i32 39, i32 71, i32 103, i32 135, i32 167, i32 199, i32 231, i32 8, i32 40, i32 72, i32 104, i32 136, i32 168, i32 200, i32 232, i32 9, i32 41, i32 73, i32 105, i32 137, i32 169, i32 201, i32 233, i32 10, i32 42, i32 74, i32 106, i32 138, i32 170, i32 202, i32 234, i32 11, i32 43, i32 75, i32 107, i32 139, i32 171, i32 203, i32 235, i32 12, i32 44, i32 76, i32 108, i32 140, i32 172, i32 204, i32 236, i32 13, i32 45, i32 77, i32 109, i32 141, i32 173, i32 205, i32 237, i32 14, i32 46, i32 78, i32 110, i32 142, i32 174, i32 206, i32 238, i32 15, i32 47, i32 79, i32 111, i32 143, i32 175, i32 207, i32 239, i32 16, i32 48, i32 80, i32 112, i32 144, i32 176, i32 208, i32 240, i32 17, i32 49, i32 81, i32 113, i32 145, i32 177, i32 209, i32 241, i32 18, i32 50, i32 82, i32 114, i32 146, i32 178, i32 210, i32 242, i32 19, i32 51, i32 83, i32 115, i32 147, i32 179, i32 211, i32 243, i32 20, i32 52, i32 84, i32 116, i32 148, i32 180, i32 212, i32 244, i32 21, i32 53, i32 85, i32 117, i32 149, i32 181, i32 213, i32 245, i32 22, i32 54, i32 86, i32 118, i32 150, i32 182, i32 214, i32 246, i32 23, i32 55, i32 87, i32 119, i32 151, i32 183, i32 215, i32 247, i32 24, i32 56, i32 88, i32 120, i32 152, i32 184, i32 216, i32 248, i32 25, i32 57, i32 89, i32 121, i32 153, i32 185, i32 217, i32 249, i32 26, i32 58, i32 90, i32 122, i32 154, i32 186, i32 218, i32 250, i32 27, i32 59, i32 91, i32 123, i32 155, i32 187, i32 219, i32 251, i32 28, i32 60, i32 92, i32 124, i32 156, i32 188, i32 220, i32 252, i32 29, i32 61, i32 93, i32 125, i32 157, i32 189, i32 221, i32 253, i32 30, i32 62, i32 94, i32 126, i32 158, i32 190, i32 222, i32 254, i32 31, i32 63, i32 95, i32 127, i32 159, i32 191, i32 223, i32 255>
3855 store <256 x i32> %interleaved.vec, ptr %out.vec, align 64
3859 define void @store_i32_stride8_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %in.vecptr7, ptr %out.vec) nounwind {
3860 ; SSE-LABEL: store_i32_stride8_vf64:
3862 ; SSE-NEXT: subq $1752, %rsp # imm = 0x6D8
3863 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
3864 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
3865 ; SSE-NEXT: movaps (%rdi), %xmm9
3866 ; SSE-NEXT: movaps 16(%rdi), %xmm10
3867 ; SSE-NEXT: movaps (%rsi), %xmm2
3868 ; SSE-NEXT: movaps 16(%rsi), %xmm0
3869 ; SSE-NEXT: movaps (%rdx), %xmm3
3870 ; SSE-NEXT: movaps 16(%rdx), %xmm1
3871 ; SSE-NEXT: movaps (%rcx), %xmm4
3872 ; SSE-NEXT: movaps (%r8), %xmm11
3873 ; SSE-NEXT: movaps (%r9), %xmm5
3874 ; SSE-NEXT: movaps (%r10), %xmm6
3875 ; SSE-NEXT: movaps (%rax), %xmm7
3876 ; SSE-NEXT: movaps %xmm4, %xmm8
3877 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm3[0]
3878 ; SSE-NEXT: movaps %xmm9, %xmm13
3879 ; SSE-NEXT: unpcklps {{.*#+}} xmm13 = xmm13[0],xmm2[0],xmm13[1],xmm2[1]
3880 ; SSE-NEXT: movaps %xmm13, %xmm12
3881 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[0,1],xmm8[2,0]
3882 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3883 ; SSE-NEXT: movaps %xmm7, %xmm8
3884 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm6[0]
3885 ; SSE-NEXT: movaps %xmm11, %xmm14
3886 ; SSE-NEXT: unpcklps {{.*#+}} xmm14 = xmm14[0],xmm5[0],xmm14[1],xmm5[1]
3887 ; SSE-NEXT: movaps %xmm14, %xmm12
3888 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[0,1],xmm8[2,0]
3889 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3890 ; SSE-NEXT: movaps %xmm4, %xmm8
3891 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[1,1],xmm3[1,1]
3892 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[2,3],xmm8[2,0]
3893 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3894 ; SSE-NEXT: movaps %xmm7, %xmm8
3895 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[1,1],xmm6[1,1]
3896 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[2,3],xmm8[2,0]
3897 ; SSE-NEXT: movaps %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3898 ; SSE-NEXT: movaps %xmm3, %xmm8
3899 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1],xmm4[1]
3900 ; SSE-NEXT: unpckhps {{.*#+}} xmm9 = xmm9[2],xmm2[2],xmm9[3],xmm2[3]
3901 ; SSE-NEXT: movaps %xmm9, %xmm2
3902 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm8[0,2]
3903 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3904 ; SSE-NEXT: movaps %xmm6, %xmm2
3905 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm7[1]
3906 ; SSE-NEXT: unpckhps {{.*#+}} xmm11 = xmm11[2],xmm5[2],xmm11[3],xmm5[3]
3907 ; SSE-NEXT: movaps %xmm11, %xmm5
3908 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm2[0,2]
3909 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3910 ; SSE-NEXT: movaps 16(%rcx), %xmm5
3911 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[3,3],xmm4[3,3]
3912 ; SSE-NEXT: movaps 16(%r10), %xmm2
3913 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[2,3],xmm3[0,2]
3914 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3915 ; SSE-NEXT: shufps {{.*#+}} xmm6 = xmm6[3,3],xmm7[3,3]
3916 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,3],xmm6[0,2]
3917 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3918 ; SSE-NEXT: movaps %xmm5, %xmm3
3919 ; SSE-NEXT: movlhps {{.*#+}} xmm3 = xmm3[0],xmm1[0]
3920 ; SSE-NEXT: movaps %xmm10, %xmm6
3921 ; SSE-NEXT: unpcklps {{.*#+}} xmm6 = xmm6[0],xmm0[0],xmm6[1],xmm0[1]
3922 ; SSE-NEXT: movaps %xmm6, %xmm4
3923 ; SSE-NEXT: movaps %xmm6, %xmm7
3924 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm3[2,0]
3925 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3926 ; SSE-NEXT: movaps 16(%rax), %xmm3
3927 ; SSE-NEXT: movaps %xmm3, %xmm4
3928 ; SSE-NEXT: movlhps {{.*#+}} xmm4 = xmm4[0],xmm2[0]
3929 ; SSE-NEXT: movaps 16(%r8), %xmm11
3930 ; SSE-NEXT: movaps 16(%r9), %xmm6
3931 ; SSE-NEXT: movaps %xmm11, %xmm9
3932 ; SSE-NEXT: unpcklps {{.*#+}} xmm9 = xmm9[0],xmm6[0],xmm9[1],xmm6[1]
3933 ; SSE-NEXT: movaps %xmm9, %xmm8
3934 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[0,1],xmm4[2,0]
3935 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3936 ; SSE-NEXT: movaps %xmm5, %xmm4
3937 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[1,1],xmm1[1,1]
3938 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,3],xmm4[2,0]
3939 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3940 ; SSE-NEXT: movaps %xmm3, %xmm4
3941 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[1,1],xmm2[1,1]
3942 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[2,3],xmm4[2,0]
3943 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3944 ; SSE-NEXT: unpckhps {{.*#+}} xmm10 = xmm10[2],xmm0[2],xmm10[3],xmm0[3]
3945 ; SSE-NEXT: movaps %xmm1, %xmm0
3946 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm5[1]
3947 ; SSE-NEXT: movaps %xmm10, %xmm4
3948 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm0[0,2]
3949 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3950 ; SSE-NEXT: unpckhps {{.*#+}} xmm11 = xmm11[2],xmm6[2],xmm11[3],xmm6[3]
3951 ; SSE-NEXT: movaps %xmm2, %xmm0
3952 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
3953 ; SSE-NEXT: movaps %xmm11, %xmm4
3954 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm0[0,2]
3955 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3956 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[3,3],xmm5[3,3]
3957 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[2,3],xmm1[0,2]
3958 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3959 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,3],xmm3[3,3]
3960 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,3],xmm2[0,2]
3961 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3962 ; SSE-NEXT: movaps 32(%rdx), %xmm0
3963 ; SSE-NEXT: movaps 32(%rcx), %xmm1
3964 ; SSE-NEXT: movaps %xmm1, %xmm2
3965 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
3966 ; SSE-NEXT: movaps 32(%rdi), %xmm7
3967 ; SSE-NEXT: movaps 32(%rsi), %xmm4
3968 ; SSE-NEXT: movaps %xmm7, %xmm5
3969 ; SSE-NEXT: unpcklps {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
3970 ; SSE-NEXT: movaps %xmm5, %xmm3
3971 ; SSE-NEXT: movaps %xmm5, %xmm8
3972 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm2[2,0]
3973 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3974 ; SSE-NEXT: movaps 32(%r10), %xmm2
3975 ; SSE-NEXT: movaps 32(%rax), %xmm3
3976 ; SSE-NEXT: movaps %xmm3, %xmm5
3977 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm2[0]
3978 ; SSE-NEXT: movaps 32(%r8), %xmm11
3979 ; SSE-NEXT: movaps 32(%r9), %xmm6
3980 ; SSE-NEXT: movaps %xmm11, %xmm10
3981 ; SSE-NEXT: unpcklps {{.*#+}} xmm10 = xmm10[0],xmm6[0],xmm10[1],xmm6[1]
3982 ; SSE-NEXT: movaps %xmm10, %xmm9
3983 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[0,1],xmm5[2,0]
3984 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3985 ; SSE-NEXT: movaps %xmm1, %xmm5
3986 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm0[1,1]
3987 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[2,3],xmm5[2,0]
3988 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3989 ; SSE-NEXT: movaps %xmm3, %xmm5
3990 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm2[1,1]
3991 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[2,3],xmm5[2,0]
3992 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3993 ; SSE-NEXT: unpckhps {{.*#+}} xmm7 = xmm7[2],xmm4[2],xmm7[3],xmm4[3]
3994 ; SSE-NEXT: movaps %xmm0, %xmm4
3995 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm1[1]
3996 ; SSE-NEXT: movaps %xmm7, %xmm5
3997 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
3998 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3999 ; SSE-NEXT: unpckhps {{.*#+}} xmm11 = xmm11[2],xmm6[2],xmm11[3],xmm6[3]
4000 ; SSE-NEXT: movaps %xmm2, %xmm4
4001 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm3[1]
4002 ; SSE-NEXT: movaps %xmm11, %xmm5
4003 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
4004 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4005 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3],xmm1[3,3]
4006 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,3],xmm0[0,2]
4007 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4008 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,3],xmm3[3,3]
4009 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,3],xmm2[0,2]
4010 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4011 ; SSE-NEXT: movaps 48(%rdx), %xmm0
4012 ; SSE-NEXT: movaps 48(%rcx), %xmm1
4013 ; SSE-NEXT: movaps %xmm1, %xmm2
4014 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4015 ; SSE-NEXT: movaps 48(%rdi), %xmm7
4016 ; SSE-NEXT: movaps 48(%rsi), %xmm4
4017 ; SSE-NEXT: movaps %xmm7, %xmm5
4018 ; SSE-NEXT: unpcklps {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
4019 ; SSE-NEXT: movaps %xmm5, %xmm3
4020 ; SSE-NEXT: movaps %xmm5, %xmm8
4021 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm2[2,0]
4022 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4023 ; SSE-NEXT: movaps 48(%r10), %xmm2
4024 ; SSE-NEXT: movaps 48(%rax), %xmm3
4025 ; SSE-NEXT: movaps %xmm3, %xmm5
4026 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm2[0]
4027 ; SSE-NEXT: movaps 48(%r8), %xmm11
4028 ; SSE-NEXT: movaps 48(%r9), %xmm6
4029 ; SSE-NEXT: movaps %xmm11, %xmm10
4030 ; SSE-NEXT: unpcklps {{.*#+}} xmm10 = xmm10[0],xmm6[0],xmm10[1],xmm6[1]
4031 ; SSE-NEXT: movaps %xmm10, %xmm9
4032 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[0,1],xmm5[2,0]
4033 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4034 ; SSE-NEXT: movaps %xmm1, %xmm5
4035 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm0[1,1]
4036 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[2,3],xmm5[2,0]
4037 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4038 ; SSE-NEXT: movaps %xmm3, %xmm5
4039 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm2[1,1]
4040 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[2,3],xmm5[2,0]
4041 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4042 ; SSE-NEXT: unpckhps {{.*#+}} xmm7 = xmm7[2],xmm4[2],xmm7[3],xmm4[3]
4043 ; SSE-NEXT: movaps %xmm0, %xmm4
4044 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm1[1]
4045 ; SSE-NEXT: movaps %xmm7, %xmm5
4046 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
4047 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4048 ; SSE-NEXT: unpckhps {{.*#+}} xmm11 = xmm11[2],xmm6[2],xmm11[3],xmm6[3]
4049 ; SSE-NEXT: movaps %xmm2, %xmm4
4050 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm3[1]
4051 ; SSE-NEXT: movaps %xmm11, %xmm5
4052 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
4053 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4054 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3],xmm1[3,3]
4055 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,3],xmm0[0,2]
4056 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4057 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,3],xmm3[3,3]
4058 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,3],xmm2[0,2]
4059 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4060 ; SSE-NEXT: movaps 64(%rdx), %xmm0
4061 ; SSE-NEXT: movaps 64(%rcx), %xmm1
4062 ; SSE-NEXT: movaps %xmm1, %xmm2
4063 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4064 ; SSE-NEXT: movaps 64(%rdi), %xmm7
4065 ; SSE-NEXT: movaps 64(%rsi), %xmm4
4066 ; SSE-NEXT: movaps %xmm7, %xmm5
4067 ; SSE-NEXT: unpcklps {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
4068 ; SSE-NEXT: movaps %xmm5, %xmm3
4069 ; SSE-NEXT: movaps %xmm5, %xmm8
4070 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm2[2,0]
4071 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4072 ; SSE-NEXT: movaps 64(%r10), %xmm2
4073 ; SSE-NEXT: movaps 64(%rax), %xmm3
4074 ; SSE-NEXT: movaps %xmm3, %xmm5
4075 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm2[0]
4076 ; SSE-NEXT: movaps 64(%r8), %xmm11
4077 ; SSE-NEXT: movaps 64(%r9), %xmm6
4078 ; SSE-NEXT: movaps %xmm11, %xmm10
4079 ; SSE-NEXT: unpcklps {{.*#+}} xmm10 = xmm10[0],xmm6[0],xmm10[1],xmm6[1]
4080 ; SSE-NEXT: movaps %xmm10, %xmm9
4081 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[0,1],xmm5[2,0]
4082 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4083 ; SSE-NEXT: movaps %xmm1, %xmm5
4084 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm0[1,1]
4085 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[2,3],xmm5[2,0]
4086 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4087 ; SSE-NEXT: movaps %xmm3, %xmm5
4088 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm2[1,1]
4089 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[2,3],xmm5[2,0]
4090 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4091 ; SSE-NEXT: unpckhps {{.*#+}} xmm7 = xmm7[2],xmm4[2],xmm7[3],xmm4[3]
4092 ; SSE-NEXT: movaps %xmm0, %xmm4
4093 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm1[1]
4094 ; SSE-NEXT: movaps %xmm7, %xmm5
4095 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
4096 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4097 ; SSE-NEXT: unpckhps {{.*#+}} xmm11 = xmm11[2],xmm6[2],xmm11[3],xmm6[3]
4098 ; SSE-NEXT: movaps %xmm2, %xmm4
4099 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm3[1]
4100 ; SSE-NEXT: movaps %xmm11, %xmm5
4101 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
4102 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4103 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3],xmm1[3,3]
4104 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,3],xmm0[0,2]
4105 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4106 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,3],xmm3[3,3]
4107 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,3],xmm2[0,2]
4108 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4109 ; SSE-NEXT: movaps 80(%rdx), %xmm0
4110 ; SSE-NEXT: movaps 80(%rcx), %xmm1
4111 ; SSE-NEXT: movaps %xmm1, %xmm2
4112 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4113 ; SSE-NEXT: movaps 80(%rdi), %xmm7
4114 ; SSE-NEXT: movaps 80(%rsi), %xmm4
4115 ; SSE-NEXT: movaps %xmm7, %xmm5
4116 ; SSE-NEXT: unpcklps {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
4117 ; SSE-NEXT: movaps %xmm5, %xmm3
4118 ; SSE-NEXT: movaps %xmm5, %xmm8
4119 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm2[2,0]
4120 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4121 ; SSE-NEXT: movaps 80(%r10), %xmm2
4122 ; SSE-NEXT: movaps 80(%rax), %xmm3
4123 ; SSE-NEXT: movaps %xmm3, %xmm5
4124 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm2[0]
4125 ; SSE-NEXT: movaps 80(%r8), %xmm11
4126 ; SSE-NEXT: movaps 80(%r9), %xmm6
4127 ; SSE-NEXT: movaps %xmm11, %xmm10
4128 ; SSE-NEXT: unpcklps {{.*#+}} xmm10 = xmm10[0],xmm6[0],xmm10[1],xmm6[1]
4129 ; SSE-NEXT: movaps %xmm10, %xmm9
4130 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[0,1],xmm5[2,0]
4131 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4132 ; SSE-NEXT: movaps %xmm1, %xmm5
4133 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm0[1,1]
4134 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[2,3],xmm5[2,0]
4135 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4136 ; SSE-NEXT: movaps %xmm3, %xmm5
4137 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm2[1,1]
4138 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[2,3],xmm5[2,0]
4139 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4140 ; SSE-NEXT: unpckhps {{.*#+}} xmm7 = xmm7[2],xmm4[2],xmm7[3],xmm4[3]
4141 ; SSE-NEXT: movaps %xmm0, %xmm4
4142 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm1[1]
4143 ; SSE-NEXT: movaps %xmm7, %xmm5
4144 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
4145 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4146 ; SSE-NEXT: unpckhps {{.*#+}} xmm11 = xmm11[2],xmm6[2],xmm11[3],xmm6[3]
4147 ; SSE-NEXT: movaps %xmm2, %xmm4
4148 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm3[1]
4149 ; SSE-NEXT: movaps %xmm11, %xmm5
4150 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
4151 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4152 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3],xmm1[3,3]
4153 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,3],xmm0[0,2]
4154 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4155 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,3],xmm3[3,3]
4156 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,3],xmm2[0,2]
4157 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4158 ; SSE-NEXT: movaps 96(%rdx), %xmm0
4159 ; SSE-NEXT: movaps 96(%rcx), %xmm1
4160 ; SSE-NEXT: movaps %xmm1, %xmm2
4161 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4162 ; SSE-NEXT: movaps 96(%rdi), %xmm7
4163 ; SSE-NEXT: movaps 96(%rsi), %xmm4
4164 ; SSE-NEXT: movaps %xmm7, %xmm5
4165 ; SSE-NEXT: unpcklps {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
4166 ; SSE-NEXT: movaps %xmm5, %xmm3
4167 ; SSE-NEXT: movaps %xmm5, %xmm8
4168 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm2[2,0]
4169 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4170 ; SSE-NEXT: movaps 96(%r10), %xmm2
4171 ; SSE-NEXT: movaps 96(%rax), %xmm3
4172 ; SSE-NEXT: movaps %xmm3, %xmm5
4173 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm2[0]
4174 ; SSE-NEXT: movaps 96(%r8), %xmm11
4175 ; SSE-NEXT: movaps 96(%r9), %xmm6
4176 ; SSE-NEXT: movaps %xmm11, %xmm10
4177 ; SSE-NEXT: unpcklps {{.*#+}} xmm10 = xmm10[0],xmm6[0],xmm10[1],xmm6[1]
4178 ; SSE-NEXT: movaps %xmm10, %xmm9
4179 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[0,1],xmm5[2,0]
4180 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4181 ; SSE-NEXT: movaps %xmm1, %xmm5
4182 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm0[1,1]
4183 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[2,3],xmm5[2,0]
4184 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4185 ; SSE-NEXT: movaps %xmm3, %xmm5
4186 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm2[1,1]
4187 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[2,3],xmm5[2,0]
4188 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4189 ; SSE-NEXT: unpckhps {{.*#+}} xmm7 = xmm7[2],xmm4[2],xmm7[3],xmm4[3]
4190 ; SSE-NEXT: movaps %xmm0, %xmm4
4191 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm1[1]
4192 ; SSE-NEXT: movaps %xmm7, %xmm5
4193 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
4194 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4195 ; SSE-NEXT: unpckhps {{.*#+}} xmm11 = xmm11[2],xmm6[2],xmm11[3],xmm6[3]
4196 ; SSE-NEXT: movaps %xmm2, %xmm4
4197 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm3[1]
4198 ; SSE-NEXT: movaps %xmm11, %xmm5
4199 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
4200 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4201 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3],xmm1[3,3]
4202 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,3],xmm0[0,2]
4203 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4204 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,3],xmm3[3,3]
4205 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,3],xmm2[0,2]
4206 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4207 ; SSE-NEXT: movaps 112(%rdx), %xmm0
4208 ; SSE-NEXT: movaps 112(%rcx), %xmm1
4209 ; SSE-NEXT: movaps %xmm1, %xmm2
4210 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4211 ; SSE-NEXT: movaps 112(%rdi), %xmm7
4212 ; SSE-NEXT: movaps 112(%rsi), %xmm4
4213 ; SSE-NEXT: movaps %xmm7, %xmm5
4214 ; SSE-NEXT: unpcklps {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
4215 ; SSE-NEXT: movaps %xmm5, %xmm3
4216 ; SSE-NEXT: movaps %xmm5, %xmm8
4217 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm2[2,0]
4218 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4219 ; SSE-NEXT: movaps 112(%r10), %xmm2
4220 ; SSE-NEXT: movaps 112(%rax), %xmm3
4221 ; SSE-NEXT: movaps %xmm3, %xmm5
4222 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm2[0]
4223 ; SSE-NEXT: movaps 112(%r8), %xmm11
4224 ; SSE-NEXT: movaps 112(%r9), %xmm6
4225 ; SSE-NEXT: movaps %xmm11, %xmm10
4226 ; SSE-NEXT: unpcklps {{.*#+}} xmm10 = xmm10[0],xmm6[0],xmm10[1],xmm6[1]
4227 ; SSE-NEXT: movaps %xmm10, %xmm9
4228 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[0,1],xmm5[2,0]
4229 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4230 ; SSE-NEXT: movaps %xmm1, %xmm5
4231 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm0[1,1]
4232 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[2,3],xmm5[2,0]
4233 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4234 ; SSE-NEXT: movaps %xmm3, %xmm5
4235 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm2[1,1]
4236 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[2,3],xmm5[2,0]
4237 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4238 ; SSE-NEXT: unpckhps {{.*#+}} xmm7 = xmm7[2],xmm4[2],xmm7[3],xmm4[3]
4239 ; SSE-NEXT: movaps %xmm0, %xmm4
4240 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm1[1]
4241 ; SSE-NEXT: movaps %xmm7, %xmm5
4242 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
4243 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4244 ; SSE-NEXT: unpckhps {{.*#+}} xmm11 = xmm11[2],xmm6[2],xmm11[3],xmm6[3]
4245 ; SSE-NEXT: movaps %xmm2, %xmm4
4246 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm3[1]
4247 ; SSE-NEXT: movaps %xmm11, %xmm5
4248 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
4249 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4250 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3],xmm1[3,3]
4251 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,3],xmm0[0,2]
4252 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4253 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,3],xmm3[3,3]
4254 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,3],xmm2[0,2]
4255 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4256 ; SSE-NEXT: movaps 128(%rdx), %xmm0
4257 ; SSE-NEXT: movaps 128(%rcx), %xmm1
4258 ; SSE-NEXT: movaps %xmm1, %xmm2
4259 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4260 ; SSE-NEXT: movaps 128(%rdi), %xmm7
4261 ; SSE-NEXT: movaps 128(%rsi), %xmm4
4262 ; SSE-NEXT: movaps %xmm7, %xmm5
4263 ; SSE-NEXT: unpcklps {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
4264 ; SSE-NEXT: movaps %xmm5, %xmm3
4265 ; SSE-NEXT: movaps %xmm5, %xmm8
4266 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm2[2,0]
4267 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4268 ; SSE-NEXT: movaps 128(%r10), %xmm2
4269 ; SSE-NEXT: movaps 128(%rax), %xmm3
4270 ; SSE-NEXT: movaps %xmm3, %xmm5
4271 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm2[0]
4272 ; SSE-NEXT: movaps 128(%r8), %xmm11
4273 ; SSE-NEXT: movaps 128(%r9), %xmm6
4274 ; SSE-NEXT: movaps %xmm11, %xmm10
4275 ; SSE-NEXT: unpcklps {{.*#+}} xmm10 = xmm10[0],xmm6[0],xmm10[1],xmm6[1]
4276 ; SSE-NEXT: movaps %xmm10, %xmm9
4277 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[0,1],xmm5[2,0]
4278 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4279 ; SSE-NEXT: movaps %xmm1, %xmm5
4280 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm0[1,1]
4281 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[2,3],xmm5[2,0]
4282 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4283 ; SSE-NEXT: movaps %xmm3, %xmm5
4284 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm2[1,1]
4285 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[2,3],xmm5[2,0]
4286 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4287 ; SSE-NEXT: unpckhps {{.*#+}} xmm7 = xmm7[2],xmm4[2],xmm7[3],xmm4[3]
4288 ; SSE-NEXT: movaps %xmm0, %xmm4
4289 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm1[1]
4290 ; SSE-NEXT: movaps %xmm7, %xmm5
4291 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
4292 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4293 ; SSE-NEXT: unpckhps {{.*#+}} xmm11 = xmm11[2],xmm6[2],xmm11[3],xmm6[3]
4294 ; SSE-NEXT: movaps %xmm2, %xmm4
4295 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm3[1]
4296 ; SSE-NEXT: movaps %xmm11, %xmm5
4297 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
4298 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4299 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3],xmm1[3,3]
4300 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,3],xmm0[0,2]
4301 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4302 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,3],xmm3[3,3]
4303 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,3],xmm2[0,2]
4304 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4305 ; SSE-NEXT: movaps 144(%rdx), %xmm0
4306 ; SSE-NEXT: movaps 144(%rcx), %xmm1
4307 ; SSE-NEXT: movaps %xmm1, %xmm2
4308 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4309 ; SSE-NEXT: movaps 144(%rdi), %xmm7
4310 ; SSE-NEXT: movaps 144(%rsi), %xmm4
4311 ; SSE-NEXT: movaps %xmm7, %xmm5
4312 ; SSE-NEXT: unpcklps {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
4313 ; SSE-NEXT: movaps %xmm5, %xmm3
4314 ; SSE-NEXT: movaps %xmm5, %xmm8
4315 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm2[2,0]
4316 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4317 ; SSE-NEXT: movaps 144(%r10), %xmm2
4318 ; SSE-NEXT: movaps 144(%rax), %xmm3
4319 ; SSE-NEXT: movaps %xmm3, %xmm5
4320 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm2[0]
4321 ; SSE-NEXT: movaps 144(%r8), %xmm11
4322 ; SSE-NEXT: movaps 144(%r9), %xmm6
4323 ; SSE-NEXT: movaps %xmm11, %xmm10
4324 ; SSE-NEXT: unpcklps {{.*#+}} xmm10 = xmm10[0],xmm6[0],xmm10[1],xmm6[1]
4325 ; SSE-NEXT: movaps %xmm10, %xmm9
4326 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[0,1],xmm5[2,0]
4327 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4328 ; SSE-NEXT: movaps %xmm1, %xmm5
4329 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm0[1,1]
4330 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[2,3],xmm5[2,0]
4331 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4332 ; SSE-NEXT: movaps %xmm3, %xmm5
4333 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm2[1,1]
4334 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[2,3],xmm5[2,0]
4335 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4336 ; SSE-NEXT: unpckhps {{.*#+}} xmm7 = xmm7[2],xmm4[2],xmm7[3],xmm4[3]
4337 ; SSE-NEXT: movaps %xmm0, %xmm4
4338 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm1[1]
4339 ; SSE-NEXT: movaps %xmm7, %xmm5
4340 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
4341 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4342 ; SSE-NEXT: unpckhps {{.*#+}} xmm11 = xmm11[2],xmm6[2],xmm11[3],xmm6[3]
4343 ; SSE-NEXT: movaps %xmm2, %xmm4
4344 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm3[1]
4345 ; SSE-NEXT: movaps %xmm11, %xmm5
4346 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
4347 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4348 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3],xmm1[3,3]
4349 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,3],xmm0[0,2]
4350 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4351 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,3],xmm3[3,3]
4352 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,3],xmm2[0,2]
4353 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4354 ; SSE-NEXT: movaps 160(%rdx), %xmm0
4355 ; SSE-NEXT: movaps 160(%rcx), %xmm1
4356 ; SSE-NEXT: movaps %xmm1, %xmm2
4357 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4358 ; SSE-NEXT: movaps 160(%rdi), %xmm7
4359 ; SSE-NEXT: movaps 160(%rsi), %xmm4
4360 ; SSE-NEXT: movaps %xmm7, %xmm5
4361 ; SSE-NEXT: unpcklps {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
4362 ; SSE-NEXT: movaps %xmm5, %xmm3
4363 ; SSE-NEXT: movaps %xmm5, %xmm8
4364 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm2[2,0]
4365 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4366 ; SSE-NEXT: movaps 160(%r10), %xmm2
4367 ; SSE-NEXT: movaps 160(%rax), %xmm3
4368 ; SSE-NEXT: movaps %xmm3, %xmm5
4369 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm2[0]
4370 ; SSE-NEXT: movaps 160(%r8), %xmm11
4371 ; SSE-NEXT: movaps 160(%r9), %xmm6
4372 ; SSE-NEXT: movaps %xmm11, %xmm10
4373 ; SSE-NEXT: unpcklps {{.*#+}} xmm10 = xmm10[0],xmm6[0],xmm10[1],xmm6[1]
4374 ; SSE-NEXT: movaps %xmm10, %xmm9
4375 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[0,1],xmm5[2,0]
4376 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4377 ; SSE-NEXT: movaps %xmm1, %xmm5
4378 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm0[1,1]
4379 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[2,3],xmm5[2,0]
4380 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4381 ; SSE-NEXT: movaps %xmm3, %xmm5
4382 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm2[1,1]
4383 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[2,3],xmm5[2,0]
4384 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4385 ; SSE-NEXT: unpckhps {{.*#+}} xmm7 = xmm7[2],xmm4[2],xmm7[3],xmm4[3]
4386 ; SSE-NEXT: movaps %xmm0, %xmm4
4387 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm1[1]
4388 ; SSE-NEXT: movaps %xmm7, %xmm5
4389 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
4390 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4391 ; SSE-NEXT: unpckhps {{.*#+}} xmm11 = xmm11[2],xmm6[2],xmm11[3],xmm6[3]
4392 ; SSE-NEXT: movaps %xmm2, %xmm4
4393 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm3[1]
4394 ; SSE-NEXT: movaps %xmm11, %xmm5
4395 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
4396 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4397 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3],xmm1[3,3]
4398 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,3],xmm0[0,2]
4399 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4400 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,3],xmm3[3,3]
4401 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,3],xmm2[0,2]
4402 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4403 ; SSE-NEXT: movaps 176(%rdx), %xmm0
4404 ; SSE-NEXT: movaps 176(%rcx), %xmm1
4405 ; SSE-NEXT: movaps %xmm1, %xmm2
4406 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4407 ; SSE-NEXT: movaps 176(%rdi), %xmm7
4408 ; SSE-NEXT: movaps 176(%rsi), %xmm4
4409 ; SSE-NEXT: movaps %xmm7, %xmm5
4410 ; SSE-NEXT: unpcklps {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
4411 ; SSE-NEXT: movaps %xmm5, %xmm3
4412 ; SSE-NEXT: movaps %xmm5, %xmm8
4413 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm2[2,0]
4414 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4415 ; SSE-NEXT: movaps 176(%r10), %xmm2
4416 ; SSE-NEXT: movaps 176(%rax), %xmm3
4417 ; SSE-NEXT: movaps %xmm3, %xmm5
4418 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm2[0]
4419 ; SSE-NEXT: movaps 176(%r8), %xmm11
4420 ; SSE-NEXT: movaps 176(%r9), %xmm6
4421 ; SSE-NEXT: movaps %xmm11, %xmm10
4422 ; SSE-NEXT: unpcklps {{.*#+}} xmm10 = xmm10[0],xmm6[0],xmm10[1],xmm6[1]
4423 ; SSE-NEXT: movaps %xmm10, %xmm9
4424 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[0,1],xmm5[2,0]
4425 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4426 ; SSE-NEXT: movaps %xmm1, %xmm5
4427 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm0[1,1]
4428 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[2,3],xmm5[2,0]
4429 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4430 ; SSE-NEXT: movaps %xmm3, %xmm5
4431 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm2[1,1]
4432 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[2,3],xmm5[2,0]
4433 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4434 ; SSE-NEXT: unpckhps {{.*#+}} xmm7 = xmm7[2],xmm4[2],xmm7[3],xmm4[3]
4435 ; SSE-NEXT: movaps %xmm0, %xmm4
4436 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm1[1]
4437 ; SSE-NEXT: movaps %xmm7, %xmm5
4438 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
4439 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4440 ; SSE-NEXT: unpckhps {{.*#+}} xmm11 = xmm11[2],xmm6[2],xmm11[3],xmm6[3]
4441 ; SSE-NEXT: movaps %xmm2, %xmm4
4442 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm3[1]
4443 ; SSE-NEXT: movaps %xmm11, %xmm5
4444 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
4445 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4446 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3],xmm1[3,3]
4447 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,3],xmm0[0,2]
4448 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4449 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,3],xmm3[3,3]
4450 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,3],xmm2[0,2]
4451 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4452 ; SSE-NEXT: movaps 192(%rdx), %xmm0
4453 ; SSE-NEXT: movaps 192(%rcx), %xmm1
4454 ; SSE-NEXT: movaps %xmm1, %xmm2
4455 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4456 ; SSE-NEXT: movaps 192(%rdi), %xmm7
4457 ; SSE-NEXT: movaps 192(%rsi), %xmm4
4458 ; SSE-NEXT: movaps %xmm7, %xmm5
4459 ; SSE-NEXT: unpcklps {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
4460 ; SSE-NEXT: movaps %xmm5, %xmm3
4461 ; SSE-NEXT: movaps %xmm5, %xmm8
4462 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm2[2,0]
4463 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4464 ; SSE-NEXT: movaps 192(%r10), %xmm2
4465 ; SSE-NEXT: movaps 192(%rax), %xmm3
4466 ; SSE-NEXT: movaps %xmm3, %xmm5
4467 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm2[0]
4468 ; SSE-NEXT: movaps 192(%r8), %xmm11
4469 ; SSE-NEXT: movaps 192(%r9), %xmm6
4470 ; SSE-NEXT: movaps %xmm11, %xmm10
4471 ; SSE-NEXT: unpcklps {{.*#+}} xmm10 = xmm10[0],xmm6[0],xmm10[1],xmm6[1]
4472 ; SSE-NEXT: movaps %xmm10, %xmm9
4473 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[0,1],xmm5[2,0]
4474 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4475 ; SSE-NEXT: movaps %xmm1, %xmm5
4476 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm0[1,1]
4477 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[2,3],xmm5[2,0]
4478 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4479 ; SSE-NEXT: movaps %xmm3, %xmm5
4480 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm2[1,1]
4481 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[2,3],xmm5[2,0]
4482 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4483 ; SSE-NEXT: unpckhps {{.*#+}} xmm7 = xmm7[2],xmm4[2],xmm7[3],xmm4[3]
4484 ; SSE-NEXT: movaps %xmm0, %xmm4
4485 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm1[1]
4486 ; SSE-NEXT: movaps %xmm7, %xmm5
4487 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
4488 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4489 ; SSE-NEXT: unpckhps {{.*#+}} xmm11 = xmm11[2],xmm6[2],xmm11[3],xmm6[3]
4490 ; SSE-NEXT: movaps %xmm2, %xmm4
4491 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm3[1]
4492 ; SSE-NEXT: movaps %xmm11, %xmm5
4493 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
4494 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4495 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3],xmm1[3,3]
4496 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,3],xmm0[0,2]
4497 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4498 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,3],xmm3[3,3]
4499 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,3],xmm2[0,2]
4500 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4501 ; SSE-NEXT: movaps 208(%rdx), %xmm0
4502 ; SSE-NEXT: movaps 208(%rcx), %xmm1
4503 ; SSE-NEXT: movaps %xmm1, %xmm2
4504 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
4505 ; SSE-NEXT: movaps 208(%rdi), %xmm7
4506 ; SSE-NEXT: movaps 208(%rsi), %xmm4
4507 ; SSE-NEXT: movaps %xmm7, %xmm5
4508 ; SSE-NEXT: unpcklps {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
4509 ; SSE-NEXT: movaps %xmm5, %xmm3
4510 ; SSE-NEXT: movaps %xmm5, %xmm8
4511 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm2[2,0]
4512 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4513 ; SSE-NEXT: movaps 208(%r10), %xmm2
4514 ; SSE-NEXT: movaps 208(%rax), %xmm3
4515 ; SSE-NEXT: movaps %xmm3, %xmm5
4516 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm2[0]
4517 ; SSE-NEXT: movaps 208(%r8), %xmm11
4518 ; SSE-NEXT: movaps 208(%r9), %xmm6
4519 ; SSE-NEXT: movaps %xmm11, %xmm10
4520 ; SSE-NEXT: unpcklps {{.*#+}} xmm10 = xmm10[0],xmm6[0],xmm10[1],xmm6[1]
4521 ; SSE-NEXT: movaps %xmm10, %xmm9
4522 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[0,1],xmm5[2,0]
4523 ; SSE-NEXT: movaps %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4524 ; SSE-NEXT: movaps %xmm1, %xmm5
4525 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm0[1,1]
4526 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[2,3],xmm5[2,0]
4527 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4528 ; SSE-NEXT: movaps %xmm3, %xmm5
4529 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm2[1,1]
4530 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[2,3],xmm5[2,0]
4531 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4532 ; SSE-NEXT: unpckhps {{.*#+}} xmm7 = xmm7[2],xmm4[2],xmm7[3],xmm4[3]
4533 ; SSE-NEXT: movaps %xmm0, %xmm4
4534 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm1[1]
4535 ; SSE-NEXT: movaps %xmm7, %xmm5
4536 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
4537 ; SSE-NEXT: movaps %xmm5, (%rsp) # 16-byte Spill
4538 ; SSE-NEXT: unpckhps {{.*#+}} xmm11 = xmm11[2],xmm6[2],xmm11[3],xmm6[3]
4539 ; SSE-NEXT: movaps %xmm2, %xmm4
4540 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm3[1]
4541 ; SSE-NEXT: movaps %xmm11, %xmm5
4542 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
4543 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4544 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3],xmm1[3,3]
4545 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,3],xmm0[0,2]
4546 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4547 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,3],xmm3[3,3]
4548 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,3],xmm2[0,2]
4549 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4550 ; SSE-NEXT: movaps 224(%rdx), %xmm1
4551 ; SSE-NEXT: movaps 224(%rcx), %xmm0
4552 ; SSE-NEXT: movaps %xmm0, %xmm2
4553 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm1[0]
4554 ; SSE-NEXT: movaps 224(%rdi), %xmm12
4555 ; SSE-NEXT: movaps 224(%rsi), %xmm4
4556 ; SSE-NEXT: movaps %xmm12, %xmm5
4557 ; SSE-NEXT: unpcklps {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
4558 ; SSE-NEXT: movaps %xmm5, %xmm3
4559 ; SSE-NEXT: movaps %xmm5, %xmm7
4560 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm2[2,0]
4561 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4562 ; SSE-NEXT: movaps 224(%r10), %xmm2
4563 ; SSE-NEXT: movaps 224(%rax), %xmm3
4564 ; SSE-NEXT: movaps %xmm3, %xmm5
4565 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm2[0]
4566 ; SSE-NEXT: movaps 224(%r8), %xmm15
4567 ; SSE-NEXT: movaps 224(%r9), %xmm6
4568 ; SSE-NEXT: movaps %xmm15, %xmm13
4569 ; SSE-NEXT: unpcklps {{.*#+}} xmm13 = xmm13[0],xmm6[0],xmm13[1],xmm6[1]
4570 ; SSE-NEXT: movaps %xmm13, %xmm8
4571 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[0,1],xmm5[2,0]
4572 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4573 ; SSE-NEXT: movaps %xmm0, %xmm5
4574 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm1[1,1]
4575 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,3],xmm5[2,0]
4576 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4577 ; SSE-NEXT: movaps %xmm3, %xmm5
4578 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm2[1,1]
4579 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[2,3],xmm5[2,0]
4580 ; SSE-NEXT: unpckhps {{.*#+}} xmm12 = xmm12[2],xmm4[2],xmm12[3],xmm4[3]
4581 ; SSE-NEXT: movaps %xmm1, %xmm4
4582 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm0[1]
4583 ; SSE-NEXT: movaps %xmm12, %xmm5
4584 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
4585 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4586 ; SSE-NEXT: unpckhps {{.*#+}} xmm15 = xmm15[2],xmm6[2],xmm15[3],xmm6[3]
4587 ; SSE-NEXT: movaps %xmm2, %xmm4
4588 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm3[1]
4589 ; SSE-NEXT: movaps %xmm15, %xmm5
4590 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[0,2]
4591 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4592 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[3,3],xmm0[3,3]
4593 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[2,3],xmm1[0,2]
4594 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,3],xmm3[3,3]
4595 ; SSE-NEXT: shufps {{.*#+}} xmm15 = xmm15[2,3],xmm2[0,2]
4596 ; SSE-NEXT: movaps 240(%rdx), %xmm2
4597 ; SSE-NEXT: movaps 240(%rcx), %xmm8
4598 ; SSE-NEXT: movaps %xmm8, %xmm0
4599 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm2[0]
4600 ; SSE-NEXT: movaps 240(%rdi), %xmm1
4601 ; SSE-NEXT: movaps 240(%rsi), %xmm11
4602 ; SSE-NEXT: movaps %xmm1, %xmm4
4603 ; SSE-NEXT: unpcklps {{.*#+}} xmm4 = xmm4[0],xmm11[0],xmm4[1],xmm11[1]
4604 ; SSE-NEXT: movaps %xmm4, %xmm14
4605 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[0,1],xmm0[2,0]
4606 ; SSE-NEXT: movaps 240(%r10), %xmm0
4607 ; SSE-NEXT: movaps 240(%rax), %xmm7
4608 ; SSE-NEXT: movaps %xmm7, %xmm3
4609 ; SSE-NEXT: movlhps {{.*#+}} xmm3 = xmm3[0],xmm0[0]
4610 ; SSE-NEXT: movaps 240(%r8), %xmm5
4611 ; SSE-NEXT: movaps 240(%r9), %xmm9
4612 ; SSE-NEXT: movaps %xmm5, %xmm6
4613 ; SSE-NEXT: unpcklps {{.*#+}} xmm6 = xmm6[0],xmm9[0],xmm6[1],xmm9[1]
4614 ; SSE-NEXT: movaps %xmm6, %xmm10
4615 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[0,1],xmm3[2,0]
4616 ; SSE-NEXT: movaps %xmm8, %xmm3
4617 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[1,1],xmm2[1,1]
4618 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[2,3],xmm3[2,0]
4619 ; SSE-NEXT: movaps %xmm7, %xmm3
4620 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[1,1],xmm0[1,1]
4621 ; SSE-NEXT: shufps {{.*#+}} xmm6 = xmm6[2,3],xmm3[2,0]
4622 ; SSE-NEXT: unpckhps {{.*#+}} xmm1 = xmm1[2],xmm11[2],xmm1[3],xmm11[3]
4623 ; SSE-NEXT: movaps %xmm2, %xmm3
4624 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm8[1]
4625 ; SSE-NEXT: movaps %xmm1, %xmm11
4626 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[0,1],xmm3[0,2]
4627 ; SSE-NEXT: unpckhps {{.*#+}} xmm5 = xmm5[2],xmm9[2],xmm5[3],xmm9[3]
4628 ; SSE-NEXT: movaps %xmm0, %xmm3
4629 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm7[1]
4630 ; SSE-NEXT: movaps %xmm5, %xmm9
4631 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[0,1],xmm3[0,2]
4632 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,3],xmm8[3,3]
4633 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,3],xmm2[0,2]
4634 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3],xmm7[3,3]
4635 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[2,3],xmm0[0,2]
4636 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
4637 ; SSE-NEXT: movaps %xmm5, 2032(%rax)
4638 ; SSE-NEXT: movaps %xmm1, 2016(%rax)
4639 ; SSE-NEXT: movaps %xmm9, 2000(%rax)
4640 ; SSE-NEXT: movaps %xmm11, 1984(%rax)
4641 ; SSE-NEXT: movaps %xmm6, 1968(%rax)
4642 ; SSE-NEXT: movaps %xmm4, 1952(%rax)
4643 ; SSE-NEXT: movaps %xmm10, 1936(%rax)
4644 ; SSE-NEXT: movaps %xmm14, 1920(%rax)
4645 ; SSE-NEXT: movaps %xmm15, 1904(%rax)
4646 ; SSE-NEXT: movaps %xmm12, 1888(%rax)
4647 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4648 ; SSE-NEXT: movaps %xmm0, 1872(%rax)
4649 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4650 ; SSE-NEXT: movaps %xmm0, 1856(%rax)
4651 ; SSE-NEXT: movaps %xmm13, 1840(%rax)
4652 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4653 ; SSE-NEXT: movaps %xmm0, 1824(%rax)
4654 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4655 ; SSE-NEXT: movaps %xmm0, 1808(%rax)
4656 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4657 ; SSE-NEXT: movaps %xmm0, 1792(%rax)
4658 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4659 ; SSE-NEXT: movaps %xmm0, 1776(%rax)
4660 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4661 ; SSE-NEXT: movaps %xmm0, 1760(%rax)
4662 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4663 ; SSE-NEXT: movaps %xmm0, 1744(%rax)
4664 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
4665 ; SSE-NEXT: movaps %xmm0, 1728(%rax)
4666 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4667 ; SSE-NEXT: movaps %xmm0, 1712(%rax)
4668 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4669 ; SSE-NEXT: movaps %xmm0, 1696(%rax)
4670 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4671 ; SSE-NEXT: movaps %xmm0, 1680(%rax)
4672 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4673 ; SSE-NEXT: movaps %xmm0, 1664(%rax)
4674 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4675 ; SSE-NEXT: movaps %xmm0, 1648(%rax)
4676 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4677 ; SSE-NEXT: movaps %xmm0, 1632(%rax)
4678 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4679 ; SSE-NEXT: movaps %xmm0, 1616(%rax)
4680 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4681 ; SSE-NEXT: movaps %xmm0, 1600(%rax)
4682 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4683 ; SSE-NEXT: movaps %xmm0, 1584(%rax)
4684 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4685 ; SSE-NEXT: movaps %xmm0, 1568(%rax)
4686 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4687 ; SSE-NEXT: movaps %xmm0, 1552(%rax)
4688 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4689 ; SSE-NEXT: movaps %xmm0, 1536(%rax)
4690 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4691 ; SSE-NEXT: movaps %xmm0, 1520(%rax)
4692 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4693 ; SSE-NEXT: movaps %xmm0, 1504(%rax)
4694 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4695 ; SSE-NEXT: movaps %xmm0, 1488(%rax)
4696 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4697 ; SSE-NEXT: movaps %xmm0, 1472(%rax)
4698 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4699 ; SSE-NEXT: movaps %xmm0, 1456(%rax)
4700 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4701 ; SSE-NEXT: movaps %xmm0, 1440(%rax)
4702 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4703 ; SSE-NEXT: movaps %xmm0, 1424(%rax)
4704 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4705 ; SSE-NEXT: movaps %xmm0, 1408(%rax)
4706 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4707 ; SSE-NEXT: movaps %xmm0, 1392(%rax)
4708 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4709 ; SSE-NEXT: movaps %xmm0, 1376(%rax)
4710 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4711 ; SSE-NEXT: movaps %xmm0, 1360(%rax)
4712 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4713 ; SSE-NEXT: movaps %xmm0, 1344(%rax)
4714 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4715 ; SSE-NEXT: movaps %xmm0, 1328(%rax)
4716 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4717 ; SSE-NEXT: movaps %xmm0, 1312(%rax)
4718 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4719 ; SSE-NEXT: movaps %xmm0, 1296(%rax)
4720 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4721 ; SSE-NEXT: movaps %xmm0, 1280(%rax)
4722 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4723 ; SSE-NEXT: movaps %xmm0, 1264(%rax)
4724 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4725 ; SSE-NEXT: movaps %xmm0, 1248(%rax)
4726 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4727 ; SSE-NEXT: movaps %xmm0, 1232(%rax)
4728 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4729 ; SSE-NEXT: movaps %xmm0, 1216(%rax)
4730 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4731 ; SSE-NEXT: movaps %xmm0, 1200(%rax)
4732 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4733 ; SSE-NEXT: movaps %xmm0, 1184(%rax)
4734 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4735 ; SSE-NEXT: movaps %xmm0, 1168(%rax)
4736 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4737 ; SSE-NEXT: movaps %xmm0, 1152(%rax)
4738 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4739 ; SSE-NEXT: movaps %xmm0, 1136(%rax)
4740 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4741 ; SSE-NEXT: movaps %xmm0, 1120(%rax)
4742 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4743 ; SSE-NEXT: movaps %xmm0, 1104(%rax)
4744 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4745 ; SSE-NEXT: movaps %xmm0, 1088(%rax)
4746 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4747 ; SSE-NEXT: movaps %xmm0, 1072(%rax)
4748 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4749 ; SSE-NEXT: movaps %xmm0, 1056(%rax)
4750 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4751 ; SSE-NEXT: movaps %xmm0, 1040(%rax)
4752 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4753 ; SSE-NEXT: movaps %xmm0, 1024(%rax)
4754 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4755 ; SSE-NEXT: movaps %xmm0, 1008(%rax)
4756 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4757 ; SSE-NEXT: movaps %xmm0, 992(%rax)
4758 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4759 ; SSE-NEXT: movaps %xmm0, 976(%rax)
4760 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4761 ; SSE-NEXT: movaps %xmm0, 960(%rax)
4762 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4763 ; SSE-NEXT: movaps %xmm0, 944(%rax)
4764 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4765 ; SSE-NEXT: movaps %xmm0, 928(%rax)
4766 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4767 ; SSE-NEXT: movaps %xmm0, 912(%rax)
4768 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4769 ; SSE-NEXT: movaps %xmm0, 896(%rax)
4770 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4771 ; SSE-NEXT: movaps %xmm0, 880(%rax)
4772 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4773 ; SSE-NEXT: movaps %xmm0, 864(%rax)
4774 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4775 ; SSE-NEXT: movaps %xmm0, 848(%rax)
4776 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4777 ; SSE-NEXT: movaps %xmm0, 832(%rax)
4778 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4779 ; SSE-NEXT: movaps %xmm0, 816(%rax)
4780 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4781 ; SSE-NEXT: movaps %xmm0, 800(%rax)
4782 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4783 ; SSE-NEXT: movaps %xmm0, 784(%rax)
4784 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4785 ; SSE-NEXT: movaps %xmm0, 768(%rax)
4786 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4787 ; SSE-NEXT: movaps %xmm0, 752(%rax)
4788 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4789 ; SSE-NEXT: movaps %xmm0, 736(%rax)
4790 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4791 ; SSE-NEXT: movaps %xmm0, 720(%rax)
4792 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4793 ; SSE-NEXT: movaps %xmm0, 704(%rax)
4794 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4795 ; SSE-NEXT: movaps %xmm0, 688(%rax)
4796 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4797 ; SSE-NEXT: movaps %xmm0, 672(%rax)
4798 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4799 ; SSE-NEXT: movaps %xmm0, 656(%rax)
4800 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4801 ; SSE-NEXT: movaps %xmm0, 640(%rax)
4802 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4803 ; SSE-NEXT: movaps %xmm0, 624(%rax)
4804 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4805 ; SSE-NEXT: movaps %xmm0, 608(%rax)
4806 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4807 ; SSE-NEXT: movaps %xmm0, 592(%rax)
4808 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4809 ; SSE-NEXT: movaps %xmm0, 576(%rax)
4810 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4811 ; SSE-NEXT: movaps %xmm0, 560(%rax)
4812 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4813 ; SSE-NEXT: movaps %xmm0, 544(%rax)
4814 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4815 ; SSE-NEXT: movaps %xmm0, 528(%rax)
4816 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4817 ; SSE-NEXT: movaps %xmm0, 512(%rax)
4818 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4819 ; SSE-NEXT: movaps %xmm0, 496(%rax)
4820 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4821 ; SSE-NEXT: movaps %xmm0, 480(%rax)
4822 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4823 ; SSE-NEXT: movaps %xmm0, 464(%rax)
4824 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4825 ; SSE-NEXT: movaps %xmm0, 448(%rax)
4826 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4827 ; SSE-NEXT: movaps %xmm0, 432(%rax)
4828 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4829 ; SSE-NEXT: movaps %xmm0, 416(%rax)
4830 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4831 ; SSE-NEXT: movaps %xmm0, 400(%rax)
4832 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4833 ; SSE-NEXT: movaps %xmm0, 384(%rax)
4834 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4835 ; SSE-NEXT: movaps %xmm0, 368(%rax)
4836 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4837 ; SSE-NEXT: movaps %xmm0, 352(%rax)
4838 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4839 ; SSE-NEXT: movaps %xmm0, 336(%rax)
4840 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4841 ; SSE-NEXT: movaps %xmm0, 320(%rax)
4842 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4843 ; SSE-NEXT: movaps %xmm0, 304(%rax)
4844 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4845 ; SSE-NEXT: movaps %xmm0, 288(%rax)
4846 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4847 ; SSE-NEXT: movaps %xmm0, 272(%rax)
4848 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4849 ; SSE-NEXT: movaps %xmm0, 256(%rax)
4850 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4851 ; SSE-NEXT: movaps %xmm0, 240(%rax)
4852 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4853 ; SSE-NEXT: movaps %xmm0, 224(%rax)
4854 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4855 ; SSE-NEXT: movaps %xmm0, 208(%rax)
4856 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4857 ; SSE-NEXT: movaps %xmm0, 192(%rax)
4858 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4859 ; SSE-NEXT: movaps %xmm0, 176(%rax)
4860 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4861 ; SSE-NEXT: movaps %xmm0, 160(%rax)
4862 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4863 ; SSE-NEXT: movaps %xmm0, 144(%rax)
4864 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4865 ; SSE-NEXT: movaps %xmm0, 128(%rax)
4866 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4867 ; SSE-NEXT: movaps %xmm0, 112(%rax)
4868 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4869 ; SSE-NEXT: movaps %xmm0, 96(%rax)
4870 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4871 ; SSE-NEXT: movaps %xmm0, 80(%rax)
4872 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4873 ; SSE-NEXT: movaps %xmm0, 64(%rax)
4874 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4875 ; SSE-NEXT: movaps %xmm0, 48(%rax)
4876 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4877 ; SSE-NEXT: movaps %xmm0, 32(%rax)
4878 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4879 ; SSE-NEXT: movaps %xmm0, 16(%rax)
4880 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4881 ; SSE-NEXT: movaps %xmm0, (%rax)
4882 ; SSE-NEXT: addq $1752, %rsp # imm = 0x6D8
4885 ; AVX1-ONLY-LABEL: store_i32_stride8_vf64:
4886 ; AVX1-ONLY: # %bb.0:
4887 ; AVX1-ONLY-NEXT: subq $1672, %rsp # imm = 0x688
4888 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
4889 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
4890 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %ymm3
4891 ; AVX1-ONLY-NEXT: vmovaps 32(%rdi), %ymm0
4892 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %ymm4
4893 ; AVX1-ONLY-NEXT: vmovaps 32(%rsi), %ymm1
4894 ; AVX1-ONLY-NEXT: vmovaps 32(%rdx), %ymm2
4895 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %ymm5
4896 ; AVX1-ONLY-NEXT: vmovaps (%rcx), %ymm6
4897 ; AVX1-ONLY-NEXT: vmovaps (%r8), %ymm7
4898 ; AVX1-ONLY-NEXT: vmovaps (%r9), %ymm9
4899 ; AVX1-ONLY-NEXT: vmovaps (%r10), %ymm11
4900 ; AVX1-ONLY-NEXT: vmovaps (%rax), %ymm12
4901 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm8 = ymm12[0],ymm11[0],ymm12[2],ymm11[2]
4902 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm10 = ymm7[0],ymm9[0],ymm7[1],ymm9[1],ymm7[4],ymm9[4],ymm7[5],ymm9[5]
4903 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm8 = ymm10[0,1],ymm8[2,0],ymm10[4,5],ymm8[6,4]
4904 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm10 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[4],ymm4[4],ymm3[5],ymm4[5]
4905 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
4906 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm13 = ymm6[0],ymm5[0],ymm6[2],ymm5[2]
4907 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm13 = ymm13[0,1,2,0,4,5,6,4]
4908 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm13, %xmm13
4909 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm10 = xmm10[0,1],xmm13[2,3]
4910 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm10[0,1,2,3],ymm8[4,5,6,7]
4911 ; AVX1-ONLY-NEXT: vmovups %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4912 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm8 = ymm11[0],ymm12[0],ymm11[1],ymm12[1],ymm11[4],ymm12[4],ymm11[5],ymm12[5]
4913 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm9[1,0],ymm7[1,0],ymm9[5,4],ymm7[5,4]
4914 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm8 = ymm10[2,0],ymm8[2,3],ymm10[6,4],ymm8[6,7]
4915 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm10 = ymm5[0],ymm6[0],ymm5[1],ymm6[1],ymm5[4],ymm6[4],ymm5[5],ymm6[5]
4916 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
4917 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm13 = ymm4[1,0],ymm3[1,0],ymm4[5,4],ymm3[5,4]
4918 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm13 = ymm13[2,0,2,3,6,4,6,7]
4919 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm13, %xmm13
4920 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm10 = xmm13[0,1],xmm10[2,3]
4921 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm10[0,1,2,3],ymm8[4,5,6,7]
4922 ; AVX1-ONLY-NEXT: vmovups %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4923 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm8 = ymm12[1],ymm11[1],ymm12[3],ymm11[3]
4924 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm10 = ymm7[2],ymm9[2],ymm7[3],ymm9[3],ymm7[6],ymm9[6],ymm7[7],ymm9[7]
4925 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm10[0,1],ymm8[2,0],ymm10[4,5],ymm8[6,4]
4926 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm8 = ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[6],ymm4[6],ymm3[7],ymm4[7]
4927 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm8, %xmm8
4928 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm13 = ymm6[1],ymm5[1],ymm6[3],ymm5[3]
4929 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm13 = ymm13[0,1,2,0,4,5,6,4]
4930 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm13, %xmm13
4931 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm13 = xmm8[0,1],xmm13[2,3]
4932 ; AVX1-ONLY-NEXT: vmovaps 32(%rcx), %ymm8
4933 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm13[0,1,2,3],ymm10[4,5,6,7]
4934 ; AVX1-ONLY-NEXT: vmovups %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4935 ; AVX1-ONLY-NEXT: vmovaps 32(%r8), %ymm10
4936 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm12 = ymm11[2],ymm12[2],ymm11[3],ymm12[3],ymm11[6],ymm12[6],ymm11[7],ymm12[7]
4937 ; AVX1-ONLY-NEXT: vmovaps 32(%r9), %ymm11
4938 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm9 = ymm9[3,0],ymm7[3,0],ymm9[7,4],ymm7[7,4]
4939 ; AVX1-ONLY-NEXT: vmovaps 32(%r10), %ymm7
4940 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm12 = ymm9[2,0],ymm12[2,3],ymm9[6,4],ymm12[6,7]
4941 ; AVX1-ONLY-NEXT: vmovaps 32(%rax), %ymm9
4942 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm5 = ymm5[2],ymm6[2],ymm5[3],ymm6[3],ymm5[6],ymm6[6],ymm5[7],ymm6[7]
4943 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm5, %xmm5
4944 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm4[3,0],ymm3[3,0],ymm4[7,4],ymm3[7,4]
4945 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm3[2,0,2,3,6,4,6,7]
4946 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm3, %xmm3
4947 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm3 = xmm3[0,1],xmm5[2,3]
4948 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm12[4,5,6,7]
4949 ; AVX1-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4950 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm9[0],ymm7[0],ymm9[2],ymm7[2]
4951 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm4 = ymm10[0],ymm11[0],ymm10[1],ymm11[1],ymm10[4],ymm11[4],ymm10[5],ymm11[5]
4952 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm4[0,1],ymm3[2,0],ymm4[4,5],ymm3[6,4]
4953 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm4 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
4954 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm4, %xmm4
4955 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm5 = ymm8[0],ymm2[0],ymm8[2],ymm2[2]
4956 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm5 = ymm5[0,1,2,0,4,5,6,4]
4957 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm5, %xmm5
4958 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm4 = xmm4[0,1],xmm5[2,3]
4959 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm4[0,1,2,3],ymm3[4,5,6,7]
4960 ; AVX1-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4961 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm3 = ymm7[0],ymm9[0],ymm7[1],ymm9[1],ymm7[4],ymm9[4],ymm7[5],ymm9[5]
4962 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm4 = ymm11[1,0],ymm10[1,0],ymm11[5,4],ymm10[5,4]
4963 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm4[2,0],ymm3[2,3],ymm4[6,4],ymm3[6,7]
4964 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm4 = ymm2[0],ymm8[0],ymm2[1],ymm8[1],ymm2[4],ymm8[4],ymm2[5],ymm8[5]
4965 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm4, %xmm4
4966 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm5 = ymm1[1,0],ymm0[1,0],ymm1[5,4],ymm0[5,4]
4967 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm5 = ymm5[2,0,2,3,6,4,6,7]
4968 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm5, %xmm5
4969 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3]
4970 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm4[0,1,2,3],ymm3[4,5,6,7]
4971 ; AVX1-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4972 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm9[1],ymm7[1],ymm9[3],ymm7[3]
4973 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm4 = ymm10[2],ymm11[2],ymm10[3],ymm11[3],ymm10[6],ymm11[6],ymm10[7],ymm11[7]
4974 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm4[0,1],ymm3[2,0],ymm4[4,5],ymm3[6,4]
4975 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm4 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
4976 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm4, %xmm4
4977 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm8[1],ymm2[1],ymm8[3],ymm2[3]
4978 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm5 = ymm5[0,1,2,0,4,5,6,4]
4979 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm5, %xmm5
4980 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm4 = xmm4[0,1],xmm5[2,3]
4981 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm4[0,1,2,3],ymm3[4,5,6,7]
4982 ; AVX1-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4983 ; AVX1-ONLY-NEXT: vmovaps 64(%rdi), %ymm3
4984 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm6 = ymm7[2],ymm9[2],ymm7[3],ymm9[3],ymm7[6],ymm9[6],ymm7[7],ymm9[7]
4985 ; AVX1-ONLY-NEXT: vmovaps 64(%rsi), %ymm4
4986 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm11[3,0],ymm10[3,0],ymm11[7,4],ymm10[7,4]
4987 ; AVX1-ONLY-NEXT: vmovaps 64(%r8), %ymm5
4988 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm7[2,0],ymm6[2,3],ymm7[6,4],ymm6[6,7]
4989 ; AVX1-ONLY-NEXT: vmovaps 64(%r9), %ymm6
4990 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm8 = ymm2[2],ymm8[2],ymm2[3],ymm8[3],ymm2[6],ymm8[6],ymm2[7],ymm8[7]
4991 ; AVX1-ONLY-NEXT: vmovaps 64(%r10), %ymm2
4992 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm1 = ymm1[3,0],ymm0[3,0],ymm1[7,4],ymm0[7,4]
4993 ; AVX1-ONLY-NEXT: vmovaps 64(%rax), %ymm0
4994 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm8, %xmm8
4995 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm1 = ymm1[2,0,2,3,6,4,6,7]
4996 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm1
4997 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm1 = xmm1[0,1],xmm8[2,3]
4998 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm7[4,5,6,7]
4999 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5000 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm1 = ymm0[0],ymm2[0],ymm0[2],ymm2[2]
5001 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm7 = ymm5[0],ymm6[0],ymm5[1],ymm6[1],ymm5[4],ymm6[4],ymm5[5],ymm6[5]
5002 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm7[0,1],ymm1[2,0],ymm7[4,5],ymm1[6,4]
5003 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm1 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[4],ymm4[4],ymm3[5],ymm4[5]
5004 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm8
5005 ; AVX1-ONLY-NEXT: vmovaps 64(%rdx), %ymm1
5006 ; AVX1-ONLY-NEXT: vmovaps 64(%rcx), %ymm9
5007 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm10 = ymm9[0],ymm1[0],ymm9[2],ymm1[2]
5008 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm10[0,1,2,0,4,5,6,4]
5009 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
5010 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm8[0,1],xmm10[2,3]
5011 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm8[0,1,2,3],ymm7[4,5,6,7]
5012 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5013 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm7 = ymm2[0],ymm0[0],ymm2[1],ymm0[1],ymm2[4],ymm0[4],ymm2[5],ymm0[5]
5014 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm8 = ymm6[1,0],ymm5[1,0],ymm6[5,4],ymm5[5,4]
5015 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm8[2,0],ymm7[2,3],ymm8[6,4],ymm7[6,7]
5016 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm8 = ymm1[0],ymm9[0],ymm1[1],ymm9[1],ymm1[4],ymm9[4],ymm1[5],ymm9[5]
5017 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm8, %xmm8
5018 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm4[1,0],ymm3[1,0],ymm4[5,4],ymm3[5,4]
5019 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm10[2,0,2,3,6,4,6,7]
5020 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
5021 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm10[0,1],xmm8[2,3]
5022 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm8[0,1,2,3],ymm7[4,5,6,7]
5023 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5024 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm7 = ymm0[1],ymm2[1],ymm0[3],ymm2[3]
5025 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm8 = ymm5[2],ymm6[2],ymm5[3],ymm6[3],ymm5[6],ymm6[6],ymm5[7],ymm6[7]
5026 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm8[0,1],ymm7[2,0],ymm8[4,5],ymm7[6,4]
5027 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm8 = ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[6],ymm4[6],ymm3[7],ymm4[7]
5028 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm8, %xmm8
5029 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm10 = ymm9[1],ymm1[1],ymm9[3],ymm1[3]
5030 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm10[0,1,2,0,4,5,6,4]
5031 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
5032 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm8[0,1],xmm10[2,3]
5033 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm8[0,1,2,3],ymm7[4,5,6,7]
5034 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5035 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm0 = ymm2[2],ymm0[2],ymm2[3],ymm0[3],ymm2[6],ymm0[6],ymm2[7],ymm0[7]
5036 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm6[3,0],ymm5[3,0],ymm6[7,4],ymm5[7,4]
5037 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm5 = ymm2[2,0],ymm0[2,3],ymm2[6,4],ymm0[6,7]
5038 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm2 = ymm1[2],ymm9[2],ymm1[3],ymm9[3],ymm1[6],ymm9[6],ymm1[7],ymm9[7]
5039 ; AVX1-ONLY-NEXT: vmovaps 96(%r8), %ymm0
5040 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm4[3,0],ymm3[3,0],ymm4[7,4],ymm3[7,4]
5041 ; AVX1-ONLY-NEXT: vmovaps 96(%r9), %ymm1
5042 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm2
5043 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm3[2,0,2,3,6,4,6,7]
5044 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm3, %xmm3
5045 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm3 = xmm3[0,1],xmm2[2,3]
5046 ; AVX1-ONLY-NEXT: vmovaps 96(%r10), %ymm2
5047 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm5[4,5,6,7]
5048 ; AVX1-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5049 ; AVX1-ONLY-NEXT: vmovaps 96(%rax), %ymm5
5050 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm3 = ymm5[0],ymm2[0],ymm5[2],ymm2[2]
5051 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm4 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
5052 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm4[0,1],ymm3[2,0],ymm4[4,5],ymm3[6,4]
5053 ; AVX1-ONLY-NEXT: vmovaps 96(%rdi), %ymm3
5054 ; AVX1-ONLY-NEXT: vmovaps 96(%rsi), %ymm4
5055 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm6 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[4],ymm4[4],ymm3[5],ymm4[5]
5056 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm6, %xmm8
5057 ; AVX1-ONLY-NEXT: vmovaps 96(%rdx), %ymm6
5058 ; AVX1-ONLY-NEXT: vmovaps 96(%rcx), %ymm9
5059 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm10 = ymm9[0],ymm6[0],ymm9[2],ymm6[2]
5060 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm10[0,1,2,0,4,5,6,4]
5061 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
5062 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm8[0,1],xmm10[2,3]
5063 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm8[0,1,2,3],ymm7[4,5,6,7]
5064 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5065 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm7 = ymm2[0],ymm5[0],ymm2[1],ymm5[1],ymm2[4],ymm5[4],ymm2[5],ymm5[5]
5066 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm8 = ymm1[1,0],ymm0[1,0],ymm1[5,4],ymm0[5,4]
5067 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm8[2,0],ymm7[2,3],ymm8[6,4],ymm7[6,7]
5068 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm8 = ymm6[0],ymm9[0],ymm6[1],ymm9[1],ymm6[4],ymm9[4],ymm6[5],ymm9[5]
5069 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm8, %xmm8
5070 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm4[1,0],ymm3[1,0],ymm4[5,4],ymm3[5,4]
5071 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm10[2,0,2,3,6,4,6,7]
5072 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
5073 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm10[0,1],xmm8[2,3]
5074 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm8[0,1,2,3],ymm7[4,5,6,7]
5075 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5076 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm7 = ymm5[1],ymm2[1],ymm5[3],ymm2[3]
5077 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm8 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
5078 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm8[0,1],ymm7[2,0],ymm8[4,5],ymm7[6,4]
5079 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm8 = ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[6],ymm4[6],ymm3[7],ymm4[7]
5080 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm8, %xmm8
5081 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm10 = ymm9[1],ymm6[1],ymm9[3],ymm6[3]
5082 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm10[0,1,2,0,4,5,6,4]
5083 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
5084 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm8[0,1],xmm10[2,3]
5085 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm8[0,1,2,3],ymm7[4,5,6,7]
5086 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5087 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm2 = ymm2[2],ymm5[2],ymm2[3],ymm5[3],ymm2[6],ymm5[6],ymm2[7],ymm5[7]
5088 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm1[3,0],ymm0[3,0],ymm1[7,4],ymm0[7,4]
5089 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm0[2,0],ymm2[2,3],ymm0[6,4],ymm2[6,7]
5090 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm1 = ymm6[2],ymm9[2],ymm6[3],ymm9[3],ymm6[6],ymm9[6],ymm6[7],ymm9[7]
5091 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm4[3,0],ymm3[3,0],ymm4[7,4],ymm3[7,4]
5092 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm1
5093 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm2[2,0,2,3,6,4,6,7]
5094 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm2
5095 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3]
5096 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5097 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5098 ; AVX1-ONLY-NEXT: vmovaps 128(%r8), %ymm0
5099 ; AVX1-ONLY-NEXT: vmovaps 128(%r9), %ymm1
5100 ; AVX1-ONLY-NEXT: vmovaps 128(%r10), %ymm3
5101 ; AVX1-ONLY-NEXT: vmovaps 128(%rax), %ymm5
5102 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm5[0],ymm3[0],ymm5[2],ymm3[2]
5103 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm4 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
5104 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm4[0,1],ymm2[2,0],ymm4[4,5],ymm2[6,4]
5105 ; AVX1-ONLY-NEXT: vmovaps 128(%rdi), %ymm2
5106 ; AVX1-ONLY-NEXT: vmovaps 128(%rsi), %ymm4
5107 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm6 = ymm2[0],ymm4[0],ymm2[1],ymm4[1],ymm2[4],ymm4[4],ymm2[5],ymm4[5]
5108 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm6, %xmm8
5109 ; AVX1-ONLY-NEXT: vmovaps 128(%rdx), %ymm6
5110 ; AVX1-ONLY-NEXT: vmovaps 128(%rcx), %ymm9
5111 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm10 = ymm9[0],ymm6[0],ymm9[2],ymm6[2]
5112 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm10[0,1,2,0,4,5,6,4]
5113 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
5114 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm8[0,1],xmm10[2,3]
5115 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm8[0,1,2,3],ymm7[4,5,6,7]
5116 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5117 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm7 = ymm3[0],ymm5[0],ymm3[1],ymm5[1],ymm3[4],ymm5[4],ymm3[5],ymm5[5]
5118 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm8 = ymm1[1,0],ymm0[1,0],ymm1[5,4],ymm0[5,4]
5119 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm8[2,0],ymm7[2,3],ymm8[6,4],ymm7[6,7]
5120 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm8 = ymm6[0],ymm9[0],ymm6[1],ymm9[1],ymm6[4],ymm9[4],ymm6[5],ymm9[5]
5121 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm8, %xmm8
5122 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm4[1,0],ymm2[1,0],ymm4[5,4],ymm2[5,4]
5123 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm10[2,0,2,3,6,4,6,7]
5124 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
5125 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm10[0,1],xmm8[2,3]
5126 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm8[0,1,2,3],ymm7[4,5,6,7]
5127 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5128 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm7 = ymm5[1],ymm3[1],ymm5[3],ymm3[3]
5129 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm8 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
5130 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm8[0,1],ymm7[2,0],ymm8[4,5],ymm7[6,4]
5131 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm8 = ymm2[2],ymm4[2],ymm2[3],ymm4[3],ymm2[6],ymm4[6],ymm2[7],ymm4[7]
5132 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm8, %xmm8
5133 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm10 = ymm9[1],ymm6[1],ymm9[3],ymm6[3]
5134 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm10[0,1,2,0,4,5,6,4]
5135 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
5136 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm8[0,1],xmm10[2,3]
5137 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm8[0,1,2,3],ymm7[4,5,6,7]
5138 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5139 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm3 = ymm3[2],ymm5[2],ymm3[3],ymm5[3],ymm3[6],ymm5[6],ymm3[7],ymm5[7]
5140 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm1[3,0],ymm0[3,0],ymm1[7,4],ymm0[7,4]
5141 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm0[2,0],ymm3[2,3],ymm0[6,4],ymm3[6,7]
5142 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm1 = ymm6[2],ymm9[2],ymm6[3],ymm9[3],ymm6[6],ymm9[6],ymm6[7],ymm9[7]
5143 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm4[3,0],ymm2[3,0],ymm4[7,4],ymm2[7,4]
5144 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm1
5145 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm2[2,0,2,3,6,4,6,7]
5146 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm2
5147 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3]
5148 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5149 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5150 ; AVX1-ONLY-NEXT: vmovaps 160(%r8), %ymm0
5151 ; AVX1-ONLY-NEXT: vmovaps 160(%r9), %ymm1
5152 ; AVX1-ONLY-NEXT: vmovaps 160(%r10), %ymm3
5153 ; AVX1-ONLY-NEXT: vmovaps 160(%rax), %ymm5
5154 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm5[0],ymm3[0],ymm5[2],ymm3[2]
5155 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm4 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
5156 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm4[0,1],ymm2[2,0],ymm4[4,5],ymm2[6,4]
5157 ; AVX1-ONLY-NEXT: vmovaps 160(%rdi), %ymm2
5158 ; AVX1-ONLY-NEXT: vmovaps 160(%rsi), %ymm4
5159 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm6 = ymm2[0],ymm4[0],ymm2[1],ymm4[1],ymm2[4],ymm4[4],ymm2[5],ymm4[5]
5160 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm6, %xmm8
5161 ; AVX1-ONLY-NEXT: vmovaps 160(%rdx), %ymm6
5162 ; AVX1-ONLY-NEXT: vmovaps 160(%rcx), %ymm9
5163 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm10 = ymm9[0],ymm6[0],ymm9[2],ymm6[2]
5164 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm10[0,1,2,0,4,5,6,4]
5165 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
5166 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm8[0,1],xmm10[2,3]
5167 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm8[0,1,2,3],ymm7[4,5,6,7]
5168 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5169 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm7 = ymm3[0],ymm5[0],ymm3[1],ymm5[1],ymm3[4],ymm5[4],ymm3[5],ymm5[5]
5170 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm8 = ymm1[1,0],ymm0[1,0],ymm1[5,4],ymm0[5,4]
5171 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm8[2,0],ymm7[2,3],ymm8[6,4],ymm7[6,7]
5172 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm8 = ymm6[0],ymm9[0],ymm6[1],ymm9[1],ymm6[4],ymm9[4],ymm6[5],ymm9[5]
5173 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm8, %xmm8
5174 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm4[1,0],ymm2[1,0],ymm4[5,4],ymm2[5,4]
5175 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm10[2,0,2,3,6,4,6,7]
5176 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
5177 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm10[0,1],xmm8[2,3]
5178 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm8[0,1,2,3],ymm7[4,5,6,7]
5179 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5180 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm7 = ymm5[1],ymm3[1],ymm5[3],ymm3[3]
5181 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm8 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
5182 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm8[0,1],ymm7[2,0],ymm8[4,5],ymm7[6,4]
5183 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm8 = ymm2[2],ymm4[2],ymm2[3],ymm4[3],ymm2[6],ymm4[6],ymm2[7],ymm4[7]
5184 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm8, %xmm8
5185 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm10 = ymm9[1],ymm6[1],ymm9[3],ymm6[3]
5186 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm10[0,1,2,0,4,5,6,4]
5187 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
5188 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm8[0,1],xmm10[2,3]
5189 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm8[0,1,2,3],ymm7[4,5,6,7]
5190 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5191 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm3 = ymm3[2],ymm5[2],ymm3[3],ymm5[3],ymm3[6],ymm5[6],ymm3[7],ymm5[7]
5192 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm1[3,0],ymm0[3,0],ymm1[7,4],ymm0[7,4]
5193 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm0[2,0],ymm3[2,3],ymm0[6,4],ymm3[6,7]
5194 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm1 = ymm6[2],ymm9[2],ymm6[3],ymm9[3],ymm6[6],ymm9[6],ymm6[7],ymm9[7]
5195 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm4[3,0],ymm2[3,0],ymm4[7,4],ymm2[7,4]
5196 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm1
5197 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm2[2,0,2,3,6,4,6,7]
5198 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm2
5199 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3]
5200 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5201 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5202 ; AVX1-ONLY-NEXT: vmovaps 192(%r8), %ymm0
5203 ; AVX1-ONLY-NEXT: vmovaps 192(%r9), %ymm1
5204 ; AVX1-ONLY-NEXT: vmovaps 192(%r10), %ymm3
5205 ; AVX1-ONLY-NEXT: vmovaps 192(%rax), %ymm5
5206 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm5[0],ymm3[0],ymm5[2],ymm3[2]
5207 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm4 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
5208 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm4[0,1],ymm2[2,0],ymm4[4,5],ymm2[6,4]
5209 ; AVX1-ONLY-NEXT: vmovaps 192(%rdi), %ymm2
5210 ; AVX1-ONLY-NEXT: vmovaps 192(%rsi), %ymm4
5211 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm6 = ymm2[0],ymm4[0],ymm2[1],ymm4[1],ymm2[4],ymm4[4],ymm2[5],ymm4[5]
5212 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm6, %xmm8
5213 ; AVX1-ONLY-NEXT: vmovaps 192(%rdx), %ymm6
5214 ; AVX1-ONLY-NEXT: vmovaps 192(%rcx), %ymm9
5215 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm10 = ymm9[0],ymm6[0],ymm9[2],ymm6[2]
5216 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm10[0,1,2,0,4,5,6,4]
5217 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
5218 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm8[0,1],xmm10[2,3]
5219 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm8[0,1,2,3],ymm7[4,5,6,7]
5220 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5221 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm7 = ymm3[0],ymm5[0],ymm3[1],ymm5[1],ymm3[4],ymm5[4],ymm3[5],ymm5[5]
5222 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm8 = ymm1[1,0],ymm0[1,0],ymm1[5,4],ymm0[5,4]
5223 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm8[2,0],ymm7[2,3],ymm8[6,4],ymm7[6,7]
5224 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm8 = ymm6[0],ymm9[0],ymm6[1],ymm9[1],ymm6[4],ymm9[4],ymm6[5],ymm9[5]
5225 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm8, %xmm8
5226 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm4[1,0],ymm2[1,0],ymm4[5,4],ymm2[5,4]
5227 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm10[2,0,2,3,6,4,6,7]
5228 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
5229 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm10[0,1],xmm8[2,3]
5230 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm8[0,1,2,3],ymm7[4,5,6,7]
5231 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5232 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm7 = ymm5[1],ymm3[1],ymm5[3],ymm3[3]
5233 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm8 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
5234 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm8[0,1],ymm7[2,0],ymm8[4,5],ymm7[6,4]
5235 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm8 = ymm2[2],ymm4[2],ymm2[3],ymm4[3],ymm2[6],ymm4[6],ymm2[7],ymm4[7]
5236 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm8, %xmm8
5237 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm10 = ymm9[1],ymm6[1],ymm9[3],ymm6[3]
5238 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm10[0,1,2,0,4,5,6,4]
5239 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
5240 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm8[0,1],xmm10[2,3]
5241 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm8[0,1,2,3],ymm7[4,5,6,7]
5242 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5243 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm3 = ymm3[2],ymm5[2],ymm3[3],ymm5[3],ymm3[6],ymm5[6],ymm3[7],ymm5[7]
5244 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm1[3,0],ymm0[3,0],ymm1[7,4],ymm0[7,4]
5245 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm0[2,0],ymm3[2,3],ymm0[6,4],ymm3[6,7]
5246 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm1 = ymm6[2],ymm9[2],ymm6[3],ymm9[3],ymm6[6],ymm9[6],ymm6[7],ymm9[7]
5247 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm4[3,0],ymm2[3,0],ymm4[7,4],ymm2[7,4]
5248 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm1
5249 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm2[2,0,2,3,6,4,6,7]
5250 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm2
5251 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3]
5252 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5253 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5254 ; AVX1-ONLY-NEXT: vmovaps 224(%r8), %ymm0
5255 ; AVX1-ONLY-NEXT: vmovaps 224(%r9), %ymm1
5256 ; AVX1-ONLY-NEXT: vmovaps 224(%r10), %ymm3
5257 ; AVX1-ONLY-NEXT: vmovaps 224(%rax), %ymm5
5258 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm2 = ymm5[0],ymm3[0],ymm5[2],ymm3[2]
5259 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm4 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
5260 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm4[0,1],ymm2[2,0],ymm4[4,5],ymm2[6,4]
5261 ; AVX1-ONLY-NEXT: vmovaps 224(%rdi), %ymm2
5262 ; AVX1-ONLY-NEXT: vmovaps 224(%rsi), %ymm4
5263 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm6 = ymm2[0],ymm4[0],ymm2[1],ymm4[1],ymm2[4],ymm4[4],ymm2[5],ymm4[5]
5264 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm6, %xmm8
5265 ; AVX1-ONLY-NEXT: vmovaps 224(%rdx), %ymm6
5266 ; AVX1-ONLY-NEXT: vmovaps 224(%rcx), %ymm9
5267 ; AVX1-ONLY-NEXT: vunpcklpd {{.*#+}} ymm10 = ymm9[0],ymm6[0],ymm9[2],ymm6[2]
5268 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm10[0,1,2,0,4,5,6,4]
5269 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
5270 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm8[0,1],xmm10[2,3]
5271 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm8[0,1,2,3],ymm7[4,5,6,7]
5272 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5273 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm7 = ymm3[0],ymm5[0],ymm3[1],ymm5[1],ymm3[4],ymm5[4],ymm3[5],ymm5[5]
5274 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm8 = ymm1[1,0],ymm0[1,0],ymm1[5,4],ymm0[5,4]
5275 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm8[2,0],ymm7[2,3],ymm8[6,4],ymm7[6,7]
5276 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} ymm8 = ymm6[0],ymm9[0],ymm6[1],ymm9[1],ymm6[4],ymm9[4],ymm6[5],ymm9[5]
5277 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm8, %xmm8
5278 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm4[1,0],ymm2[1,0],ymm4[5,4],ymm2[5,4]
5279 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm10[2,0,2,3,6,4,6,7]
5280 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
5281 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm10[0,1],xmm8[2,3]
5282 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm8[0,1,2,3],ymm7[4,5,6,7]
5283 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5284 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm7 = ymm5[1],ymm3[1],ymm5[3],ymm3[3]
5285 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm8 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
5286 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm8[0,1],ymm7[2,0],ymm8[4,5],ymm7[6,4]
5287 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm8 = ymm2[2],ymm4[2],ymm2[3],ymm4[3],ymm2[6],ymm4[6],ymm2[7],ymm4[7]
5288 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm8, %xmm8
5289 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} ymm10 = ymm9[1],ymm6[1],ymm9[3],ymm6[3]
5290 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm10[0,1,2,0,4,5,6,4]
5291 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
5292 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm8[0,1],xmm10[2,3]
5293 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm8[0,1,2,3],ymm7[4,5,6,7]
5294 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5295 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm3 = ymm3[2],ymm5[2],ymm3[3],ymm5[3],ymm3[6],ymm5[6],ymm3[7],ymm5[7]
5296 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm1[3,0],ymm0[3,0],ymm1[7,4],ymm0[7,4]
5297 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm0[2,0],ymm3[2,3],ymm0[6,4],ymm3[6,7]
5298 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} ymm1 = ymm6[2],ymm9[2],ymm6[3],ymm9[3],ymm6[6],ymm9[6],ymm6[7],ymm9[7]
5299 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm4[3,0],ymm2[3,0],ymm4[7,4],ymm2[7,4]
5300 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm1
5301 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm2[2,0,2,3,6,4,6,7]
5302 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm2
5303 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3]
5304 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
5305 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5306 ; AVX1-ONLY-NEXT: vmovaps (%r9), %xmm2
5307 ; AVX1-ONLY-NEXT: vmovaps (%r8), %xmm3
5308 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm0 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
5309 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm4
5310 ; AVX1-ONLY-NEXT: vmovaps (%rax), %xmm0
5311 ; AVX1-ONLY-NEXT: vmovaps (%r10), %xmm1
5312 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm6 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
5313 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm5 = xmm6[0,1,0,1]
5314 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
5315 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm4[0,1,2,3,4,5],ymm5[6,7]
5316 ; AVX1-ONLY-NEXT: vmovaps (%rcx), %xmm4
5317 ; AVX1-ONLY-NEXT: vmovaps (%rdx), %xmm5
5318 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm8 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
5319 ; AVX1-ONLY-NEXT: vmovaps (%rsi), %xmm9
5320 ; AVX1-ONLY-NEXT: vmovaps (%rdi), %xmm10
5321 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm11 = xmm10[0],xmm9[0],xmm10[1],xmm9[1]
5322 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm11 = xmm11[0],xmm8[0]
5323 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm11[0,1,2,3],ymm7[4,5,6,7]
5324 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5325 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm10[1,1,1,1]
5326 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm7[0],xmm9[1],xmm7[2,3]
5327 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm7[0,1],xmm8[2,3]
5328 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
5329 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm8 = xmm3[1,1,1,1]
5330 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm8[0],xmm2[1],xmm8[2,3]
5331 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm0, %ymm8
5332 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm8[0,1,2,3,4,5],ymm6[6,7]
5333 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3],ymm6[4,5,6,7]
5334 ; AVX1-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5335 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm6 = xmm10[2],xmm9[2],xmm10[3],xmm9[3]
5336 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm2 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
5337 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm0[2,2,2,2]
5338 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm3 = xmm1[0,1,2],xmm3[3]
5339 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
5340 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm7
5341 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm7[0,1,2,3,4,5],ymm3[6,7]
5342 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm4[2,2,2,2]
5343 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm5[0,1,2],xmm7[3]
5344 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm6[0,1],xmm7[2,3]
5345 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm7[0,1,2,3],ymm3[4,5,6,7]
5346 ; AVX1-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5347 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm3 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
5348 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm3 = xmm6[1],xmm3[1]
5349 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
5350 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
5351 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm2[2,3,2,3]
5352 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
5353 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
5354 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm3[0,1,2,3],ymm0[4,5,6,7]
5355 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5356 ; AVX1-ONLY-NEXT: vmovaps 32(%r9), %xmm4
5357 ; AVX1-ONLY-NEXT: vmovaps 32(%r8), %xmm5
5358 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm0 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
5359 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm2
5360 ; AVX1-ONLY-NEXT: vmovaps 32(%rax), %xmm0
5361 ; AVX1-ONLY-NEXT: vmovaps 32(%r10), %xmm1
5362 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm6 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
5363 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm6[0,1,0,1]
5364 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
5365 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm2[0,1,2,3,4,5],ymm3[6,7]
5366 ; AVX1-ONLY-NEXT: vmovaps 32(%rcx), %xmm2
5367 ; AVX1-ONLY-NEXT: vmovaps 32(%rdx), %xmm3
5368 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm8 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
5369 ; AVX1-ONLY-NEXT: vmovaps 32(%rsi), %xmm9
5370 ; AVX1-ONLY-NEXT: vmovaps 32(%rdi), %xmm10
5371 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm11 = xmm10[0],xmm9[0],xmm10[1],xmm9[1]
5372 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm11 = xmm11[0],xmm8[0]
5373 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm11[0,1,2,3],ymm7[4,5,6,7]
5374 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5375 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm10[1,1,1,1]
5376 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm7[0],xmm9[1],xmm7[2,3]
5377 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm7[0,1],xmm8[2,3]
5378 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
5379 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm8 = xmm5[1,1,1,1]
5380 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm8[0],xmm4[1],xmm8[2,3]
5381 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm0, %ymm8
5382 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm8[0,1,2,3,4,5],ymm6[6,7]
5383 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3],ymm6[4,5,6,7]
5384 ; AVX1-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5385 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm6 = xmm10[2],xmm9[2],xmm10[3],xmm9[3]
5386 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm4 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
5387 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm5 = xmm0[2,2,2,2]
5388 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm1[0,1,2],xmm5[3]
5389 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
5390 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm7
5391 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm7[0,1,2,3,4,5],ymm5[6,7]
5392 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm2[2,2,2,2]
5393 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm3[0,1,2],xmm7[3]
5394 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm6[0,1],xmm7[2,3]
5395 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm7[0,1,2,3],ymm5[4,5,6,7]
5396 ; AVX1-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5397 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm2 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
5398 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm6[1],xmm2[1]
5399 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
5400 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
5401 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm4[2,3,2,3]
5402 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
5403 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
5404 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
5405 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5406 ; AVX1-ONLY-NEXT: vmovaps 64(%r9), %xmm4
5407 ; AVX1-ONLY-NEXT: vmovaps 64(%r8), %xmm5
5408 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm0 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
5409 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm2
5410 ; AVX1-ONLY-NEXT: vmovaps 64(%rax), %xmm0
5411 ; AVX1-ONLY-NEXT: vmovaps 64(%r10), %xmm1
5412 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm6 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
5413 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm6[0,1,0,1]
5414 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
5415 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm2[0,1,2,3,4,5],ymm3[6,7]
5416 ; AVX1-ONLY-NEXT: vmovaps 64(%rcx), %xmm2
5417 ; AVX1-ONLY-NEXT: vmovaps 64(%rdx), %xmm3
5418 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm8 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
5419 ; AVX1-ONLY-NEXT: vmovaps 64(%rsi), %xmm9
5420 ; AVX1-ONLY-NEXT: vmovaps 64(%rdi), %xmm10
5421 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm11 = xmm10[0],xmm9[0],xmm10[1],xmm9[1]
5422 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm11 = xmm11[0],xmm8[0]
5423 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm11[0,1,2,3],ymm7[4,5,6,7]
5424 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5425 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm10[1,1,1,1]
5426 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm7[0],xmm9[1],xmm7[2,3]
5427 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm7[0,1],xmm8[2,3]
5428 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
5429 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm8 = xmm5[1,1,1,1]
5430 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm8[0],xmm4[1],xmm8[2,3]
5431 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm0, %ymm8
5432 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm8[0,1,2,3,4,5],ymm6[6,7]
5433 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3],ymm6[4,5,6,7]
5434 ; AVX1-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5435 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm6 = xmm10[2],xmm9[2],xmm10[3],xmm9[3]
5436 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm4 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
5437 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm5 = xmm0[2,2,2,2]
5438 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm1[0,1,2],xmm5[3]
5439 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
5440 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm7
5441 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm7[0,1,2,3,4,5],ymm5[6,7]
5442 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm2[2,2,2,2]
5443 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm3[0,1,2],xmm7[3]
5444 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm6[0,1],xmm7[2,3]
5445 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm7[0,1,2,3],ymm5[4,5,6,7]
5446 ; AVX1-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5447 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm2 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
5448 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm6[1],xmm2[1]
5449 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
5450 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
5451 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm4[2,3,2,3]
5452 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
5453 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
5454 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
5455 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5456 ; AVX1-ONLY-NEXT: vmovaps 96(%r9), %xmm4
5457 ; AVX1-ONLY-NEXT: vmovaps 96(%r8), %xmm5
5458 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm0 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
5459 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm2
5460 ; AVX1-ONLY-NEXT: vmovaps 96(%rax), %xmm0
5461 ; AVX1-ONLY-NEXT: vmovaps 96(%r10), %xmm1
5462 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm6 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
5463 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm6[0,1,0,1]
5464 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
5465 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm2[0,1,2,3,4,5],ymm3[6,7]
5466 ; AVX1-ONLY-NEXT: vmovaps 96(%rcx), %xmm2
5467 ; AVX1-ONLY-NEXT: vmovaps 96(%rdx), %xmm3
5468 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm8 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
5469 ; AVX1-ONLY-NEXT: vmovaps 96(%rsi), %xmm9
5470 ; AVX1-ONLY-NEXT: vmovaps 96(%rdi), %xmm10
5471 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm11 = xmm10[0],xmm9[0],xmm10[1],xmm9[1]
5472 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm11 = xmm11[0],xmm8[0]
5473 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm11[0,1,2,3],ymm7[4,5,6,7]
5474 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5475 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm10[1,1,1,1]
5476 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm7[0],xmm9[1],xmm7[2,3]
5477 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm7[0,1],xmm8[2,3]
5478 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
5479 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm8 = xmm5[1,1,1,1]
5480 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm8[0],xmm4[1],xmm8[2,3]
5481 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm0, %ymm8
5482 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm8[0,1,2,3,4,5],ymm6[6,7]
5483 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3],ymm6[4,5,6,7]
5484 ; AVX1-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5485 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm6 = xmm10[2],xmm9[2],xmm10[3],xmm9[3]
5486 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm4 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
5487 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm5 = xmm0[2,2,2,2]
5488 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm1[0,1,2],xmm5[3]
5489 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
5490 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm7
5491 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm7[0,1,2,3,4,5],ymm5[6,7]
5492 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm2[2,2,2,2]
5493 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm3[0,1,2],xmm7[3]
5494 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm6[0,1],xmm7[2,3]
5495 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm7[0,1,2,3],ymm5[4,5,6,7]
5496 ; AVX1-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5497 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm2 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
5498 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm6[1],xmm2[1]
5499 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
5500 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
5501 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm4[2,3,2,3]
5502 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
5503 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
5504 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
5505 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5506 ; AVX1-ONLY-NEXT: vmovaps 128(%r9), %xmm4
5507 ; AVX1-ONLY-NEXT: vmovaps 128(%r8), %xmm5
5508 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm0 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
5509 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm2
5510 ; AVX1-ONLY-NEXT: vmovaps 128(%rax), %xmm0
5511 ; AVX1-ONLY-NEXT: vmovaps 128(%r10), %xmm1
5512 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm6 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
5513 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm6[0,1,0,1]
5514 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
5515 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm2[0,1,2,3,4,5],ymm3[6,7]
5516 ; AVX1-ONLY-NEXT: vmovaps 128(%rcx), %xmm2
5517 ; AVX1-ONLY-NEXT: vmovaps 128(%rdx), %xmm3
5518 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm8 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
5519 ; AVX1-ONLY-NEXT: vmovaps 128(%rsi), %xmm9
5520 ; AVX1-ONLY-NEXT: vmovaps 128(%rdi), %xmm10
5521 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm11 = xmm10[0],xmm9[0],xmm10[1],xmm9[1]
5522 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm11 = xmm11[0],xmm8[0]
5523 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm11[0,1,2,3],ymm7[4,5,6,7]
5524 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5525 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm10[1,1,1,1]
5526 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm7[0],xmm9[1],xmm7[2,3]
5527 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm7[0,1],xmm8[2,3]
5528 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
5529 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm8 = xmm5[1,1,1,1]
5530 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm8[0],xmm4[1],xmm8[2,3]
5531 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm0, %ymm8
5532 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm8[0,1,2,3,4,5],ymm6[6,7]
5533 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3],ymm6[4,5,6,7]
5534 ; AVX1-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5535 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm6 = xmm10[2],xmm9[2],xmm10[3],xmm9[3]
5536 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm4 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
5537 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm5 = xmm0[2,2,2,2]
5538 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm1[0,1,2],xmm5[3]
5539 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
5540 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm7
5541 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm7[0,1,2,3,4,5],ymm5[6,7]
5542 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm2[2,2,2,2]
5543 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm3[0,1,2],xmm7[3]
5544 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm6[0,1],xmm7[2,3]
5545 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm7[0,1,2,3],ymm5[4,5,6,7]
5546 ; AVX1-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5547 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm2 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
5548 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm6[1],xmm2[1]
5549 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
5550 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
5551 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm4[2,3,2,3]
5552 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
5553 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
5554 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
5555 ; AVX1-ONLY-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
5556 ; AVX1-ONLY-NEXT: vmovaps 160(%r9), %xmm4
5557 ; AVX1-ONLY-NEXT: vmovaps 160(%r8), %xmm5
5558 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm0 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
5559 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm2
5560 ; AVX1-ONLY-NEXT: vmovaps 160(%rax), %xmm0
5561 ; AVX1-ONLY-NEXT: vmovaps 160(%r10), %xmm1
5562 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm6 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
5563 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm6[0,1,0,1]
5564 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
5565 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm2[0,1,2,3,4,5],ymm3[6,7]
5566 ; AVX1-ONLY-NEXT: vmovaps 160(%rcx), %xmm2
5567 ; AVX1-ONLY-NEXT: vmovaps 160(%rdx), %xmm3
5568 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm8 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
5569 ; AVX1-ONLY-NEXT: vmovaps 160(%rsi), %xmm9
5570 ; AVX1-ONLY-NEXT: vmovaps 160(%rdi), %xmm10
5571 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm11 = xmm10[0],xmm9[0],xmm10[1],xmm9[1]
5572 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm11 = xmm11[0],xmm8[0]
5573 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm11[0,1,2,3],ymm7[4,5,6,7]
5574 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5575 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm10[1,1,1,1]
5576 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm7[0],xmm9[1],xmm7[2,3]
5577 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm7[0,1],xmm8[2,3]
5578 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
5579 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm8 = xmm5[1,1,1,1]
5580 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm8[0],xmm4[1],xmm8[2,3]
5581 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm0, %ymm8
5582 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm8[0,1,2,3,4,5],ymm6[6,7]
5583 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3],ymm6[4,5,6,7]
5584 ; AVX1-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5585 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm6 = xmm10[2],xmm9[2],xmm10[3],xmm9[3]
5586 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm4 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
5587 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm5 = xmm0[2,2,2,2]
5588 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm1[0,1,2],xmm5[3]
5589 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
5590 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm7
5591 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm7[0,1,2,3,4,5],ymm5[6,7]
5592 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm2[2,2,2,2]
5593 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm3[0,1,2],xmm7[3]
5594 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm6[0,1],xmm7[2,3]
5595 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm7[0,1,2,3],ymm5[4,5,6,7]
5596 ; AVX1-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5597 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm2 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
5598 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm6[1],xmm2[1]
5599 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
5600 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
5601 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm4[2,3,2,3]
5602 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
5603 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
5604 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
5605 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5606 ; AVX1-ONLY-NEXT: vmovaps 192(%r9), %xmm4
5607 ; AVX1-ONLY-NEXT: vmovaps 192(%r8), %xmm5
5608 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm0 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
5609 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm2
5610 ; AVX1-ONLY-NEXT: vmovaps 192(%rax), %xmm9
5611 ; AVX1-ONLY-NEXT: vmovaps 192(%r10), %xmm1
5612 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm6 = xmm1[0],xmm9[0],xmm1[1],xmm9[1]
5613 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm6[0,1,0,1]
5614 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
5615 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm2[0,1,2,3,4,5],ymm3[6,7]
5616 ; AVX1-ONLY-NEXT: vmovaps 192(%rcx), %xmm2
5617 ; AVX1-ONLY-NEXT: vmovaps 192(%rdx), %xmm3
5618 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm8 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
5619 ; AVX1-ONLY-NEXT: vmovaps 192(%rsi), %xmm0
5620 ; AVX1-ONLY-NEXT: vmovaps 192(%rdi), %xmm7
5621 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm15 = xmm7[0],xmm0[0],xmm7[1],xmm0[1]
5622 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm15 = xmm15[0],xmm8[0]
5623 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm15[0,1,2,3],ymm10[4,5,6,7]
5624 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm15 = xmm7[1,1,1,1]
5625 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm15 = xmm15[0],xmm0[1],xmm15[2,3]
5626 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm8 = xmm15[0,1],xmm8[2,3]
5627 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
5628 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm15 = xmm5[1,1,1,1]
5629 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm15 = xmm15[0],xmm4[1],xmm15[2,3]
5630 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm15, %ymm0, %ymm15
5631 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm15[0,1,2,3,4,5],ymm6[6,7]
5632 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm8[0,1,2,3],ymm6[4,5,6,7]
5633 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm7[2],xmm0[2],xmm7[3],xmm0[3]
5634 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm4 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
5635 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm5 = xmm9[2,2,2,2]
5636 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm1[0,1,2],xmm5[3]
5637 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
5638 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm6
5639 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5],ymm5[6,7]
5640 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm6 = xmm2[2,2,2,2]
5641 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm3[0,1,2],xmm6[3]
5642 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm0[0,1],xmm6[2,3]
5643 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm6[0,1,2,3],ymm5[4,5,6,7]
5644 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm2 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
5645 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
5646 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm1 = xmm1[2],xmm9[2],xmm1[3],xmm9[3]
5647 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
5648 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm2 = xmm4[2,3,2,3]
5649 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
5650 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
5651 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5652 ; AVX1-ONLY-NEXT: vmovaps 224(%r9), %xmm3
5653 ; AVX1-ONLY-NEXT: vmovaps 224(%r8), %xmm9
5654 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm0 = xmm9[0],xmm3[0],xmm9[1],xmm3[1]
5655 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
5656 ; AVX1-ONLY-NEXT: vmovaps 224(%rax), %xmm7
5657 ; AVX1-ONLY-NEXT: vmovaps 224(%r10), %xmm6
5658 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm15 = xmm6[0],xmm7[0],xmm6[1],xmm7[1]
5659 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm2 = xmm15[0,1,0,1]
5660 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
5661 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm0[0,1,2,3,4,5],ymm2[6,7]
5662 ; AVX1-ONLY-NEXT: vmovaps 224(%rcx), %xmm5
5663 ; AVX1-ONLY-NEXT: vmovaps 224(%rdx), %xmm4
5664 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm2 = xmm4[0],xmm5[0],xmm4[1],xmm5[1]
5665 ; AVX1-ONLY-NEXT: vmovaps 224(%rsi), %xmm1
5666 ; AVX1-ONLY-NEXT: vmovaps 224(%rdi), %xmm0
5667 ; AVX1-ONLY-NEXT: vunpcklps {{.*#+}} xmm14 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
5668 ; AVX1-ONLY-NEXT: vmovlhps {{.*#+}} xmm14 = xmm14[0],xmm2[0]
5669 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm14[0,1,2,3],ymm8[4,5,6,7]
5670 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm14 = xmm0[1,1,1,1]
5671 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm14 = xmm14[0],xmm1[1],xmm14[2,3]
5672 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm2 = xmm14[0,1],xmm2[2,3]
5673 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm15, %ymm0, %ymm14
5674 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm15 = xmm9[1,1,1,1]
5675 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm15 = xmm15[0],xmm3[1],xmm15[2,3]
5676 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm15, %ymm0, %ymm15
5677 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm15[0,1,2,3,4,5],ymm14[6,7]
5678 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm14[4,5,6,7]
5679 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
5680 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm1 = xmm9[2],xmm3[2],xmm9[3],xmm3[3]
5681 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm7[2,2,2,2]
5682 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm3 = xmm6[0,1,2],xmm3[3]
5683 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
5684 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm9
5685 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm9[0,1,2,3,4,5],ymm3[6,7]
5686 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm9 = xmm5[2,2,2,2]
5687 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm9 = xmm4[0,1,2],xmm9[3]
5688 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm9 = xmm0[0,1],xmm9[2,3]
5689 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm9[0,1,2,3],ymm3[4,5,6,7]
5690 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm4 = xmm4[2],xmm5[2],xmm4[3],xmm5[3]
5691 ; AVX1-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm0[1],xmm4[1]
5692 ; AVX1-ONLY-NEXT: vunpckhps {{.*#+}} xmm4 = xmm6[2],xmm7[2],xmm6[3],xmm7[3]
5693 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm4
5694 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm1[2,3,2,3]
5695 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
5696 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm4[6,7]
5697 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
5698 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
5699 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1888(%rax)
5700 ; AVX1-ONLY-NEXT: vmovaps %ymm3, 1856(%rax)
5701 ; AVX1-ONLY-NEXT: vmovaps %ymm2, 1824(%rax)
5702 ; AVX1-ONLY-NEXT: vmovaps %ymm8, 1792(%rax)
5703 ; AVX1-ONLY-NEXT: vmovaps %ymm10, 1632(%rax)
5704 ; AVX1-ONLY-NEXT: vmovaps %ymm11, 1600(%rax)
5705 ; AVX1-ONLY-NEXT: vmovaps %ymm12, 1568(%rax)
5706 ; AVX1-ONLY-NEXT: vmovaps %ymm13, 1536(%rax)
5707 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5708 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1376(%rax)
5709 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5710 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1344(%rax)
5711 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5712 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1312(%rax)
5713 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5714 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1280(%rax)
5715 ; AVX1-ONLY-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
5716 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1120(%rax)
5717 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5718 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1088(%rax)
5719 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5720 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1056(%rax)
5721 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5722 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1024(%rax)
5723 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5724 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 864(%rax)
5725 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5726 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 832(%rax)
5727 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5728 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 800(%rax)
5729 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5730 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 768(%rax)
5731 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5732 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 608(%rax)
5733 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5734 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 576(%rax)
5735 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5736 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 544(%rax)
5737 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5738 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 512(%rax)
5739 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5740 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 352(%rax)
5741 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5742 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 320(%rax)
5743 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5744 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 288(%rax)
5745 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5746 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 256(%rax)
5747 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5748 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 96(%rax)
5749 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5750 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 64(%rax)
5751 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5752 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
5753 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5754 ; AVX1-ONLY-NEXT: vmovaps %ymm0, (%rax)
5755 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5756 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 2016(%rax)
5757 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5758 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1984(%rax)
5759 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5760 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1952(%rax)
5761 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5762 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1920(%rax)
5763 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5764 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1760(%rax)
5765 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5766 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1728(%rax)
5767 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5768 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1696(%rax)
5769 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5770 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1664(%rax)
5771 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5772 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1504(%rax)
5773 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5774 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1472(%rax)
5775 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5776 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1440(%rax)
5777 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5778 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1408(%rax)
5779 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5780 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1248(%rax)
5781 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5782 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1216(%rax)
5783 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5784 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1184(%rax)
5785 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5786 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 1152(%rax)
5787 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5788 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 992(%rax)
5789 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5790 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 960(%rax)
5791 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5792 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 928(%rax)
5793 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5794 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 896(%rax)
5795 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5796 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 736(%rax)
5797 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5798 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 704(%rax)
5799 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5800 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 672(%rax)
5801 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5802 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 640(%rax)
5803 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5804 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 480(%rax)
5805 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5806 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 448(%rax)
5807 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5808 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 416(%rax)
5809 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5810 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 384(%rax)
5811 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5812 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 224(%rax)
5813 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5814 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 192(%rax)
5815 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5816 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 160(%rax)
5817 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5818 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 128(%rax)
5819 ; AVX1-ONLY-NEXT: addq $1672, %rsp # imm = 0x688
5820 ; AVX1-ONLY-NEXT: vzeroupper
5821 ; AVX1-ONLY-NEXT: retq
5823 ; AVX2-ONLY-LABEL: store_i32_stride8_vf64:
5824 ; AVX2-ONLY: # %bb.0:
5825 ; AVX2-ONLY-NEXT: subq $1672, %rsp # imm = 0x688
5826 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
5827 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
5828 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %ymm0
5829 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %ymm4
5830 ; AVX2-ONLY-NEXT: vmovaps 32(%rsi), %ymm1
5831 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %ymm5
5832 ; AVX2-ONLY-NEXT: vmovaps 32(%rdx), %ymm2
5833 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %ymm6
5834 ; AVX2-ONLY-NEXT: vmovaps 32(%rcx), %ymm3
5835 ; AVX2-ONLY-NEXT: vmovaps (%rcx), %ymm7
5836 ; AVX2-ONLY-NEXT: vmovaps (%r8), %ymm9
5837 ; AVX2-ONLY-NEXT: vmovaps (%r9), %ymm11
5838 ; AVX2-ONLY-NEXT: vmovaps (%r10), %ymm10
5839 ; AVX2-ONLY-NEXT: vmovaps (%rax), %ymm12
5840 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm8 = ymm10[0],ymm12[0],ymm10[1],ymm12[1],ymm10[4],ymm12[4],ymm10[5],ymm12[5]
5841 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm13 = ymm4[0],ymm5[0],ymm4[1],ymm5[1],ymm4[4],ymm5[4],ymm4[5],ymm5[5]
5842 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm13, %xmm13
5843 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm14 = ymm6[0],ymm7[0],ymm6[1],ymm7[1],ymm6[4],ymm7[4],ymm6[5],ymm7[5]
5844 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm15 = ymm14[2,2,2,2]
5845 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm13 = xmm13[0,1],xmm15[2,3]
5846 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm15 = ymm9[0],ymm11[0],ymm9[1],ymm11[1],ymm9[4],ymm11[4],ymm9[5],ymm11[5]
5847 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm15 = ymm15[0],ymm8[0],ymm15[2],ymm8[2]
5848 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3],ymm15[4,5,6,7]
5849 ; AVX2-ONLY-NEXT: vmovups %ymm13, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5850 ; AVX2-ONLY-NEXT: vbroadcastss 20(%r8), %ymm13
5851 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3,4],ymm11[5],ymm13[6,7]
5852 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3,4,5],ymm8[6,7]
5853 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm14, %xmm8
5854 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm14 = ymm4[1,1,1,1,5,5,5,5]
5855 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0],ymm5[1],ymm14[2,3,4],ymm5[5],ymm14[6,7]
5856 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm14, %xmm14
5857 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm14 = xmm14[0,1],xmm8[2,3]
5858 ; AVX2-ONLY-NEXT: vmovaps 32(%r8), %ymm8
5859 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm14[0,1,2,3],ymm13[4,5,6,7]
5860 ; AVX2-ONLY-NEXT: vmovups %ymm13, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5861 ; AVX2-ONLY-NEXT: vbroadcastss 24(%rax), %ymm13
5862 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm10[0,1,2,3,4,5,6],ymm13[7]
5863 ; AVX2-ONLY-NEXT: vmovaps 32(%r9), %ymm10
5864 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm11 = ymm9[2],ymm11[2],ymm9[3],ymm11[3],ymm9[6],ymm11[6],ymm9[7],ymm11[7]
5865 ; AVX2-ONLY-NEXT: vmovaps 32(%r10), %ymm9
5866 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm4 = ymm4[2],ymm5[2],ymm4[3],ymm5[3],ymm4[6],ymm5[6],ymm4[7],ymm5[7]
5867 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm4, %xmm5
5868 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm6 = ymm6[2],ymm7[2],ymm6[3],ymm7[3],ymm6[6],ymm7[6],ymm6[7],ymm7[7]
5869 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm7 = ymm6[2,2,2,2]
5870 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm5[0,1],xmm7[2,3]
5871 ; AVX2-ONLY-NEXT: vmovaps 32(%rax), %ymm7
5872 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm11[0,1,2,3,4,5],ymm13[6,7]
5873 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm13[4,5,6,7]
5874 ; AVX2-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5875 ; AVX2-ONLY-NEXT: vbroadcastss 28(%r10), %ymm5
5876 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm5 = ymm5[2],ymm12[2],ymm5[3],ymm12[3],ymm5[6],ymm12[6],ymm5[7],ymm12[7]
5877 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm11[1],ymm5[1],ymm11[3],ymm5[3]
5878 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm6, %xmm6
5879 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm4 = ymm4[2,3,2,3,6,7,6,7]
5880 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm4, %xmm4
5881 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm4 = xmm4[0,1],xmm6[2,3]
5882 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
5883 ; AVX2-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5884 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm4 = ymm9[0],ymm7[0],ymm9[1],ymm7[1],ymm9[4],ymm7[4],ymm9[5],ymm7[5]
5885 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm5 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
5886 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm5, %xmm5
5887 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm6 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[4],ymm3[4],ymm2[5],ymm3[5]
5888 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm11 = ymm6[2,2,2,2]
5889 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm5[0,1],xmm11[2,3]
5890 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm11 = ymm8[0],ymm10[0],ymm8[1],ymm10[1],ymm8[4],ymm10[4],ymm8[5],ymm10[5]
5891 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm11 = ymm11[0],ymm4[0],ymm11[2],ymm4[2]
5892 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm11[4,5,6,7]
5893 ; AVX2-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5894 ; AVX2-ONLY-NEXT: vbroadcastss 52(%r8), %ymm5
5895 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3,4],ymm10[5],ymm5[6,7]
5896 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],ymm4[6,7]
5897 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm6, %xmm4
5898 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm6 = ymm0[1,1,1,1,5,5,5,5]
5899 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0],ymm1[1],ymm6[2,3,4],ymm1[5],ymm6[6,7]
5900 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm6, %xmm6
5901 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm6[0,1],xmm4[2,3]
5902 ; AVX2-ONLY-NEXT: vmovaps 64(%rdi), %ymm4
5903 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm6[0,1,2,3],ymm5[4,5,6,7]
5904 ; AVX2-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5905 ; AVX2-ONLY-NEXT: vbroadcastss 56(%rax), %ymm5
5906 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5,6],ymm5[7]
5907 ; AVX2-ONLY-NEXT: vmovaps 64(%rsi), %ymm5
5908 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm8 = ymm8[2],ymm10[2],ymm8[3],ymm10[3],ymm8[6],ymm10[6],ymm8[7],ymm10[7]
5909 ; AVX2-ONLY-NEXT: vmovaps 64(%rdx), %ymm6
5910 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm10 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
5911 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm0
5912 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm2 = ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[6],ymm3[6],ymm2[7],ymm3[7]
5913 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm1 = ymm2[2,2,2,2]
5914 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm1 = xmm0[0,1],xmm1[2,3]
5915 ; AVX2-ONLY-NEXT: vmovaps 64(%rcx), %ymm0
5916 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm8[0,1,2,3,4,5],ymm9[6,7]
5917 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
5918 ; AVX2-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5919 ; AVX2-ONLY-NEXT: vbroadcastss 60(%r10), %ymm1
5920 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm3 = ymm1[2],ymm7[2],ymm1[3],ymm7[3],ymm1[6],ymm7[6],ymm1[7],ymm7[7]
5921 ; AVX2-ONLY-NEXT: vmovaps 64(%r8), %ymm1
5922 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm3 = ymm8[1],ymm3[1],ymm8[3],ymm3[3]
5923 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm2
5924 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm10[2,3,2,3,6,7,6,7]
5925 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm7, %xmm7
5926 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm2 = xmm7[0,1],xmm2[2,3]
5927 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
5928 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5929 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm2 = ymm4[0],ymm5[0],ymm4[1],ymm5[1],ymm4[4],ymm5[4],ymm4[5],ymm5[5]
5930 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm2
5931 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm3 = ymm6[0],ymm0[0],ymm6[1],ymm0[1],ymm6[4],ymm0[4],ymm6[5],ymm0[5]
5932 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm7 = ymm3[2,2,2,2]
5933 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm2 = xmm2[0,1],xmm7[2,3]
5934 ; AVX2-ONLY-NEXT: vmovaps 64(%r9), %ymm7
5935 ; AVX2-ONLY-NEXT: vmovaps 64(%r10), %ymm8
5936 ; AVX2-ONLY-NEXT: vmovaps 64(%rax), %ymm9
5937 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm10 = ymm8[0],ymm9[0],ymm8[1],ymm9[1],ymm8[4],ymm9[4],ymm8[5],ymm9[5]
5938 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm11 = ymm1[0],ymm7[0],ymm1[1],ymm7[1],ymm1[4],ymm7[4],ymm1[5],ymm7[5]
5939 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm11 = ymm11[0],ymm10[0],ymm11[2],ymm10[2]
5940 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm11[4,5,6,7]
5941 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5942 ; AVX2-ONLY-NEXT: vbroadcastss 84(%r8), %ymm2
5943 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4],ymm7[5],ymm2[6,7]
5944 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm10[6,7]
5945 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm3, %xmm3
5946 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm4[1,1,1,1,5,5,5,5]
5947 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0],ymm5[1],ymm10[2,3,4],ymm5[5],ymm10[6,7]
5948 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
5949 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm3 = xmm10[0,1],xmm3[2,3]
5950 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1,2,3],ymm2[4,5,6,7]
5951 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5952 ; AVX2-ONLY-NEXT: vbroadcastss 88(%rax), %ymm2
5953 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm8[0,1,2,3,4,5,6],ymm2[7]
5954 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm1 = ymm1[2],ymm7[2],ymm1[3],ymm7[3],ymm1[6],ymm7[6],ymm1[7],ymm7[7]
5955 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm3 = ymm4[2],ymm5[2],ymm4[3],ymm5[3],ymm4[6],ymm5[6],ymm4[7],ymm5[7]
5956 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm4 = ymm6[2],ymm0[2],ymm6[3],ymm0[3],ymm6[6],ymm0[6],ymm6[7],ymm0[7]
5957 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm3, %xmm0
5958 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm5 = ymm4[2,2,2,2]
5959 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm0 = xmm0[0,1],xmm5[2,3]
5960 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm1[0,1,2,3,4,5],ymm2[6,7]
5961 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
5962 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5963 ; AVX2-ONLY-NEXT: vbroadcastss 92(%r10), %ymm0
5964 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm2 = ymm0[2],ymm9[2],ymm0[3],ymm9[3],ymm0[6],ymm9[6],ymm0[7],ymm9[7]
5965 ; AVX2-ONLY-NEXT: vmovaps 96(%rdi), %ymm0
5966 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm1[1],ymm2[1],ymm1[3],ymm2[3]
5967 ; AVX2-ONLY-NEXT: vmovaps 96(%rsi), %ymm1
5968 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm4, %xmm2
5969 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm3[2,3,2,3,6,7,6,7]
5970 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm3, %xmm3
5971 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm3 = xmm3[0,1],xmm2[2,3]
5972 ; AVX2-ONLY-NEXT: vmovaps 96(%rdx), %ymm2
5973 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm5[4,5,6,7]
5974 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5975 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm3 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
5976 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm3, %xmm3
5977 ; AVX2-ONLY-NEXT: vmovaps 96(%rcx), %ymm4
5978 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm5 = ymm2[0],ymm4[0],ymm2[1],ymm4[1],ymm2[4],ymm4[4],ymm2[5],ymm4[5]
5979 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm6 = ymm5[2,2,2,2]
5980 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm3 = xmm3[0,1],xmm6[2,3]
5981 ; AVX2-ONLY-NEXT: vmovaps 96(%r8), %ymm6
5982 ; AVX2-ONLY-NEXT: vmovaps 96(%r9), %ymm7
5983 ; AVX2-ONLY-NEXT: vmovaps 96(%r10), %ymm8
5984 ; AVX2-ONLY-NEXT: vmovaps 96(%rax), %ymm9
5985 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm10 = ymm8[0],ymm9[0],ymm8[1],ymm9[1],ymm8[4],ymm9[4],ymm8[5],ymm9[5]
5986 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm11 = ymm6[0],ymm7[0],ymm6[1],ymm7[1],ymm6[4],ymm7[4],ymm6[5],ymm7[5]
5987 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm11 = ymm11[0],ymm10[0],ymm11[2],ymm10[2]
5988 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm11[4,5,6,7]
5989 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5990 ; AVX2-ONLY-NEXT: vbroadcastss 116(%r8), %ymm3
5991 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4],ymm7[5],ymm3[6,7]
5992 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm10[6,7]
5993 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm5, %xmm5
5994 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm0[1,1,1,1,5,5,5,5]
5995 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0],ymm1[1],ymm10[2,3,4],ymm1[5],ymm10[6,7]
5996 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
5997 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm10[0,1],xmm5[2,3]
5998 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm5[0,1,2,3],ymm3[4,5,6,7]
5999 ; AVX2-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6000 ; AVX2-ONLY-NEXT: vbroadcastss 120(%rax), %ymm3
6001 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm8[0,1,2,3,4,5,6],ymm3[7]
6002 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm5 = ymm6[2],ymm7[2],ymm6[3],ymm7[3],ymm6[6],ymm7[6],ymm6[7],ymm7[7]
6003 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm0 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
6004 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm1 = ymm2[2],ymm4[2],ymm2[3],ymm4[3],ymm2[6],ymm4[6],ymm2[7],ymm4[7]
6005 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm2
6006 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm4 = ymm1[2,2,2,2]
6007 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm2 = xmm2[0,1],xmm4[2,3]
6008 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm5[0,1,2,3,4,5],ymm3[6,7]
6009 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
6010 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6011 ; AVX2-ONLY-NEXT: vbroadcastss 124(%r10), %ymm2
6012 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm2 = ymm2[2],ymm9[2],ymm2[3],ymm9[3],ymm2[6],ymm9[6],ymm2[7],ymm9[7]
6013 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm5[1],ymm2[1],ymm5[3],ymm2[3]
6014 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm1
6015 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm0[2,3,2,3,6,7,6,7]
6016 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm0
6017 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3]
6018 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6019 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6020 ; AVX2-ONLY-NEXT: vmovaps 128(%rdi), %ymm0
6021 ; AVX2-ONLY-NEXT: vmovaps 128(%rsi), %ymm1
6022 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
6023 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm2
6024 ; AVX2-ONLY-NEXT: vmovaps 128(%rdx), %ymm3
6025 ; AVX2-ONLY-NEXT: vmovaps 128(%rcx), %ymm4
6026 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm5 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[4],ymm4[4],ymm3[5],ymm4[5]
6027 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm6 = ymm5[2,2,2,2]
6028 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm2 = xmm2[0,1],xmm6[2,3]
6029 ; AVX2-ONLY-NEXT: vmovaps 128(%r8), %ymm6
6030 ; AVX2-ONLY-NEXT: vmovaps 128(%r9), %ymm7
6031 ; AVX2-ONLY-NEXT: vmovaps 128(%r10), %ymm8
6032 ; AVX2-ONLY-NEXT: vmovaps 128(%rax), %ymm9
6033 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm10 = ymm8[0],ymm9[0],ymm8[1],ymm9[1],ymm8[4],ymm9[4],ymm8[5],ymm9[5]
6034 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm11 = ymm6[0],ymm7[0],ymm6[1],ymm7[1],ymm6[4],ymm7[4],ymm6[5],ymm7[5]
6035 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm11 = ymm11[0],ymm10[0],ymm11[2],ymm10[2]
6036 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm11[4,5,6,7]
6037 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6038 ; AVX2-ONLY-NEXT: vbroadcastss 148(%r8), %ymm2
6039 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4],ymm7[5],ymm2[6,7]
6040 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm10[6,7]
6041 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm5, %xmm5
6042 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm0[1,1,1,1,5,5,5,5]
6043 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0],ymm1[1],ymm10[2,3,4],ymm1[5],ymm10[6,7]
6044 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
6045 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm10[0,1],xmm5[2,3]
6046 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1,2,3],ymm2[4,5,6,7]
6047 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6048 ; AVX2-ONLY-NEXT: vbroadcastss 152(%rax), %ymm2
6049 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm8[0,1,2,3,4,5,6],ymm2[7]
6050 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm5 = ymm6[2],ymm7[2],ymm6[3],ymm7[3],ymm6[6],ymm7[6],ymm6[7],ymm7[7]
6051 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm0 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
6052 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm1 = ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[6],ymm4[6],ymm3[7],ymm4[7]
6053 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm3
6054 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm4 = ymm1[2,2,2,2]
6055 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm3 = xmm3[0,1],xmm4[2,3]
6056 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1,2,3,4,5],ymm2[6,7]
6057 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1,2,3],ymm2[4,5,6,7]
6058 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6059 ; AVX2-ONLY-NEXT: vbroadcastss 156(%r10), %ymm2
6060 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm2 = ymm2[2],ymm9[2],ymm2[3],ymm9[3],ymm2[6],ymm9[6],ymm2[7],ymm9[7]
6061 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm5[1],ymm2[1],ymm5[3],ymm2[3]
6062 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm1
6063 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm0[2,3,2,3,6,7,6,7]
6064 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm0
6065 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3]
6066 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6067 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6068 ; AVX2-ONLY-NEXT: vmovaps 160(%rdi), %ymm0
6069 ; AVX2-ONLY-NEXT: vmovaps 160(%rsi), %ymm1
6070 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
6071 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm2
6072 ; AVX2-ONLY-NEXT: vmovaps 160(%rdx), %ymm3
6073 ; AVX2-ONLY-NEXT: vmovaps 160(%rcx), %ymm4
6074 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm5 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[4],ymm4[4],ymm3[5],ymm4[5]
6075 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm6 = ymm5[2,2,2,2]
6076 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm2 = xmm2[0,1],xmm6[2,3]
6077 ; AVX2-ONLY-NEXT: vmovaps 160(%r8), %ymm6
6078 ; AVX2-ONLY-NEXT: vmovaps 160(%r9), %ymm7
6079 ; AVX2-ONLY-NEXT: vmovaps 160(%r10), %ymm8
6080 ; AVX2-ONLY-NEXT: vmovaps 160(%rax), %ymm9
6081 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm10 = ymm8[0],ymm9[0],ymm8[1],ymm9[1],ymm8[4],ymm9[4],ymm8[5],ymm9[5]
6082 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm11 = ymm6[0],ymm7[0],ymm6[1],ymm7[1],ymm6[4],ymm7[4],ymm6[5],ymm7[5]
6083 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm11 = ymm11[0],ymm10[0],ymm11[2],ymm10[2]
6084 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm11[4,5,6,7]
6085 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6086 ; AVX2-ONLY-NEXT: vbroadcastss 180(%r8), %ymm2
6087 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4],ymm7[5],ymm2[6,7]
6088 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm10[6,7]
6089 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm5, %xmm5
6090 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm0[1,1,1,1,5,5,5,5]
6091 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0],ymm1[1],ymm10[2,3,4],ymm1[5],ymm10[6,7]
6092 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
6093 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm10[0,1],xmm5[2,3]
6094 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1,2,3],ymm2[4,5,6,7]
6095 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6096 ; AVX2-ONLY-NEXT: vbroadcastss 184(%rax), %ymm2
6097 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm8[0,1,2,3,4,5,6],ymm2[7]
6098 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm5 = ymm6[2],ymm7[2],ymm6[3],ymm7[3],ymm6[6],ymm7[6],ymm6[7],ymm7[7]
6099 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm0 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
6100 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm1 = ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[6],ymm4[6],ymm3[7],ymm4[7]
6101 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm3
6102 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm4 = ymm1[2,2,2,2]
6103 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm3 = xmm3[0,1],xmm4[2,3]
6104 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1,2,3,4,5],ymm2[6,7]
6105 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1,2,3],ymm2[4,5,6,7]
6106 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6107 ; AVX2-ONLY-NEXT: vbroadcastss 188(%r10), %ymm2
6108 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm2 = ymm2[2],ymm9[2],ymm2[3],ymm9[3],ymm2[6],ymm9[6],ymm2[7],ymm9[7]
6109 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm5[1],ymm2[1],ymm5[3],ymm2[3]
6110 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm1
6111 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm0[2,3,2,3,6,7,6,7]
6112 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm0
6113 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3]
6114 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6115 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6116 ; AVX2-ONLY-NEXT: vmovaps 192(%rdi), %ymm0
6117 ; AVX2-ONLY-NEXT: vmovaps 192(%rsi), %ymm1
6118 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
6119 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm2
6120 ; AVX2-ONLY-NEXT: vmovaps 192(%rdx), %ymm3
6121 ; AVX2-ONLY-NEXT: vmovaps 192(%rcx), %ymm4
6122 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm5 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[4],ymm4[4],ymm3[5],ymm4[5]
6123 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm6 = ymm5[2,2,2,2]
6124 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm2 = xmm2[0,1],xmm6[2,3]
6125 ; AVX2-ONLY-NEXT: vmovaps 192(%r8), %ymm6
6126 ; AVX2-ONLY-NEXT: vmovaps 192(%r9), %ymm7
6127 ; AVX2-ONLY-NEXT: vmovaps 192(%r10), %ymm8
6128 ; AVX2-ONLY-NEXT: vmovaps 192(%rax), %ymm9
6129 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm10 = ymm8[0],ymm9[0],ymm8[1],ymm9[1],ymm8[4],ymm9[4],ymm8[5],ymm9[5]
6130 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm11 = ymm6[0],ymm7[0],ymm6[1],ymm7[1],ymm6[4],ymm7[4],ymm6[5],ymm7[5]
6131 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm11 = ymm11[0],ymm10[0],ymm11[2],ymm10[2]
6132 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm11[4,5,6,7]
6133 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6134 ; AVX2-ONLY-NEXT: vbroadcastss 212(%r8), %ymm2
6135 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4],ymm7[5],ymm2[6,7]
6136 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm10[6,7]
6137 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm5, %xmm5
6138 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm0[1,1,1,1,5,5,5,5]
6139 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0],ymm1[1],ymm10[2,3,4],ymm1[5],ymm10[6,7]
6140 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
6141 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm10[0,1],xmm5[2,3]
6142 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1,2,3],ymm2[4,5,6,7]
6143 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6144 ; AVX2-ONLY-NEXT: vbroadcastss 216(%rax), %ymm2
6145 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm8[0,1,2,3,4,5,6],ymm2[7]
6146 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm5 = ymm6[2],ymm7[2],ymm6[3],ymm7[3],ymm6[6],ymm7[6],ymm6[7],ymm7[7]
6147 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm0 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
6148 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm1 = ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[6],ymm4[6],ymm3[7],ymm4[7]
6149 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm3
6150 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm4 = ymm1[2,2,2,2]
6151 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm3 = xmm3[0,1],xmm4[2,3]
6152 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1,2,3,4,5],ymm2[6,7]
6153 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1,2,3],ymm2[4,5,6,7]
6154 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6155 ; AVX2-ONLY-NEXT: vbroadcastss 220(%r10), %ymm2
6156 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm2 = ymm2[2],ymm9[2],ymm2[3],ymm9[3],ymm2[6],ymm9[6],ymm2[7],ymm9[7]
6157 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm5[1],ymm2[1],ymm5[3],ymm2[3]
6158 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm1
6159 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm0[2,3,2,3,6,7,6,7]
6160 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm0
6161 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3]
6162 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6163 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6164 ; AVX2-ONLY-NEXT: vmovaps 224(%rdi), %ymm0
6165 ; AVX2-ONLY-NEXT: vmovaps 224(%rsi), %ymm1
6166 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
6167 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm2
6168 ; AVX2-ONLY-NEXT: vmovaps 224(%rdx), %ymm3
6169 ; AVX2-ONLY-NEXT: vmovaps 224(%rcx), %ymm4
6170 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm5 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[4],ymm4[4],ymm3[5],ymm4[5]
6171 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm6 = ymm5[2,2,2,2]
6172 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm2 = xmm2[0,1],xmm6[2,3]
6173 ; AVX2-ONLY-NEXT: vmovaps 224(%r8), %ymm6
6174 ; AVX2-ONLY-NEXT: vmovaps 224(%r9), %ymm7
6175 ; AVX2-ONLY-NEXT: vmovaps 224(%r10), %ymm8
6176 ; AVX2-ONLY-NEXT: vmovaps 224(%rax), %ymm9
6177 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm10 = ymm8[0],ymm9[0],ymm8[1],ymm9[1],ymm8[4],ymm9[4],ymm8[5],ymm9[5]
6178 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} ymm11 = ymm6[0],ymm7[0],ymm6[1],ymm7[1],ymm6[4],ymm7[4],ymm6[5],ymm7[5]
6179 ; AVX2-ONLY-NEXT: vunpcklpd {{.*#+}} ymm11 = ymm11[0],ymm10[0],ymm11[2],ymm10[2]
6180 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm11[4,5,6,7]
6181 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6182 ; AVX2-ONLY-NEXT: vbroadcastss 244(%r8), %ymm2
6183 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4],ymm7[5],ymm2[6,7]
6184 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm10[6,7]
6185 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm5, %xmm5
6186 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm10 = ymm0[1,1,1,1,5,5,5,5]
6187 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0],ymm1[1],ymm10[2,3,4],ymm1[5],ymm10[6,7]
6188 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm10, %xmm10
6189 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm10[0,1],xmm5[2,3]
6190 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1,2,3],ymm2[4,5,6,7]
6191 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6192 ; AVX2-ONLY-NEXT: vbroadcastss 248(%rax), %ymm2
6193 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm8[0,1,2,3,4,5,6],ymm2[7]
6194 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm5 = ymm6[2],ymm7[2],ymm6[3],ymm7[3],ymm6[6],ymm7[6],ymm6[7],ymm7[7]
6195 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm0 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
6196 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm1 = ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[6],ymm4[6],ymm3[7],ymm4[7]
6197 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm3
6198 ; AVX2-ONLY-NEXT: vpermpd {{.*#+}} ymm4 = ymm1[2,2,2,2]
6199 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm3 = xmm3[0,1],xmm4[2,3]
6200 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1,2,3,4,5],ymm2[6,7]
6201 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1,2,3],ymm2[4,5,6,7]
6202 ; AVX2-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6203 ; AVX2-ONLY-NEXT: vbroadcastss 252(%r10), %ymm2
6204 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} ymm2 = ymm2[2],ymm9[2],ymm2[3],ymm9[3],ymm2[6],ymm9[6],ymm2[7],ymm9[7]
6205 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} ymm2 = ymm5[1],ymm2[1],ymm5[3],ymm2[3]
6206 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm1
6207 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm0[2,3,2,3,6,7,6,7]
6208 ; AVX2-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm0
6209 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3]
6210 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
6211 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6212 ; AVX2-ONLY-NEXT: vmovaps (%rcx), %xmm0
6213 ; AVX2-ONLY-NEXT: vbroadcastss %xmm0, %xmm2
6214 ; AVX2-ONLY-NEXT: vmovaps (%rdx), %xmm1
6215 ; AVX2-ONLY-NEXT: vbroadcastss %xmm1, %xmm3
6216 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
6217 ; AVX2-ONLY-NEXT: vmovaps (%rsi), %xmm4
6218 ; AVX2-ONLY-NEXT: vmovaps (%rdi), %xmm5
6219 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm3 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
6220 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm3[0,1],xmm2[2,3]
6221 ; AVX2-ONLY-NEXT: vmovaps (%rax), %xmm2
6222 ; AVX2-ONLY-NEXT: vbroadcastss %xmm2, %xmm7
6223 ; AVX2-ONLY-NEXT: vmovaps (%r10), %xmm3
6224 ; AVX2-ONLY-NEXT: vbroadcastss %xmm3, %xmm8
6225 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1]
6226 ; AVX2-ONLY-NEXT: vmovaps (%r9), %xmm8
6227 ; AVX2-ONLY-NEXT: vmovaps (%r8), %xmm9
6228 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm10 = xmm9[0],xmm8[0],xmm9[1],xmm8[1]
6229 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm0, %ymm10
6230 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm0, %ymm7
6231 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm10[0,1,2,3,4,5],ymm7[6,7]
6232 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm7[4,5,6,7]
6233 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6234 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm6 = xmm5[1,1,1,1]
6235 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm6[0],xmm4[1],xmm6[2,3]
6236 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm7 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
6237 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm6[0,1],xmm7[2,3]
6238 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm7 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
6239 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm0, %ymm7
6240 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm10 = xmm9[1,1,1,1]
6241 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm10 = xmm10[0],xmm8[1],xmm10[2,3]
6242 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm0, %ymm10
6243 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm10[0,1,2,3,4,5],ymm7[6,7]
6244 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm7[4,5,6,7]
6245 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6246 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm4 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
6247 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm5 = xmm9[2],xmm8[2],xmm9[3],xmm8[3]
6248 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm6 = xmm2[2,2,2,2]
6249 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm3[0,1,2],xmm6[3]
6250 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
6251 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm7
6252 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3,4,5],ymm6[6,7]
6253 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm0[2,2,2,2]
6254 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm1[0,1,2],xmm7[3]
6255 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm4[0,1],xmm7[2,3]
6256 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3],ymm6[4,5,6,7]
6257 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6258 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
6259 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm0[1]
6260 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm1 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
6261 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
6262 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm2 = xmm5[2,3,2,3]
6263 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
6264 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
6265 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
6266 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6267 ; AVX2-ONLY-NEXT: vmovaps 32(%rcx), %xmm0
6268 ; AVX2-ONLY-NEXT: vmovaps 32(%rdx), %xmm1
6269 ; AVX2-ONLY-NEXT: vbroadcastss %xmm0, %xmm2
6270 ; AVX2-ONLY-NEXT: vbroadcastss %xmm1, %xmm3
6271 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
6272 ; AVX2-ONLY-NEXT: vmovaps 32(%rsi), %xmm4
6273 ; AVX2-ONLY-NEXT: vmovaps 32(%rdi), %xmm5
6274 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm3 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
6275 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm3[0,1],xmm2[2,3]
6276 ; AVX2-ONLY-NEXT: vmovaps 32(%r10), %xmm2
6277 ; AVX2-ONLY-NEXT: vmovaps 32(%rax), %xmm3
6278 ; AVX2-ONLY-NEXT: vbroadcastss %xmm3, %xmm7
6279 ; AVX2-ONLY-NEXT: vbroadcastss %xmm2, %xmm8
6280 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1]
6281 ; AVX2-ONLY-NEXT: vmovaps 32(%r9), %xmm8
6282 ; AVX2-ONLY-NEXT: vmovaps 32(%r8), %xmm9
6283 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm10 = xmm9[0],xmm8[0],xmm9[1],xmm8[1]
6284 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm0, %ymm10
6285 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm0, %ymm7
6286 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm10[0,1,2,3,4,5],ymm7[6,7]
6287 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm7[4,5,6,7]
6288 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6289 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm6 = xmm5[1,1,1,1]
6290 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm6[0],xmm4[1],xmm6[2,3]
6291 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm7 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
6292 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm6[0,1],xmm7[2,3]
6293 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm7 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
6294 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm0, %ymm7
6295 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm10 = xmm9[1,1,1,1]
6296 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm10 = xmm10[0],xmm8[1],xmm10[2,3]
6297 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm0, %ymm10
6298 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm10[0,1,2,3,4,5],ymm7[6,7]
6299 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm7[4,5,6,7]
6300 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6301 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm4 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
6302 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm5 = xmm9[2],xmm8[2],xmm9[3],xmm8[3]
6303 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm6 = xmm3[2,2,2,2]
6304 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm2[0,1,2],xmm6[3]
6305 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
6306 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm7
6307 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3,4,5],ymm6[6,7]
6308 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm0[2,2,2,2]
6309 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm1[0,1,2],xmm7[3]
6310 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm4[0,1],xmm7[2,3]
6311 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3],ymm6[4,5,6,7]
6312 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6313 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
6314 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm0[1]
6315 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm1 = xmm2[2],xmm3[2],xmm2[3],xmm3[3]
6316 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
6317 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm2 = xmm5[2,3,2,3]
6318 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
6319 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
6320 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
6321 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6322 ; AVX2-ONLY-NEXT: vmovaps 64(%rcx), %xmm0
6323 ; AVX2-ONLY-NEXT: vmovaps 64(%rdx), %xmm1
6324 ; AVX2-ONLY-NEXT: vbroadcastss %xmm0, %xmm2
6325 ; AVX2-ONLY-NEXT: vbroadcastss %xmm1, %xmm3
6326 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
6327 ; AVX2-ONLY-NEXT: vmovaps 64(%rsi), %xmm4
6328 ; AVX2-ONLY-NEXT: vmovaps 64(%rdi), %xmm5
6329 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm3 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
6330 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm3[0,1],xmm2[2,3]
6331 ; AVX2-ONLY-NEXT: vmovaps 64(%r10), %xmm2
6332 ; AVX2-ONLY-NEXT: vmovaps 64(%rax), %xmm3
6333 ; AVX2-ONLY-NEXT: vbroadcastss %xmm3, %xmm7
6334 ; AVX2-ONLY-NEXT: vbroadcastss %xmm2, %xmm8
6335 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1]
6336 ; AVX2-ONLY-NEXT: vmovaps 64(%r9), %xmm8
6337 ; AVX2-ONLY-NEXT: vmovaps 64(%r8), %xmm9
6338 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm10 = xmm9[0],xmm8[0],xmm9[1],xmm8[1]
6339 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm0, %ymm10
6340 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm0, %ymm7
6341 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm10[0,1,2,3,4,5],ymm7[6,7]
6342 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm7[4,5,6,7]
6343 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6344 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm6 = xmm5[1,1,1,1]
6345 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm6[0],xmm4[1],xmm6[2,3]
6346 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm7 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
6347 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm6[0,1],xmm7[2,3]
6348 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm7 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
6349 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm0, %ymm7
6350 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm10 = xmm9[1,1,1,1]
6351 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm10 = xmm10[0],xmm8[1],xmm10[2,3]
6352 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm0, %ymm10
6353 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm10[0,1,2,3,4,5],ymm7[6,7]
6354 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm7[4,5,6,7]
6355 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6356 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm4 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
6357 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm5 = xmm9[2],xmm8[2],xmm9[3],xmm8[3]
6358 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm6 = xmm3[2,2,2,2]
6359 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm2[0,1,2],xmm6[3]
6360 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
6361 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm7
6362 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3,4,5],ymm6[6,7]
6363 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm0[2,2,2,2]
6364 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm1[0,1,2],xmm7[3]
6365 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm4[0,1],xmm7[2,3]
6366 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3],ymm6[4,5,6,7]
6367 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6368 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
6369 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm0[1]
6370 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm1 = xmm2[2],xmm3[2],xmm2[3],xmm3[3]
6371 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
6372 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm2 = xmm5[2,3,2,3]
6373 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
6374 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
6375 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
6376 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6377 ; AVX2-ONLY-NEXT: vmovaps 96(%rcx), %xmm0
6378 ; AVX2-ONLY-NEXT: vbroadcastss %xmm0, %xmm2
6379 ; AVX2-ONLY-NEXT: vmovaps 96(%rdx), %xmm1
6380 ; AVX2-ONLY-NEXT: vbroadcastss %xmm1, %xmm3
6381 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
6382 ; AVX2-ONLY-NEXT: vmovaps 96(%rsi), %xmm4
6383 ; AVX2-ONLY-NEXT: vmovaps 96(%rdi), %xmm5
6384 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm3 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
6385 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm3[0,1],xmm2[2,3]
6386 ; AVX2-ONLY-NEXT: vmovaps 96(%rax), %xmm2
6387 ; AVX2-ONLY-NEXT: vbroadcastss %xmm2, %xmm7
6388 ; AVX2-ONLY-NEXT: vmovaps 96(%r10), %xmm3
6389 ; AVX2-ONLY-NEXT: vbroadcastss %xmm3, %xmm8
6390 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1]
6391 ; AVX2-ONLY-NEXT: vmovaps 96(%r9), %xmm8
6392 ; AVX2-ONLY-NEXT: vmovaps 96(%r8), %xmm9
6393 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm10 = xmm9[0],xmm8[0],xmm9[1],xmm8[1]
6394 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm0, %ymm10
6395 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm0, %ymm7
6396 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm10[0,1,2,3,4,5],ymm7[6,7]
6397 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm7[4,5,6,7]
6398 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6399 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm6 = xmm5[1,1,1,1]
6400 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm6[0],xmm4[1],xmm6[2,3]
6401 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm7 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
6402 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm6[0,1],xmm7[2,3]
6403 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm7 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
6404 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm0, %ymm7
6405 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm10 = xmm9[1,1,1,1]
6406 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm10 = xmm10[0],xmm8[1],xmm10[2,3]
6407 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm0, %ymm10
6408 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm10[0,1,2,3,4,5],ymm7[6,7]
6409 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm7[4,5,6,7]
6410 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6411 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm4 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
6412 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm5 = xmm9[2],xmm8[2],xmm9[3],xmm8[3]
6413 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm6 = xmm2[2,2,2,2]
6414 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm3[0,1,2],xmm6[3]
6415 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
6416 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm7
6417 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3,4,5],ymm6[6,7]
6418 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm0[2,2,2,2]
6419 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm1[0,1,2],xmm7[3]
6420 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm4[0,1],xmm7[2,3]
6421 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3],ymm6[4,5,6,7]
6422 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6423 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
6424 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm0[1]
6425 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm1 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
6426 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
6427 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm2 = xmm5[2,3,2,3]
6428 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
6429 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
6430 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
6431 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6432 ; AVX2-ONLY-NEXT: vmovaps 128(%rcx), %xmm0
6433 ; AVX2-ONLY-NEXT: vbroadcastss %xmm0, %xmm2
6434 ; AVX2-ONLY-NEXT: vmovaps 128(%rdx), %xmm1
6435 ; AVX2-ONLY-NEXT: vbroadcastss %xmm1, %xmm3
6436 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
6437 ; AVX2-ONLY-NEXT: vmovaps 128(%rsi), %xmm4
6438 ; AVX2-ONLY-NEXT: vmovaps 128(%rdi), %xmm5
6439 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm3 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
6440 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm3[0,1],xmm2[2,3]
6441 ; AVX2-ONLY-NEXT: vmovaps 128(%rax), %xmm2
6442 ; AVX2-ONLY-NEXT: vbroadcastss %xmm2, %xmm7
6443 ; AVX2-ONLY-NEXT: vmovaps 128(%r10), %xmm3
6444 ; AVX2-ONLY-NEXT: vbroadcastss %xmm3, %xmm8
6445 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1]
6446 ; AVX2-ONLY-NEXT: vmovaps 128(%r9), %xmm8
6447 ; AVX2-ONLY-NEXT: vmovaps 128(%r8), %xmm9
6448 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm10 = xmm9[0],xmm8[0],xmm9[1],xmm8[1]
6449 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm0, %ymm10
6450 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm0, %ymm7
6451 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm10[0,1,2,3,4,5],ymm7[6,7]
6452 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm7[4,5,6,7]
6453 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6454 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm6 = xmm5[1,1,1,1]
6455 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm6[0],xmm4[1],xmm6[2,3]
6456 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm7 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
6457 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm6[0,1],xmm7[2,3]
6458 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm7 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
6459 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm0, %ymm7
6460 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm10 = xmm9[1,1,1,1]
6461 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm10 = xmm10[0],xmm8[1],xmm10[2,3]
6462 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm0, %ymm10
6463 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm10[0,1,2,3,4,5],ymm7[6,7]
6464 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm7[4,5,6,7]
6465 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6466 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm4 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
6467 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm5 = xmm9[2],xmm8[2],xmm9[3],xmm8[3]
6468 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm6 = xmm2[2,2,2,2]
6469 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm3[0,1,2],xmm6[3]
6470 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
6471 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm7
6472 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3,4,5],ymm6[6,7]
6473 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm0[2,2,2,2]
6474 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm1[0,1,2],xmm7[3]
6475 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm4[0,1],xmm7[2,3]
6476 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3],ymm6[4,5,6,7]
6477 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6478 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
6479 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm0[1]
6480 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm1 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
6481 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
6482 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm2 = xmm5[2,3,2,3]
6483 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
6484 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
6485 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
6486 ; AVX2-ONLY-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
6487 ; AVX2-ONLY-NEXT: vmovaps 160(%rcx), %xmm0
6488 ; AVX2-ONLY-NEXT: vbroadcastss %xmm0, %xmm2
6489 ; AVX2-ONLY-NEXT: vmovaps 160(%rdx), %xmm1
6490 ; AVX2-ONLY-NEXT: vbroadcastss %xmm1, %xmm3
6491 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
6492 ; AVX2-ONLY-NEXT: vmovaps 160(%rsi), %xmm4
6493 ; AVX2-ONLY-NEXT: vmovaps 160(%rdi), %xmm5
6494 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm3 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
6495 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm3[0,1],xmm2[2,3]
6496 ; AVX2-ONLY-NEXT: vmovaps 160(%rax), %xmm2
6497 ; AVX2-ONLY-NEXT: vbroadcastss %xmm2, %xmm7
6498 ; AVX2-ONLY-NEXT: vmovaps 160(%r10), %xmm3
6499 ; AVX2-ONLY-NEXT: vbroadcastss %xmm3, %xmm8
6500 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1]
6501 ; AVX2-ONLY-NEXT: vmovaps 160(%r9), %xmm8
6502 ; AVX2-ONLY-NEXT: vmovaps 160(%r8), %xmm9
6503 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm10 = xmm9[0],xmm8[0],xmm9[1],xmm8[1]
6504 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm0, %ymm10
6505 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm0, %ymm7
6506 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm10[0,1,2,3,4,5],ymm7[6,7]
6507 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm7[4,5,6,7]
6508 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6509 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm6 = xmm5[1,1,1,1]
6510 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm6[0],xmm4[1],xmm6[2,3]
6511 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm7 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
6512 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm6[0,1],xmm7[2,3]
6513 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm7 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
6514 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm0, %ymm7
6515 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm10 = xmm9[1,1,1,1]
6516 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm10 = xmm10[0],xmm8[1],xmm10[2,3]
6517 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm0, %ymm10
6518 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm10[0,1,2,3,4,5],ymm7[6,7]
6519 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm7[4,5,6,7]
6520 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6521 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm4 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
6522 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm5 = xmm9[2],xmm8[2],xmm9[3],xmm8[3]
6523 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm6 = xmm2[2,2,2,2]
6524 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm3[0,1,2],xmm6[3]
6525 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
6526 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm7
6527 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3,4,5],ymm6[6,7]
6528 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm0[2,2,2,2]
6529 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm1[0,1,2],xmm7[3]
6530 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm4[0,1],xmm7[2,3]
6531 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3],ymm6[4,5,6,7]
6532 ; AVX2-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6533 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
6534 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm0[1]
6535 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm1 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
6536 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
6537 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm2 = xmm5[2,3,2,3]
6538 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
6539 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
6540 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
6541 ; AVX2-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6542 ; AVX2-ONLY-NEXT: vmovaps 192(%rcx), %xmm0
6543 ; AVX2-ONLY-NEXT: vbroadcastss %xmm0, %xmm2
6544 ; AVX2-ONLY-NEXT: vmovaps 192(%rdx), %xmm1
6545 ; AVX2-ONLY-NEXT: vbroadcastss %xmm1, %xmm3
6546 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
6547 ; AVX2-ONLY-NEXT: vmovaps 192(%rsi), %xmm4
6548 ; AVX2-ONLY-NEXT: vmovaps 192(%rdi), %xmm5
6549 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm3 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
6550 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm9 = xmm3[0,1],xmm2[2,3]
6551 ; AVX2-ONLY-NEXT: vmovaps 192(%rax), %xmm2
6552 ; AVX2-ONLY-NEXT: vbroadcastss %xmm2, %xmm7
6553 ; AVX2-ONLY-NEXT: vmovaps 192(%r10), %xmm3
6554 ; AVX2-ONLY-NEXT: vbroadcastss %xmm3, %xmm8
6555 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1]
6556 ; AVX2-ONLY-NEXT: vmovaps 192(%r9), %xmm8
6557 ; AVX2-ONLY-NEXT: vmovaps 192(%r8), %xmm6
6558 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm10 = xmm6[0],xmm8[0],xmm6[1],xmm8[1]
6559 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm0, %ymm10
6560 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm0, %ymm7
6561 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm10[0,1,2,3,4,5],ymm7[6,7]
6562 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm9[0,1,2,3],ymm7[4,5,6,7]
6563 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm5[1,1,1,1]
6564 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm7[0],xmm4[1],xmm7[2,3]
6565 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm9 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
6566 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm7[0,1],xmm9[2,3]
6567 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm9 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
6568 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm0, %ymm9
6569 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm15 = xmm6[1,1,1,1]
6570 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm15 = xmm15[0],xmm8[1],xmm15[2,3]
6571 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm15, %ymm0, %ymm15
6572 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm15[0,1,2,3,4,5],ymm9[6,7]
6573 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm7[0,1,2,3],ymm9[4,5,6,7]
6574 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm4 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
6575 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm5 = xmm6[2],xmm8[2],xmm6[3],xmm8[3]
6576 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm6 = xmm2[2,2,2,2]
6577 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm6 = xmm3[0,1,2],xmm6[3]
6578 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
6579 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm7
6580 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3,4,5],ymm6[6,7]
6581 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm0[2,2,2,2]
6582 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm1[0,1,2],xmm7[3]
6583 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm4[0,1],xmm7[2,3]
6584 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm7[0,1,2,3],ymm6[4,5,6,7]
6585 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
6586 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm0 = xmm4[1],xmm0[1]
6587 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm1 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
6588 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
6589 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm2 = xmm5[2,3,2,3]
6590 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
6591 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
6592 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm0[0,1,2,3],ymm1[4,5,6,7]
6593 ; AVX2-ONLY-NEXT: vmovaps 224(%rcx), %xmm6
6594 ; AVX2-ONLY-NEXT: vbroadcastss %xmm6, %xmm0
6595 ; AVX2-ONLY-NEXT: vmovaps 224(%rdx), %xmm5
6596 ; AVX2-ONLY-NEXT: vbroadcastss %xmm5, %xmm1
6597 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
6598 ; AVX2-ONLY-NEXT: vmovaps 224(%rsi), %xmm4
6599 ; AVX2-ONLY-NEXT: vmovaps 224(%rdi), %xmm2
6600 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm3 = xmm2[0],xmm4[0],xmm2[1],xmm4[1]
6601 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm3[0,1],xmm1[2,3]
6602 ; AVX2-ONLY-NEXT: vmovaps 224(%rax), %xmm3
6603 ; AVX2-ONLY-NEXT: vmovaps 224(%r10), %xmm1
6604 ; AVX2-ONLY-NEXT: vbroadcastss %xmm3, %xmm15
6605 ; AVX2-ONLY-NEXT: vbroadcastss %xmm1, %xmm14
6606 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm14 = xmm14[0],xmm15[0],xmm14[1],xmm15[1]
6607 ; AVX2-ONLY-NEXT: vmovaps 224(%r9), %xmm15
6608 ; AVX2-ONLY-NEXT: vmovaps 224(%r8), %xmm0
6609 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm13 = xmm0[0],xmm15[0],xmm0[1],xmm15[1]
6610 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm0, %ymm13
6611 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm14, %ymm0, %ymm14
6612 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2,3,4,5],ymm14[6,7]
6613 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm7[0,1,2,3],ymm13[4,5,6,7]
6614 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm2[1,1,1,1]
6615 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm7[0],xmm4[1],xmm7[2,3]
6616 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm13 = xmm5[0],xmm6[0],xmm5[1],xmm6[1]
6617 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm7[0,1],xmm13[2,3]
6618 ; AVX2-ONLY-NEXT: vunpcklps {{.*#+}} xmm13 = xmm1[0],xmm3[0],xmm1[1],xmm3[1]
6619 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm0, %ymm13
6620 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm12 = xmm0[1,1,1,1]
6621 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm12 = xmm12[0],xmm15[1],xmm12[2,3]
6622 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm0, %ymm12
6623 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm12[0,1,2,3,4,5],ymm13[6,7]
6624 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm12[4,5,6,7]
6625 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm2 = xmm2[2],xmm4[2],xmm2[3],xmm4[3]
6626 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm0 = xmm0[2],xmm15[2],xmm0[3],xmm15[3]
6627 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm4 = xmm3[2,2,2,2]
6628 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm4 = xmm1[0,1,2],xmm4[3]
6629 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm4
6630 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm12
6631 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm12[0,1,2,3,4,5],ymm4[6,7]
6632 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm12 = xmm6[2,2,2,2]
6633 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm12 = xmm5[0,1,2],xmm12[3]
6634 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} xmm12 = xmm2[0,1],xmm12[2,3]
6635 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm12[0,1,2,3],ymm4[4,5,6,7]
6636 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm5 = xmm5[2],xmm6[2],xmm5[3],xmm6[3]
6637 ; AVX2-ONLY-NEXT: vunpckhpd {{.*#+}} xmm2 = xmm2[1],xmm5[1]
6638 ; AVX2-ONLY-NEXT: vunpckhps {{.*#+}} xmm1 = xmm1[2],xmm3[2],xmm1[3],xmm3[3]
6639 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
6640 ; AVX2-ONLY-NEXT: vshufps {{.*#+}} xmm0 = xmm0[2,3,2,3]
6641 ; AVX2-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
6642 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
6643 ; AVX2-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
6644 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
6645 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1888(%rax)
6646 ; AVX2-ONLY-NEXT: vmovaps %ymm4, 1856(%rax)
6647 ; AVX2-ONLY-NEXT: vmovaps %ymm7, 1824(%rax)
6648 ; AVX2-ONLY-NEXT: vmovaps %ymm14, 1792(%rax)
6649 ; AVX2-ONLY-NEXT: vmovaps %ymm8, 1632(%rax)
6650 ; AVX2-ONLY-NEXT: vmovaps %ymm9, 1600(%rax)
6651 ; AVX2-ONLY-NEXT: vmovaps %ymm10, 1568(%rax)
6652 ; AVX2-ONLY-NEXT: vmovaps %ymm11, 1536(%rax)
6653 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6654 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1376(%rax)
6655 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6656 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1344(%rax)
6657 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6658 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1312(%rax)
6659 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6660 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1280(%rax)
6661 ; AVX2-ONLY-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
6662 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1120(%rax)
6663 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6664 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1088(%rax)
6665 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6666 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1056(%rax)
6667 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6668 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1024(%rax)
6669 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6670 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 864(%rax)
6671 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6672 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 832(%rax)
6673 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6674 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 800(%rax)
6675 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6676 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 768(%rax)
6677 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6678 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 608(%rax)
6679 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6680 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 576(%rax)
6681 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6682 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 544(%rax)
6683 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6684 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 512(%rax)
6685 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6686 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 352(%rax)
6687 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6688 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 320(%rax)
6689 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6690 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 288(%rax)
6691 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6692 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 256(%rax)
6693 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6694 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 96(%rax)
6695 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6696 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 64(%rax)
6697 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6698 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
6699 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6700 ; AVX2-ONLY-NEXT: vmovaps %ymm0, (%rax)
6701 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6702 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 2016(%rax)
6703 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6704 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1984(%rax)
6705 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6706 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1952(%rax)
6707 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6708 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1920(%rax)
6709 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6710 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1760(%rax)
6711 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6712 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1728(%rax)
6713 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6714 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1696(%rax)
6715 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6716 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1664(%rax)
6717 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6718 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1504(%rax)
6719 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6720 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1472(%rax)
6721 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6722 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1440(%rax)
6723 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6724 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1408(%rax)
6725 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6726 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1248(%rax)
6727 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6728 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1216(%rax)
6729 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6730 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1184(%rax)
6731 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6732 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 1152(%rax)
6733 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6734 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 992(%rax)
6735 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6736 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 960(%rax)
6737 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6738 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 928(%rax)
6739 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6740 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 896(%rax)
6741 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6742 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 736(%rax)
6743 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6744 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 704(%rax)
6745 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6746 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 672(%rax)
6747 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6748 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 640(%rax)
6749 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6750 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 480(%rax)
6751 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6752 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 448(%rax)
6753 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6754 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 416(%rax)
6755 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6756 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 384(%rax)
6757 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6758 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 224(%rax)
6759 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6760 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 192(%rax)
6761 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6762 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 160(%rax)
6763 ; AVX2-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6764 ; AVX2-ONLY-NEXT: vmovaps %ymm0, 128(%rax)
6765 ; AVX2-ONLY-NEXT: addq $1672, %rsp # imm = 0x688
6766 ; AVX2-ONLY-NEXT: vzeroupper
6767 ; AVX2-ONLY-NEXT: retq
6769 ; AVX512F-LABEL: store_i32_stride8_vf64:
6771 ; AVX512F-NEXT: subq $6216, %rsp # imm = 0x1848
6772 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %rax
6773 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %r10
6774 ; AVX512F-NEXT: vmovdqa64 (%r10), %zmm5
6775 ; AVX512F-NEXT: vmovdqa64 64(%r10), %zmm4
6776 ; AVX512F-NEXT: vmovdqa64 128(%r10), %zmm2
6777 ; AVX512F-NEXT: vmovdqa64 (%rax), %zmm1
6778 ; AVX512F-NEXT: vmovdqa64 64(%rax), %zmm0
6779 ; AVX512F-NEXT: vmovdqa64 128(%rax), %zmm30
6780 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm3 = <u,u,u,u,u,u,0,16,u,u,u,u,u,u,1,17>
6781 ; AVX512F-NEXT: vmovdqa64 %zmm5, %zmm7
6782 ; AVX512F-NEXT: vmovdqa64 %zmm5, %zmm6
6783 ; AVX512F-NEXT: vpermt2d %zmm1, %zmm3, %zmm7
6784 ; AVX512F-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6785 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm5 = <u,u,u,u,u,u,2,18,u,u,u,u,u,u,3,19>
6786 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm7
6787 ; AVX512F-NEXT: vpermt2d %zmm1, %zmm5, %zmm6
6788 ; AVX512F-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6789 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm6 = <u,u,u,u,u,u,4,20,u,u,u,u,u,u,5,21>
6790 ; AVX512F-NEXT: vmovdqa64 %zmm7, %zmm8
6791 ; AVX512F-NEXT: vpermt2d %zmm1, %zmm6, %zmm8
6792 ; AVX512F-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6793 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm8 = <u,u,u,u,u,u,6,22,u,u,u,u,u,u,7,23>
6794 ; AVX512F-NEXT: vmovdqa64 %zmm7, %zmm9
6795 ; AVX512F-NEXT: vpermt2d %zmm1, %zmm8, %zmm9
6796 ; AVX512F-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6797 ; AVX512F-NEXT: vmovdqa64 %zmm8, %zmm9
6798 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm8 = <u,u,u,u,u,u,8,24,u,u,u,u,u,u,9,25>
6799 ; AVX512F-NEXT: vmovdqa64 %zmm7, %zmm10
6800 ; AVX512F-NEXT: vpermt2d %zmm1, %zmm8, %zmm10
6801 ; AVX512F-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6802 ; AVX512F-NEXT: vmovdqa64 %zmm8, %zmm10
6803 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm8 = <u,u,u,u,u,u,10,26,u,u,u,u,u,u,11,27>
6804 ; AVX512F-NEXT: vmovdqa64 %zmm7, %zmm11
6805 ; AVX512F-NEXT: vpermt2d %zmm1, %zmm8, %zmm11
6806 ; AVX512F-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6807 ; AVX512F-NEXT: vmovdqa64 %zmm8, %zmm11
6808 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm8 = <u,u,u,u,u,u,12,28,u,u,u,u,u,u,13,29>
6809 ; AVX512F-NEXT: vmovdqa64 %zmm7, %zmm13
6810 ; AVX512F-NEXT: vmovdqa64 %zmm7, %zmm12
6811 ; AVX512F-NEXT: vpermt2d %zmm1, %zmm8, %zmm13
6812 ; AVX512F-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6813 ; AVX512F-NEXT: vmovdqa64 %zmm8, %zmm7
6814 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm8 = <u,u,u,u,u,u,14,30,u,u,u,u,u,u,15,31>
6815 ; AVX512F-NEXT: vpermt2d %zmm1, %zmm8, %zmm12
6816 ; AVX512F-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6817 ; AVX512F-NEXT: vmovdqa64 %zmm4, %zmm1
6818 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm3, %zmm1
6819 ; AVX512F-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6820 ; AVX512F-NEXT: vmovdqa64 %zmm4, %zmm1
6821 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm5, %zmm1
6822 ; AVX512F-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6823 ; AVX512F-NEXT: vmovdqa64 %zmm4, %zmm1
6824 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm6, %zmm1
6825 ; AVX512F-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6826 ; AVX512F-NEXT: vmovdqa64 %zmm4, %zmm1
6827 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm9, %zmm1
6828 ; AVX512F-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6829 ; AVX512F-NEXT: vmovdqa64 %zmm4, %zmm1
6830 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm10, %zmm1
6831 ; AVX512F-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6832 ; AVX512F-NEXT: vmovdqa64 %zmm4, %zmm1
6833 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm11, %zmm1
6834 ; AVX512F-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6835 ; AVX512F-NEXT: vmovdqa64 %zmm4, %zmm1
6836 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm7, %zmm1
6837 ; AVX512F-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6838 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm8, %zmm4
6839 ; AVX512F-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6840 ; AVX512F-NEXT: vmovdqa64 %zmm2, %zmm0
6841 ; AVX512F-NEXT: vpermt2d %zmm30, %zmm3, %zmm0
6842 ; AVX512F-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6843 ; AVX512F-NEXT: vmovdqa64 %zmm2, %zmm0
6844 ; AVX512F-NEXT: vpermt2d %zmm30, %zmm5, %zmm0
6845 ; AVX512F-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6846 ; AVX512F-NEXT: vmovdqa64 %zmm2, %zmm0
6847 ; AVX512F-NEXT: vpermt2d %zmm30, %zmm6, %zmm0
6848 ; AVX512F-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6849 ; AVX512F-NEXT: vmovdqa64 %zmm2, %zmm0
6850 ; AVX512F-NEXT: vpermt2d %zmm30, %zmm9, %zmm0
6851 ; AVX512F-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6852 ; AVX512F-NEXT: vmovdqa64 %zmm2, %zmm0
6853 ; AVX512F-NEXT: vpermt2d %zmm30, %zmm10, %zmm0
6854 ; AVX512F-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6855 ; AVX512F-NEXT: vmovdqa64 %zmm2, %zmm0
6856 ; AVX512F-NEXT: vpermt2d %zmm30, %zmm11, %zmm0
6857 ; AVX512F-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6858 ; AVX512F-NEXT: vmovdqa64 %zmm2, %zmm0
6859 ; AVX512F-NEXT: vpermt2d %zmm30, %zmm7, %zmm0
6860 ; AVX512F-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6861 ; AVX512F-NEXT: vpermt2d %zmm30, %zmm8, %zmm2
6862 ; AVX512F-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6863 ; AVX512F-NEXT: vmovdqa64 192(%r10), %zmm1
6864 ; AVX512F-NEXT: vmovdqa64 192(%rax), %zmm0
6865 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm1, %zmm3
6866 ; AVX512F-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6867 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm1, %zmm5
6868 ; AVX512F-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6869 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm1, %zmm6
6870 ; AVX512F-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6871 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm1, %zmm9
6872 ; AVX512F-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6873 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm1, %zmm10
6874 ; AVX512F-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6875 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm1, %zmm11
6876 ; AVX512F-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6877 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm1, %zmm7
6878 ; AVX512F-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6879 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm8, %zmm1
6880 ; AVX512F-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6881 ; AVX512F-NEXT: vmovdqa64 (%r8), %zmm26
6882 ; AVX512F-NEXT: vmovdqa64 (%r9), %zmm0
6883 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,u,u,0,16,u,u,u,u,u,u,1,17,u,u>
6884 ; AVX512F-NEXT: vmovdqa64 %zmm26, %zmm2
6885 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm2
6886 ; AVX512F-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6887 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm2
6888 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,u,u,2,18,u,u,u,u,u,u,3,19,u,u>
6889 ; AVX512F-NEXT: vmovdqa64 %zmm26, %zmm3
6890 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm3
6891 ; AVX512F-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6892 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm3
6893 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,u,u,4,20,u,u,u,u,u,u,5,21,u,u>
6894 ; AVX512F-NEXT: vmovdqa64 %zmm26, %zmm4
6895 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm4
6896 ; AVX512F-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6897 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm4
6898 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,u,u,6,22,u,u,u,u,u,u,7,23,u,u>
6899 ; AVX512F-NEXT: vmovdqa64 %zmm26, %zmm5
6900 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm5
6901 ; AVX512F-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6902 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm5
6903 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,u,u,8,24,u,u,u,u,u,u,9,25,u,u>
6904 ; AVX512F-NEXT: vmovdqa64 %zmm26, %zmm6
6905 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm6
6906 ; AVX512F-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6907 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm6
6908 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,u,u,10,26,u,u,u,u,u,u,11,27,u,u>
6909 ; AVX512F-NEXT: vmovdqa64 %zmm26, %zmm7
6910 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm7
6911 ; AVX512F-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6912 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm7
6913 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,u,u,12,28,u,u,u,u,u,u,13,29,u,u>
6914 ; AVX512F-NEXT: vmovdqa64 %zmm26, %zmm8
6915 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm8
6916 ; AVX512F-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6917 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm8
6918 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,u,u,14,30,u,u,u,u,u,u,15,31,u,u>
6919 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm26
6920 ; AVX512F-NEXT: vmovdqa64 64(%r8), %zmm25
6921 ; AVX512F-NEXT: vmovdqa64 64(%r9), %zmm0
6922 ; AVX512F-NEXT: vmovdqa64 %zmm25, %zmm9
6923 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm2, %zmm9
6924 ; AVX512F-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6925 ; AVX512F-NEXT: vmovdqa64 %zmm25, %zmm9
6926 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm3, %zmm9
6927 ; AVX512F-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6928 ; AVX512F-NEXT: vmovdqa64 %zmm25, %zmm9
6929 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm4, %zmm9
6930 ; AVX512F-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6931 ; AVX512F-NEXT: vmovdqa64 %zmm25, %zmm9
6932 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm5, %zmm9
6933 ; AVX512F-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6934 ; AVX512F-NEXT: vmovdqa64 %zmm25, %zmm9
6935 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm6, %zmm9
6936 ; AVX512F-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6937 ; AVX512F-NEXT: vmovdqa64 %zmm25, %zmm9
6938 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm7, %zmm9
6939 ; AVX512F-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6940 ; AVX512F-NEXT: vmovdqa64 %zmm25, %zmm9
6941 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm8, %zmm9
6942 ; AVX512F-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6943 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm25
6944 ; AVX512F-NEXT: vmovdqa64 128(%r8), %zmm22
6945 ; AVX512F-NEXT: vmovdqa64 128(%r9), %zmm0
6946 ; AVX512F-NEXT: vmovdqa64 %zmm22, %zmm9
6947 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm2, %zmm9
6948 ; AVX512F-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6949 ; AVX512F-NEXT: vmovdqa64 %zmm22, %zmm9
6950 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm3, %zmm9
6951 ; AVX512F-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6952 ; AVX512F-NEXT: vmovdqa64 %zmm22, %zmm9
6953 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm4, %zmm9
6954 ; AVX512F-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6955 ; AVX512F-NEXT: vmovdqa64 %zmm22, %zmm9
6956 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm5, %zmm9
6957 ; AVX512F-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6958 ; AVX512F-NEXT: vmovdqa64 %zmm22, %zmm9
6959 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm6, %zmm9
6960 ; AVX512F-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6961 ; AVX512F-NEXT: vmovdqa64 %zmm22, %zmm9
6962 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm7, %zmm9
6963 ; AVX512F-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6964 ; AVX512F-NEXT: vmovdqa64 %zmm22, %zmm9
6965 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm8, %zmm9
6966 ; AVX512F-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6967 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm22
6968 ; AVX512F-NEXT: vmovdqa64 192(%r8), %zmm27
6969 ; AVX512F-NEXT: vmovdqa64 192(%r9), %zmm0
6970 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm27, %zmm2
6971 ; AVX512F-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6972 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm27, %zmm3
6973 ; AVX512F-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6974 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm27, %zmm4
6975 ; AVX512F-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6976 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm27, %zmm5
6977 ; AVX512F-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6978 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm27, %zmm6
6979 ; AVX512F-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6980 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm27, %zmm7
6981 ; AVX512F-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6982 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm27, %zmm8
6983 ; AVX512F-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6984 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm27
6985 ; AVX512F-NEXT: vmovdqa64 (%rdx), %zmm8
6986 ; AVX512F-NEXT: vmovdqa64 (%rcx), %zmm0
6987 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,0,16,u,u,u,u,u,u,1,17,u,u,u,u>
6988 ; AVX512F-NEXT: vmovdqa64 %zmm8, %zmm2
6989 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm2
6990 ; AVX512F-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6991 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm2
6992 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,2,18,u,u,u,u,u,u,3,19,u,u,u,u>
6993 ; AVX512F-NEXT: vmovdqa64 %zmm8, %zmm3
6994 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm3
6995 ; AVX512F-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6996 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm3
6997 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,4,20,u,u,u,u,u,u,5,21,u,u,u,u>
6998 ; AVX512F-NEXT: vmovdqa64 %zmm8, %zmm4
6999 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm4
7000 ; AVX512F-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7001 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm4
7002 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,6,22,u,u,u,u,u,u,7,23,u,u,u,u>
7003 ; AVX512F-NEXT: vmovdqa64 %zmm8, %zmm5
7004 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm5
7005 ; AVX512F-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7006 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm5
7007 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,8,24,u,u,u,u,u,u,9,25,u,u,u,u>
7008 ; AVX512F-NEXT: vmovdqa64 %zmm8, %zmm6
7009 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm6
7010 ; AVX512F-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7011 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm7
7012 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,10,26,u,u,u,u,u,u,11,27,u,u,u,u>
7013 ; AVX512F-NEXT: vmovdqa64 %zmm8, %zmm6
7014 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm6
7015 ; AVX512F-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7016 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm9
7017 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,12,28,u,u,u,u,u,u,13,29,u,u,u,u>
7018 ; AVX512F-NEXT: vmovdqa64 %zmm8, %zmm6
7019 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm6
7020 ; AVX512F-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7021 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm10
7022 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,14,30,u,u,u,u,u,u,15,31,u,u,u,u>
7023 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm8
7024 ; AVX512F-NEXT: vmovdqa64 64(%rdx), %zmm6
7025 ; AVX512F-NEXT: vmovdqa64 64(%rcx), %zmm0
7026 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm11
7027 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm2, %zmm11
7028 ; AVX512F-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7029 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm11
7030 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm3, %zmm11
7031 ; AVX512F-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7032 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm11
7033 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm4, %zmm11
7034 ; AVX512F-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7035 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm11
7036 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm5, %zmm11
7037 ; AVX512F-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7038 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm11
7039 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm7, %zmm11
7040 ; AVX512F-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7041 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm11
7042 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm9, %zmm11
7043 ; AVX512F-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7044 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm11
7045 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm10, %zmm11
7046 ; AVX512F-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7047 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm6
7048 ; AVX512F-NEXT: vmovdqa64 128(%rdx), %zmm13
7049 ; AVX512F-NEXT: vmovdqa64 128(%rcx), %zmm0
7050 ; AVX512F-NEXT: vmovdqa64 %zmm13, %zmm11
7051 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm2, %zmm11
7052 ; AVX512F-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7053 ; AVX512F-NEXT: vmovdqa64 %zmm13, %zmm11
7054 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm3, %zmm11
7055 ; AVX512F-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7056 ; AVX512F-NEXT: vmovdqa64 %zmm13, %zmm11
7057 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm4, %zmm11
7058 ; AVX512F-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7059 ; AVX512F-NEXT: vmovdqa64 %zmm13, %zmm11
7060 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm5, %zmm11
7061 ; AVX512F-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7062 ; AVX512F-NEXT: vmovdqa64 %zmm13, %zmm11
7063 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm7, %zmm11
7064 ; AVX512F-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7065 ; AVX512F-NEXT: vmovdqa64 %zmm13, %zmm11
7066 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm9, %zmm11
7067 ; AVX512F-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7068 ; AVX512F-NEXT: vmovdqa64 %zmm13, %zmm11
7069 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm10, %zmm11
7070 ; AVX512F-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7071 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm13
7072 ; AVX512F-NEXT: vmovdqa64 192(%rdx), %zmm17
7073 ; AVX512F-NEXT: vmovdqa64 192(%rcx), %zmm0
7074 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm17, %zmm2
7075 ; AVX512F-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7076 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm17, %zmm3
7077 ; AVX512F-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7078 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm17, %zmm4
7079 ; AVX512F-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7080 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm17, %zmm5
7081 ; AVX512F-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7082 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm17, %zmm7
7083 ; AVX512F-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7084 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm17, %zmm9
7085 ; AVX512F-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7086 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm17, %zmm10
7087 ; AVX512F-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7088 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm1, %zmm17
7089 ; AVX512F-NEXT: vmovdqa64 (%rdi), %zmm5
7090 ; AVX512F-NEXT: vmovdqa64 (%rsi), %zmm0
7091 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm14 = <0,16,u,u,u,u,u,u,1,17,u,u,u,u,u,u>
7092 ; AVX512F-NEXT: vmovdqa64 %zmm5, %zmm1
7093 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm14, %zmm1
7094 ; AVX512F-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7095 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm11 = <2,18,u,u,u,u,u,u,3,19,u,u,u,u,u,u>
7096 ; AVX512F-NEXT: vmovdqa64 %zmm5, %zmm1
7097 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm11, %zmm1
7098 ; AVX512F-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7099 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm10 = <4,20,u,u,u,u,u,u,5,21,u,u,u,u,u,u>
7100 ; AVX512F-NEXT: vmovdqa64 %zmm5, %zmm1
7101 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm10, %zmm1
7102 ; AVX512F-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7103 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm9 = <6,22,u,u,u,u,u,u,7,23,u,u,u,u,u,u>
7104 ; AVX512F-NEXT: vmovdqa64 %zmm5, %zmm1
7105 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm9, %zmm1
7106 ; AVX512F-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7107 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm7 = <8,24,u,u,u,u,u,u,9,25,u,u,u,u,u,u>
7108 ; AVX512F-NEXT: vmovdqa64 %zmm5, %zmm1
7109 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm7, %zmm1
7110 ; AVX512F-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7111 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm4 = <10,26,u,u,u,u,u,u,11,27,u,u,u,u,u,u>
7112 ; AVX512F-NEXT: vmovdqa64 %zmm5, %zmm1
7113 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm4, %zmm1
7114 ; AVX512F-NEXT: vmovdqu64 %zmm1, (%rsp) # 64-byte Spill
7115 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm3 = <12,28,u,u,u,u,u,u,13,29,u,u,u,u,u,u>
7116 ; AVX512F-NEXT: vmovdqa64 %zmm5, %zmm1
7117 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm3, %zmm1
7118 ; AVX512F-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7119 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm31 = <14,30,u,u,u,u,u,u,15,31,u,u,u,u,u,u>
7120 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm31, %zmm5
7121 ; AVX512F-NEXT: vmovdqa64 64(%rdi), %zmm12
7122 ; AVX512F-NEXT: vmovdqa64 64(%rsi), %zmm0
7123 ; AVX512F-NEXT: vmovdqa64 %zmm12, %zmm1
7124 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm14, %zmm1
7125 ; AVX512F-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7126 ; AVX512F-NEXT: vmovdqa64 %zmm12, %zmm1
7127 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm11, %zmm1
7128 ; AVX512F-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7129 ; AVX512F-NEXT: vmovdqa64 %zmm12, %zmm1
7130 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm10, %zmm1
7131 ; AVX512F-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7132 ; AVX512F-NEXT: vmovdqa64 %zmm12, %zmm1
7133 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm9, %zmm1
7134 ; AVX512F-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7135 ; AVX512F-NEXT: vmovdqa64 %zmm12, %zmm30
7136 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm7, %zmm30
7137 ; AVX512F-NEXT: vmovdqa64 %zmm12, %zmm29
7138 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm4, %zmm29
7139 ; AVX512F-NEXT: vmovdqa64 %zmm12, %zmm28
7140 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm3, %zmm28
7141 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm31, %zmm12
7142 ; AVX512F-NEXT: vmovdqa64 128(%rdi), %zmm15
7143 ; AVX512F-NEXT: vmovdqa64 128(%rsi), %zmm0
7144 ; AVX512F-NEXT: vmovdqa64 %zmm15, %zmm24
7145 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm14, %zmm24
7146 ; AVX512F-NEXT: vmovdqa64 %zmm15, %zmm23
7147 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm11, %zmm23
7148 ; AVX512F-NEXT: vmovdqa64 %zmm15, %zmm21
7149 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm10, %zmm21
7150 ; AVX512F-NEXT: vmovdqa64 %zmm15, %zmm20
7151 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm9, %zmm20
7152 ; AVX512F-NEXT: vmovdqa64 %zmm15, %zmm19
7153 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm7, %zmm19
7154 ; AVX512F-NEXT: vmovdqa64 %zmm15, %zmm18
7155 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm4, %zmm18
7156 ; AVX512F-NEXT: vmovdqa64 %zmm15, %zmm16
7157 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm3, %zmm16
7158 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm31, %zmm15
7159 ; AVX512F-NEXT: vmovdqa64 192(%rdi), %zmm2
7160 ; AVX512F-NEXT: vmovdqa64 192(%rsi), %zmm0
7161 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm2, %zmm14
7162 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm2, %zmm11
7163 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm2, %zmm10
7164 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm2, %zmm9
7165 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm2, %zmm7
7166 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm2, %zmm4
7167 ; AVX512F-NEXT: vpermi2d %zmm0, %zmm2, %zmm3
7168 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm31, %zmm2
7169 ; AVX512F-NEXT: movb $-120, %al
7170 ; AVX512F-NEXT: kmovw %eax, %k1
7171 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7172 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7173 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
7174 ; AVX512F-NEXT: movb $34, %al
7175 ; AVX512F-NEXT: kmovw %eax, %k2
7176 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7177 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
7178 ; AVX512F-NEXT: vmovdqa64 %zmm31, %zmm0 {%k2}
7179 ; AVX512F-NEXT: movb $-52, %al
7180 ; AVX512F-NEXT: kmovw %eax, %k3
7181 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm0 {%k3}
7182 ; AVX512F-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7183 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7184 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7185 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
7186 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7187 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
7188 ; AVX512F-NEXT: vmovdqa64 %zmm31, %zmm0 {%k2}
7189 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm0 {%k3}
7190 ; AVX512F-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7191 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7192 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7193 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
7194 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7195 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
7196 ; AVX512F-NEXT: vmovdqa64 %zmm31, %zmm0 {%k2}
7197 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm0 {%k3}
7198 ; AVX512F-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7199 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7200 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7201 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
7202 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7203 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
7204 ; AVX512F-NEXT: vmovdqa64 %zmm31, %zmm0 {%k2}
7205 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm0 {%k3}
7206 ; AVX512F-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7207 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7208 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7209 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
7210 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7211 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
7212 ; AVX512F-NEXT: vmovdqa64 %zmm31, %zmm0 {%k2}
7213 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm0 {%k3}
7214 ; AVX512F-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7215 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7216 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7217 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
7218 ; AVX512F-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
7219 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
7220 ; AVX512F-NEXT: vmovdqa64 %zmm31, %zmm0 {%k2}
7221 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm0 {%k3}
7222 ; AVX512F-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
7223 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7224 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7225 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
7226 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7227 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
7228 ; AVX512F-NEXT: vmovdqa64 %zmm31, %zmm0 {%k2}
7229 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm0 {%k3}
7230 ; AVX512F-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7231 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7232 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm26 {%k1}
7233 ; AVX512F-NEXT: vmovdqa64 %zmm8, %zmm5 {%k2}
7234 ; AVX512F-NEXT: vmovdqa64 %zmm26, %zmm5 {%k3}
7235 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7236 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7237 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
7238 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7239 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
7240 ; AVX512F-NEXT: vmovdqa64 %zmm8, %zmm0 {%k2}
7241 ; AVX512F-NEXT: vmovdqa64 %zmm1, %zmm0 {%k3}
7242 ; AVX512F-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7243 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7244 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
7245 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
7246 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm26 # 64-byte Reload
7247 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7248 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm26 {%k2}
7249 ; AVX512F-NEXT: vmovdqa64 %zmm8, %zmm26 {%k3}
7250 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7251 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
7252 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
7253 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7254 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7255 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
7256 ; AVX512F-NEXT: vmovdqa64 %zmm8, %zmm1 {%k3}
7257 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7258 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
7259 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
7260 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7261 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
7262 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
7263 ; AVX512F-NEXT: vmovdqa64 %zmm8, %zmm31 {%k3}
7264 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7265 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
7266 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
7267 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7268 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm30 {%k2}
7269 ; AVX512F-NEXT: vmovdqa64 %zmm8, %zmm30 {%k3}
7270 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7271 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
7272 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
7273 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7274 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
7275 ; AVX512F-NEXT: vmovdqa64 %zmm8, %zmm29 {%k3}
7276 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7277 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
7278 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
7279 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7280 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm28 {%k2}
7281 ; AVX512F-NEXT: vmovdqa64 %zmm8, %zmm28 {%k3}
7282 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7283 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm25 {%k1}
7284 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm12 {%k2}
7285 ; AVX512F-NEXT: vmovdqa64 %zmm25, %zmm12 {%k3}
7286 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7287 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7288 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
7289 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7290 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm24 {%k2}
7291 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm24 {%k3}
7292 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7293 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7294 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
7295 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7296 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm23 {%k2}
7297 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm23 {%k3}
7298 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7299 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7300 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
7301 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7302 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm21 {%k2}
7303 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm21 {%k3}
7304 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7305 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7306 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
7307 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7308 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm20 {%k2}
7309 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm20 {%k3}
7310 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7311 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7312 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
7313 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7314 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm19 {%k2}
7315 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm19 {%k3}
7316 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7317 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7318 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
7319 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7320 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
7321 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm18 {%k3}
7322 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7323 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7324 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
7325 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7326 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm16 {%k2}
7327 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm16 {%k3}
7328 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7329 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm22 {%k1}
7330 ; AVX512F-NEXT: vmovdqa64 %zmm13, %zmm15 {%k2}
7331 ; AVX512F-NEXT: vmovdqa64 %zmm22, %zmm15 {%k3}
7332 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7333 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7334 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
7335 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7336 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm14 {%k2}
7337 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm14 {%k3}
7338 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7339 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7340 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
7341 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7342 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
7343 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm11 {%k3}
7344 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7345 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7346 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
7347 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7348 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
7349 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm10 {%k3}
7350 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7351 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7352 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
7353 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7354 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
7355 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm9 {%k3}
7356 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7357 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7358 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
7359 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7360 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm7 {%k2}
7361 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm7 {%k3}
7362 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7363 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7364 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
7365 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7366 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm4 {%k2}
7367 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm4 {%k3}
7368 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7369 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7370 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
7371 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7372 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm3 {%k2}
7373 ; AVX512F-NEXT: vmovdqa64 %zmm6, %zmm3 {%k3}
7374 ; AVX512F-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7375 ; AVX512F-NEXT: vmovdqa64 %zmm0, %zmm27 {%k1}
7376 ; AVX512F-NEXT: vmovdqa64 %zmm17, %zmm2 {%k2}
7377 ; AVX512F-NEXT: vmovdqa64 %zmm27, %zmm2 {%k3}
7378 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %rax
7379 ; AVX512F-NEXT: vmovdqa64 %zmm2, 1984(%rax)
7380 ; AVX512F-NEXT: vmovdqa64 %zmm3, 1920(%rax)
7381 ; AVX512F-NEXT: vmovdqa64 %zmm4, 1856(%rax)
7382 ; AVX512F-NEXT: vmovdqa64 %zmm7, 1792(%rax)
7383 ; AVX512F-NEXT: vmovdqa64 %zmm9, 1728(%rax)
7384 ; AVX512F-NEXT: vmovdqa64 %zmm10, 1664(%rax)
7385 ; AVX512F-NEXT: vmovdqa64 %zmm11, 1600(%rax)
7386 ; AVX512F-NEXT: vmovdqa64 %zmm14, 1536(%rax)
7387 ; AVX512F-NEXT: vmovdqa64 %zmm15, 1472(%rax)
7388 ; AVX512F-NEXT: vmovdqa64 %zmm16, 1408(%rax)
7389 ; AVX512F-NEXT: vmovdqa64 %zmm18, 1344(%rax)
7390 ; AVX512F-NEXT: vmovdqa64 %zmm19, 1280(%rax)
7391 ; AVX512F-NEXT: vmovdqa64 %zmm20, 1216(%rax)
7392 ; AVX512F-NEXT: vmovdqa64 %zmm21, 1152(%rax)
7393 ; AVX512F-NEXT: vmovdqa64 %zmm23, 1088(%rax)
7394 ; AVX512F-NEXT: vmovdqa64 %zmm24, 1024(%rax)
7395 ; AVX512F-NEXT: vmovdqa64 %zmm12, 960(%rax)
7396 ; AVX512F-NEXT: vmovdqa64 %zmm28, 896(%rax)
7397 ; AVX512F-NEXT: vmovdqa64 %zmm29, 832(%rax)
7398 ; AVX512F-NEXT: vmovdqa64 %zmm30, 768(%rax)
7399 ; AVX512F-NEXT: vmovdqa64 %zmm31, 704(%rax)
7400 ; AVX512F-NEXT: vmovdqa64 %zmm1, 640(%rax)
7401 ; AVX512F-NEXT: vmovdqa64 %zmm26, 576(%rax)
7402 ; AVX512F-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7403 ; AVX512F-NEXT: vmovaps %zmm0, 512(%rax)
7404 ; AVX512F-NEXT: vmovdqa64 %zmm5, 448(%rax)
7405 ; AVX512F-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7406 ; AVX512F-NEXT: vmovaps %zmm0, 384(%rax)
7407 ; AVX512F-NEXT: vmovups (%rsp), %zmm0 # 64-byte Reload
7408 ; AVX512F-NEXT: vmovaps %zmm0, 320(%rax)
7409 ; AVX512F-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7410 ; AVX512F-NEXT: vmovaps %zmm0, 256(%rax)
7411 ; AVX512F-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7412 ; AVX512F-NEXT: vmovaps %zmm0, 192(%rax)
7413 ; AVX512F-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7414 ; AVX512F-NEXT: vmovaps %zmm0, 128(%rax)
7415 ; AVX512F-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7416 ; AVX512F-NEXT: vmovaps %zmm0, 64(%rax)
7417 ; AVX512F-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7418 ; AVX512F-NEXT: vmovaps %zmm0, (%rax)
7419 ; AVX512F-NEXT: addq $6216, %rsp # imm = 0x1848
7420 ; AVX512F-NEXT: vzeroupper
7421 ; AVX512F-NEXT: retq
7423 ; AVX512BW-LABEL: store_i32_stride8_vf64:
7424 ; AVX512BW: # %bb.0:
7425 ; AVX512BW-NEXT: subq $6216, %rsp # imm = 0x1848
7426 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
7427 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r10
7428 ; AVX512BW-NEXT: vmovdqa64 (%r10), %zmm5
7429 ; AVX512BW-NEXT: vmovdqa64 64(%r10), %zmm4
7430 ; AVX512BW-NEXT: vmovdqa64 128(%r10), %zmm2
7431 ; AVX512BW-NEXT: vmovdqa64 (%rax), %zmm1
7432 ; AVX512BW-NEXT: vmovdqa64 64(%rax), %zmm0
7433 ; AVX512BW-NEXT: vmovdqa64 128(%rax), %zmm30
7434 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = <u,u,u,u,u,u,0,16,u,u,u,u,u,u,1,17>
7435 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm7
7436 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm6
7437 ; AVX512BW-NEXT: vpermt2d %zmm1, %zmm3, %zmm7
7438 ; AVX512BW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7439 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm5 = <u,u,u,u,u,u,2,18,u,u,u,u,u,u,3,19>
7440 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm7
7441 ; AVX512BW-NEXT: vpermt2d %zmm1, %zmm5, %zmm6
7442 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7443 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm6 = <u,u,u,u,u,u,4,20,u,u,u,u,u,u,5,21>
7444 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm8
7445 ; AVX512BW-NEXT: vpermt2d %zmm1, %zmm6, %zmm8
7446 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7447 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm8 = <u,u,u,u,u,u,6,22,u,u,u,u,u,u,7,23>
7448 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm9
7449 ; AVX512BW-NEXT: vpermt2d %zmm1, %zmm8, %zmm9
7450 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7451 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm9
7452 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm8 = <u,u,u,u,u,u,8,24,u,u,u,u,u,u,9,25>
7453 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm10
7454 ; AVX512BW-NEXT: vpermt2d %zmm1, %zmm8, %zmm10
7455 ; AVX512BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7456 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm10
7457 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm8 = <u,u,u,u,u,u,10,26,u,u,u,u,u,u,11,27>
7458 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm11
7459 ; AVX512BW-NEXT: vpermt2d %zmm1, %zmm8, %zmm11
7460 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7461 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm11
7462 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm8 = <u,u,u,u,u,u,12,28,u,u,u,u,u,u,13,29>
7463 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm13
7464 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm12
7465 ; AVX512BW-NEXT: vpermt2d %zmm1, %zmm8, %zmm13
7466 ; AVX512BW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7467 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm7
7468 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm8 = <u,u,u,u,u,u,14,30,u,u,u,u,u,u,15,31>
7469 ; AVX512BW-NEXT: vpermt2d %zmm1, %zmm8, %zmm12
7470 ; AVX512BW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7471 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm1
7472 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm3, %zmm1
7473 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7474 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm1
7475 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm5, %zmm1
7476 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7477 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm1
7478 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm6, %zmm1
7479 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7480 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm1
7481 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm9, %zmm1
7482 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7483 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm1
7484 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm10, %zmm1
7485 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7486 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm1
7487 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm11, %zmm1
7488 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7489 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm1
7490 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm7, %zmm1
7491 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7492 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm8, %zmm4
7493 ; AVX512BW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7494 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm0
7495 ; AVX512BW-NEXT: vpermt2d %zmm30, %zmm3, %zmm0
7496 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7497 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm0
7498 ; AVX512BW-NEXT: vpermt2d %zmm30, %zmm5, %zmm0
7499 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7500 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm0
7501 ; AVX512BW-NEXT: vpermt2d %zmm30, %zmm6, %zmm0
7502 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7503 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm0
7504 ; AVX512BW-NEXT: vpermt2d %zmm30, %zmm9, %zmm0
7505 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7506 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm0
7507 ; AVX512BW-NEXT: vpermt2d %zmm30, %zmm10, %zmm0
7508 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7509 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm0
7510 ; AVX512BW-NEXT: vpermt2d %zmm30, %zmm11, %zmm0
7511 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7512 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm0
7513 ; AVX512BW-NEXT: vpermt2d %zmm30, %zmm7, %zmm0
7514 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7515 ; AVX512BW-NEXT: vpermt2d %zmm30, %zmm8, %zmm2
7516 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7517 ; AVX512BW-NEXT: vmovdqa64 192(%r10), %zmm1
7518 ; AVX512BW-NEXT: vmovdqa64 192(%rax), %zmm0
7519 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm1, %zmm3
7520 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7521 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm1, %zmm5
7522 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7523 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm1, %zmm6
7524 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7525 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm1, %zmm9
7526 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7527 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm1, %zmm10
7528 ; AVX512BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7529 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm1, %zmm11
7530 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7531 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm1, %zmm7
7532 ; AVX512BW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7533 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm8, %zmm1
7534 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7535 ; AVX512BW-NEXT: vmovdqa64 (%r8), %zmm26
7536 ; AVX512BW-NEXT: vmovdqa64 (%r9), %zmm0
7537 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,u,u,0,16,u,u,u,u,u,u,1,17,u,u>
7538 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm2
7539 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm2
7540 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7541 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm2
7542 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,u,u,2,18,u,u,u,u,u,u,3,19,u,u>
7543 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm3
7544 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm3
7545 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7546 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
7547 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,u,u,4,20,u,u,u,u,u,u,5,21,u,u>
7548 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm4
7549 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm4
7550 ; AVX512BW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7551 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm4
7552 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,u,u,6,22,u,u,u,u,u,u,7,23,u,u>
7553 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm5
7554 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm5
7555 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7556 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm5
7557 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,u,u,8,24,u,u,u,u,u,u,9,25,u,u>
7558 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm6
7559 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm6
7560 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7561 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm6
7562 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,u,u,10,26,u,u,u,u,u,u,11,27,u,u>
7563 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm7
7564 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm7
7565 ; AVX512BW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7566 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm7
7567 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,u,u,12,28,u,u,u,u,u,u,13,29,u,u>
7568 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm8
7569 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm8
7570 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7571 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm8
7572 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,u,u,14,30,u,u,u,u,u,u,15,31,u,u>
7573 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm26
7574 ; AVX512BW-NEXT: vmovdqa64 64(%r8), %zmm25
7575 ; AVX512BW-NEXT: vmovdqa64 64(%r9), %zmm0
7576 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm9
7577 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm2, %zmm9
7578 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7579 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm9
7580 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm3, %zmm9
7581 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7582 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm9
7583 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm4, %zmm9
7584 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7585 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm9
7586 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm5, %zmm9
7587 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7588 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm9
7589 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm6, %zmm9
7590 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7591 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm9
7592 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm7, %zmm9
7593 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7594 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm9
7595 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm8, %zmm9
7596 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7597 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm25
7598 ; AVX512BW-NEXT: vmovdqa64 128(%r8), %zmm22
7599 ; AVX512BW-NEXT: vmovdqa64 128(%r9), %zmm0
7600 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm9
7601 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm2, %zmm9
7602 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7603 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm9
7604 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm3, %zmm9
7605 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7606 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm9
7607 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm4, %zmm9
7608 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7609 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm9
7610 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm5, %zmm9
7611 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7612 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm9
7613 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm6, %zmm9
7614 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7615 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm9
7616 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm7, %zmm9
7617 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7618 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm9
7619 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm8, %zmm9
7620 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7621 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm22
7622 ; AVX512BW-NEXT: vmovdqa64 192(%r8), %zmm27
7623 ; AVX512BW-NEXT: vmovdqa64 192(%r9), %zmm0
7624 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm27, %zmm2
7625 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7626 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm27, %zmm3
7627 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7628 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm27, %zmm4
7629 ; AVX512BW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7630 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm27, %zmm5
7631 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7632 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm27, %zmm6
7633 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7634 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm27, %zmm7
7635 ; AVX512BW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7636 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm27, %zmm8
7637 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7638 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm27
7639 ; AVX512BW-NEXT: vmovdqa64 (%rdx), %zmm8
7640 ; AVX512BW-NEXT: vmovdqa64 (%rcx), %zmm0
7641 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,0,16,u,u,u,u,u,u,1,17,u,u,u,u>
7642 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm2
7643 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm2
7644 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7645 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm2
7646 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,2,18,u,u,u,u,u,u,3,19,u,u,u,u>
7647 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm3
7648 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm3
7649 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7650 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
7651 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,4,20,u,u,u,u,u,u,5,21,u,u,u,u>
7652 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm4
7653 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm4
7654 ; AVX512BW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7655 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm4
7656 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,6,22,u,u,u,u,u,u,7,23,u,u,u,u>
7657 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm5
7658 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm5
7659 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7660 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm5
7661 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,8,24,u,u,u,u,u,u,9,25,u,u,u,u>
7662 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm6
7663 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm6
7664 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7665 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm7
7666 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,10,26,u,u,u,u,u,u,11,27,u,u,u,u>
7667 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm6
7668 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm6
7669 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7670 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm9
7671 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,12,28,u,u,u,u,u,u,13,29,u,u,u,u>
7672 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm6
7673 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm6
7674 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7675 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm10
7676 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,14,30,u,u,u,u,u,u,15,31,u,u,u,u>
7677 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm8
7678 ; AVX512BW-NEXT: vmovdqa64 64(%rdx), %zmm6
7679 ; AVX512BW-NEXT: vmovdqa64 64(%rcx), %zmm0
7680 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm11
7681 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm2, %zmm11
7682 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7683 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm11
7684 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm3, %zmm11
7685 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7686 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm11
7687 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm4, %zmm11
7688 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7689 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm11
7690 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm5, %zmm11
7691 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7692 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm11
7693 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm7, %zmm11
7694 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7695 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm11
7696 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm9, %zmm11
7697 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7698 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm11
7699 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm10, %zmm11
7700 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7701 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm6
7702 ; AVX512BW-NEXT: vmovdqa64 128(%rdx), %zmm13
7703 ; AVX512BW-NEXT: vmovdqa64 128(%rcx), %zmm0
7704 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm11
7705 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm2, %zmm11
7706 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7707 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm11
7708 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm3, %zmm11
7709 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7710 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm11
7711 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm4, %zmm11
7712 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7713 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm11
7714 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm5, %zmm11
7715 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7716 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm11
7717 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm7, %zmm11
7718 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7719 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm11
7720 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm9, %zmm11
7721 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7722 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm11
7723 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm10, %zmm11
7724 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7725 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm13
7726 ; AVX512BW-NEXT: vmovdqa64 192(%rdx), %zmm17
7727 ; AVX512BW-NEXT: vmovdqa64 192(%rcx), %zmm0
7728 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm17, %zmm2
7729 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7730 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm17, %zmm3
7731 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7732 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm17, %zmm4
7733 ; AVX512BW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7734 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm17, %zmm5
7735 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7736 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm17, %zmm7
7737 ; AVX512BW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7738 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm17, %zmm9
7739 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7740 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm17, %zmm10
7741 ; AVX512BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7742 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm1, %zmm17
7743 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm5
7744 ; AVX512BW-NEXT: vmovdqa64 (%rsi), %zmm0
7745 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm14 = <0,16,u,u,u,u,u,u,1,17,u,u,u,u,u,u>
7746 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm1
7747 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm14, %zmm1
7748 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7749 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm11 = <2,18,u,u,u,u,u,u,3,19,u,u,u,u,u,u>
7750 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm1
7751 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm11, %zmm1
7752 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7753 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm10 = <4,20,u,u,u,u,u,u,5,21,u,u,u,u,u,u>
7754 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm1
7755 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm10, %zmm1
7756 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7757 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm9 = <6,22,u,u,u,u,u,u,7,23,u,u,u,u,u,u>
7758 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm1
7759 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm9, %zmm1
7760 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7761 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <8,24,u,u,u,u,u,u,9,25,u,u,u,u,u,u>
7762 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm1
7763 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm7, %zmm1
7764 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7765 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = <10,26,u,u,u,u,u,u,11,27,u,u,u,u,u,u>
7766 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm1
7767 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm4, %zmm1
7768 ; AVX512BW-NEXT: vmovdqu64 %zmm1, (%rsp) # 64-byte Spill
7769 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = <12,28,u,u,u,u,u,u,13,29,u,u,u,u,u,u>
7770 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm1
7771 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm3, %zmm1
7772 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7773 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm31 = <14,30,u,u,u,u,u,u,15,31,u,u,u,u,u,u>
7774 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm31, %zmm5
7775 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %zmm12
7776 ; AVX512BW-NEXT: vmovdqa64 64(%rsi), %zmm0
7777 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm1
7778 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm14, %zmm1
7779 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7780 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm1
7781 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm11, %zmm1
7782 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7783 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm1
7784 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm10, %zmm1
7785 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7786 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm1
7787 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm9, %zmm1
7788 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7789 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm30
7790 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm7, %zmm30
7791 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm29
7792 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm4, %zmm29
7793 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm28
7794 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm3, %zmm28
7795 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm31, %zmm12
7796 ; AVX512BW-NEXT: vmovdqa64 128(%rdi), %zmm15
7797 ; AVX512BW-NEXT: vmovdqa64 128(%rsi), %zmm0
7798 ; AVX512BW-NEXT: vmovdqa64 %zmm15, %zmm24
7799 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm14, %zmm24
7800 ; AVX512BW-NEXT: vmovdqa64 %zmm15, %zmm23
7801 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm11, %zmm23
7802 ; AVX512BW-NEXT: vmovdqa64 %zmm15, %zmm21
7803 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm10, %zmm21
7804 ; AVX512BW-NEXT: vmovdqa64 %zmm15, %zmm20
7805 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm9, %zmm20
7806 ; AVX512BW-NEXT: vmovdqa64 %zmm15, %zmm19
7807 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm7, %zmm19
7808 ; AVX512BW-NEXT: vmovdqa64 %zmm15, %zmm18
7809 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm4, %zmm18
7810 ; AVX512BW-NEXT: vmovdqa64 %zmm15, %zmm16
7811 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm3, %zmm16
7812 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm31, %zmm15
7813 ; AVX512BW-NEXT: vmovdqa64 192(%rdi), %zmm2
7814 ; AVX512BW-NEXT: vmovdqa64 192(%rsi), %zmm0
7815 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm2, %zmm14
7816 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm2, %zmm11
7817 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm2, %zmm10
7818 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm2, %zmm9
7819 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm2, %zmm7
7820 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm2, %zmm4
7821 ; AVX512BW-NEXT: vpermi2d %zmm0, %zmm2, %zmm3
7822 ; AVX512BW-NEXT: vpermt2d %zmm0, %zmm31, %zmm2
7823 ; AVX512BW-NEXT: movb $-120, %al
7824 ; AVX512BW-NEXT: kmovd %eax, %k1
7825 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7826 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7827 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
7828 ; AVX512BW-NEXT: movb $34, %al
7829 ; AVX512BW-NEXT: kmovd %eax, %k2
7830 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7831 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
7832 ; AVX512BW-NEXT: vmovdqa64 %zmm31, %zmm0 {%k2}
7833 ; AVX512BW-NEXT: movb $-52, %al
7834 ; AVX512BW-NEXT: kmovd %eax, %k3
7835 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm0 {%k3}
7836 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7837 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7838 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7839 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
7840 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7841 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
7842 ; AVX512BW-NEXT: vmovdqa64 %zmm31, %zmm0 {%k2}
7843 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm0 {%k3}
7844 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7845 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7846 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7847 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
7848 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7849 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
7850 ; AVX512BW-NEXT: vmovdqa64 %zmm31, %zmm0 {%k2}
7851 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm0 {%k3}
7852 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7853 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7854 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7855 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
7856 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7857 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
7858 ; AVX512BW-NEXT: vmovdqa64 %zmm31, %zmm0 {%k2}
7859 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm0 {%k3}
7860 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7861 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7862 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7863 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
7864 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7865 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
7866 ; AVX512BW-NEXT: vmovdqa64 %zmm31, %zmm0 {%k2}
7867 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm0 {%k3}
7868 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7869 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7870 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7871 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
7872 ; AVX512BW-NEXT: vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
7873 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
7874 ; AVX512BW-NEXT: vmovdqa64 %zmm31, %zmm0 {%k2}
7875 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm0 {%k3}
7876 ; AVX512BW-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
7877 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7878 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7879 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
7880 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7881 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
7882 ; AVX512BW-NEXT: vmovdqa64 %zmm31, %zmm0 {%k2}
7883 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm0 {%k3}
7884 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7885 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7886 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm26 {%k1}
7887 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm5 {%k2}
7888 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm5 {%k3}
7889 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7890 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7891 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k1}
7892 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7893 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
7894 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm0 {%k2}
7895 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm0 {%k3}
7896 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7897 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7898 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
7899 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
7900 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm26 # 64-byte Reload
7901 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7902 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm26 {%k2}
7903 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm26 {%k3}
7904 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7905 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
7906 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
7907 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7908 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7909 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm1 {%k2}
7910 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm1 {%k3}
7911 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7912 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
7913 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
7914 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7915 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
7916 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm31 {%k2}
7917 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm31 {%k3}
7918 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7919 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
7920 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
7921 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7922 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm30 {%k2}
7923 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm30 {%k3}
7924 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7925 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
7926 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
7927 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7928 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm29 {%k2}
7929 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm29 {%k3}
7930 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7931 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
7932 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm8 {%k1}
7933 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7934 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm28 {%k2}
7935 ; AVX512BW-NEXT: vmovdqa64 %zmm8, %zmm28 {%k3}
7936 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7937 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm25 {%k1}
7938 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm12 {%k2}
7939 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm12 {%k3}
7940 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7941 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7942 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
7943 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7944 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm24 {%k2}
7945 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm24 {%k3}
7946 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7947 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7948 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
7949 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7950 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm23 {%k2}
7951 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm23 {%k3}
7952 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7953 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7954 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
7955 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7956 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm21 {%k2}
7957 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm21 {%k3}
7958 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7959 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7960 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
7961 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7962 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm20 {%k2}
7963 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm20 {%k3}
7964 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7965 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7966 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
7967 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7968 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm19 {%k2}
7969 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm19 {%k3}
7970 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7971 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7972 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
7973 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7974 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm18 {%k2}
7975 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm18 {%k3}
7976 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7977 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7978 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
7979 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7980 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm16 {%k2}
7981 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm16 {%k3}
7982 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7983 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm22 {%k1}
7984 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm15 {%k2}
7985 ; AVX512BW-NEXT: vmovdqa64 %zmm22, %zmm15 {%k3}
7986 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7987 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7988 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
7989 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7990 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm14 {%k2}
7991 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm14 {%k3}
7992 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7993 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7994 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
7995 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7996 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm11 {%k2}
7997 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm11 {%k3}
7998 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7999 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
8000 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
8001 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8002 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm10 {%k2}
8003 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm10 {%k3}
8004 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8005 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
8006 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
8007 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8008 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm9 {%k2}
8009 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm9 {%k3}
8010 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8011 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
8012 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
8013 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8014 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm7 {%k2}
8015 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm7 {%k3}
8016 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8017 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
8018 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
8019 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8020 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm4 {%k2}
8021 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm4 {%k3}
8022 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8023 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
8024 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm6 {%k1}
8025 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8026 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm3 {%k2}
8027 ; AVX512BW-NEXT: vmovdqa64 %zmm6, %zmm3 {%k3}
8028 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8029 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm27 {%k1}
8030 ; AVX512BW-NEXT: vmovdqa64 %zmm17, %zmm2 {%k2}
8031 ; AVX512BW-NEXT: vmovdqa64 %zmm27, %zmm2 {%k3}
8032 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
8033 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 1984(%rax)
8034 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 1920(%rax)
8035 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 1856(%rax)
8036 ; AVX512BW-NEXT: vmovdqa64 %zmm7, 1792(%rax)
8037 ; AVX512BW-NEXT: vmovdqa64 %zmm9, 1728(%rax)
8038 ; AVX512BW-NEXT: vmovdqa64 %zmm10, 1664(%rax)
8039 ; AVX512BW-NEXT: vmovdqa64 %zmm11, 1600(%rax)
8040 ; AVX512BW-NEXT: vmovdqa64 %zmm14, 1536(%rax)
8041 ; AVX512BW-NEXT: vmovdqa64 %zmm15, 1472(%rax)
8042 ; AVX512BW-NEXT: vmovdqa64 %zmm16, 1408(%rax)
8043 ; AVX512BW-NEXT: vmovdqa64 %zmm18, 1344(%rax)
8044 ; AVX512BW-NEXT: vmovdqa64 %zmm19, 1280(%rax)
8045 ; AVX512BW-NEXT: vmovdqa64 %zmm20, 1216(%rax)
8046 ; AVX512BW-NEXT: vmovdqa64 %zmm21, 1152(%rax)
8047 ; AVX512BW-NEXT: vmovdqa64 %zmm23, 1088(%rax)
8048 ; AVX512BW-NEXT: vmovdqa64 %zmm24, 1024(%rax)
8049 ; AVX512BW-NEXT: vmovdqa64 %zmm12, 960(%rax)
8050 ; AVX512BW-NEXT: vmovdqa64 %zmm28, 896(%rax)
8051 ; AVX512BW-NEXT: vmovdqa64 %zmm29, 832(%rax)
8052 ; AVX512BW-NEXT: vmovdqa64 %zmm30, 768(%rax)
8053 ; AVX512BW-NEXT: vmovdqa64 %zmm31, 704(%rax)
8054 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 640(%rax)
8055 ; AVX512BW-NEXT: vmovdqa64 %zmm26, 576(%rax)
8056 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8057 ; AVX512BW-NEXT: vmovaps %zmm0, 512(%rax)
8058 ; AVX512BW-NEXT: vmovdqa64 %zmm5, 448(%rax)
8059 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8060 ; AVX512BW-NEXT: vmovaps %zmm0, 384(%rax)
8061 ; AVX512BW-NEXT: vmovups (%rsp), %zmm0 # 64-byte Reload
8062 ; AVX512BW-NEXT: vmovaps %zmm0, 320(%rax)
8063 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8064 ; AVX512BW-NEXT: vmovaps %zmm0, 256(%rax)
8065 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8066 ; AVX512BW-NEXT: vmovaps %zmm0, 192(%rax)
8067 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8068 ; AVX512BW-NEXT: vmovaps %zmm0, 128(%rax)
8069 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8070 ; AVX512BW-NEXT: vmovaps %zmm0, 64(%rax)
8071 ; AVX512BW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8072 ; AVX512BW-NEXT: vmovaps %zmm0, (%rax)
8073 ; AVX512BW-NEXT: addq $6216, %rsp # imm = 0x1848
8074 ; AVX512BW-NEXT: vzeroupper
8075 ; AVX512BW-NEXT: retq
8076 %in.vec0 = load <64 x i32>, ptr %in.vecptr0, align 64
8077 %in.vec1 = load <64 x i32>, ptr %in.vecptr1, align 64
8078 %in.vec2 = load <64 x i32>, ptr %in.vecptr2, align 64
8079 %in.vec3 = load <64 x i32>, ptr %in.vecptr3, align 64
8080 %in.vec4 = load <64 x i32>, ptr %in.vecptr4, align 64
8081 %in.vec5 = load <64 x i32>, ptr %in.vecptr5, align 64
8082 %in.vec6 = load <64 x i32>, ptr %in.vecptr6, align 64
8083 %in.vec7 = load <64 x i32>, ptr %in.vecptr7, align 64
8084 %1 = shufflevector <64 x i32> %in.vec0, <64 x i32> %in.vec1, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
8085 %2 = shufflevector <64 x i32> %in.vec2, <64 x i32> %in.vec3, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
8086 %3 = shufflevector <64 x i32> %in.vec4, <64 x i32> %in.vec5, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
8087 %4 = shufflevector <64 x i32> %in.vec6, <64 x i32> %in.vec7, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
8088 %5 = shufflevector <128 x i32> %1, <128 x i32> %2, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255>
8089 %6 = shufflevector <128 x i32> %3, <128 x i32> %4, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255>
8090 %7 = shufflevector <256 x i32> %5, <256 x i32> %6, <512 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255, i32 256, i32 257, i32 258, i32 259, i32 260, i32 261, i32 262, i32 263, i32 264, i32 265, i32 266, i32 267, i32 268, i32 269, i32 270, i32 271, i32 272, i32 273, i32 274, i32 275, i32 276, i32 277, i32 278, i32 279, i32 280, i32 281, i32 282, i32 283, i32 284, i32 285, i32 286, i32 287, i32 288, i32 289, i32 290, i32 291, i32 292, i32 293, i32 294, i32 295, i32 296, i32 297, i32 298, i32 299, i32 300, i32 301, i32 302, i32 303, i32 304, i32 305, i32 306, i32 307, i32 308, i32 309, i32 310, i32 311, i32 312, i32 313, i32 314, i32 315, i32 316, i32 317, i32 318, i32 319, i32 320, i32 321, i32 322, i32 323, i32 324, i32 325, i32 326, i32 327, i32 328, i32 329, i32 330, i32 331, i32 332, i32 333, i32 334, i32 335, i32 336, i32 337, i32 338, i32 339, i32 340, i32 341, i32 342, i32 343, i32 344, i32 345, i32 346, i32 347, i32 348, i32 349, i32 350, i32 351, i32 352, i32 353, i32 354, i32 355, i32 356, i32 357, i32 358, i32 359, i32 360, i32 361, i32 362, i32 363, i32 364, i32 365, i32 366, i32 367, i32 368, i32 369, i32 370, i32 371, i32 372, i32 373, i32 374, i32 375, i32 376, i32 377, i32 378, i32 379, i32 380, i32 381, i32 382, i32 383, i32 384, i32 385, i32 386, i32 387, i32 388, i32 389, i32 390, i32 391, i32 392, i32 393, i32 394, i32 395, i32 396, i32 397, i32 398, i32 399, i32 400, i32 401, i32 402, i32 403, i32 404, i32 405, i32 406, i32 407, i32 408, i32 409, i32 410, i32 411, i32 412, i32 413, i32 414, i32 415, i32 416, i32 417, i32 418, i32 419, i32 420, i32 421, i32 422, i32 423, i32 424, i32 425, i32 426, i32 427, i32 428, i32 429, i32 430, i32 431, i32 432, i32 433, i32 434, i32 435, i32 436, i32 437, i32 438, i32 439, i32 440, i32 441, i32 442, i32 443, i32 444, i32 445, i32 446, i32 447, i32 448, i32 449, i32 450, i32 451, i32 452, i32 453, i32 454, i32 455, i32 456, i32 457, i32 458, i32 459, i32 460, i32 461, i32 462, i32 463, i32 464, i32 465, i32 466, i32 467, i32 468, i32 469, i32 470, i32 471, i32 472, i32 473, i32 474, i32 475, i32 476, i32 477, i32 478, i32 479, i32 480, i32 481, i32 482, i32 483, i32 484, i32 485, i32 486, i32 487, i32 488, i32 489, i32 490, i32 491, i32 492, i32 493, i32 494, i32 495, i32 496, i32 497, i32 498, i32 499, i32 500, i32 501, i32 502, i32 503, i32 504, i32 505, i32 506, i32 507, i32 508, i32 509, i32 510, i32 511>
8091 %interleaved.vec = shufflevector <512 x i32> %7, <512 x i32> poison, <512 x i32> <i32 0, i32 64, i32 128, i32 192, i32 256, i32 320, i32 384, i32 448, i32 1, i32 65, i32 129, i32 193, i32 257, i32 321, i32 385, i32 449, i32 2, i32 66, i32 130, i32 194, i32 258, i32 322, i32 386, i32 450, i32 3, i32 67, i32 131, i32 195, i32 259, i32 323, i32 387, i32 451, i32 4, i32 68, i32 132, i32 196, i32 260, i32 324, i32 388, i32 452, i32 5, i32 69, i32 133, i32 197, i32 261, i32 325, i32 389, i32 453, i32 6, i32 70, i32 134, i32 198, i32 262, i32 326, i32 390, i32 454, i32 7, i32 71, i32 135, i32 199, i32 263, i32 327, i32 391, i32 455, i32 8, i32 72, i32 136, i32 200, i32 264, i32 328, i32 392, i32 456, i32 9, i32 73, i32 137, i32 201, i32 265, i32 329, i32 393, i32 457, i32 10, i32 74, i32 138, i32 202, i32 266, i32 330, i32 394, i32 458, i32 11, i32 75, i32 139, i32 203, i32 267, i32 331, i32 395, i32 459, i32 12, i32 76, i32 140, i32 204, i32 268, i32 332, i32 396, i32 460, i32 13, i32 77, i32 141, i32 205, i32 269, i32 333, i32 397, i32 461, i32 14, i32 78, i32 142, i32 206, i32 270, i32 334, i32 398, i32 462, i32 15, i32 79, i32 143, i32 207, i32 271, i32 335, i32 399, i32 463, i32 16, i32 80, i32 144, i32 208, i32 272, i32 336, i32 400, i32 464, i32 17, i32 81, i32 145, i32 209, i32 273, i32 337, i32 401, i32 465, i32 18, i32 82, i32 146, i32 210, i32 274, i32 338, i32 402, i32 466, i32 19, i32 83, i32 147, i32 211, i32 275, i32 339, i32 403, i32 467, i32 20, i32 84, i32 148, i32 212, i32 276, i32 340, i32 404, i32 468, i32 21, i32 85, i32 149, i32 213, i32 277, i32 341, i32 405, i32 469, i32 22, i32 86, i32 150, i32 214, i32 278, i32 342, i32 406, i32 470, i32 23, i32 87, i32 151, i32 215, i32 279, i32 343, i32 407, i32 471, i32 24, i32 88, i32 152, i32 216, i32 280, i32 344, i32 408, i32 472, i32 25, i32 89, i32 153, i32 217, i32 281, i32 345, i32 409, i32 473, i32 26, i32 90, i32 154, i32 218, i32 282, i32 346, i32 410, i32 474, i32 27, i32 91, i32 155, i32 219, i32 283, i32 347, i32 411, i32 475, i32 28, i32 92, i32 156, i32 220, i32 284, i32 348, i32 412, i32 476, i32 29, i32 93, i32 157, i32 221, i32 285, i32 349, i32 413, i32 477, i32 30, i32 94, i32 158, i32 222, i32 286, i32 350, i32 414, i32 478, i32 31, i32 95, i32 159, i32 223, i32 287, i32 351, i32 415, i32 479, i32 32, i32 96, i32 160, i32 224, i32 288, i32 352, i32 416, i32 480, i32 33, i32 97, i32 161, i32 225, i32 289, i32 353, i32 417, i32 481, i32 34, i32 98, i32 162, i32 226, i32 290, i32 354, i32 418, i32 482, i32 35, i32 99, i32 163, i32 227, i32 291, i32 355, i32 419, i32 483, i32 36, i32 100, i32 164, i32 228, i32 292, i32 356, i32 420, i32 484, i32 37, i32 101, i32 165, i32 229, i32 293, i32 357, i32 421, i32 485, i32 38, i32 102, i32 166, i32 230, i32 294, i32 358, i32 422, i32 486, i32 39, i32 103, i32 167, i32 231, i32 295, i32 359, i32 423, i32 487, i32 40, i32 104, i32 168, i32 232, i32 296, i32 360, i32 424, i32 488, i32 41, i32 105, i32 169, i32 233, i32 297, i32 361, i32 425, i32 489, i32 42, i32 106, i32 170, i32 234, i32 298, i32 362, i32 426, i32 490, i32 43, i32 107, i32 171, i32 235, i32 299, i32 363, i32 427, i32 491, i32 44, i32 108, i32 172, i32 236, i32 300, i32 364, i32 428, i32 492, i32 45, i32 109, i32 173, i32 237, i32 301, i32 365, i32 429, i32 493, i32 46, i32 110, i32 174, i32 238, i32 302, i32 366, i32 430, i32 494, i32 47, i32 111, i32 175, i32 239, i32 303, i32 367, i32 431, i32 495, i32 48, i32 112, i32 176, i32 240, i32 304, i32 368, i32 432, i32 496, i32 49, i32 113, i32 177, i32 241, i32 305, i32 369, i32 433, i32 497, i32 50, i32 114, i32 178, i32 242, i32 306, i32 370, i32 434, i32 498, i32 51, i32 115, i32 179, i32 243, i32 307, i32 371, i32 435, i32 499, i32 52, i32 116, i32 180, i32 244, i32 308, i32 372, i32 436, i32 500, i32 53, i32 117, i32 181, i32 245, i32 309, i32 373, i32 437, i32 501, i32 54, i32 118, i32 182, i32 246, i32 310, i32 374, i32 438, i32 502, i32 55, i32 119, i32 183, i32 247, i32 311, i32 375, i32 439, i32 503, i32 56, i32 120, i32 184, i32 248, i32 312, i32 376, i32 440, i32 504, i32 57, i32 121, i32 185, i32 249, i32 313, i32 377, i32 441, i32 505, i32 58, i32 122, i32 186, i32 250, i32 314, i32 378, i32 442, i32 506, i32 59, i32 123, i32 187, i32 251, i32 315, i32 379, i32 443, i32 507, i32 60, i32 124, i32 188, i32 252, i32 316, i32 380, i32 444, i32 508, i32 61, i32 125, i32 189, i32 253, i32 317, i32 381, i32 445, i32 509, i32 62, i32 126, i32 190, i32 254, i32 318, i32 382, i32 446, i32 510, i32 63, i32 127, i32 191, i32 255, i32 319, i32 383, i32 447, i32 511>
8092 store <512 x i32> %interleaved.vec, ptr %out.vec, align 64
8095 ;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
8100 ; AVX2-FAST-PERLANE: {{.*}}
8102 ; AVX512-FAST: {{.*}}
8103 ; AVX512-SLOW: {{.*}}
8104 ; AVX512BW-FAST: {{.*}}
8105 ; AVX512BW-ONLY-FAST: {{.*}}
8106 ; AVX512BW-ONLY-SLOW: {{.*}}
8107 ; AVX512BW-SLOW: {{.*}}
8108 ; AVX512DQ-FAST: {{.*}}
8109 ; AVX512DQ-SLOW: {{.*}}
8110 ; AVX512DQBW-FAST: {{.*}}
8111 ; AVX512DQBW-SLOW: {{.*}}
8112 ; AVX512F-FAST: {{.*}}
8113 ; AVX512F-ONLY-FAST: {{.*}}
8114 ; AVX512F-ONLY-SLOW: {{.*}}
8115 ; AVX512F-SLOW: {{.*}}
8118 ; FALLBACK10: {{.*}}
8119 ; FALLBACK11: {{.*}}
8120 ; FALLBACK12: {{.*}}