1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+sse2 | FileCheck %s --check-prefixes=SSE,FALLBACK0
3 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx | FileCheck %s --check-prefixes=AVX,AVX1,AVX1-ONLY,FALLBACK1
4 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2 | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-SLOW,FALLBACK2
5 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-FAST,FALLBACK3
6 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-FAST-PERLANE,FALLBACK4
7 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-SLOW,AVX512F-SLOW,AVX512F-ONLY-SLOW,FALLBACK5
8 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-FAST,AVX512F-FAST,AVX512F-ONLY-FAST,FALLBACK6
9 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-SLOW,AVX512F-SLOW,AVX512DQ-SLOW,FALLBACK7
10 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-FAST,AVX512F-FAST,AVX512DQ-FAST,FALLBACK8
11 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-SLOW,AVX512BW-SLOW,AVX512BW-ONLY-SLOW,FALLBACK9
12 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-FAST,AVX512BW-FAST,AVX512BW-ONLY-FAST,FALLBACK10
13 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-SLOW,AVX512BW-SLOW,AVX512DQBW-SLOW,FALLBACK11
14 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-FAST,AVX512BW-FAST,AVX512DQBW-FAST,FALLBACK12
16 ; These patterns are produced by LoopVectorizer for interleaved stores.
18 define void @store_i16_stride8_vf2(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %in.vecptr7, ptr %out.vec) nounwind {
19 ; SSE-LABEL: store_i16_stride8_vf2:
21 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
22 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
23 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r11
24 ; SSE-NEXT: movdqa (%rdi), %xmm0
25 ; SSE-NEXT: movdqa (%rdx), %xmm1
26 ; SSE-NEXT: movdqa (%r8), %xmm2
27 ; SSE-NEXT: movdqa (%r11), %xmm3
28 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
29 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
30 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
31 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],mem[0],xmm2[1],mem[1]
32 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],mem[0],xmm3[1],mem[1]
33 ; SSE-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
34 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm0[0,2,2,3]
35 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,2,1,3,4,5,6,7]
36 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,1,2,0]
37 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,6,4,7,5]
38 ; SSE-NEXT: movsd {{.*#+}} xmm3 = xmm1[0],xmm3[1]
39 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[3,1,2,3]
40 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[2,0,3,1,4,5,6,7]
41 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm2[0,1,1,3]
42 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,6,5,7]
43 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
44 ; SSE-NEXT: movapd %xmm1, 16(%rax)
45 ; SSE-NEXT: movapd %xmm3, (%rax)
48 ; AVX1-ONLY-LABEL: store_i16_stride8_vf2:
50 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
51 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
52 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r11
53 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm0
54 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm1
55 ; AVX1-ONLY-NEXT: vmovdqa (%r8), %xmm2
56 ; AVX1-ONLY-NEXT: vmovdqa (%r11), %xmm3
57 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
58 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
59 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
60 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1]
61 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm2 = xmm3[0],mem[0],xmm3[1],mem[1]
62 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm2[0]
63 ; AVX1-ONLY-NEXT: vpxor %xmm2, %xmm2, %xmm2
64 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm3 = xmm1[0],xmm2[1],xmm1[2],xmm2[3],xmm1[4],xmm2[5],xmm1[6],xmm2[7]
65 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm0[0],xmm2[1],xmm0[2],xmm2[3],xmm0[4],xmm2[5],xmm0[6],xmm2[7]
66 ; AVX1-ONLY-NEXT: vpackusdw %xmm3, %xmm2, %xmm2
67 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm1, %xmm1
68 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm0, %xmm0
69 ; AVX1-ONLY-NEXT: vpackusdw %xmm1, %xmm0, %xmm0
70 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, 16(%rax)
71 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, (%rax)
72 ; AVX1-ONLY-NEXT: retq
74 ; AVX2-ONLY-LABEL: store_i16_stride8_vf2:
76 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
77 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
78 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r11
79 ; AVX2-ONLY-NEXT: vmovdqa (%rdi), %xmm0
80 ; AVX2-ONLY-NEXT: vmovdqa (%rsi), %xmm1
81 ; AVX2-ONLY-NEXT: vmovdqa (%rdx), %xmm2
82 ; AVX2-ONLY-NEXT: vmovdqa (%rcx), %xmm3
83 ; AVX2-ONLY-NEXT: vinserti128 $1, (%r11), %ymm3, %ymm3
84 ; AVX2-ONLY-NEXT: vinserti128 $1, (%r10), %ymm2, %ymm2
85 ; AVX2-ONLY-NEXT: vpunpckldq {{.*#+}} ymm2 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[4],ymm3[4],ymm2[5],ymm3[5]
86 ; AVX2-ONLY-NEXT: vinserti128 $1, (%r9), %ymm1, %ymm1
87 ; AVX2-ONLY-NEXT: vinserti128 $1, (%r8), %ymm0, %ymm0
88 ; AVX2-ONLY-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
89 ; AVX2-ONLY-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm2[0],ymm0[2],ymm2[2]
90 ; AVX2-ONLY-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[0,1,4,5,8,9,12,13,2,3,6,7,10,11,14,15,16,17,20,21,24,25,28,29,18,19,22,23,26,27,30,31]
91 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,1,3]
92 ; AVX2-ONLY-NEXT: vmovdqa %ymm0, (%rax)
93 ; AVX2-ONLY-NEXT: vzeroupper
94 ; AVX2-ONLY-NEXT: retq
96 ; AVX512F-LABEL: store_i16_stride8_vf2:
98 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %rax
99 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %r10
100 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %r11
101 ; AVX512F-NEXT: vmovdqa (%rdi), %xmm0
102 ; AVX512F-NEXT: vmovdqa (%rsi), %xmm1
103 ; AVX512F-NEXT: vmovdqa (%rdx), %xmm2
104 ; AVX512F-NEXT: vmovdqa (%rcx), %xmm3
105 ; AVX512F-NEXT: vinserti128 $1, (%r11), %ymm3, %ymm3
106 ; AVX512F-NEXT: vinserti128 $1, (%r10), %ymm2, %ymm2
107 ; AVX512F-NEXT: vpunpckldq {{.*#+}} ymm2 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[4],ymm3[4],ymm2[5],ymm3[5]
108 ; AVX512F-NEXT: vinserti128 $1, (%r9), %ymm1, %ymm1
109 ; AVX512F-NEXT: vinserti128 $1, (%r8), %ymm0, %ymm0
110 ; AVX512F-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
111 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm2[0],ymm0[2],ymm2[2]
112 ; AVX512F-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[0,1,4,5,8,9,12,13,2,3,6,7,10,11,14,15,16,17,20,21,24,25,28,29,18,19,22,23,26,27,30,31]
113 ; AVX512F-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,1,3]
114 ; AVX512F-NEXT: vmovdqa %ymm0, (%rax)
115 ; AVX512F-NEXT: vzeroupper
118 ; AVX512BW-LABEL: store_i16_stride8_vf2:
120 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
121 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r10
122 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r11
123 ; AVX512BW-NEXT: vmovdqa (%rdi), %xmm0
124 ; AVX512BW-NEXT: vmovdqa (%rsi), %xmm1
125 ; AVX512BW-NEXT: vmovdqa (%rdx), %xmm2
126 ; AVX512BW-NEXT: vmovdqa (%rcx), %xmm3
127 ; AVX512BW-NEXT: vinserti128 $1, (%r11), %ymm3, %ymm3
128 ; AVX512BW-NEXT: vinserti128 $1, (%r10), %ymm2, %ymm2
129 ; AVX512BW-NEXT: vpunpckldq {{.*#+}} ymm2 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[4],ymm3[4],ymm2[5],ymm3[5]
130 ; AVX512BW-NEXT: vinserti128 $1, (%r9), %ymm1, %ymm1
131 ; AVX512BW-NEXT: vinserti128 $1, (%r8), %ymm0, %ymm0
132 ; AVX512BW-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
133 ; AVX512BW-NEXT: vmovdqa {{.*#+}} ymm1 = [0,2,16,18,8,10,24,26,1,3,17,19,9,11,25,27]
134 ; AVX512BW-NEXT: vpermi2w %ymm2, %ymm0, %ymm1
135 ; AVX512BW-NEXT: vmovdqa %ymm1, (%rax)
136 ; AVX512BW-NEXT: vzeroupper
137 ; AVX512BW-NEXT: retq
138 %in.vec0 = load <2 x i16>, ptr %in.vecptr0, align 64
139 %in.vec1 = load <2 x i16>, ptr %in.vecptr1, align 64
140 %in.vec2 = load <2 x i16>, ptr %in.vecptr2, align 64
141 %in.vec3 = load <2 x i16>, ptr %in.vecptr3, align 64
142 %in.vec4 = load <2 x i16>, ptr %in.vecptr4, align 64
143 %in.vec5 = load <2 x i16>, ptr %in.vecptr5, align 64
144 %in.vec6 = load <2 x i16>, ptr %in.vecptr6, align 64
145 %in.vec7 = load <2 x i16>, ptr %in.vecptr7, align 64
146 %1 = shufflevector <2 x i16> %in.vec0, <2 x i16> %in.vec1, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
147 %2 = shufflevector <2 x i16> %in.vec2, <2 x i16> %in.vec3, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
148 %3 = shufflevector <2 x i16> %in.vec4, <2 x i16> %in.vec5, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
149 %4 = shufflevector <2 x i16> %in.vec6, <2 x i16> %in.vec7, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
150 %5 = shufflevector <4 x i16> %1, <4 x i16> %2, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
151 %6 = shufflevector <4 x i16> %3, <4 x i16> %4, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
152 %7 = shufflevector <8 x i16> %5, <8 x i16> %6, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
153 %interleaved.vec = shufflevector <16 x i16> %7, <16 x i16> poison, <16 x i32> <i32 0, i32 2, i32 4, i32 6, i32 8, i32 10, i32 12, i32 14, i32 1, i32 3, i32 5, i32 7, i32 9, i32 11, i32 13, i32 15>
154 store <16 x i16> %interleaved.vec, ptr %out.vec, align 64
158 define void @store_i16_stride8_vf4(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %in.vecptr7, ptr %out.vec) nounwind {
159 ; SSE-LABEL: store_i16_stride8_vf4:
161 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
162 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
163 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r11
164 ; SSE-NEXT: movq {{.*#+}} xmm0 = mem[0],zero
165 ; SSE-NEXT: movq {{.*#+}} xmm1 = mem[0],zero
166 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
167 ; SSE-NEXT: movq {{.*#+}} xmm1 = mem[0],zero
168 ; SSE-NEXT: movq {{.*#+}} xmm2 = mem[0],zero
169 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
170 ; SSE-NEXT: movq {{.*#+}} xmm2 = mem[0],zero
171 ; SSE-NEXT: movq {{.*#+}} xmm3 = mem[0],zero
172 ; SSE-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
173 ; SSE-NEXT: movq {{.*#+}} xmm3 = mem[0],zero
174 ; SSE-NEXT: movq {{.*#+}} xmm4 = mem[0],zero
175 ; SSE-NEXT: punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
176 ; SSE-NEXT: movdqa %xmm0, %xmm4
177 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
178 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm3[0,0,0,0]
179 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm2[0,0,0,0]
180 ; SSE-NEXT: punpckhdq {{.*#+}} xmm6 = xmm6[2],xmm5[2],xmm6[3],xmm5[3]
181 ; SSE-NEXT: movsd {{.*#+}} xmm6 = xmm4[0],xmm6[1]
182 ; SSE-NEXT: movdqa %xmm2, %xmm4
183 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
184 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[1,1,1,1]
185 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm0[1,1,1,1]
186 ; SSE-NEXT: punpckldq {{.*#+}} xmm7 = xmm7[0],xmm5[0],xmm7[1],xmm5[1]
187 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[0,1],xmm4[2,3]
188 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm0[3,3,3,3]
189 ; SSE-NEXT: punpckhdq {{.*#+}} xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
190 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm3[2,2,2,2]
191 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm2[2,2,2,2]
192 ; SSE-NEXT: punpckhdq {{.*#+}} xmm8 = xmm8[2],xmm5[2],xmm8[3],xmm5[3]
193 ; SSE-NEXT: movsd {{.*#+}} xmm8 = xmm0[0],xmm8[1]
194 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm3[2],xmm2[3],xmm3[3]
195 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[3,3,3,3]
196 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm0[0],xmm4[1],xmm0[1]
197 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm2[2,3]
198 ; SSE-NEXT: movaps %xmm4, 48(%rax)
199 ; SSE-NEXT: movapd %xmm8, 32(%rax)
200 ; SSE-NEXT: movaps %xmm7, 16(%rax)
201 ; SSE-NEXT: movapd %xmm6, (%rax)
204 ; AVX1-ONLY-LABEL: store_i16_stride8_vf4:
205 ; AVX1-ONLY: # %bb.0:
206 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
207 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
208 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
209 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
210 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
211 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
212 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm1[0],xmm0[0]
213 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
214 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
215 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
216 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm7 = xmm3[0],xmm0[0]
217 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm5 = mem[0],zero
218 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm6 = mem[0],zero
219 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm8 = xmm6[0],xmm5[0]
220 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm9 = mem[0],zero
221 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm10 = mem[0],zero
222 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm11 = xmm10[0],xmm9[0]
223 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm11[3,1,2,3]
224 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm13 = xmm12[0,1,3,1,4,5,6,7]
225 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm8[3,1,2,3]
226 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm15 = xmm14[0,1,3,1,4,5,6,7]
227 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm13 = xmm15[0],xmm13[0],xmm15[1],xmm13[1]
228 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm15 = xmm7[3,1,2,3]
229 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm1 = xmm15[3,1,2,3,4,5,6,7]
230 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm4[3,1,2,3]
231 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm2 = xmm0[3,1,2,3,4,5,6,7]
232 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
233 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3],xmm13[4,5,6,7]
234 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm2 = xmm12[0,1,2,0,4,5,6,7]
235 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm12 = xmm14[0,1,2,0,4,5,6,7]
236 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm2 = xmm12[0],xmm2[0],xmm12[1],xmm2[1]
237 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm12 = xmm15[2,0,2,3,4,5,6,7]
238 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[2,0,2,3,4,5,6,7]
239 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm12[0],xmm0[1],xmm12[1]
240 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3],xmm2[4,5,6,7]
241 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
242 ; AVX1-ONLY-NEXT: vpxor %xmm1, %xmm1, %xmm1
243 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm11[0],xmm1[1,2,3],xmm11[4],xmm1[5,6,7]
244 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0],xmm1[1,2,3],xmm8[4],xmm1[5,6,7]
245 ; AVX1-ONLY-NEXT: vpackusdw %xmm2, %xmm8, %xmm2
246 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0],xmm1[1,2,3],xmm7[4],xmm1[5,6,7]
247 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm4[0],xmm1[1,2,3],xmm4[4],xmm1[5,6,7]
248 ; AVX1-ONLY-NEXT: vpackusdw %xmm7, %xmm1, %xmm1
249 ; AVX1-ONLY-NEXT: vpackusdw %xmm2, %xmm1, %xmm1
250 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
251 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
252 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1]
253 ; AVX1-ONLY-NEXT: vpunpckldq {{[-0-9]+}}(%r{{[sb]}}p), %xmm3, %xmm3 # 16-byte Folded Reload
254 ; AVX1-ONLY-NEXT: # xmm3 = xmm3[0],mem[0],xmm3[1],mem[1]
255 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
256 ; AVX1-ONLY-NEXT: vpunpckldq {{[-0-9]+}}(%r{{[sb]}}p), %xmm4, %xmm4 # 16-byte Folded Reload
257 ; AVX1-ONLY-NEXT: # xmm4 = xmm4[0],mem[0],xmm4[1],mem[1]
258 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[1,3,2,3,4,5,6,7]
259 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[1,3,2,3,4,5,6,7]
260 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
261 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1,2,3],xmm2[4,5,6,7]
262 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
263 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, 16(%rax)
264 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, (%rax)
265 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
266 ; AVX1-ONLY-NEXT: vzeroupper
267 ; AVX1-ONLY-NEXT: retq
269 ; AVX2-SLOW-LABEL: store_i16_stride8_vf4:
270 ; AVX2-SLOW: # %bb.0:
271 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
272 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
273 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r11
274 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
275 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
276 ; AVX2-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
277 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
278 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
279 ; AVX2-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
280 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
281 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
282 ; AVX2-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
283 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
284 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
285 ; AVX2-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
286 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
287 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm1
288 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm1[0,1,2,0,4,5,6,4]
289 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm2 = ymm2[0,1,2,3,6,4,7,5,8,9,10,11,14,12,15,13]
290 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm1[2,3,0,1]
291 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm3[0,1,2,0,4,5,6,4]
292 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm4 = ymm4[0,1,2,3,7,5,6,4,8,9,10,11,15,13,14,12]
293 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2],ymm4[3],ymm2[4,5],ymm4[6],ymm2[7]
294 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm0[0,2,2,3,4,6,6,7]
295 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm4 = ymm4[0,2,1,3,4,5,6,7,8,10,9,11,12,13,14,15]
296 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm0[2,3,0,1]
297 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm5[0,2,2,3,4,6,6,7]
298 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm6 = ymm6[1,3,0,2,4,5,6,7,9,11,8,10,12,13,14,15]
299 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0],ymm6[1],ymm4[2,3],ymm6[4],ymm4[5,6,7]
300 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1],ymm2[2,3],ymm4[4,5],ymm2[6,7]
301 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,1,1,3,4,5,5,7]
302 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm1 = ymm1[0,1,2,3,4,6,5,7,8,9,10,11,12,14,13,15]
303 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[0,1,1,3,4,5,5,7]
304 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm3 = ymm3[0,1,2,3,5,7,4,6,8,9,10,11,13,15,12,14]
305 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2],ymm3[3],ymm1[4,5],ymm3[6],ymm1[7]
306 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[3,1,2,3,7,5,6,7]
307 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm0[2,0,3,1,4,5,6,7,10,8,11,9,12,13,14,15]
308 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm5[3,1,2,3,7,5,6,7]
309 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm3 = ymm3[3,1,2,0,4,5,6,7,11,9,10,8,12,13,14,15]
310 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm3[1],ymm0[2,3],ymm3[4],ymm0[5,6,7]
311 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
312 ; AVX2-SLOW-NEXT: vmovdqa %ymm0, 32(%rax)
313 ; AVX2-SLOW-NEXT: vmovdqa %ymm2, (%rax)
314 ; AVX2-SLOW-NEXT: vzeroupper
315 ; AVX2-SLOW-NEXT: retq
317 ; AVX2-FAST-LABEL: store_i16_stride8_vf4:
318 ; AVX2-FAST: # %bb.0:
319 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
320 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
321 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r11
322 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
323 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
324 ; AVX2-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
325 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
326 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
327 ; AVX2-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
328 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
329 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
330 ; AVX2-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
331 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
332 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
333 ; AVX2-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
334 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
335 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm1
336 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [0,2,4,6,0,2,4,6]
337 ; AVX2-FAST-NEXT: # ymm2 = mem[0,1,0,1]
338 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm2, %ymm3
339 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = <u,u,u,u,u,u,u,u,0,1,4,5,8,9,12,13,u,u,u,u,u,u,u,u,2,3,6,7,10,11,14,15>
340 ; AVX2-FAST-NEXT: vpshufb %ymm4, %ymm3, %ymm3
341 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm2, %ymm2
342 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = <0,1,4,5,8,9,12,13,u,u,u,u,u,u,u,u,2,3,6,7,10,11,14,15,u,u,u,u,u,u,u,u>
343 ; AVX2-FAST-NEXT: vpshufb %ymm5, %ymm2, %ymm2
344 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1],ymm3[2,3],ymm2[4,5],ymm3[6,7]
345 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [1,3,5,7,1,3,5,7]
346 ; AVX2-FAST-NEXT: # ymm3 = mem[0,1,0,1]
347 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm3, %ymm1
348 ; AVX2-FAST-NEXT: vpshufb %ymm4, %ymm1, %ymm1
349 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm3, %ymm0
350 ; AVX2-FAST-NEXT: vpshufb %ymm5, %ymm0, %ymm0
351 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
352 ; AVX2-FAST-NEXT: vmovdqa %ymm0, 32(%rax)
353 ; AVX2-FAST-NEXT: vmovdqa %ymm2, (%rax)
354 ; AVX2-FAST-NEXT: vzeroupper
355 ; AVX2-FAST-NEXT: retq
357 ; AVX2-FAST-PERLANE-LABEL: store_i16_stride8_vf4:
358 ; AVX2-FAST-PERLANE: # %bb.0:
359 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
360 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %r10
361 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %r11
362 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
363 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
364 ; AVX2-FAST-PERLANE-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
365 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
366 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
367 ; AVX2-FAST-PERLANE-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
368 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
369 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
370 ; AVX2-FAST-PERLANE-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
371 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
372 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
373 ; AVX2-FAST-PERLANE-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
374 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
375 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm1
376 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm2 = ymm1[u,u,u,u,u,u,u,u,0,1,8,9,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,26,27]
377 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm1[2,3,0,1]
378 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm4 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,0,1,8,9,u,u,u,u,u,u,u,u,18,19,26,27,u,u,u,u]
379 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2],ymm4[3],ymm2[4,5],ymm4[6],ymm2[7]
380 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm4 = ymm0[0,1,8,9,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,26,27,u,u,u,u,u,u,u,u]
381 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm0[2,3,0,1]
382 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm6 = ymm5[u,u,u,u,0,1,8,9,u,u,u,u,u,u,u,u,18,19,26,27,u,u,u,u,u,u,u,u,u,u,u,u]
383 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0],ymm6[1],ymm4[2,3],ymm6[4],ymm4[5,6,7]
384 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1],ymm2[2,3],ymm4[4,5],ymm2[6,7]
385 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,4,5,12,13,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,30,31]
386 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm3 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,4,5,12,13,u,u,u,u,u,u,u,u,22,23,30,31,u,u,u,u]
387 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2],ymm3[3],ymm1[4,5],ymm3[6],ymm1[7]
388 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[4,5,12,13,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,30,31,u,u,u,u,u,u,u,u]
389 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm3 = ymm5[u,u,u,u,4,5,12,13,u,u,u,u,u,u,u,u,22,23,30,31,u,u,u,u,u,u,u,u,u,u,u,u]
390 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm3[1],ymm0[2,3],ymm3[4],ymm0[5,6,7]
391 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
392 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, 32(%rax)
393 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm2, (%rax)
394 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
395 ; AVX2-FAST-PERLANE-NEXT: retq
397 ; AVX512F-SLOW-LABEL: store_i16_stride8_vf4:
398 ; AVX512F-SLOW: # %bb.0:
399 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
400 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
401 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r11
402 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
403 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
404 ; AVX512F-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
405 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
406 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
407 ; AVX512F-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
408 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
409 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
410 ; AVX512F-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
411 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
412 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
413 ; AVX512F-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
414 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
415 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm1
416 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm1[0,1,1,3,4,5,5,7]
417 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} ymm2 = ymm2[0,1,2,3,4,6,5,7,8,9,10,11,12,14,13,15]
418 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm1[2,3,0,1]
419 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm3[0,1,1,3,4,5,5,7]
420 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} ymm4 = ymm4[0,1,2,3,5,7,4,6,8,9,10,11,13,15,12,14]
421 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2],ymm4[3],ymm2[4,5],ymm4[6],ymm2[7]
422 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm0[3,1,2,3,7,5,6,7]
423 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} ymm4 = ymm4[2,0,3,1,4,5,6,7,10,8,11,9,12,13,14,15]
424 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm0[2,3,0,1]
425 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm5[3,1,2,3,7,5,6,7]
426 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} ymm6 = ymm6[3,1,2,0,4,5,6,7,11,9,10,8,12,13,14,15]
427 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0],ymm6[1],ymm4[2,3],ymm6[4],ymm4[5,6,7]
428 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1],ymm2[2,3],ymm4[4,5],ymm2[6,7]
429 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,1,2,0,4,5,6,4]
430 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} ymm1 = ymm1[0,1,2,3,6,4,7,5,8,9,10,11,14,12,15,13]
431 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[0,1,2,0,4,5,6,4]
432 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} ymm3 = ymm3[0,1,2,3,7,5,6,4,8,9,10,11,15,13,14,12]
433 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2],ymm3[3],ymm1[4,5],ymm3[6],ymm1[7]
434 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,2,2,3,4,6,6,7]
435 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm0[0,2,1,3,4,5,6,7,8,10,9,11,12,13,14,15]
436 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm5[0,2,2,3,4,6,6,7]
437 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} ymm3 = ymm3[1,3,0,2,4,5,6,7,9,11,8,10,12,13,14,15]
438 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm3[1],ymm0[2,3],ymm3[4],ymm0[5,6,7]
439 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
440 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0
441 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm0, (%rax)
442 ; AVX512F-SLOW-NEXT: vzeroupper
443 ; AVX512F-SLOW-NEXT: retq
445 ; AVX512F-FAST-LABEL: store_i16_stride8_vf4:
446 ; AVX512F-FAST: # %bb.0:
447 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
448 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
449 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r11
450 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
451 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
452 ; AVX512F-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
453 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
454 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
455 ; AVX512F-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
456 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
457 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
458 ; AVX512F-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
459 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
460 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
461 ; AVX512F-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
462 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
463 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm1
464 ; AVX512F-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [1,3,5,7,1,3,5,7]
465 ; AVX512F-FAST-NEXT: # ymm2 = mem[0,1,0,1]
466 ; AVX512F-FAST-NEXT: vpermd %ymm1, %ymm2, %ymm3
467 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = <u,u,u,u,u,u,u,u,0,1,4,5,8,9,12,13,u,u,u,u,u,u,u,u,2,3,6,7,10,11,14,15>
468 ; AVX512F-FAST-NEXT: vpshufb %ymm4, %ymm3, %ymm3
469 ; AVX512F-FAST-NEXT: vpermd %ymm0, %ymm2, %ymm2
470 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = <0,1,4,5,8,9,12,13,u,u,u,u,u,u,u,u,2,3,6,7,10,11,14,15,u,u,u,u,u,u,u,u>
471 ; AVX512F-FAST-NEXT: vpshufb %ymm5, %ymm2, %ymm2
472 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1],ymm3[2,3],ymm2[4,5],ymm3[6,7]
473 ; AVX512F-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [0,2,4,6,0,2,4,6]
474 ; AVX512F-FAST-NEXT: # ymm3 = mem[0,1,0,1]
475 ; AVX512F-FAST-NEXT: vpermd %ymm1, %ymm3, %ymm1
476 ; AVX512F-FAST-NEXT: vpshufb %ymm4, %ymm1, %ymm1
477 ; AVX512F-FAST-NEXT: vpermd %ymm0, %ymm3, %ymm0
478 ; AVX512F-FAST-NEXT: vpshufb %ymm5, %ymm0, %ymm0
479 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
480 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0
481 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm0, (%rax)
482 ; AVX512F-FAST-NEXT: vzeroupper
483 ; AVX512F-FAST-NEXT: retq
485 ; AVX512BW-LABEL: store_i16_stride8_vf4:
487 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
488 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r10
489 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r11
490 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
491 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
492 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
493 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
494 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
495 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
496 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
497 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
498 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
499 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
500 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
501 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
502 ; AVX512BW-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2
503 ; AVX512BW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
504 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0
505 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,4,8,12,16,20,24,28,1,5,9,13,17,21,25,29,2,6,10,14,18,22,26,30,3,7,11,15,19,23,27,31]
506 ; AVX512BW-NEXT: vpermw %zmm0, %zmm1, %zmm0
507 ; AVX512BW-NEXT: vmovdqa64 %zmm0, (%rax)
508 ; AVX512BW-NEXT: vzeroupper
509 ; AVX512BW-NEXT: retq
510 %in.vec0 = load <4 x i16>, ptr %in.vecptr0, align 64
511 %in.vec1 = load <4 x i16>, ptr %in.vecptr1, align 64
512 %in.vec2 = load <4 x i16>, ptr %in.vecptr2, align 64
513 %in.vec3 = load <4 x i16>, ptr %in.vecptr3, align 64
514 %in.vec4 = load <4 x i16>, ptr %in.vecptr4, align 64
515 %in.vec5 = load <4 x i16>, ptr %in.vecptr5, align 64
516 %in.vec6 = load <4 x i16>, ptr %in.vecptr6, align 64
517 %in.vec7 = load <4 x i16>, ptr %in.vecptr7, align 64
518 %1 = shufflevector <4 x i16> %in.vec0, <4 x i16> %in.vec1, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
519 %2 = shufflevector <4 x i16> %in.vec2, <4 x i16> %in.vec3, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
520 %3 = shufflevector <4 x i16> %in.vec4, <4 x i16> %in.vec5, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
521 %4 = shufflevector <4 x i16> %in.vec6, <4 x i16> %in.vec7, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
522 %5 = shufflevector <8 x i16> %1, <8 x i16> %2, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
523 %6 = shufflevector <8 x i16> %3, <8 x i16> %4, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
524 %7 = shufflevector <16 x i16> %5, <16 x i16> %6, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
525 %interleaved.vec = shufflevector <32 x i16> %7, <32 x i16> poison, <32 x i32> <i32 0, i32 4, i32 8, i32 12, i32 16, i32 20, i32 24, i32 28, i32 1, i32 5, i32 9, i32 13, i32 17, i32 21, i32 25, i32 29, i32 2, i32 6, i32 10, i32 14, i32 18, i32 22, i32 26, i32 30, i32 3, i32 7, i32 11, i32 15, i32 19, i32 23, i32 27, i32 31>
526 store <32 x i16> %interleaved.vec, ptr %out.vec, align 64
530 define void @store_i16_stride8_vf8(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %in.vecptr7, ptr %out.vec) nounwind {
531 ; SSE-LABEL: store_i16_stride8_vf8:
533 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
534 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
535 ; SSE-NEXT: movdqa (%rdi), %xmm0
536 ; SSE-NEXT: movdqa (%rsi), %xmm9
537 ; SSE-NEXT: movdqa (%rdx), %xmm1
538 ; SSE-NEXT: movdqa (%rcx), %xmm11
539 ; SSE-NEXT: movdqa (%r8), %xmm4
540 ; SSE-NEXT: movdqa (%r9), %xmm8
541 ; SSE-NEXT: movdqa (%r10), %xmm3
542 ; SSE-NEXT: movdqa (%rax), %xmm10
543 ; SSE-NEXT: movdqa %xmm1, %xmm13
544 ; SSE-NEXT: punpcklwd {{.*#+}} xmm13 = xmm13[0],xmm11[0],xmm13[1],xmm11[1],xmm13[2],xmm11[2],xmm13[3],xmm11[3]
545 ; SSE-NEXT: movdqa %xmm0, %xmm12
546 ; SSE-NEXT: punpcklwd {{.*#+}} xmm12 = xmm12[0],xmm9[0],xmm12[1],xmm9[1],xmm12[2],xmm9[2],xmm12[3],xmm9[3]
547 ; SSE-NEXT: movdqa %xmm12, %xmm5
548 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm13[0],xmm5[1],xmm13[1]
549 ; SSE-NEXT: movdqa %xmm3, %xmm14
550 ; SSE-NEXT: punpcklwd {{.*#+}} xmm14 = xmm14[0],xmm10[0],xmm14[1],xmm10[1],xmm14[2],xmm10[2],xmm14[3],xmm10[3]
551 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm14[0,0,0,0]
552 ; SSE-NEXT: movdqa %xmm4, %xmm15
553 ; SSE-NEXT: punpcklwd {{.*#+}} xmm15 = xmm15[0],xmm8[0],xmm15[1],xmm8[1],xmm15[2],xmm8[2],xmm15[3],xmm8[3]
554 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm15[0,0,0,0]
555 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm6[2],xmm2[3],xmm6[3]
556 ; SSE-NEXT: movsd {{.*#+}} xmm2 = xmm5[0],xmm2[1]
557 ; SSE-NEXT: movdqa %xmm15, %xmm6
558 ; SSE-NEXT: punpckldq {{.*#+}} xmm6 = xmm6[0],xmm14[0],xmm6[1],xmm14[1]
559 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm13[1,1,1,1]
560 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm12[1,1,1,1]
561 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm7[0],xmm5[1],xmm7[1]
562 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm6[2,3]
563 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm13[3,3,3,3]
564 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm12[3,3,3,3]
565 ; SSE-NEXT: punpckldq {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1]
566 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm15[2,2,2,2]
567 ; SSE-NEXT: punpckhdq {{.*#+}} xmm15 = xmm15[2],xmm14[2],xmm15[3],xmm14[3]
568 ; SSE-NEXT: shufps {{.*#+}} xmm6 = xmm6[0,1],xmm15[2,3]
569 ; SSE-NEXT: punpckhdq {{.*#+}} xmm12 = xmm12[2],xmm13[2],xmm12[3],xmm13[3]
570 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm14[2,2,2,2]
571 ; SSE-NEXT: punpckhdq {{.*#+}} xmm7 = xmm7[2],xmm13[2],xmm7[3],xmm13[3]
572 ; SSE-NEXT: movsd {{.*#+}} xmm7 = xmm12[0],xmm7[1]
573 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm11[4],xmm1[5],xmm11[5],xmm1[6],xmm11[6],xmm1[7],xmm11[7]
574 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm9[4],xmm0[5],xmm9[5],xmm0[6],xmm9[6],xmm0[7],xmm9[7]
575 ; SSE-NEXT: movdqa %xmm0, %xmm9
576 ; SSE-NEXT: punpckldq {{.*#+}} xmm9 = xmm9[0],xmm1[0],xmm9[1],xmm1[1]
577 ; SSE-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm10[4],xmm3[5],xmm10[5],xmm3[6],xmm10[6],xmm3[7],xmm10[7]
578 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm3[0,0,0,0]
579 ; SSE-NEXT: punpckhwd {{.*#+}} xmm4 = xmm4[4],xmm8[4],xmm4[5],xmm8[5],xmm4[6],xmm8[6],xmm4[7],xmm8[7]
580 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm4[0,0,0,0]
581 ; SSE-NEXT: punpckhdq {{.*#+}} xmm8 = xmm8[2],xmm10[2],xmm8[3],xmm10[3]
582 ; SSE-NEXT: movsd {{.*#+}} xmm8 = xmm9[0],xmm8[1]
583 ; SSE-NEXT: movdqa %xmm4, %xmm9
584 ; SSE-NEXT: punpckldq {{.*#+}} xmm9 = xmm9[0],xmm3[0],xmm9[1],xmm3[1]
585 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm1[1,1,1,1]
586 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm0[1,1,1,1]
587 ; SSE-NEXT: punpckldq {{.*#+}} xmm11 = xmm11[0],xmm10[0],xmm11[1],xmm10[1]
588 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[0,1],xmm9[2,3]
589 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm1[3,3,3,3]
590 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm0[3,3,3,3]
591 ; SSE-NEXT: punpckldq {{.*#+}} xmm10 = xmm10[0],xmm9[0],xmm10[1],xmm9[1]
592 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm4[2,2,2,2]
593 ; SSE-NEXT: punpckhdq {{.*#+}} xmm4 = xmm4[2],xmm3[2],xmm4[3],xmm3[3]
594 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[0,1],xmm4[2,3]
595 ; SSE-NEXT: punpckhdq {{.*#+}} xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
596 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm3[2,2,2,2]
597 ; SSE-NEXT: punpckhdq {{.*#+}} xmm9 = xmm9[2],xmm1[2],xmm9[3],xmm1[3]
598 ; SSE-NEXT: movsd {{.*#+}} xmm9 = xmm0[0],xmm9[1]
599 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
600 ; SSE-NEXT: movapd %xmm9, 96(%rax)
601 ; SSE-NEXT: movaps %xmm10, 112(%rax)
602 ; SSE-NEXT: movaps %xmm11, 80(%rax)
603 ; SSE-NEXT: movapd %xmm8, 64(%rax)
604 ; SSE-NEXT: movapd %xmm7, 32(%rax)
605 ; SSE-NEXT: movaps %xmm6, 48(%rax)
606 ; SSE-NEXT: movaps %xmm5, 16(%rax)
607 ; SSE-NEXT: movapd %xmm2, (%rax)
610 ; AVX1-ONLY-LABEL: store_i16_stride8_vf8:
611 ; AVX1-ONLY: # %bb.0:
612 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
613 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
614 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r11
615 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm3
616 ; AVX1-ONLY-NEXT: vmovdqa (%rsi), %xmm5
617 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm7
618 ; AVX1-ONLY-NEXT: vmovdqa (%rcx), %xmm8
619 ; AVX1-ONLY-NEXT: vmovdqa (%r8), %xmm1
620 ; AVX1-ONLY-NEXT: vmovdqa (%r9), %xmm2
621 ; AVX1-ONLY-NEXT: vmovdqa (%r11), %xmm4
622 ; AVX1-ONLY-NEXT: vmovdqa (%r10), %xmm6
623 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
624 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3]
625 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm10[1,1,1,1]
626 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm9[2,3],xmm0[4,5,6,7]
627 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm11 = xmm10[0],xmm9[0],xmm10[1],xmm9[1]
628 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm11, %ymm0
629 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm4[0],xmm6[0],xmm4[1],xmm6[1],xmm4[2],xmm6[2],xmm4[3],xmm6[3]
630 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
631 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm13 = xmm12[0],xmm11[0],xmm12[1],xmm11[1]
632 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm11[0,0,0,0]
633 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm15 = xmm12[0,1,0,1]
634 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm14 = xmm15[0,1,2,3,4,5],xmm14[6,7]
635 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm14, %ymm13
636 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm13[2,3],ymm0[4,5],ymm13[6,7]
637 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm9[2,3,2,3]
638 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm10[3,3,3,3]
639 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm13 = xmm14[0,1],xmm13[2,3],xmm14[4,5,6,7]
640 ; AVX1-ONLY-NEXT: vpunpckhdq {{.*#+}} xmm9 = xmm10[2],xmm9[2],xmm10[3],xmm9[3]
641 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm9, %ymm9
642 ; AVX1-ONLY-NEXT: vpunpckhdq {{.*#+}} xmm10 = xmm12[2],xmm11[2],xmm12[3],xmm11[3]
643 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[2,2,2,2]
644 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm11 = xmm12[0,1,2,3,4,5],xmm11[6,7]
645 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm11, %ymm10
646 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1],ymm10[2,3],ymm9[4,5],ymm10[6,7]
647 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm7[4],xmm8[4],xmm7[5],xmm8[5],xmm7[6],xmm8[6],xmm7[7],xmm8[7]
648 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm3[4],xmm5[4],xmm3[5],xmm5[5],xmm3[6],xmm5[6],xmm3[7],xmm5[7]
649 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm3[1,1,1,1]
650 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1],xmm7[2,3],xmm5[4,5,6,7]
651 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm8 = xmm3[0],xmm7[0],xmm3[1],xmm7[1]
652 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm8, %ymm5
653 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm4[4],xmm6[4],xmm4[5],xmm6[5],xmm4[6],xmm6[6],xmm4[7],xmm6[7]
654 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
655 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm2 = xmm1[0],xmm4[0],xmm1[1],xmm4[1]
656 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm4[0,0,0,0]
657 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm1[0,1,0,1]
658 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm8[0,1,2,3,4,5],xmm6[6,7]
659 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm6, %ymm2
660 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
661 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm7[2,3,2,3]
662 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm3[3,3,3,3]
663 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1],xmm5[2,3],xmm6[4,5,6,7]
664 ; AVX1-ONLY-NEXT: vpunpckhdq {{.*#+}} xmm3 = xmm3[2],xmm7[2],xmm3[3],xmm7[3]
665 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm3, %ymm3
666 ; AVX1-ONLY-NEXT: vpunpckhdq {{.*#+}} xmm5 = xmm1[2],xmm4[2],xmm1[3],xmm4[3]
667 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,2,2]
668 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5],xmm4[6,7]
669 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm1, %ymm1
670 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
671 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 96(%rax)
672 ; AVX1-ONLY-NEXT: vmovaps %ymm2, 64(%rax)
673 ; AVX1-ONLY-NEXT: vmovaps %ymm9, 32(%rax)
674 ; AVX1-ONLY-NEXT: vmovaps %ymm0, (%rax)
675 ; AVX1-ONLY-NEXT: vzeroupper
676 ; AVX1-ONLY-NEXT: retq
678 ; AVX2-ONLY-LABEL: store_i16_stride8_vf8:
679 ; AVX2-ONLY: # %bb.0:
680 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
681 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
682 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r11
683 ; AVX2-ONLY-NEXT: vmovdqa (%rdi), %xmm0
684 ; AVX2-ONLY-NEXT: vmovdqa (%rdx), %xmm1
685 ; AVX2-ONLY-NEXT: vmovdqa (%r8), %xmm2
686 ; AVX2-ONLY-NEXT: vmovdqa (%r11), %xmm3
687 ; AVX2-ONLY-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
688 ; AVX2-ONLY-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
689 ; AVX2-ONLY-NEXT: vinserti128 $1, (%r9), %ymm2, %ymm4
690 ; AVX2-ONLY-NEXT: vinserti128 $1, (%r10), %ymm3, %ymm3
691 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm5 = ymm3[0,2,0,2]
692 ; AVX2-ONLY-NEXT: vmovdqa {{.*#+}} ymm6 = <u,u,u,u,u,u,u,u,u,u,u,u,0,1,8,9,u,u,u,u,u,u,u,u,u,u,u,u,2,3,10,11>
693 ; AVX2-ONLY-NEXT: vpshufb %ymm6, %ymm5, %ymm2
694 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm7 = ymm4[0,2,0,2]
695 ; AVX2-ONLY-NEXT: vmovdqa {{.*#+}} ymm8 = <u,u,u,u,u,u,u,u,0,1,8,9,u,u,u,u,u,u,u,u,u,u,u,u,2,3,10,11,u,u,u,u>
696 ; AVX2-ONLY-NEXT: vpshufb %ymm8, %ymm7, %ymm9
697 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} ymm2 = ymm9[0,1,2],ymm2[3],ymm9[4,5,6],ymm2[7]
698 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm9 = ymm1[0,2,0,2]
699 ; AVX2-ONLY-NEXT: vmovdqa {{.*#+}} ymm10 = <u,u,u,u,0,1,8,9,u,u,u,u,u,u,u,u,u,u,u,u,2,3,10,11,u,u,u,u,u,u,u,u>
700 ; AVX2-ONLY-NEXT: vpshufb %ymm10, %ymm9, %ymm11
701 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm12 = ymm0[0,2,0,2]
702 ; AVX2-ONLY-NEXT: vmovdqa {{.*#+}} ymm13 = <0,1,8,9,u,u,u,u,u,u,u,u,u,u,u,u,2,3,10,11,u,u,u,u,u,u,u,u,u,u,u,u>
703 ; AVX2-ONLY-NEXT: vpshufb %ymm13, %ymm12, %ymm14
704 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} ymm11 = ymm14[0],ymm11[1],ymm14[2,3,4],ymm11[5],ymm14[6,7]
705 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} ymm2 = ymm11[0,1],ymm2[2,3],ymm11[4,5],ymm2[6,7]
706 ; AVX2-ONLY-NEXT: vmovdqa {{.*#+}} ymm11 = <u,u,u,u,u,u,u,u,u,u,u,u,4,5,12,13,u,u,u,u,u,u,u,u,u,u,u,u,6,7,14,15>
707 ; AVX2-ONLY-NEXT: vpshufb %ymm11, %ymm5, %ymm5
708 ; AVX2-ONLY-NEXT: vmovdqa {{.*#+}} ymm14 = <u,u,u,u,u,u,u,u,4,5,12,13,u,u,u,u,u,u,u,u,u,u,u,u,6,7,14,15,u,u,u,u>
709 ; AVX2-ONLY-NEXT: vpshufb %ymm14, %ymm7, %ymm7
710 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} ymm5 = ymm7[0,1,2],ymm5[3],ymm7[4,5,6],ymm5[7]
711 ; AVX2-ONLY-NEXT: vmovdqa {{.*#+}} ymm7 = <u,u,u,u,4,5,12,13,u,u,u,u,u,u,u,u,u,u,u,u,6,7,14,15,u,u,u,u,u,u,u,u>
712 ; AVX2-ONLY-NEXT: vpshufb %ymm7, %ymm9, %ymm9
713 ; AVX2-ONLY-NEXT: vmovdqa {{.*#+}} ymm15 = <4,5,12,13,u,u,u,u,u,u,u,u,u,u,u,u,6,7,14,15,u,u,u,u,u,u,u,u,u,u,u,u>
714 ; AVX2-ONLY-NEXT: vpshufb %ymm15, %ymm12, %ymm12
715 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} ymm9 = ymm12[0],ymm9[1],ymm12[2,3,4],ymm9[5],ymm12[6,7]
716 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} ymm5 = ymm9[0,1],ymm5[2,3],ymm9[4,5],ymm5[6,7]
717 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm3 = ymm3[1,3,1,3]
718 ; AVX2-ONLY-NEXT: vpshufb %ymm6, %ymm3, %ymm6
719 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm4 = ymm4[1,3,1,3]
720 ; AVX2-ONLY-NEXT: vpshufb %ymm8, %ymm4, %ymm8
721 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} ymm6 = ymm8[0,1,2],ymm6[3],ymm8[4,5,6],ymm6[7]
722 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,3,1,3]
723 ; AVX2-ONLY-NEXT: vpshufb %ymm10, %ymm1, %ymm8
724 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,1,3]
725 ; AVX2-ONLY-NEXT: vpshufb %ymm13, %ymm0, %ymm9
726 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0],ymm8[1],ymm9[2,3,4],ymm8[5],ymm9[6,7]
727 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} ymm6 = ymm8[0,1],ymm6[2,3],ymm8[4,5],ymm6[6,7]
728 ; AVX2-ONLY-NEXT: vpshufb %ymm11, %ymm3, %ymm3
729 ; AVX2-ONLY-NEXT: vpshufb %ymm14, %ymm4, %ymm4
730 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0,1,2],ymm3[3],ymm4[4,5,6],ymm3[7]
731 ; AVX2-ONLY-NEXT: vpshufb %ymm7, %ymm1, %ymm1
732 ; AVX2-ONLY-NEXT: vpshufb %ymm15, %ymm0, %ymm0
733 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3,4],ymm1[5],ymm0[6,7]
734 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm3[2,3],ymm0[4,5],ymm3[6,7]
735 ; AVX2-ONLY-NEXT: vmovdqa %ymm0, 96(%rax)
736 ; AVX2-ONLY-NEXT: vmovdqa %ymm6, 64(%rax)
737 ; AVX2-ONLY-NEXT: vmovdqa %ymm5, 32(%rax)
738 ; AVX2-ONLY-NEXT: vmovdqa %ymm2, (%rax)
739 ; AVX2-ONLY-NEXT: vzeroupper
740 ; AVX2-ONLY-NEXT: retq
742 ; AVX512F-LABEL: store_i16_stride8_vf8:
744 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %rax
745 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %r10
746 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %r11
747 ; AVX512F-NEXT: vmovdqa (%rdi), %xmm0
748 ; AVX512F-NEXT: vmovdqa (%rdx), %xmm1
749 ; AVX512F-NEXT: vmovdqa (%r8), %xmm2
750 ; AVX512F-NEXT: vmovdqa (%r11), %xmm3
751 ; AVX512F-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
752 ; AVX512F-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
753 ; AVX512F-NEXT: vinserti128 $1, (%r9), %ymm2, %ymm2
754 ; AVX512F-NEXT: vinserti128 $1, (%r10), %ymm3, %ymm3
755 ; AVX512F-NEXT: vpermq {{.*#+}} ymm4 = ymm3[0,2,0,2]
756 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm5 = <u,u,u,u,u,u,u,u,u,u,u,u,4,5,12,13,u,u,u,u,u,u,u,u,u,u,u,u,6,7,14,15>
757 ; AVX512F-NEXT: vpshufb %ymm5, %ymm4, %ymm6
758 ; AVX512F-NEXT: vpermq {{.*#+}} ymm7 = ymm2[0,2,0,2]
759 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm8 = <u,u,u,u,u,u,u,u,4,5,12,13,u,u,u,u,u,u,u,u,u,u,u,u,6,7,14,15,u,u,u,u>
760 ; AVX512F-NEXT: vpshufb %ymm8, %ymm7, %ymm9
761 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm6 = ymm9[0,1,2],ymm6[3],ymm9[4,5,6],ymm6[7]
762 ; AVX512F-NEXT: vpermq {{.*#+}} ymm9 = ymm1[0,2,0,2]
763 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm10 = <u,u,u,u,4,5,12,13,u,u,u,u,u,u,u,u,u,u,u,u,6,7,14,15,u,u,u,u,u,u,u,u>
764 ; AVX512F-NEXT: vpshufb %ymm10, %ymm9, %ymm11
765 ; AVX512F-NEXT: vpermq {{.*#+}} ymm12 = ymm0[0,2,0,2]
766 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm13 = <4,5,12,13,u,u,u,u,u,u,u,u,u,u,u,u,6,7,14,15,u,u,u,u,u,u,u,u,u,u,u,u>
767 ; AVX512F-NEXT: vpshufb %ymm13, %ymm12, %ymm14
768 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm11 = ymm14[0],ymm11[1],ymm14[2,3,4],ymm11[5],ymm14[6,7]
769 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm6 = ymm11[0,1],ymm6[2,3],ymm11[4,5],ymm6[6,7]
770 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm11 = <u,u,u,u,u,u,u,u,u,u,u,u,0,1,8,9,u,u,u,u,u,u,u,u,u,u,u,u,2,3,10,11>
771 ; AVX512F-NEXT: vpshufb %ymm11, %ymm4, %ymm4
772 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm14 = <u,u,u,u,u,u,u,u,0,1,8,9,u,u,u,u,u,u,u,u,u,u,u,u,2,3,10,11,u,u,u,u>
773 ; AVX512F-NEXT: vpshufb %ymm14, %ymm7, %ymm7
774 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm4 = ymm7[0,1,2],ymm4[3],ymm7[4,5,6],ymm4[7]
775 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm7 = <u,u,u,u,0,1,8,9,u,u,u,u,u,u,u,u,u,u,u,u,2,3,10,11,u,u,u,u,u,u,u,u>
776 ; AVX512F-NEXT: vpshufb %ymm7, %ymm9, %ymm9
777 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm15 = <0,1,8,9,u,u,u,u,u,u,u,u,u,u,u,u,2,3,10,11,u,u,u,u,u,u,u,u,u,u,u,u>
778 ; AVX512F-NEXT: vpshufb %ymm15, %ymm12, %ymm12
779 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm9 = ymm12[0],ymm9[1],ymm12[2,3,4],ymm9[5],ymm12[6,7]
780 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm4 = ymm9[0,1],ymm4[2,3],ymm9[4,5],ymm4[6,7]
781 ; AVX512F-NEXT: vinserti64x4 $1, %ymm6, %zmm4, %zmm4
782 ; AVX512F-NEXT: vpermq {{.*#+}} ymm3 = ymm3[1,3,1,3]
783 ; AVX512F-NEXT: vpshufb %ymm5, %ymm3, %ymm5
784 ; AVX512F-NEXT: vpermq {{.*#+}} ymm2 = ymm2[1,3,1,3]
785 ; AVX512F-NEXT: vpshufb %ymm8, %ymm2, %ymm6
786 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2],ymm5[3],ymm6[4,5,6],ymm5[7]
787 ; AVX512F-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,3,1,3]
788 ; AVX512F-NEXT: vpshufb %ymm10, %ymm1, %ymm6
789 ; AVX512F-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,1,3]
790 ; AVX512F-NEXT: vpshufb %ymm13, %ymm0, %ymm8
791 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm6 = ymm8[0],ymm6[1],ymm8[2,3,4],ymm6[5],ymm8[6,7]
792 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1],ymm5[2,3],ymm6[4,5],ymm5[6,7]
793 ; AVX512F-NEXT: vpshufb %ymm11, %ymm3, %ymm3
794 ; AVX512F-NEXT: vpshufb %ymm14, %ymm2, %ymm2
795 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2],ymm3[3],ymm2[4,5,6],ymm3[7]
796 ; AVX512F-NEXT: vpshufb %ymm7, %ymm1, %ymm1
797 ; AVX512F-NEXT: vpshufb %ymm15, %ymm0, %ymm0
798 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3,4],ymm1[5],ymm0[6,7]
799 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
800 ; AVX512F-NEXT: vinserti64x4 $1, %ymm5, %zmm0, %zmm0
801 ; AVX512F-NEXT: vmovdqa64 %zmm0, 64(%rax)
802 ; AVX512F-NEXT: vmovdqa64 %zmm4, (%rax)
803 ; AVX512F-NEXT: vzeroupper
806 ; AVX512BW-LABEL: store_i16_stride8_vf8:
808 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
809 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r10
810 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r11
811 ; AVX512BW-NEXT: vmovdqa (%rdi), %xmm0
812 ; AVX512BW-NEXT: vmovdqa (%rdx), %xmm1
813 ; AVX512BW-NEXT: vmovdqa (%r8), %xmm2
814 ; AVX512BW-NEXT: vmovdqa (%r11), %xmm3
815 ; AVX512BW-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
816 ; AVX512BW-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
817 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
818 ; AVX512BW-NEXT: vinserti128 $1, (%r10), %ymm3, %ymm1
819 ; AVX512BW-NEXT: vinserti128 $1, (%r9), %ymm2, %ymm2
820 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm1
821 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = [0,8,16,24,32,40,48,56,1,9,17,25,33,41,49,57,2,10,18,26,34,42,50,58,3,11,19,27,35,43,51,59]
822 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm2
823 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [4,12,20,28,36,44,52,60,5,13,21,29,37,45,53,61,6,14,22,30,38,46,54,62,7,15,23,31,39,47,55,63]
824 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm3
825 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 64(%rax)
826 ; AVX512BW-NEXT: vmovdqa64 %zmm2, (%rax)
827 ; AVX512BW-NEXT: vzeroupper
828 ; AVX512BW-NEXT: retq
829 %in.vec0 = load <8 x i16>, ptr %in.vecptr0, align 64
830 %in.vec1 = load <8 x i16>, ptr %in.vecptr1, align 64
831 %in.vec2 = load <8 x i16>, ptr %in.vecptr2, align 64
832 %in.vec3 = load <8 x i16>, ptr %in.vecptr3, align 64
833 %in.vec4 = load <8 x i16>, ptr %in.vecptr4, align 64
834 %in.vec5 = load <8 x i16>, ptr %in.vecptr5, align 64
835 %in.vec6 = load <8 x i16>, ptr %in.vecptr6, align 64
836 %in.vec7 = load <8 x i16>, ptr %in.vecptr7, align 64
837 %1 = shufflevector <8 x i16> %in.vec0, <8 x i16> %in.vec1, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
838 %2 = shufflevector <8 x i16> %in.vec2, <8 x i16> %in.vec3, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
839 %3 = shufflevector <8 x i16> %in.vec4, <8 x i16> %in.vec5, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
840 %4 = shufflevector <8 x i16> %in.vec6, <8 x i16> %in.vec7, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
841 %5 = shufflevector <16 x i16> %1, <16 x i16> %2, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
842 %6 = shufflevector <16 x i16> %3, <16 x i16> %4, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
843 %7 = shufflevector <32 x i16> %5, <32 x i16> %6, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
844 %interleaved.vec = shufflevector <64 x i16> %7, <64 x i16> poison, <64 x i32> <i32 0, i32 8, i32 16, i32 24, i32 32, i32 40, i32 48, i32 56, i32 1, i32 9, i32 17, i32 25, i32 33, i32 41, i32 49, i32 57, i32 2, i32 10, i32 18, i32 26, i32 34, i32 42, i32 50, i32 58, i32 3, i32 11, i32 19, i32 27, i32 35, i32 43, i32 51, i32 59, i32 4, i32 12, i32 20, i32 28, i32 36, i32 44, i32 52, i32 60, i32 5, i32 13, i32 21, i32 29, i32 37, i32 45, i32 53, i32 61, i32 6, i32 14, i32 22, i32 30, i32 38, i32 46, i32 54, i32 62, i32 7, i32 15, i32 23, i32 31, i32 39, i32 47, i32 55, i32 63>
845 store <64 x i16> %interleaved.vec, ptr %out.vec, align 64
849 define void @store_i16_stride8_vf16(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %in.vecptr7, ptr %out.vec) nounwind {
850 ; SSE-LABEL: store_i16_stride8_vf16:
852 ; SSE-NEXT: subq $88, %rsp
853 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
854 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
855 ; SSE-NEXT: movdqa (%rdi), %xmm10
856 ; SSE-NEXT: movdqa 16(%rdi), %xmm15
857 ; SSE-NEXT: movdqa (%rsi), %xmm0
858 ; SSE-NEXT: movdqa (%rdx), %xmm7
859 ; SSE-NEXT: movdqa (%rcx), %xmm1
860 ; SSE-NEXT: movdqa (%r8), %xmm8
861 ; SSE-NEXT: movdqa (%r9), %xmm2
862 ; SSE-NEXT: movdqa (%r10), %xmm11
863 ; SSE-NEXT: movdqa (%rax), %xmm3
864 ; SSE-NEXT: movdqa %xmm11, %xmm5
865 ; SSE-NEXT: punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
866 ; SSE-NEXT: movdqa %xmm8, %xmm12
867 ; SSE-NEXT: punpcklwd {{.*#+}} xmm12 = xmm12[0],xmm2[0],xmm12[1],xmm2[1],xmm12[2],xmm2[2],xmm12[3],xmm2[3]
868 ; SSE-NEXT: movdqa %xmm12, %xmm4
869 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1]
870 ; SSE-NEXT: movdqa %xmm5, %xmm14
871 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
872 ; SSE-NEXT: movdqa %xmm7, %xmm6
873 ; SSE-NEXT: punpcklwd {{.*#+}} xmm6 = xmm6[0],xmm1[0],xmm6[1],xmm1[1],xmm6[2],xmm1[2],xmm6[3],xmm1[3]
874 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm6[1,1,1,1]
875 ; SSE-NEXT: movdqa %xmm6, %xmm9
876 ; SSE-NEXT: movdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
877 ; SSE-NEXT: movdqa %xmm10, %xmm13
878 ; SSE-NEXT: punpcklwd {{.*#+}} xmm13 = xmm13[0],xmm0[0],xmm13[1],xmm0[1],xmm13[2],xmm0[2],xmm13[3],xmm0[3]
879 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm13[1,1,1,1]
880 ; SSE-NEXT: punpckldq {{.*#+}} xmm6 = xmm6[0],xmm5[0],xmm6[1],xmm5[1]
881 ; SSE-NEXT: shufps {{.*#+}} xmm6 = xmm6[0,1],xmm4[2,3]
882 ; SSE-NEXT: movaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
883 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm14[0,0,0,0]
884 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm12[0,0,0,0]
885 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
886 ; SSE-NEXT: movdqa %xmm13, %xmm4
887 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm9[0],xmm4[1],xmm9[1]
888 ; SSE-NEXT: movsd {{.*#+}} xmm5 = xmm4[0],xmm5[1]
889 ; SSE-NEXT: movapd %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
890 ; SSE-NEXT: movdqa 16(%rdx), %xmm9
891 ; SSE-NEXT: punpckhwd {{.*#+}} xmm11 = xmm11[4],xmm3[4],xmm11[5],xmm3[5],xmm11[6],xmm3[6],xmm11[7],xmm3[7]
892 ; SSE-NEXT: movdqa 16(%rcx), %xmm3
893 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
894 ; SSE-NEXT: punpckhwd {{.*#+}} xmm8 = xmm8[4],xmm2[4],xmm8[5],xmm2[5],xmm8[6],xmm2[6],xmm8[7],xmm2[7]
895 ; SSE-NEXT: movdqa %xmm8, %xmm2
896 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm11[0],xmm2[1],xmm11[1]
897 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
898 ; SSE-NEXT: punpckhwd {{.*#+}} xmm7 = xmm7[4],xmm1[4],xmm7[5],xmm1[5],xmm7[6],xmm1[6],xmm7[7],xmm1[7]
899 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm7[1,1,1,1]
900 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
901 ; SSE-NEXT: punpckhwd {{.*#+}} xmm10 = xmm10[4],xmm0[4],xmm10[5],xmm0[5],xmm10[6],xmm0[6],xmm10[7],xmm0[7]
902 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm10[1,1,1,1]
903 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
904 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,1],xmm2[2,3]
905 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
906 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm11[0,0,0,0]
907 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm8[0,0,0,0]
908 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
909 ; SSE-NEXT: movdqa %xmm10, %xmm0
910 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm7[0],xmm0[1],xmm7[1]
911 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
912 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
913 ; SSE-NEXT: movdqa %xmm9, %xmm5
914 ; SSE-NEXT: punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
915 ; SSE-NEXT: movdqa 16(%rsi), %xmm0
916 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
917 ; SSE-NEXT: movdqa %xmm15, %xmm6
918 ; SSE-NEXT: movdqa %xmm15, %xmm3
919 ; SSE-NEXT: punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1],xmm3[2],xmm0[2],xmm3[3],xmm0[3]
920 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm5[1,1,1,1]
921 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
922 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm3[1,1,1,1]
923 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
924 ; SSE-NEXT: movdqa 16(%r10), %xmm15
925 ; SSE-NEXT: movdqa 16(%rax), %xmm7
926 ; SSE-NEXT: movdqa %xmm15, %xmm14
927 ; SSE-NEXT: punpcklwd {{.*#+}} xmm14 = xmm14[0],xmm7[0],xmm14[1],xmm7[1],xmm14[2],xmm7[2],xmm14[3],xmm7[3]
928 ; SSE-NEXT: movdqa 16(%r8), %xmm4
929 ; SSE-NEXT: movdqa 16(%r9), %xmm11
930 ; SSE-NEXT: movdqa %xmm4, %xmm2
931 ; SSE-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm11[0],xmm2[1],xmm11[1],xmm2[2],xmm11[2],xmm2[3],xmm11[3]
932 ; SSE-NEXT: movdqa %xmm2, %xmm0
933 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm14[0],xmm0[1],xmm14[1]
934 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,1],xmm0[2,3]
935 ; SSE-NEXT: movaps %xmm1, (%rsp) # 16-byte Spill
936 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm14[0,0,0,0]
937 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm2[0,0,0,0]
938 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
939 ; SSE-NEXT: movdqa %xmm3, %xmm0
940 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm5[0],xmm0[1],xmm5[1]
941 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
942 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
943 ; SSE-NEXT: punpckhwd {{.*#+}} xmm15 = xmm15[4],xmm7[4],xmm15[5],xmm7[5],xmm15[6],xmm7[6],xmm15[7],xmm7[7]
944 ; SSE-NEXT: punpckhwd {{.*#+}} xmm4 = xmm4[4],xmm11[4],xmm4[5],xmm11[5],xmm4[6],xmm11[6],xmm4[7],xmm11[7]
945 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Folded Reload
946 ; SSE-NEXT: # xmm9 = xmm9[4],mem[4],xmm9[5],mem[5],xmm9[6],mem[6],xmm9[7],mem[7]
947 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Folded Reload
948 ; SSE-NEXT: # xmm6 = xmm6[4],mem[4],xmm6[5],mem[5],xmm6[6],mem[6],xmm6[7],mem[7]
949 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm9[1,1,1,1]
950 ; SSE-NEXT: movdqa %xmm9, %xmm1
951 ; SSE-NEXT: movdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
952 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm6[1,1,1,1]
953 ; SSE-NEXT: movdqa %xmm6, %xmm7
954 ; SSE-NEXT: punpckldq {{.*#+}} xmm11 = xmm11[0],xmm0[0],xmm11[1],xmm0[1]
955 ; SSE-NEXT: movdqa %xmm4, %xmm0
956 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm15[0],xmm0[1],xmm15[1]
957 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[0,1],xmm0[2,3]
958 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm15[0,0,0,0]
959 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm4[0,0,0,0]
960 ; SSE-NEXT: punpckhdq {{.*#+}} xmm9 = xmm9[2],xmm0[2],xmm9[3],xmm0[3]
961 ; SSE-NEXT: movdqa %xmm6, %xmm0
962 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
963 ; SSE-NEXT: movsd {{.*#+}} xmm9 = xmm0[0],xmm9[1]
964 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
965 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm0[3,3,3,3]
966 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm13[3,3,3,3]
967 ; SSE-NEXT: punpckldq {{.*#+}} xmm6 = xmm6[0],xmm1[0],xmm6[1],xmm1[1]
968 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm12[2,2,2,2]
969 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
970 ; SSE-NEXT: punpckhdq {{.*#+}} xmm12 = xmm12[2],xmm1[2],xmm12[3],xmm1[3]
971 ; SSE-NEXT: shufps {{.*#+}} xmm6 = xmm6[0,1],xmm12[2,3]
972 ; SSE-NEXT: punpckhdq {{.*#+}} xmm13 = xmm13[2],xmm0[2],xmm13[3],xmm0[3]
973 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm1[2,2,2,2]
974 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm12[2],xmm5[3],xmm12[3]
975 ; SSE-NEXT: movsd {{.*#+}} xmm5 = xmm13[0],xmm5[1]
976 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
977 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm0[3,3,3,3]
978 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm10[3,3,3,3]
979 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm13[0],xmm12[1],xmm13[1]
980 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm8[2,2,2,2]
981 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
982 ; SSE-NEXT: punpckhdq {{.*#+}} xmm8 = xmm8[2],xmm1[2],xmm8[3],xmm1[3]
983 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[0,1],xmm8[2,3]
984 ; SSE-NEXT: punpckhdq {{.*#+}} xmm10 = xmm10[2],xmm0[2],xmm10[3],xmm0[3]
985 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm1[2,2,2,2]
986 ; SSE-NEXT: punpckhdq {{.*#+}} xmm13 = xmm13[2],xmm8[2],xmm13[3],xmm8[3]
987 ; SSE-NEXT: movsd {{.*#+}} xmm13 = xmm10[0],xmm13[1]
988 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
989 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm0[3,3,3,3]
990 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm3[3,3,3,3]
991 ; SSE-NEXT: punpckldq {{.*#+}} xmm8 = xmm8[0],xmm10[0],xmm8[1],xmm10[1]
992 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm2[2,2,2,2]
993 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm14[2],xmm2[3],xmm14[3]
994 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[0,1],xmm2[2,3]
995 ; SSE-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
996 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm14[2,2,2,2]
997 ; SSE-NEXT: punpckhdq {{.*#+}} xmm10 = xmm10[2],xmm1[2],xmm10[3],xmm1[3]
998 ; SSE-NEXT: movsd {{.*#+}} xmm10 = xmm3[0],xmm10[1]
999 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1000 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm0[3,3,3,3]
1001 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm7[3,3,3,3]
1002 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
1003 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm4[2,2,2,2]
1004 ; SSE-NEXT: punpckhdq {{.*#+}} xmm4 = xmm4[2],xmm15[2],xmm4[3],xmm15[3]
1005 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm4[2,3]
1006 ; SSE-NEXT: punpckhdq {{.*#+}} xmm7 = xmm7[2],xmm0[2],xmm7[3],xmm0[3]
1007 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm15[2,2,2,2]
1008 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm2[2],xmm1[3],xmm2[3]
1009 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm7[0],xmm1[1]
1010 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
1011 ; SSE-NEXT: movapd %xmm1, 224(%rax)
1012 ; SSE-NEXT: movaps %xmm3, 240(%rax)
1013 ; SSE-NEXT: movapd %xmm10, 160(%rax)
1014 ; SSE-NEXT: movaps %xmm8, 176(%rax)
1015 ; SSE-NEXT: movapd %xmm13, 96(%rax)
1016 ; SSE-NEXT: movaps %xmm12, 112(%rax)
1017 ; SSE-NEXT: movapd %xmm5, 32(%rax)
1018 ; SSE-NEXT: movaps %xmm6, 48(%rax)
1019 ; SSE-NEXT: movapd %xmm9, 192(%rax)
1020 ; SSE-NEXT: movaps %xmm11, 208(%rax)
1021 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1022 ; SSE-NEXT: movaps %xmm0, 128(%rax)
1023 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
1024 ; SSE-NEXT: movaps %xmm0, 144(%rax)
1025 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1026 ; SSE-NEXT: movaps %xmm0, 64(%rax)
1027 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1028 ; SSE-NEXT: movaps %xmm0, 80(%rax)
1029 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1030 ; SSE-NEXT: movaps %xmm0, (%rax)
1031 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1032 ; SSE-NEXT: movaps %xmm0, 16(%rax)
1033 ; SSE-NEXT: addq $88, %rsp
1036 ; AVX1-ONLY-LABEL: store_i16_stride8_vf16:
1037 ; AVX1-ONLY: # %bb.0:
1038 ; AVX1-ONLY-NEXT: subq $136, %rsp
1039 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
1040 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
1041 ; AVX1-ONLY-NEXT: vmovdqa (%r10), %xmm3
1042 ; AVX1-ONLY-NEXT: vmovaps 16(%r10), %xmm0
1043 ; AVX1-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1044 ; AVX1-ONLY-NEXT: vmovdqa (%rax), %xmm5
1045 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
1046 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[0,0,0,0]
1047 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[0,1,0,1]
1048 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
1049 ; AVX1-ONLY-NEXT: vmovdqa (%r9), %xmm2
1050 ; AVX1-ONLY-NEXT: vmovdqa (%r8), %xmm6
1051 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm6[0],xmm2[0],xmm6[1],xmm2[1],xmm6[2],xmm2[2],xmm6[3],xmm2[3]
1052 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm12[0,1,0,1]
1053 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm8 = xmm12[0],zero,xmm12[1],zero
1054 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm7, %ymm7
1055 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm7[0,1,2],ymm1[3],ymm7[4,5,6],ymm1[7]
1056 ; AVX1-ONLY-NEXT: vmovdqa (%rsi), %xmm4
1057 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm8
1058 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm8[0],xmm4[0],xmm8[1],xmm4[1],xmm8[2],xmm4[2],xmm8[3],xmm4[3]
1059 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm14[1,1,1,1]
1060 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm14, %ymm9
1061 ; AVX1-ONLY-NEXT: vmovdqa (%rcx), %xmm10
1062 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm11
1063 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm15 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
1064 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm15[0,0,1,1]
1065 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm15, %ymm13, %ymm13
1066 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm9[0],ymm13[1],ymm9[2,3,4],ymm13[5],ymm9[6,7]
1067 ; AVX1-ONLY-NEXT: vmovaps 16(%rax), %xmm7
1068 ; AVX1-ONLY-NEXT: vmovaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1069 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm13[0,1],ymm1[2,3],ymm13[4,5],ymm1[6,7]
1070 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1071 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm12[2,2,3,3]
1072 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm12, %ymm1
1073 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm0[2,2,2,2]
1074 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm12, %ymm0
1075 ; AVX1-ONLY-NEXT: vmovdqa 16(%r9), %xmm7
1076 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7]
1077 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm15[2,2,3,3]
1078 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm15[2,3,2,3]
1079 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm0, %ymm12
1080 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm14[2,3,2,3]
1081 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm14[3,3,3,3]
1082 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm14, %ymm0, %ymm14
1083 ; AVX1-ONLY-NEXT: vmovdqa 16(%r8), %xmm13
1084 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm14[0],ymm12[1],ymm14[2,3,4],ymm12[5],ymm14[6,7]
1085 ; AVX1-ONLY-NEXT: vmovdqa 16(%rcx), %xmm14
1086 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm12[0,1],ymm1[2,3],ymm12[4,5],ymm1[6,7]
1087 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1088 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm6[4],xmm2[4],xmm6[5],xmm2[5],xmm6[6],xmm2[6],xmm6[7],xmm2[7]
1089 ; AVX1-ONLY-NEXT: vmovdqu %ymm0, (%rsp) # 32-byte Spill
1090 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm0[2,2,3,3]
1091 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm12
1092 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm5[4],xmm3[4],xmm5[5],xmm3[5],xmm5[6],xmm3[6],xmm5[7],xmm3[7]
1093 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1094 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[2,2,2,2]
1095 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm3, %ymm3
1096 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm12[0,1,2],ymm3[3],ymm12[4,5,6],ymm3[7]
1097 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
1098 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1099 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm0[2,2,3,3]
1100 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm0[2,3,2,3]
1101 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm10, %ymm10
1102 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm8[4],xmm4[4],xmm8[5],xmm4[5],xmm8[6],xmm4[6],xmm8[7],xmm4[7]
1103 ; AVX1-ONLY-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1104 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm0[2,3,2,3]
1105 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm0[3,3,3,3]
1106 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm8, %ymm8
1107 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm8[0],ymm10[1],ymm8[2,3,4],ymm10[5],ymm8[6,7]
1108 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm8[0,1],ymm5[2,3],ymm8[4,5],ymm5[6,7]
1109 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1110 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm13[0],xmm7[0],xmm13[1],xmm7[1],xmm13[2],xmm7[2],xmm13[3],xmm7[3]
1111 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, %xmm9
1112 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm10[2,2,3,3]
1113 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm10, %ymm8
1114 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
1115 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
1116 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
1117 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm12[2,2,2,2]
1118 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm11, %ymm11
1119 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm8[0,1,2],ymm11[3],ymm8[4,5,6],ymm11[7]
1120 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdx), %xmm8
1121 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm8[0],xmm14[0],xmm8[1],xmm14[1],xmm8[2],xmm14[2],xmm8[3],xmm14[3]
1122 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm15 = xmm11[2,2,3,3]
1123 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm11[2,3,2,3]
1124 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm15, %ymm4
1125 ; AVX1-ONLY-NEXT: vmovdqa 16(%rsi), %xmm6
1126 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdi), %xmm1
1127 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm15 = xmm1[0],xmm6[0],xmm1[1],xmm6[1],xmm1[2],xmm6[2],xmm1[3],xmm6[3]
1128 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm15[2,3,2,3]
1129 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm15[3,3,3,3]
1130 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm0
1131 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0],ymm4[1],ymm0[2,3,4],ymm4[5],ymm0[6,7]
1132 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm7[2,3],ymm0[4,5],ymm7[6,7]
1133 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1134 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm13[4],xmm9[4],xmm13[5],xmm9[5],xmm13[6],xmm9[6],xmm13[7],xmm9[7]
1135 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm2[4],xmm3[4],xmm2[5],xmm3[5],xmm2[6],xmm3[6],xmm2[7],xmm3[7]
1136 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm0[2,2,3,3]
1137 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
1138 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm2[2,2,2,2]
1139 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm7, %ymm7
1140 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0,1,2],ymm7[3],ymm5[4,5,6],ymm7[7]
1141 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm8[4],xmm14[4],xmm8[5],xmm14[5],xmm8[6],xmm14[6],xmm8[7],xmm14[7]
1142 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm7[2,2,3,3]
1143 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm7[2,3,2,3]
1144 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm4, %ymm4
1145 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm1[4],xmm6[4],xmm1[5],xmm6[5],xmm1[6],xmm6[6],xmm1[7],xmm6[7]
1146 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm1[2,3,2,3]
1147 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm1[3,3,3,3]
1148 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm3, %ymm3
1149 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0],ymm4[1],ymm3[2,3,4],ymm4[5],ymm3[6,7]
1150 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm3[0,1],ymm5[2,3],ymm3[4,5],ymm5[6,7]
1151 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm2[0,0,0,0]
1152 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,1,0,1]
1153 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm2
1154 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[0,1,0,1]
1155 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
1156 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm3, %ymm0
1157 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2],ymm2[3],ymm0[4,5,6],ymm2[7]
1158 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm1[1,1,1,1]
1159 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
1160 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm7[0,0,1,1]
1161 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm2, %ymm2
1162 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3,4],ymm2[5],ymm1[6,7]
1163 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
1164 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm12[0,0,0,0]
1165 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm12[0,1,0,1]
1166 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
1167 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm10[0,1,0,1]
1168 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm10[0],zero,xmm10[1],zero
1169 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
1170 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3],ymm2[4,5,6],ymm1[7]
1171 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm15[1,1,1,1]
1172 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm15, %ymm2
1173 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm11[0,0,1,1]
1174 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm3, %ymm3
1175 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0],ymm3[1],ymm2[2,3,4],ymm3[5],ymm2[6,7]
1176 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3],ymm2[4,5],ymm1[6,7]
1177 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
1178 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm2 = xmm3[0,0,0,0]
1179 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm3[0,1,0,1]
1180 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
1181 ; AVX1-ONLY-NEXT: vmovdqu (%rsp), %ymm5 # 32-byte Reload
1182 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm5[0,1,0,1]
1183 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm5 = xmm5[0],zero,xmm5[1],zero
1184 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm3, %ymm3
1185 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7]
1186 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
1187 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm5[1,1,1,1]
1188 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm5, %ymm3
1189 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
1190 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm5 = xmm6[0,0,1,1]
1191 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm5
1192 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0],ymm5[1],ymm3[2,3,4],ymm5[5],ymm3[6,7]
1193 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7]
1194 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
1195 ; AVX1-ONLY-NEXT: vmovaps %ymm2, 64(%rax)
1196 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 128(%rax)
1197 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 192(%rax)
1198 ; AVX1-ONLY-NEXT: vmovaps %ymm4, 224(%rax)
1199 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1200 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 160(%rax)
1201 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1202 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 96(%rax)
1203 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1204 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
1205 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1206 ; AVX1-ONLY-NEXT: vmovaps %ymm0, (%rax)
1207 ; AVX1-ONLY-NEXT: addq $136, %rsp
1208 ; AVX1-ONLY-NEXT: vzeroupper
1209 ; AVX1-ONLY-NEXT: retq
1211 ; AVX2-SLOW-LABEL: store_i16_stride8_vf16:
1212 ; AVX2-SLOW: # %bb.0:
1213 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1214 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
1215 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %ymm1
1216 ; AVX2-SLOW-NEXT: vmovdqa (%rax), %xmm5
1217 ; AVX2-SLOW-NEXT: vmovdqa (%r10), %xmm7
1218 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm7[0],xmm5[0],xmm7[1],xmm5[1],xmm7[2],xmm5[2],xmm7[3],xmm5[3]
1219 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm2[0,0,1,1]
1220 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
1221 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %xmm10
1222 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %xmm11
1223 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
1224 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm6[0],zero,xmm6[1],zero
1225 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
1226 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm3[0,1,2],ymm0[3],ymm3[4,5,6],ymm0[7]
1227 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %xmm12
1228 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %xmm13
1229 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3]
1230 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm8[0,0,1,1]
1231 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm0[0,1,1,3]
1232 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %xmm15
1233 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %xmm0
1234 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm0[0],xmm15[0],xmm0[1],xmm15[1],xmm0[2],xmm15[2],xmm0[3],xmm15[3]
1235 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm14 = xmm9[0],zero,xmm9[1],zero
1236 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,1,1,3]
1237 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm14 = ymm14[0],ymm3[1],ymm14[2,3,4],ymm3[5],ymm14[6,7]
1238 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %ymm3
1239 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm14[0,1],ymm4[2,3],ymm14[4,5],ymm4[6,7]
1240 ; AVX2-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1241 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %ymm4
1242 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
1243 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
1244 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
1245 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,2,1]
1246 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm6[0,1,2],ymm2[3],ymm6[4,5,6],ymm2[7]
1247 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %ymm6
1248 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[2,2,3,3]
1249 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,1,1,3]
1250 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
1251 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,1,1,3]
1252 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0],ymm8[1],ymm9[2,3,4],ymm8[5],ymm9[6,7]
1253 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %ymm8
1254 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm9[0,1],ymm2[2,3],ymm9[4,5],ymm2[6,7]
1255 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1256 ; AVX2-SLOW-NEXT: vmovdqa (%r10), %ymm9
1257 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm7[4],xmm5[4],xmm7[5],xmm5[5],xmm7[6],xmm5[6],xmm7[7],xmm5[7]
1258 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm7[2,2,3,3]
1259 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,2,1]
1260 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm10 = xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
1261 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm11 = xmm10[2,2,3,3]
1262 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,2,1]
1263 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm11[0,1,2],ymm5[3],ymm11[4,5,6],ymm5[7]
1264 ; AVX2-SLOW-NEXT: vmovdqa (%rax), %ymm14
1265 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm11 = xmm13[4],xmm12[4],xmm13[5],xmm12[5],xmm13[6],xmm12[6],xmm13[7],xmm12[7]
1266 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm12 = xmm11[2,2,3,3]
1267 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,1,1,3]
1268 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm15[4],xmm0[5],xmm15[5],xmm0[6],xmm15[6],xmm0[7],xmm15[7]
1269 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm13 = xmm0[2,2,3,3]
1270 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm13 = ymm13[0,1,1,3]
1271 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = ymm13[0],ymm12[1],ymm13[2,3,4],ymm12[5],ymm13[6,7]
1272 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm12[0,1],ymm5[2,3],ymm12[4,5],ymm5[6,7]
1273 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1274 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[0,0,1,1]
1275 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,0,2,1]
1276 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm10 = xmm10[0],zero,xmm10[1],zero
1277 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,0,2,1]
1278 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm7 = ymm10[0,1,2],ymm7[3],ymm10[4,5,6],ymm7[7]
1279 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm10 = xmm11[0,0,1,1]
1280 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,1,1,3]
1281 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
1282 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,1,3]
1283 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm10[1],ymm0[2,3,4],ymm10[5],ymm0[6,7]
1284 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm7 = ymm0[0,1],ymm7[2,3],ymm0[4,5],ymm7[6,7]
1285 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm10 = ymm9[0],ymm14[0],ymm9[1],ymm14[1],ymm9[2],ymm14[2],ymm9[3],ymm14[3],ymm9[8],ymm14[8],ymm9[9],ymm14[9],ymm9[10],ymm14[10],ymm9[11],ymm14[11]
1286 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm10[0,2,2,3,4,6,6,7]
1287 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,2,3]
1288 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm12 = ymm6[0],ymm8[0],ymm6[1],ymm8[1],ymm6[2],ymm8[2],ymm6[3],ymm8[3],ymm6[8],ymm8[8],ymm6[9],ymm8[9],ymm6[10],ymm8[10],ymm6[11],ymm8[11]
1289 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm11 = ymm12[2,1,3,3,6,5,7,7]
1290 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,2,2,3]
1291 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm11 = ymm11[0,1,2],ymm0[3],ymm11[4,5,6],ymm0[7]
1292 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm13 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[8],ymm4[8],ymm3[9],ymm4[9],ymm3[10],ymm4[10],ymm3[11],ymm4[11]
1293 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm13[0,2,2,3,4,6,6,7]
1294 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm0[2,1,3,3]
1295 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %ymm0
1296 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm5 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[8],ymm0[8],ymm1[9],ymm0[9],ymm1[10],ymm0[10],ymm1[11],ymm0[11]
1297 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm5[2,1,3,3,6,5,7,7]
1298 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,3,3]
1299 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0],ymm15[1],ymm2[2,3,4],ymm15[5],ymm2[6,7]
1300 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm11 = ymm2[0,1],ymm11[2,3],ymm2[4,5],ymm11[6,7]
1301 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm9[4],ymm14[4],ymm9[5],ymm14[5],ymm9[6],ymm14[6],ymm9[7],ymm14[7],ymm9[12],ymm14[12],ymm9[13],ymm14[13],ymm9[14],ymm14[14],ymm9[15],ymm14[15]
1302 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm6 = ymm6[4],ymm8[4],ymm6[5],ymm8[5],ymm6[6],ymm8[6],ymm6[7],ymm8[7],ymm6[12],ymm8[12],ymm6[13],ymm8[13],ymm6[14],ymm8[14],ymm6[15],ymm8[15]
1303 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm8 = ymm2[0,2,2,3,4,6,6,7]
1304 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,2,2,3]
1305 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm6[2,1,3,3,6,5,7,7]
1306 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,2,2,3]
1307 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0,1,2],ymm8[3],ymm9[4,5,6],ymm8[7]
1308 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm3[4],ymm4[4],ymm3[5],ymm4[5],ymm3[6],ymm4[6],ymm3[7],ymm4[7],ymm3[12],ymm4[12],ymm3[13],ymm4[13],ymm3[14],ymm4[14],ymm3[15],ymm4[15]
1309 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm3[0,2,2,3,4,6,6,7]
1310 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,1,3,3]
1311 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm1[4],ymm0[4],ymm1[5],ymm0[5],ymm1[6],ymm0[6],ymm1[7],ymm0[7],ymm1[12],ymm0[12],ymm1[13],ymm0[13],ymm1[14],ymm0[14],ymm1[15],ymm0[15]
1312 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm0[2,1,3,3,6,5,7,7]
1313 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,1,3,3]
1314 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm9[0],ymm4[1],ymm9[2,3,4],ymm4[5],ymm9[6,7]
1315 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1],ymm8[2,3],ymm4[4,5],ymm8[6,7]
1316 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,0,2,1,4,4,6,5]
1317 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,2,3]
1318 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm6[0,1,1,3,4,5,5,7]
1319 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,2,2,3]
1320 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm6[0,1,2],ymm2[3],ymm6[4,5,6],ymm2[7]
1321 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[0,0,2,1,4,4,6,5]
1322 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,3,3]
1323 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,1,1,3,4,5,5,7]
1324 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,3,3]
1325 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm3[1],ymm0[2,3,4],ymm3[5],ymm0[6,7]
1326 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
1327 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm10[0,0,2,1,4,4,6,5]
1328 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,2,3]
1329 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm12[0,1,1,3,4,5,5,7]
1330 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,2,2,3]
1331 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7]
1332 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm13[0,0,2,1,4,4,6,5]
1333 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,3,3]
1334 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm5[0,1,1,3,4,5,5,7]
1335 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,3]
1336 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm3[1],ymm1[2,3,4],ymm3[5],ymm1[6,7]
1337 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3],ymm1[4,5],ymm2[6,7]
1338 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1339 ; AVX2-SLOW-NEXT: vmovdqa %ymm1, 128(%rax)
1340 ; AVX2-SLOW-NEXT: vmovdqa %ymm0, 192(%rax)
1341 ; AVX2-SLOW-NEXT: vmovdqa %ymm4, 224(%rax)
1342 ; AVX2-SLOW-NEXT: vmovdqa %ymm11, 160(%rax)
1343 ; AVX2-SLOW-NEXT: vmovdqa %ymm7, 64(%rax)
1344 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1345 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 96(%rax)
1346 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1347 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 32(%rax)
1348 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1349 ; AVX2-SLOW-NEXT: vmovaps %ymm0, (%rax)
1350 ; AVX2-SLOW-NEXT: vzeroupper
1351 ; AVX2-SLOW-NEXT: retq
1353 ; AVX2-FAST-LABEL: store_i16_stride8_vf16:
1354 ; AVX2-FAST: # %bb.0:
1355 ; AVX2-FAST-NEXT: pushq %rax
1356 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
1357 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
1358 ; AVX2-FAST-NEXT: vmovdqa (%rax), %xmm5
1359 ; AVX2-FAST-NEXT: vmovdqa (%r10), %xmm4
1360 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
1361 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = <0,0,0,0,u,u,1,1>
1362 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm0, %ymm0
1363 ; AVX2-FAST-NEXT: vmovdqa (%r9), %xmm6
1364 ; AVX2-FAST-NEXT: vmovdqa (%r8), %xmm9
1365 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm9[0],xmm6[0],xmm9[1],xmm6[1],xmm9[2],xmm6[2],xmm9[3],xmm6[3]
1366 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <0,u,0,u,u,u,1,u>
1367 ; AVX2-FAST-NEXT: vpermd %ymm11, %ymm2, %ymm7
1368 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm7[0,1,2],ymm0[3],ymm7[4,5,6],ymm0[7]
1369 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %xmm10
1370 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %xmm12
1371 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm12[0],xmm10[0],xmm12[1],xmm10[1],xmm12[2],xmm10[2],xmm12[3],xmm10[3]
1372 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = <0,0,1,1,1,1,u,u>
1373 ; AVX2-FAST-NEXT: vpermd %ymm13, %ymm0, %ymm14
1374 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %xmm15
1375 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %xmm2
1376 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm2[0],xmm15[0],xmm2[1],xmm15[1],xmm2[2],xmm15[2],xmm2[3],xmm15[3]
1377 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = <0,u,1,u,1,u,u,u>
1378 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm8, %ymm7
1379 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm7 = ymm7[0],ymm14[1],ymm7[2,3,4],ymm14[5],ymm7[6,7]
1380 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm7[0,1],ymm3[2,3],ymm7[4,5],ymm3[6,7]
1381 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1382 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = <2,2,2,2,u,u,3,3>
1383 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm7, %ymm1
1384 ; AVX2-FAST-NEXT: vpermd %ymm11, %ymm7, %ymm3
1385 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0,1,2],ymm1[3],ymm3[4,5,6],ymm1[7]
1386 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm11 = <2,2,3,3,3,3,u,u>
1387 ; AVX2-FAST-NEXT: vpermd %ymm13, %ymm11, %ymm3
1388 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm11, %ymm0
1389 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm3[1],ymm0[2,3,4],ymm3[5],ymm0[6,7]
1390 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
1391 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1392 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm4[4],xmm5[4],xmm4[5],xmm5[5],xmm4[6],xmm5[6],xmm4[7],xmm5[7]
1393 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %ymm14
1394 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm9[4],xmm6[4],xmm9[5],xmm6[5],xmm9[6],xmm6[6],xmm9[7],xmm6[7]
1395 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm7, %ymm5
1396 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm7, %ymm6
1397 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2],ymm5[3],ymm6[4,5,6],ymm5[7]
1398 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %ymm6
1399 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm12[4],xmm10[4],xmm12[5],xmm10[5],xmm12[6],xmm10[6],xmm12[7],xmm10[7]
1400 ; AVX2-FAST-NEXT: vmovdqa (%r8), %ymm9
1401 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm10 = xmm2[4],xmm15[4],xmm2[5],xmm15[5],xmm2[6],xmm15[6],xmm2[7],xmm15[7]
1402 ; AVX2-FAST-NEXT: vpermd %ymm7, %ymm11, %ymm2
1403 ; AVX2-FAST-NEXT: vpermd %ymm10, %ymm11, %ymm11
1404 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm11[0],ymm2[1],ymm11[2,3,4],ymm2[5],ymm11[6,7]
1405 ; AVX2-FAST-NEXT: vmovdqa (%r9), %ymm12
1406 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1],ymm5[2,3],ymm2[4,5],ymm5[6,7]
1407 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1408 ; AVX2-FAST-NEXT: vmovdqa (%r10), %ymm13
1409 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = <0,0,0,0,u,u,1,1>
1410 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm1, %ymm0
1411 ; AVX2-FAST-NEXT: vmovdqa (%rax), %ymm15
1412 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = <0,u,0,u,u,u,1,u>
1413 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm1, %ymm4
1414 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2],ymm0[3],ymm4[4,5,6],ymm0[7]
1415 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = <0,0,1,1,1,1,u,u>
1416 ; AVX2-FAST-NEXT: vpermd %ymm7, %ymm1, %ymm4
1417 ; AVX2-FAST-NEXT: vpermd %ymm10, %ymm8, %ymm5
1418 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0],ymm4[1],ymm5[2,3,4],ymm4[5],ymm5[6,7]
1419 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1],ymm0[2,3],ymm4[4,5],ymm0[6,7]
1420 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1421 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm5 = ymm13[0],ymm15[0],ymm13[1],ymm15[1],ymm13[2],ymm15[2],ymm13[3],ymm15[3],ymm13[8],ymm15[8],ymm13[9],ymm15[9],ymm13[10],ymm15[10],ymm13[11],ymm15[11]
1422 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [0,2,4,6,4,6,6,7]
1423 ; AVX2-FAST-NEXT: vpermd %ymm5, %ymm0, %ymm8
1424 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm7 = ymm9[0],ymm12[0],ymm9[1],ymm12[1],ymm9[2],ymm12[2],ymm9[3],ymm12[3],ymm9[8],ymm12[8],ymm9[9],ymm12[9],ymm9[10],ymm12[10],ymm9[11],ymm12[11]
1425 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [2,1,6,5,6,5,7,7]
1426 ; AVX2-FAST-NEXT: vpermd %ymm7, %ymm0, %ymm10
1427 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm10[0,1,2],ymm8[3],ymm10[4,5,6],ymm8[7]
1428 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %ymm1
1429 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %ymm0
1430 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm10 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[8],ymm0[8],ymm1[9],ymm0[9],ymm1[10],ymm0[10],ymm1[11],ymm0[11]
1431 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [4,6,2,3,6,7,6,7]
1432 ; AVX2-FAST-NEXT: vpermd %ymm10, %ymm2, %ymm2
1433 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm11 = ymm14[0],ymm6[0],ymm14[1],ymm6[1],ymm14[2],ymm6[2],ymm14[3],ymm6[3],ymm14[8],ymm6[8],ymm14[9],ymm6[9],ymm14[10],ymm6[10],ymm14[11],ymm6[11]
1434 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [6,5,3,3,7,7,7,7]
1435 ; AVX2-FAST-NEXT: vpermd %ymm11, %ymm3, %ymm4
1436 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0],ymm2[1],ymm4[2,3,4],ymm2[5],ymm4[6,7]
1437 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm2[0,1],ymm8[2,3],ymm2[4,5],ymm8[6,7]
1438 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm13[4],ymm15[4],ymm13[5],ymm15[5],ymm13[6],ymm15[6],ymm13[7],ymm15[7],ymm13[12],ymm15[12],ymm13[13],ymm15[13],ymm13[14],ymm15[14],ymm13[15],ymm15[15]
1439 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm4 = ymm9[4],ymm12[4],ymm9[5],ymm12[5],ymm9[6],ymm12[6],ymm9[7],ymm12[7],ymm9[12],ymm12[12],ymm9[13],ymm12[13],ymm9[14],ymm12[14],ymm9[15],ymm12[15]
1440 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = [0,2,4,6,4,6,6,7]
1441 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm9, %ymm9
1442 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = [2,1,6,5,6,5,7,7]
1443 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm12, %ymm12
1444 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm9 = ymm12[0,1,2],ymm9[3],ymm12[4,5,6],ymm9[7]
1445 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm1[4],ymm0[4],ymm1[5],ymm0[5],ymm1[6],ymm0[6],ymm1[7],ymm0[7],ymm1[12],ymm0[12],ymm1[13],ymm0[13],ymm1[14],ymm0[14],ymm1[15],ymm0[15]
1446 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm14[4],ymm6[4],ymm14[5],ymm6[5],ymm14[6],ymm6[6],ymm14[7],ymm6[7],ymm14[12],ymm6[12],ymm14[13],ymm6[13],ymm14[14],ymm6[14],ymm14[15],ymm6[15]
1447 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = [4,6,2,3,6,7,6,7]
1448 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm6, %ymm6
1449 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm3, %ymm3
1450 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0],ymm6[1],ymm3[2,3,4],ymm6[5],ymm3[6,7]
1451 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1],ymm9[2,3],ymm3[4,5],ymm9[6,7]
1452 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = [0,0,4,4,4,4,6,5]
1453 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm6, %ymm2
1454 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = [0,1,4,5,4,5,5,7]
1455 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm9, %ymm4
1456 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1,2],ymm2[3],ymm4[4,5,6],ymm2[7]
1457 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = [4,4,2,1,6,5,6,5]
1458 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm4, %ymm0
1459 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = [4,5,1,3,5,7,5,7]
1460 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm12, %ymm1
1461 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2,3,4],ymm0[5],ymm1[6,7]
1462 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
1463 ; AVX2-FAST-NEXT: vpermd %ymm5, %ymm6, %ymm1
1464 ; AVX2-FAST-NEXT: vpermd %ymm7, %ymm9, %ymm2
1465 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3],ymm2[4,5,6],ymm1[7]
1466 ; AVX2-FAST-NEXT: vpermd %ymm10, %ymm4, %ymm2
1467 ; AVX2-FAST-NEXT: vpermd %ymm11, %ymm12, %ymm4
1468 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0],ymm2[1],ymm4[2,3,4],ymm2[5],ymm4[6,7]
1469 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3],ymm2[4,5],ymm1[6,7]
1470 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
1471 ; AVX2-FAST-NEXT: vmovdqa %ymm1, 128(%rax)
1472 ; AVX2-FAST-NEXT: vmovdqa %ymm0, 192(%rax)
1473 ; AVX2-FAST-NEXT: vmovdqa %ymm3, 224(%rax)
1474 ; AVX2-FAST-NEXT: vmovdqa %ymm8, 160(%rax)
1475 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1476 ; AVX2-FAST-NEXT: vmovaps %ymm0, 64(%rax)
1477 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1478 ; AVX2-FAST-NEXT: vmovaps %ymm0, 96(%rax)
1479 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1480 ; AVX2-FAST-NEXT: vmovaps %ymm0, 32(%rax)
1481 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1482 ; AVX2-FAST-NEXT: vmovaps %ymm0, (%rax)
1483 ; AVX2-FAST-NEXT: popq %rax
1484 ; AVX2-FAST-NEXT: vzeroupper
1485 ; AVX2-FAST-NEXT: retq
1487 ; AVX2-FAST-PERLANE-LABEL: store_i16_stride8_vf16:
1488 ; AVX2-FAST-PERLANE: # %bb.0:
1489 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
1490 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %r10
1491 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %ymm1
1492 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rax), %xmm5
1493 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r10), %xmm7
1494 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm7[0],xmm5[0],xmm7[1],xmm5[1],xmm7[2],xmm5[2],xmm7[3],xmm5[3]
1495 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm0 = xmm2[0,0,1,1]
1496 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
1497 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %xmm10
1498 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %xmm11
1499 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
1500 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm6[0],zero,xmm6[1],zero
1501 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
1502 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm3[0,1,2],ymm0[3],ymm3[4,5,6],ymm0[7]
1503 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %xmm12
1504 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %xmm13
1505 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3]
1506 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm0 = xmm8[0,0,1,1]
1507 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm0[0,1,1,3]
1508 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %xmm15
1509 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %xmm0
1510 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm0[0],xmm15[0],xmm0[1],xmm15[1],xmm0[2],xmm15[2],xmm0[3],xmm15[3]
1511 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm14 = xmm9[0],zero,xmm9[1],zero
1512 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,1,1,3]
1513 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm14 = ymm14[0],ymm3[1],ymm14[2,3,4],ymm3[5],ymm14[6,7]
1514 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %ymm3
1515 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm14[0,1],ymm4[2,3],ymm14[4,5],ymm4[6,7]
1516 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1517 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %ymm4
1518 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
1519 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
1520 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
1521 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,2,1]
1522 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm6[0,1,2],ymm2[3],ymm6[4,5,6],ymm2[7]
1523 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %ymm6
1524 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[2,2,3,3]
1525 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,1,1,3]
1526 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
1527 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,1,1,3]
1528 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0],ymm8[1],ymm9[2,3,4],ymm8[5],ymm9[6,7]
1529 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %ymm8
1530 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm9[0,1],ymm2[2,3],ymm9[4,5],ymm2[6,7]
1531 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1532 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r10), %ymm9
1533 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm7[4],xmm5[4],xmm7[5],xmm5[5],xmm7[6],xmm5[6],xmm7[7],xmm5[7]
1534 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm5 = xmm7[2,2,3,3]
1535 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,2,1]
1536 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm10 = xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
1537 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm11 = xmm10[2,2,3,3]
1538 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,2,1]
1539 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm11[0,1,2],ymm5[3],ymm11[4,5,6],ymm5[7]
1540 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rax), %ymm14
1541 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm11 = xmm13[4],xmm12[4],xmm13[5],xmm12[5],xmm13[6],xmm12[6],xmm13[7],xmm12[7]
1542 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm12 = xmm11[2,2,3,3]
1543 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,1,1,3]
1544 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm15[4],xmm0[5],xmm15[5],xmm0[6],xmm15[6],xmm0[7],xmm15[7]
1545 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm13 = xmm0[2,2,3,3]
1546 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm13 = ymm13[0,1,1,3]
1547 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm12 = ymm13[0],ymm12[1],ymm13[2,3,4],ymm12[5],ymm13[6,7]
1548 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm12[0,1],ymm5[2,3],ymm12[4,5],ymm5[6,7]
1549 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1550 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[0,0,1,1]
1551 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,0,2,1]
1552 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm10 = xmm10[0],zero,xmm10[1],zero
1553 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,0,2,1]
1554 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm7 = ymm10[0,1,2],ymm7[3],ymm10[4,5,6],ymm7[7]
1555 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm10 = xmm11[0,0,1,1]
1556 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,1,1,3]
1557 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
1558 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,1,3]
1559 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm10[1],ymm0[2,3,4],ymm10[5],ymm0[6,7]
1560 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm7 = ymm0[0,1],ymm7[2,3],ymm0[4,5],ymm7[6,7]
1561 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm10 = ymm9[0],ymm14[0],ymm9[1],ymm14[1],ymm9[2],ymm14[2],ymm9[3],ymm14[3],ymm9[8],ymm14[8],ymm9[9],ymm14[9],ymm9[10],ymm14[10],ymm9[11],ymm14[11]
1562 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm0 = ymm10[0,2,2,3,4,6,6,7]
1563 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,2,3]
1564 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm12 = ymm6[0],ymm8[0],ymm6[1],ymm8[1],ymm6[2],ymm8[2],ymm6[3],ymm8[3],ymm6[8],ymm8[8],ymm6[9],ymm8[9],ymm6[10],ymm8[10],ymm6[11],ymm8[11]
1565 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm11 = ymm12[2,1,3,3,6,5,7,7]
1566 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,2,2,3]
1567 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm11 = ymm11[0,1,2],ymm0[3],ymm11[4,5,6],ymm0[7]
1568 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm13 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[8],ymm4[8],ymm3[9],ymm4[9],ymm3[10],ymm4[10],ymm3[11],ymm4[11]
1569 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm0 = ymm13[0,2,2,3,4,6,6,7]
1570 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm15 = ymm0[2,1,3,3]
1571 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %ymm0
1572 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm5 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[8],ymm0[8],ymm1[9],ymm0[9],ymm1[10],ymm0[10],ymm1[11],ymm0[11]
1573 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm2 = ymm5[2,1,3,3,6,5,7,7]
1574 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,3,3]
1575 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0],ymm15[1],ymm2[2,3,4],ymm15[5],ymm2[6,7]
1576 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm11 = ymm2[0,1],ymm11[2,3],ymm2[4,5],ymm11[6,7]
1577 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm9[4],ymm14[4],ymm9[5],ymm14[5],ymm9[6],ymm14[6],ymm9[7],ymm14[7],ymm9[12],ymm14[12],ymm9[13],ymm14[13],ymm9[14],ymm14[14],ymm9[15],ymm14[15]
1578 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm6 = ymm6[4],ymm8[4],ymm6[5],ymm8[5],ymm6[6],ymm8[6],ymm6[7],ymm8[7],ymm6[12],ymm8[12],ymm6[13],ymm8[13],ymm6[14],ymm8[14],ymm6[15],ymm8[15]
1579 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm8 = ymm2[0,2,2,3,4,6,6,7]
1580 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,2,2,3]
1581 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm9 = ymm6[2,1,3,3,6,5,7,7]
1582 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,2,2,3]
1583 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0,1,2],ymm8[3],ymm9[4,5,6],ymm8[7]
1584 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm3[4],ymm4[4],ymm3[5],ymm4[5],ymm3[6],ymm4[6],ymm3[7],ymm4[7],ymm3[12],ymm4[12],ymm3[13],ymm4[13],ymm3[14],ymm4[14],ymm3[15],ymm4[15]
1585 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm4 = ymm3[0,2,2,3,4,6,6,7]
1586 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,1,3,3]
1587 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm1[4],ymm0[4],ymm1[5],ymm0[5],ymm1[6],ymm0[6],ymm1[7],ymm0[7],ymm1[12],ymm0[12],ymm1[13],ymm0[13],ymm1[14],ymm0[14],ymm1[15],ymm0[15]
1588 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm9 = ymm0[2,1,3,3,6,5,7,7]
1589 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,1,3,3]
1590 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm9[0],ymm4[1],ymm9[2,3,4],ymm4[5],ymm9[6,7]
1591 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1],ymm8[2,3],ymm4[4,5],ymm8[6,7]
1592 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,0,2,1,4,4,6,5]
1593 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,2,3]
1594 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm6 = ymm6[0,1,1,3,4,5,5,7]
1595 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,2,2,3]
1596 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm6[0,1,2],ymm2[3],ymm6[4,5,6],ymm2[7]
1597 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[0,0,2,1,4,4,6,5]
1598 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,3,3]
1599 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,1,1,3,4,5,5,7]
1600 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,3,3]
1601 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm3[1],ymm0[2,3,4],ymm3[5],ymm0[6,7]
1602 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
1603 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm2 = ymm10[0,0,2,1,4,4,6,5]
1604 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,2,3]
1605 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm3 = ymm12[0,1,1,3,4,5,5,7]
1606 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,2,2,3]
1607 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7]
1608 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm3 = ymm13[0,0,2,1,4,4,6,5]
1609 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,3,3]
1610 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm5[0,1,1,3,4,5,5,7]
1611 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,3]
1612 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm3[1],ymm1[2,3,4],ymm3[5],ymm1[6,7]
1613 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm2[2,3],ymm1[4,5],ymm2[6,7]
1614 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
1615 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm1, 128(%rax)
1616 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, 192(%rax)
1617 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm4, 224(%rax)
1618 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm11, 160(%rax)
1619 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm7, 64(%rax)
1620 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1621 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 96(%rax)
1622 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1623 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 32(%rax)
1624 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1625 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, (%rax)
1626 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
1627 ; AVX2-FAST-PERLANE-NEXT: retq
1629 ; AVX512F-LABEL: store_i16_stride8_vf16:
1631 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %rax
1632 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %r10
1633 ; AVX512F-NEXT: vmovdqa (%rdi), %ymm2
1634 ; AVX512F-NEXT: vmovdqa (%rdx), %ymm9
1635 ; AVX512F-NEXT: vmovdqa (%rcx), %ymm10
1636 ; AVX512F-NEXT: vmovdqa (%r8), %ymm15
1637 ; AVX512F-NEXT: vmovdqa (%r9), %ymm3
1638 ; AVX512F-NEXT: vmovdqa (%r10), %ymm4
1639 ; AVX512F-NEXT: vmovdqa (%rax), %ymm1
1640 ; AVX512F-NEXT: vmovdqa (%rax), %xmm5
1641 ; AVX512F-NEXT: vmovdqa (%r10), %xmm6
1642 ; AVX512F-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm6[4],xmm5[4],xmm6[5],xmm5[5],xmm6[6],xmm5[6],xmm6[7],xmm5[7]
1643 ; AVX512F-NEXT: vmovdqa64 %xmm6, %xmm21
1644 ; AVX512F-NEXT: vmovdqa64 %xmm5, %xmm22
1645 ; AVX512F-NEXT: vinserti32x4 $2, %xmm0, %zmm0, %zmm16
1646 ; AVX512F-NEXT: vmovdqa (%r9), %xmm5
1647 ; AVX512F-NEXT: vmovdqa (%r8), %xmm8
1648 ; AVX512F-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm8[4],xmm5[4],xmm8[5],xmm5[5],xmm8[6],xmm5[6],xmm8[7],xmm5[7]
1649 ; AVX512F-NEXT: vmovdqa64 %xmm5, %xmm23
1650 ; AVX512F-NEXT: vinserti32x4 $2, %xmm0, %zmm0, %zmm20
1651 ; AVX512F-NEXT: vmovdqa (%rcx), %xmm11
1652 ; AVX512F-NEXT: vmovdqa (%rdx), %xmm12
1653 ; AVX512F-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
1654 ; AVX512F-NEXT: vinserti32x4 $2, %xmm0, %zmm0, %zmm17
1655 ; AVX512F-NEXT: vmovdqa (%rsi), %xmm13
1656 ; AVX512F-NEXT: vmovdqa (%rdi), %xmm14
1657 ; AVX512F-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
1658 ; AVX512F-NEXT: vinserti32x4 $2, %xmm0, %zmm0, %zmm0
1659 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} ymm5 = ymm4[0],ymm1[0],ymm4[1],ymm1[1],ymm4[2],ymm1[2],ymm4[3],ymm1[3],ymm4[8],ymm1[8],ymm4[9],ymm1[9],ymm4[10],ymm1[10],ymm4[11],ymm1[11]
1660 ; AVX512F-NEXT: vinserti64x4 $1, %ymm5, %zmm5, %zmm18
1661 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} ymm5 = ymm15[0],ymm3[0],ymm15[1],ymm3[1],ymm15[2],ymm3[2],ymm15[3],ymm3[3],ymm15[8],ymm3[8],ymm15[9],ymm3[9],ymm15[10],ymm3[10],ymm15[11],ymm3[11]
1662 ; AVX512F-NEXT: vinserti64x4 $1, %ymm5, %zmm5, %zmm5
1663 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} ymm6 = ymm9[0],ymm10[0],ymm9[1],ymm10[1],ymm9[2],ymm10[2],ymm9[3],ymm10[3],ymm9[8],ymm10[8],ymm9[9],ymm10[9],ymm9[10],ymm10[10],ymm9[11],ymm10[11]
1664 ; AVX512F-NEXT: vinserti64x4 $1, %ymm6, %zmm6, %zmm19
1665 ; AVX512F-NEXT: vmovdqa (%rsi), %ymm6
1666 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} ymm7 = ymm2[0],ymm6[0],ymm2[1],ymm6[1],ymm2[2],ymm6[2],ymm2[3],ymm6[3],ymm2[8],ymm6[8],ymm2[9],ymm6[9],ymm2[10],ymm6[10],ymm2[11],ymm6[11]
1667 ; AVX512F-NEXT: vinserti64x4 $1, %ymm7, %zmm7, %zmm7
1668 ; AVX512F-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm4[4],ymm1[4],ymm4[5],ymm1[5],ymm4[6],ymm1[6],ymm4[7],ymm1[7],ymm4[12],ymm1[12],ymm4[13],ymm1[13],ymm4[14],ymm1[14],ymm4[15],ymm1[15]
1669 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm1, %zmm1
1670 ; AVX512F-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm15[4],ymm3[4],ymm15[5],ymm3[5],ymm15[6],ymm3[6],ymm15[7],ymm3[7],ymm15[12],ymm3[12],ymm15[13],ymm3[13],ymm15[14],ymm3[14],ymm15[15],ymm3[15]
1671 ; AVX512F-NEXT: vinserti64x4 $1, %ymm3, %zmm3, %zmm3
1672 ; AVX512F-NEXT: vpunpckhwd {{.*#+}} ymm4 = ymm9[4],ymm10[4],ymm9[5],ymm10[5],ymm9[6],ymm10[6],ymm9[7],ymm10[7],ymm9[12],ymm10[12],ymm9[13],ymm10[13],ymm9[14],ymm10[14],ymm9[15],ymm10[15]
1673 ; AVX512F-NEXT: vinserti64x4 $1, %ymm4, %zmm4, %zmm4
1674 ; AVX512F-NEXT: vpunpckhwd {{.*#+}} ymm6 = ymm2[4],ymm6[4],ymm2[5],ymm6[5],ymm2[6],ymm6[6],ymm2[7],ymm6[7],ymm2[12],ymm6[12],ymm2[13],ymm6[13],ymm2[14],ymm6[14],ymm2[15],ymm6[15]
1675 ; AVX512F-NEXT: vinserti64x4 $1, %ymm6, %zmm6, %zmm6
1676 ; AVX512F-NEXT: vmovdqa64 %xmm21, %xmm2
1677 ; AVX512F-NEXT: vmovdqa64 %xmm22, %xmm9
1678 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm2[0],xmm9[0],xmm2[1],xmm9[1],xmm2[2],xmm9[2],xmm2[3],xmm9[3]
1679 ; AVX512F-NEXT: vinserti32x4 $2, %xmm9, %zmm9, %zmm9
1680 ; AVX512F-NEXT: vmovdqa64 %xmm23, %xmm2
1681 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm2[0],xmm8[1],xmm2[1],xmm8[2],xmm2[2],xmm8[3],xmm2[3]
1682 ; AVX512F-NEXT: vinserti32x4 $2, %xmm8, %zmm8, %zmm8
1683 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3]
1684 ; AVX512F-NEXT: vinserti32x4 $2, %xmm10, %zmm10, %zmm10
1685 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3]
1686 ; AVX512F-NEXT: vinserti32x4 $2, %xmm11, %zmm11, %zmm11
1687 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %rax
1688 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm12 = <0,u,0,16,u,u,1,17,10,10,10,26,u,u,11,27>
1689 ; AVX512F-NEXT: vpermt2d %zmm16, %zmm12, %zmm20
1690 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm13 = <0,16,1,u,1,17,u,u,10,26,11,11,11,27,u,u>
1691 ; AVX512F-NEXT: vpermt2d %zmm17, %zmm13, %zmm0
1692 ; AVX512F-NEXT: movb $-86, %cl
1693 ; AVX512F-NEXT: kmovw %ecx, %k1
1694 ; AVX512F-NEXT: vmovdqa64 %zmm20, %zmm0 {%k1}
1695 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm2 = [0,1,4,20,4,5,5,21,10,9,14,30,14,13,15,31]
1696 ; AVX512F-NEXT: vpermt2d %zmm18, %zmm2, %zmm5
1697 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm14 = [4,20,1,3,5,21,5,7,14,30,11,11,15,31,15,15]
1698 ; AVX512F-NEXT: vpermt2d %zmm19, %zmm14, %zmm7
1699 ; AVX512F-NEXT: vmovdqa64 %zmm5, %zmm7 {%k1}
1700 ; AVX512F-NEXT: vpermt2d %zmm1, %zmm2, %zmm3
1701 ; AVX512F-NEXT: vpermt2d %zmm4, %zmm14, %zmm6
1702 ; AVX512F-NEXT: vmovdqa64 %zmm3, %zmm6 {%k1}
1703 ; AVX512F-NEXT: vpermt2d %zmm9, %zmm12, %zmm8
1704 ; AVX512F-NEXT: vpermt2d %zmm10, %zmm13, %zmm11
1705 ; AVX512F-NEXT: vmovdqa64 %zmm8, %zmm11 {%k1}
1706 ; AVX512F-NEXT: vmovdqa64 %zmm11, (%rax)
1707 ; AVX512F-NEXT: vmovdqa64 %zmm6, 192(%rax)
1708 ; AVX512F-NEXT: vmovdqa64 %zmm7, 128(%rax)
1709 ; AVX512F-NEXT: vmovdqa64 %zmm0, 64(%rax)
1710 ; AVX512F-NEXT: vzeroupper
1711 ; AVX512F-NEXT: retq
1713 ; AVX512BW-LABEL: store_i16_stride8_vf16:
1714 ; AVX512BW: # %bb.0:
1715 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1716 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r10
1717 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r11
1718 ; AVX512BW-NEXT: vmovdqa (%rdi), %ymm0
1719 ; AVX512BW-NEXT: vmovdqa (%rdx), %ymm1
1720 ; AVX512BW-NEXT: vmovdqa (%r8), %ymm2
1721 ; AVX512BW-NEXT: vmovdqa (%r11), %ymm3
1722 ; AVX512BW-NEXT: vinserti64x4 $1, (%rsi), %zmm0, %zmm0
1723 ; AVX512BW-NEXT: vinserti64x4 $1, (%rcx), %zmm1, %zmm1
1724 ; AVX512BW-NEXT: vinserti64x4 $1, (%r9), %zmm2, %zmm2
1725 ; AVX512BW-NEXT: vinserti64x4 $1, (%r10), %zmm3, %zmm3
1726 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = <u,u,u,u,0,16,32,48,u,u,u,u,1,17,33,49,u,u,u,u,2,18,34,50,u,u,u,u,3,19,35,51>
1727 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm2, %zmm4
1728 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm5 = <0,16,32,48,u,u,u,u,1,17,33,49,u,u,u,u,2,18,34,50,u,u,u,u,3,19,35,51,u,u,u,u>
1729 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm5
1730 ; AVX512BW-NEXT: movb $-86, %cl
1731 ; AVX512BW-NEXT: kmovd %ecx, %k1
1732 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm5 {%k1}
1733 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = <u,u,u,u,4,20,36,52,u,u,u,u,5,21,37,53,u,u,u,u,6,22,38,54,u,u,u,u,7,23,39,55>
1734 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm2, %zmm4
1735 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm6 = <4,20,36,52,u,u,u,u,5,21,37,53,u,u,u,u,6,22,38,54,u,u,u,u,7,23,39,55,u,u,u,u>
1736 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm6
1737 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm6 {%k1}
1738 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = <u,u,u,u,8,24,40,56,u,u,u,u,9,25,41,57,u,u,u,u,10,26,42,58,u,u,u,u,11,27,43,59>
1739 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm2, %zmm4
1740 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <8,24,40,56,u,u,u,u,9,25,41,57,u,u,u,u,10,26,42,58,u,u,u,u,11,27,43,59,u,u,u,u>
1741 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm7
1742 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm7 {%k1}
1743 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = <u,u,u,u,12,28,44,60,u,u,u,u,13,29,45,61,u,u,u,u,14,30,46,62,u,u,u,u,15,31,47,63>
1744 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm2, %zmm4
1745 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <12,28,44,60,u,u,u,u,13,29,45,61,u,u,u,u,14,30,46,62,u,u,u,u,15,31,47,63,u,u,u,u>
1746 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm2
1747 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm2 {%k1}
1748 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 192(%rax)
1749 ; AVX512BW-NEXT: vmovdqa64 %zmm7, 128(%rax)
1750 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 64(%rax)
1751 ; AVX512BW-NEXT: vmovdqa64 %zmm5, (%rax)
1752 ; AVX512BW-NEXT: vzeroupper
1753 ; AVX512BW-NEXT: retq
1754 %in.vec0 = load <16 x i16>, ptr %in.vecptr0, align 64
1755 %in.vec1 = load <16 x i16>, ptr %in.vecptr1, align 64
1756 %in.vec2 = load <16 x i16>, ptr %in.vecptr2, align 64
1757 %in.vec3 = load <16 x i16>, ptr %in.vecptr3, align 64
1758 %in.vec4 = load <16 x i16>, ptr %in.vecptr4, align 64
1759 %in.vec5 = load <16 x i16>, ptr %in.vecptr5, align 64
1760 %in.vec6 = load <16 x i16>, ptr %in.vecptr6, align 64
1761 %in.vec7 = load <16 x i16>, ptr %in.vecptr7, align 64
1762 %1 = shufflevector <16 x i16> %in.vec0, <16 x i16> %in.vec1, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1763 %2 = shufflevector <16 x i16> %in.vec2, <16 x i16> %in.vec3, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1764 %3 = shufflevector <16 x i16> %in.vec4, <16 x i16> %in.vec5, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1765 %4 = shufflevector <16 x i16> %in.vec6, <16 x i16> %in.vec7, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1766 %5 = shufflevector <32 x i16> %1, <32 x i16> %2, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
1767 %6 = shufflevector <32 x i16> %3, <32 x i16> %4, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
1768 %7 = shufflevector <64 x i16> %5, <64 x i16> %6, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
1769 %interleaved.vec = shufflevector <128 x i16> %7, <128 x i16> poison, <128 x i32> <i32 0, i32 16, i32 32, i32 48, i32 64, i32 80, i32 96, i32 112, i32 1, i32 17, i32 33, i32 49, i32 65, i32 81, i32 97, i32 113, i32 2, i32 18, i32 34, i32 50, i32 66, i32 82, i32 98, i32 114, i32 3, i32 19, i32 35, i32 51, i32 67, i32 83, i32 99, i32 115, i32 4, i32 20, i32 36, i32 52, i32 68, i32 84, i32 100, i32 116, i32 5, i32 21, i32 37, i32 53, i32 69, i32 85, i32 101, i32 117, i32 6, i32 22, i32 38, i32 54, i32 70, i32 86, i32 102, i32 118, i32 7, i32 23, i32 39, i32 55, i32 71, i32 87, i32 103, i32 119, i32 8, i32 24, i32 40, i32 56, i32 72, i32 88, i32 104, i32 120, i32 9, i32 25, i32 41, i32 57, i32 73, i32 89, i32 105, i32 121, i32 10, i32 26, i32 42, i32 58, i32 74, i32 90, i32 106, i32 122, i32 11, i32 27, i32 43, i32 59, i32 75, i32 91, i32 107, i32 123, i32 12, i32 28, i32 44, i32 60, i32 76, i32 92, i32 108, i32 124, i32 13, i32 29, i32 45, i32 61, i32 77, i32 93, i32 109, i32 125, i32 14, i32 30, i32 46, i32 62, i32 78, i32 94, i32 110, i32 126, i32 15, i32 31, i32 47, i32 63, i32 79, i32 95, i32 111, i32 127>
1770 store <128 x i16> %interleaved.vec, ptr %out.vec, align 64
1774 define void @store_i16_stride8_vf32(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %in.vecptr7, ptr %out.vec) nounwind {
1775 ; SSE-LABEL: store_i16_stride8_vf32:
1777 ; SSE-NEXT: subq $264, %rsp # imm = 0x108
1778 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
1779 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
1780 ; SSE-NEXT: movdqa (%rdi), %xmm3
1781 ; SSE-NEXT: movdqa (%rsi), %xmm8
1782 ; SSE-NEXT: movdqa (%rdx), %xmm4
1783 ; SSE-NEXT: movdqa (%rcx), %xmm10
1784 ; SSE-NEXT: movdqa (%r8), %xmm6
1785 ; SSE-NEXT: movdqa (%r9), %xmm9
1786 ; SSE-NEXT: movdqa (%r10), %xmm7
1787 ; SSE-NEXT: movdqa (%rax), %xmm11
1788 ; SSE-NEXT: movdqa %xmm4, %xmm1
1789 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm10[0],xmm1[1],xmm10[1],xmm1[2],xmm10[2],xmm1[3],xmm10[3]
1790 ; SSE-NEXT: movdqa %xmm3, %xmm0
1791 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm8[0],xmm0[1],xmm8[1],xmm0[2],xmm8[2],xmm0[3],xmm8[3]
1792 ; SSE-NEXT: movdqa %xmm0, %xmm2
1793 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
1794 ; SSE-NEXT: movdqa %xmm7, %xmm12
1795 ; SSE-NEXT: punpcklwd {{.*#+}} xmm12 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3]
1796 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm12[0,0,0,0]
1797 ; SSE-NEXT: movdqa %xmm6, %xmm5
1798 ; SSE-NEXT: punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm9[0],xmm5[1],xmm9[1],xmm5[2],xmm9[2],xmm5[3],xmm9[3]
1799 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm5[0,0,0,0]
1800 ; SSE-NEXT: punpckhdq {{.*#+}} xmm14 = xmm14[2],xmm13[2],xmm14[3],xmm13[3]
1801 ; SSE-NEXT: movsd {{.*#+}} xmm14 = xmm2[0],xmm14[1]
1802 ; SSE-NEXT: movapd %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1803 ; SSE-NEXT: movdqa %xmm5, %xmm2
1804 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm12[0],xmm2[1],xmm12[1]
1805 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm1[1,1,1,1]
1806 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm0[1,1,1,1]
1807 ; SSE-NEXT: punpckldq {{.*#+}} xmm14 = xmm14[0],xmm13[0],xmm14[1],xmm13[1]
1808 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[0,1],xmm2[2,3]
1809 ; SSE-NEXT: movaps %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1810 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm12[2,2,2,2]
1811 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm5[2,2,2,2]
1812 ; SSE-NEXT: punpckhdq {{.*#+}} xmm13 = xmm13[2],xmm2[2],xmm13[3],xmm2[3]
1813 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm0[3,3,3,3]
1814 ; SSE-NEXT: punpckhdq {{.*#+}} xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
1815 ; SSE-NEXT: movsd {{.*#+}} xmm13 = xmm0[0],xmm13[1]
1816 ; SSE-NEXT: movapd %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1817 ; SSE-NEXT: movdqa 16(%r8), %xmm0
1818 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm12[2],xmm5[3],xmm12[3]
1819 ; SSE-NEXT: movdqa 16(%r9), %xmm2
1820 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[3,3,3,3]
1821 ; SSE-NEXT: punpckldq {{.*#+}} xmm14 = xmm14[0],xmm1[0],xmm14[1],xmm1[1]
1822 ; SSE-NEXT: movdqa 16(%r10), %xmm1
1823 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[0,1],xmm5[2,3]
1824 ; SSE-NEXT: movaps %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1825 ; SSE-NEXT: movdqa 16(%rax), %xmm5
1826 ; SSE-NEXT: punpckhwd {{.*#+}} xmm4 = xmm4[4],xmm10[4],xmm4[5],xmm10[5],xmm4[6],xmm10[6],xmm4[7],xmm10[7]
1827 ; SSE-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm8[4],xmm3[5],xmm8[5],xmm3[6],xmm8[6],xmm3[7],xmm8[7]
1828 ; SSE-NEXT: movdqa %xmm3, %xmm8
1829 ; SSE-NEXT: punpckldq {{.*#+}} xmm8 = xmm8[0],xmm4[0],xmm8[1],xmm4[1]
1830 ; SSE-NEXT: punpckhwd {{.*#+}} xmm7 = xmm7[4],xmm11[4],xmm7[5],xmm11[5],xmm7[6],xmm11[6],xmm7[7],xmm11[7]
1831 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm7[0,0,0,0]
1832 ; SSE-NEXT: punpckhwd {{.*#+}} xmm6 = xmm6[4],xmm9[4],xmm6[5],xmm9[5],xmm6[6],xmm9[6],xmm6[7],xmm9[7]
1833 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm6[0,0,0,0]
1834 ; SSE-NEXT: punpckhdq {{.*#+}} xmm9 = xmm9[2],xmm10[2],xmm9[3],xmm10[3]
1835 ; SSE-NEXT: movsd {{.*#+}} xmm9 = xmm8[0],xmm9[1]
1836 ; SSE-NEXT: movapd %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1837 ; SSE-NEXT: movdqa %xmm6, %xmm8
1838 ; SSE-NEXT: punpckldq {{.*#+}} xmm8 = xmm8[0],xmm7[0],xmm8[1],xmm7[1]
1839 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm4[1,1,1,1]
1840 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm3[1,1,1,1]
1841 ; SSE-NEXT: punpckldq {{.*#+}} xmm10 = xmm10[0],xmm9[0],xmm10[1],xmm9[1]
1842 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[0,1],xmm8[2,3]
1843 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1844 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm7[2,2,2,2]
1845 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm6[2,2,2,2]
1846 ; SSE-NEXT: punpckhdq {{.*#+}} xmm9 = xmm9[2],xmm8[2],xmm9[3],xmm8[3]
1847 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm3[3,3,3,3]
1848 ; SSE-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm4[2],xmm3[3],xmm4[3]
1849 ; SSE-NEXT: movsd {{.*#+}} xmm9 = xmm3[0],xmm9[1]
1850 ; SSE-NEXT: movapd %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1851 ; SSE-NEXT: punpckhdq {{.*#+}} xmm6 = xmm6[2],xmm7[2],xmm6[3],xmm7[3]
1852 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm4[3,3,3,3]
1853 ; SSE-NEXT: punpckldq {{.*#+}} xmm8 = xmm8[0],xmm3[0],xmm8[1],xmm3[1]
1854 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[0,1],xmm6[2,3]
1855 ; SSE-NEXT: movaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1856 ; SSE-NEXT: movdqa %xmm1, %xmm7
1857 ; SSE-NEXT: punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm5[0],xmm7[1],xmm5[1],xmm7[2],xmm5[2],xmm7[3],xmm5[3]
1858 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm7[0,0,0,0]
1859 ; SSE-NEXT: movdqa %xmm0, %xmm6
1860 ; SSE-NEXT: punpcklwd {{.*#+}} xmm6 = xmm6[0],xmm2[0],xmm6[1],xmm2[1],xmm6[2],xmm2[2],xmm6[3],xmm2[3]
1861 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm6[0,0,0,0]
1862 ; SSE-NEXT: punpckhdq {{.*#+}} xmm4 = xmm4[2],xmm3[2],xmm4[3],xmm3[3]
1863 ; SSE-NEXT: movdqa %xmm4, %xmm13
1864 ; SSE-NEXT: movdqa 16(%rdx), %xmm3
1865 ; SSE-NEXT: movdqa 16(%rcx), %xmm8
1866 ; SSE-NEXT: movdqa %xmm3, %xmm10
1867 ; SSE-NEXT: punpcklwd {{.*#+}} xmm10 = xmm10[0],xmm8[0],xmm10[1],xmm8[1],xmm10[2],xmm8[2],xmm10[3],xmm8[3]
1868 ; SSE-NEXT: movdqa 16(%rdi), %xmm4
1869 ; SSE-NEXT: movdqa 16(%rsi), %xmm9
1870 ; SSE-NEXT: movdqa %xmm4, %xmm11
1871 ; SSE-NEXT: punpcklwd {{.*#+}} xmm11 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3]
1872 ; SSE-NEXT: movdqa %xmm11, %xmm12
1873 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm10[0],xmm12[1],xmm10[1]
1874 ; SSE-NEXT: movsd {{.*#+}} xmm13 = xmm12[0],xmm13[1]
1875 ; SSE-NEXT: movapd %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1876 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm10[1,1,1,1]
1877 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm11[1,1,1,1]
1878 ; SSE-NEXT: punpckldq {{.*#+}} xmm13 = xmm13[0],xmm12[0],xmm13[1],xmm12[1]
1879 ; SSE-NEXT: movdqa %xmm6, %xmm12
1880 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm7[0],xmm12[1],xmm7[1]
1881 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[0,1],xmm12[2,3]
1882 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1883 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm7[2,2,2,2]
1884 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm6[2,2,2,2]
1885 ; SSE-NEXT: punpckhdq {{.*#+}} xmm13 = xmm13[2],xmm12[2],xmm13[3],xmm12[3]
1886 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm11[3,3,3,3]
1887 ; SSE-NEXT: punpckhdq {{.*#+}} xmm11 = xmm11[2],xmm10[2],xmm11[3],xmm10[3]
1888 ; SSE-NEXT: movsd {{.*#+}} xmm13 = xmm11[0],xmm13[1]
1889 ; SSE-NEXT: movapd %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1890 ; SSE-NEXT: punpckhdq {{.*#+}} xmm6 = xmm6[2],xmm7[2],xmm6[3],xmm7[3]
1891 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm10[3,3,3,3]
1892 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm7[0],xmm12[1],xmm7[1]
1893 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[0,1],xmm6[2,3]
1894 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1895 ; SSE-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm8[4],xmm3[5],xmm8[5],xmm3[6],xmm8[6],xmm3[7],xmm8[7]
1896 ; SSE-NEXT: punpckhwd {{.*#+}} xmm4 = xmm4[4],xmm9[4],xmm4[5],xmm9[5],xmm4[6],xmm9[6],xmm4[7],xmm9[7]
1897 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm5[4],xmm1[5],xmm5[5],xmm1[6],xmm5[6],xmm1[7],xmm5[7]
1898 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm2[4],xmm0[5],xmm2[5],xmm0[6],xmm2[6],xmm0[7],xmm2[7]
1899 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm1[0,0,0,0]
1900 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm0[0,0,0,0]
1901 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm2[2],xmm5[3],xmm2[3]
1902 ; SSE-NEXT: movdqa %xmm4, %xmm2
1903 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
1904 ; SSE-NEXT: movsd {{.*#+}} xmm5 = xmm2[0],xmm5[1]
1905 ; SSE-NEXT: movapd %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1906 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm3[1,1,1,1]
1907 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm4[1,1,1,1]
1908 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm2[0],xmm5[1],xmm2[1]
1909 ; SSE-NEXT: movdqa %xmm0, %xmm2
1910 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
1911 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm2[2,3]
1912 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1913 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm1[2,2,2,2]
1914 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm0[2,2,2,2]
1915 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm2[2],xmm5[3],xmm2[3]
1916 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm4[3,3,3,3]
1917 ; SSE-NEXT: punpckhdq {{.*#+}} xmm4 = xmm4[2],xmm3[2],xmm4[3],xmm3[3]
1918 ; SSE-NEXT: movsd {{.*#+}} xmm5 = xmm4[0],xmm5[1]
1919 ; SSE-NEXT: movapd %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1920 ; SSE-NEXT: punpckhdq {{.*#+}} xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
1921 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm3[3,3,3,3]
1922 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
1923 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm0[2,3]
1924 ; SSE-NEXT: movaps %xmm2, (%rsp) # 16-byte Spill
1925 ; SSE-NEXT: movdqa 32(%r10), %xmm0
1926 ; SSE-NEXT: movdqa 32(%rax), %xmm4
1927 ; SSE-NEXT: movdqa %xmm0, %xmm6
1928 ; SSE-NEXT: punpcklwd {{.*#+}} xmm6 = xmm6[0],xmm4[0],xmm6[1],xmm4[1],xmm6[2],xmm4[2],xmm6[3],xmm4[3]
1929 ; SSE-NEXT: movdqa 32(%r8), %xmm1
1930 ; SSE-NEXT: movdqa 32(%r9), %xmm5
1931 ; SSE-NEXT: movdqa %xmm1, %xmm7
1932 ; SSE-NEXT: punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm5[0],xmm7[1],xmm5[1],xmm7[2],xmm5[2],xmm7[3],xmm5[3]
1933 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm6[0,0,0,0]
1934 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm7[0,0,0,0]
1935 ; SSE-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
1936 ; SSE-NEXT: movdqa %xmm3, %xmm13
1937 ; SSE-NEXT: movdqa 32(%rdx), %xmm2
1938 ; SSE-NEXT: movdqa 32(%rcx), %xmm8
1939 ; SSE-NEXT: movdqa %xmm2, %xmm10
1940 ; SSE-NEXT: punpcklwd {{.*#+}} xmm10 = xmm10[0],xmm8[0],xmm10[1],xmm8[1],xmm10[2],xmm8[2],xmm10[3],xmm8[3]
1941 ; SSE-NEXT: movdqa 32(%rdi), %xmm3
1942 ; SSE-NEXT: movdqa 32(%rsi), %xmm9
1943 ; SSE-NEXT: movdqa %xmm3, %xmm11
1944 ; SSE-NEXT: punpcklwd {{.*#+}} xmm11 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3]
1945 ; SSE-NEXT: movdqa %xmm11, %xmm12
1946 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm10[0],xmm12[1],xmm10[1]
1947 ; SSE-NEXT: movsd {{.*#+}} xmm13 = xmm12[0],xmm13[1]
1948 ; SSE-NEXT: movapd %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1949 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm10[1,1,1,1]
1950 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm11[1,1,1,1]
1951 ; SSE-NEXT: punpckldq {{.*#+}} xmm13 = xmm13[0],xmm12[0],xmm13[1],xmm12[1]
1952 ; SSE-NEXT: movdqa %xmm7, %xmm12
1953 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm6[0],xmm12[1],xmm6[1]
1954 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[0,1],xmm12[2,3]
1955 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1956 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm6[2,2,2,2]
1957 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm7[2,2,2,2]
1958 ; SSE-NEXT: punpckhdq {{.*#+}} xmm13 = xmm13[2],xmm12[2],xmm13[3],xmm12[3]
1959 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm11[3,3,3,3]
1960 ; SSE-NEXT: punpckhdq {{.*#+}} xmm11 = xmm11[2],xmm10[2],xmm11[3],xmm10[3]
1961 ; SSE-NEXT: movsd {{.*#+}} xmm13 = xmm11[0],xmm13[1]
1962 ; SSE-NEXT: movapd %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1963 ; SSE-NEXT: punpckhdq {{.*#+}} xmm7 = xmm7[2],xmm6[2],xmm7[3],xmm6[3]
1964 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm10[3,3,3,3]
1965 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm6[0],xmm12[1],xmm6[1]
1966 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[0,1],xmm7[2,3]
1967 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1968 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm8[4],xmm2[5],xmm8[5],xmm2[6],xmm8[6],xmm2[7],xmm8[7]
1969 ; SSE-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm9[4],xmm3[5],xmm9[5],xmm3[6],xmm9[6],xmm3[7],xmm9[7]
1970 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm4[4],xmm0[5],xmm4[5],xmm0[6],xmm4[6],xmm0[7],xmm4[7]
1971 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm5[4],xmm1[5],xmm5[5],xmm1[6],xmm5[6],xmm1[7],xmm5[7]
1972 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm0[0,0,0,0]
1973 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[0,0,0,0]
1974 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
1975 ; SSE-NEXT: movdqa %xmm3, %xmm4
1976 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm2[0],xmm4[1],xmm2[1]
1977 ; SSE-NEXT: movsd {{.*#+}} xmm5 = xmm4[0],xmm5[1]
1978 ; SSE-NEXT: movapd %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1979 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm2[1,1,1,1]
1980 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm3[1,1,1,1]
1981 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
1982 ; SSE-NEXT: movdqa %xmm1, %xmm4
1983 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm0[0],xmm4[1],xmm0[1]
1984 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[2,3]
1985 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1986 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm0[2,2,2,2]
1987 ; SSE-NEXT: pshufd {{.*#+}} xmm15 = xmm1[2,2,2,2]
1988 ; SSE-NEXT: punpckhdq {{.*#+}} xmm15 = xmm15[2],xmm4[2],xmm15[3],xmm4[3]
1989 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm3[3,3,3,3]
1990 ; SSE-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
1991 ; SSE-NEXT: movsd {{.*#+}} xmm15 = xmm3[0],xmm15[1]
1992 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
1993 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[3,3,3,3]
1994 ; SSE-NEXT: punpckldq {{.*#+}} xmm14 = xmm14[0],xmm0[0],xmm14[1],xmm0[1]
1995 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[0,1],xmm1[2,3]
1996 ; SSE-NEXT: movdqa 48(%r10), %xmm9
1997 ; SSE-NEXT: movdqa 48(%rax), %xmm0
1998 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1999 ; SSE-NEXT: movdqa %xmm9, %xmm5
2000 ; SSE-NEXT: punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm0[0],xmm5[1],xmm0[1],xmm5[2],xmm0[2],xmm5[3],xmm0[3]
2001 ; SSE-NEXT: movdqa 48(%r8), %xmm4
2002 ; SSE-NEXT: movdqa 48(%r9), %xmm0
2003 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2004 ; SSE-NEXT: movdqa %xmm4, %xmm1
2005 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
2006 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm5[0,0,0,0]
2007 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm1[0,0,0,0]
2008 ; SSE-NEXT: punpckhdq {{.*#+}} xmm11 = xmm11[2],xmm0[2],xmm11[3],xmm0[3]
2009 ; SSE-NEXT: movdqa 48(%rdx), %xmm6
2010 ; SSE-NEXT: movdqa 48(%rcx), %xmm13
2011 ; SSE-NEXT: movdqa %xmm6, %xmm3
2012 ; SSE-NEXT: punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm13[0],xmm3[1],xmm13[1],xmm3[2],xmm13[2],xmm3[3],xmm13[3]
2013 ; SSE-NEXT: movdqa 48(%rdi), %xmm2
2014 ; SSE-NEXT: movdqa 48(%rsi), %xmm12
2015 ; SSE-NEXT: movdqa %xmm2, %xmm0
2016 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm12[0],xmm0[1],xmm12[1],xmm0[2],xmm12[2],xmm0[3],xmm12[3]
2017 ; SSE-NEXT: movdqa %xmm0, %xmm7
2018 ; SSE-NEXT: punpckldq {{.*#+}} xmm7 = xmm7[0],xmm3[0],xmm7[1],xmm3[1]
2019 ; SSE-NEXT: movsd {{.*#+}} xmm11 = xmm7[0],xmm11[1]
2020 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm3[1,1,1,1]
2021 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm0[1,1,1,1]
2022 ; SSE-NEXT: punpckldq {{.*#+}} xmm10 = xmm10[0],xmm7[0],xmm10[1],xmm7[1]
2023 ; SSE-NEXT: movdqa %xmm1, %xmm7
2024 ; SSE-NEXT: punpckldq {{.*#+}} xmm7 = xmm7[0],xmm5[0],xmm7[1],xmm5[1]
2025 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[0,1],xmm7[2,3]
2026 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm5[2,2,2,2]
2027 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm1[2,2,2,2]
2028 ; SSE-NEXT: punpckhdq {{.*#+}} xmm8 = xmm8[2],xmm7[2],xmm8[3],xmm7[3]
2029 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm0[3,3,3,3]
2030 ; SSE-NEXT: punpckhdq {{.*#+}} xmm0 = xmm0[2],xmm3[2],xmm0[3],xmm3[3]
2031 ; SSE-NEXT: movsd {{.*#+}} xmm8 = xmm0[0],xmm8[1]
2032 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm5[2],xmm1[3],xmm5[3]
2033 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm3[3,3,3,3]
2034 ; SSE-NEXT: punpckldq {{.*#+}} xmm7 = xmm7[0],xmm0[0],xmm7[1],xmm0[1]
2035 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[0,1],xmm1[2,3]
2036 ; SSE-NEXT: punpckhwd {{.*#+}} xmm6 = xmm6[4],xmm13[4],xmm6[5],xmm13[5],xmm6[6],xmm13[6],xmm6[7],xmm13[7]
2037 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm12[4],xmm2[5],xmm12[5],xmm2[6],xmm12[6],xmm2[7],xmm12[7]
2038 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Folded Reload
2039 ; SSE-NEXT: # xmm9 = xmm9[4],mem[4],xmm9[5],mem[5],xmm9[6],mem[6],xmm9[7],mem[7]
2040 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
2041 ; SSE-NEXT: # xmm4 = xmm4[4],mem[4],xmm4[5],mem[5],xmm4[6],mem[6],xmm4[7],mem[7]
2042 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm9[0,0,0,0]
2043 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm4[0,0,0,0]
2044 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
2045 ; SSE-NEXT: movdqa %xmm2, %xmm0
2046 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm6[0],xmm0[1],xmm6[1]
2047 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
2048 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm6[1,1,1,1]
2049 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm2[1,1,1,1]
2050 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
2051 ; SSE-NEXT: movdqa %xmm4, %xmm0
2052 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm9[0],xmm0[1],xmm9[1]
2053 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm0[2,3]
2054 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm9[2,2,2,2]
2055 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm4[2,2,2,2]
2056 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm0[2],xmm5[3],xmm0[3]
2057 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[3,3,3,3]
2058 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm6[2],xmm2[3],xmm6[3]
2059 ; SSE-NEXT: movsd {{.*#+}} xmm5 = xmm2[0],xmm5[1]
2060 ; SSE-NEXT: punpckhdq {{.*#+}} xmm4 = xmm4[2],xmm9[2],xmm4[3],xmm9[3]
2061 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm6[3,3,3,3]
2062 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1]
2063 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,1],xmm4[2,3]
2064 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
2065 ; SSE-NEXT: movaps %xmm0, 496(%rax)
2066 ; SSE-NEXT: movapd %xmm5, 480(%rax)
2067 ; SSE-NEXT: movaps %xmm3, 464(%rax)
2068 ; SSE-NEXT: movapd %xmm1, 448(%rax)
2069 ; SSE-NEXT: movaps %xmm7, 432(%rax)
2070 ; SSE-NEXT: movapd %xmm8, 416(%rax)
2071 ; SSE-NEXT: movaps %xmm10, 400(%rax)
2072 ; SSE-NEXT: movapd %xmm11, 384(%rax)
2073 ; SSE-NEXT: movaps %xmm14, 368(%rax)
2074 ; SSE-NEXT: movapd %xmm15, 352(%rax)
2075 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2076 ; SSE-NEXT: movaps %xmm0, 336(%rax)
2077 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2078 ; SSE-NEXT: movaps %xmm0, 320(%rax)
2079 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2080 ; SSE-NEXT: movaps %xmm0, 304(%rax)
2081 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2082 ; SSE-NEXT: movaps %xmm0, 288(%rax)
2083 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2084 ; SSE-NEXT: movaps %xmm0, 272(%rax)
2085 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2086 ; SSE-NEXT: movaps %xmm0, 256(%rax)
2087 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
2088 ; SSE-NEXT: movaps %xmm0, 240(%rax)
2089 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2090 ; SSE-NEXT: movaps %xmm0, 224(%rax)
2091 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2092 ; SSE-NEXT: movaps %xmm0, 208(%rax)
2093 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2094 ; SSE-NEXT: movaps %xmm0, 192(%rax)
2095 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2096 ; SSE-NEXT: movaps %xmm0, 176(%rax)
2097 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2098 ; SSE-NEXT: movaps %xmm0, 160(%rax)
2099 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2100 ; SSE-NEXT: movaps %xmm0, 144(%rax)
2101 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2102 ; SSE-NEXT: movaps %xmm0, 128(%rax)
2103 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2104 ; SSE-NEXT: movaps %xmm0, 112(%rax)
2105 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2106 ; SSE-NEXT: movaps %xmm0, 96(%rax)
2107 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2108 ; SSE-NEXT: movaps %xmm0, 80(%rax)
2109 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2110 ; SSE-NEXT: movaps %xmm0, 64(%rax)
2111 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2112 ; SSE-NEXT: movaps %xmm0, 48(%rax)
2113 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2114 ; SSE-NEXT: movaps %xmm0, 32(%rax)
2115 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2116 ; SSE-NEXT: movaps %xmm0, 16(%rax)
2117 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2118 ; SSE-NEXT: movaps %xmm0, (%rax)
2119 ; SSE-NEXT: addq $264, %rsp # imm = 0x108
2122 ; AVX1-ONLY-LABEL: store_i16_stride8_vf32:
2123 ; AVX1-ONLY: # %bb.0:
2124 ; AVX1-ONLY-NEXT: subq $296, %rsp # imm = 0x128
2125 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
2126 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
2127 ; AVX1-ONLY-NEXT: vmovdqa (%r9), %xmm0
2128 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2129 ; AVX1-ONLY-NEXT: vmovdqa (%r8), %xmm1
2130 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2131 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
2132 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm2[2,2,3,3]
2133 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm2, %ymm0
2134 ; AVX1-ONLY-NEXT: vmovdqa (%r10), %xmm1
2135 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2136 ; AVX1-ONLY-NEXT: vmovdqa (%rax), %xmm3
2137 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2138 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
2139 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm3[2,2,2,2]
2140 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm1, %ymm1
2141 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm0[0,1,2],ymm1[3],ymm0[4,5,6],ymm1[7]
2142 ; AVX1-ONLY-NEXT: vmovdqa (%rcx), %xmm0
2143 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2144 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm1
2145 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2146 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
2147 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm4[2,2,3,3]
2148 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm4[2,3,2,3]
2149 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
2150 ; AVX1-ONLY-NEXT: vmovdqa (%rsi), %xmm0
2151 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2152 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm6
2153 ; AVX1-ONLY-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2154 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm0[0],xmm6[1],xmm0[1],xmm6[2],xmm0[2],xmm6[3],xmm0[3]
2155 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm6[2,3,2,3]
2156 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm6[3,3,3,3]
2157 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm0, %ymm7
2158 ; AVX1-ONLY-NEXT: vmovdqa 48(%r9), %xmm0
2159 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0],ymm1[1],ymm7[2,3,4],ymm1[5],ymm7[6,7]
2160 ; AVX1-ONLY-NEXT: vmovdqa 48(%r10), %xmm1
2161 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm7[0,1],ymm5[2,3],ymm7[4,5],ymm5[6,7]
2162 ; AVX1-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2163 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm3[0,0,0,0]
2164 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,1,0,1]
2165 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm5, %ymm3
2166 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm2[0,1,0,1]
2167 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm2 = xmm2[0],zero,xmm2[1],zero
2168 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm5, %ymm5
2169 ; AVX1-ONLY-NEXT: vmovdqa 48(%rax), %xmm2
2170 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm5[0,1,2],ymm3[3],ymm5[4,5,6],ymm3[7]
2171 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm6[1,1,1,1]
2172 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm6, %ymm5
2173 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm4[0,0,1,1]
2174 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm6, %ymm4
2175 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm5[0],ymm4[1],ymm5[2,3,4],ymm4[5],ymm5[6,7]
2176 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm4[0,1],ymm3[2,3],ymm4[4,5],ymm3[6,7]
2177 ; AVX1-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2178 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
2179 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm8[0,0,0,0]
2180 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm8[0,1,0,1]
2181 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm3, %ymm4
2182 ; AVX1-ONLY-NEXT: vmovdqa 48(%r8), %xmm3
2183 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm9 = xmm3[4],xmm0[4],xmm3[5],xmm0[5],xmm3[6],xmm0[6],xmm3[7],xmm0[7]
2184 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm9[0,1,0,1]
2185 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm9[0],zero,xmm9[1],zero
2186 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm5
2187 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm5[0,1,2],ymm4[3],ymm5[4,5,6],ymm4[7]
2188 ; AVX1-ONLY-NEXT: vmovdqa 48(%rsi), %xmm4
2189 ; AVX1-ONLY-NEXT: vmovdqa 48(%rdi), %xmm5
2190 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm11 = xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
2191 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm11[1,1,1,1]
2192 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm11, %ymm14
2193 ; AVX1-ONLY-NEXT: vmovdqa 48(%rcx), %xmm6
2194 ; AVX1-ONLY-NEXT: vmovdqa 48(%rdx), %xmm7
2195 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm15 = xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
2196 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm15[0,0,1,1]
2197 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm15, %ymm13, %ymm13
2198 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm14[0],ymm13[1],ymm14[2,3,4],ymm13[5],ymm14[6,7]
2199 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm13[0,1],ymm10[2,3],ymm13[4,5],ymm10[6,7]
2200 ; AVX1-ONLY-NEXT: vmovups %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2201 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm9[2,2,3,3]
2202 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm9, %ymm9
2203 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm8[2,2,2,2]
2204 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm10, %ymm8
2205 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm9[0,1,2],ymm8[3],ymm9[4,5,6],ymm8[7]
2206 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm15[2,2,3,3]
2207 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm15[2,3,2,3]
2208 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm9, %ymm9
2209 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm11[2,3,2,3]
2210 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[3,3,3,3]
2211 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm10, %ymm10
2212 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm10[0],ymm9[1],ymm10[2,3,4],ymm9[5],ymm10[6,7]
2213 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm9[0,1],ymm8[2,3],ymm9[4,5],ymm8[6,7]
2214 ; AVX1-ONLY-NEXT: vmovups %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2215 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
2216 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm1[0,0,0,0]
2217 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm1[0,1,0,1]
2218 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm2, %ymm2
2219 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm3[0],xmm0[0],xmm3[1],xmm0[1],xmm3[2],xmm0[2],xmm3[3],xmm0[3]
2220 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[0,1,0,1]
2221 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm8 = xmm0[0],zero,xmm0[1],zero
2222 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm3, %ymm3
2223 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7]
2224 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
2225 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
2226 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm3[1,1,1,1]
2227 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm3, %ymm5
2228 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm4[0,0,1,1]
2229 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm6, %ymm6
2230 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0],ymm6[1],ymm5[2,3,4],ymm6[5],ymm5[6,7]
2231 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
2232 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2233 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[2,2,3,3]
2234 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm0
2235 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm1[2,2,2,2]
2236 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
2237 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0,1,2],ymm1[3],ymm0[4,5,6],ymm1[7]
2238 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm4[2,2,3,3]
2239 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm4[2,3,2,3]
2240 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm0
2241 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm3[2,3,2,3]
2242 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[3,3,3,3]
2243 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
2244 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0],ymm0[1],ymm2[2,3,4],ymm0[5],ymm2[6,7]
2245 ; AVX1-ONLY-NEXT: vmovdqa 32(%r10), %xmm0
2246 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3],ymm2[4,5],ymm1[6,7]
2247 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2248 ; AVX1-ONLY-NEXT: vmovdqa 32(%rax), %xmm1
2249 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
2250 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm8[0,0,0,0]
2251 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm8[0,1,0,1]
2252 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm4
2253 ; AVX1-ONLY-NEXT: vmovdqa 32(%r9), %xmm2
2254 ; AVX1-ONLY-NEXT: vmovdqa 32(%r8), %xmm3
2255 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm9 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
2256 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm9[0,1,0,1]
2257 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm9[0],zero,xmm9[1],zero
2258 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm5
2259 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm5[0,1,2],ymm4[3],ymm5[4,5,6],ymm4[7]
2260 ; AVX1-ONLY-NEXT: vmovdqa 32(%rsi), %xmm4
2261 ; AVX1-ONLY-NEXT: vmovdqa 32(%rdi), %xmm5
2262 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm11 = xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
2263 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm11[1,1,1,1]
2264 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm11, %ymm13
2265 ; AVX1-ONLY-NEXT: vmovdqa 32(%rcx), %xmm6
2266 ; AVX1-ONLY-NEXT: vmovdqa 32(%rdx), %xmm7
2267 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm15 = xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
2268 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm15[0,0,1,1]
2269 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm15, %ymm14, %ymm14
2270 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0],ymm14[1],ymm13[2,3,4],ymm14[5],ymm13[6,7]
2271 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm13[0,1],ymm10[2,3],ymm13[4,5],ymm10[6,7]
2272 ; AVX1-ONLY-NEXT: vmovups %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2273 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm9[2,2,3,3]
2274 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm9, %ymm9
2275 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm8[2,2,2,2]
2276 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm10, %ymm8
2277 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm9[0,1,2],ymm8[3],ymm9[4,5,6],ymm8[7]
2278 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm15[2,2,3,3]
2279 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm15[2,3,2,3]
2280 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm9, %ymm9
2281 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm11[2,3,2,3]
2282 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[3,3,3,3]
2283 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm10, %ymm10
2284 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm10[0],ymm9[1],ymm10[2,3,4],ymm9[5],ymm10[6,7]
2285 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm9[0,1],ymm8[2,3],ymm9[4,5],ymm8[6,7]
2286 ; AVX1-ONLY-NEXT: vmovups %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2287 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
2288 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[0,0,0,0]
2289 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm0[0,1,0,1]
2290 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm1, %ymm1
2291 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
2292 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm2[0,1,0,1]
2293 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm8 = xmm2[0],zero,xmm2[1],zero
2294 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm3, %ymm3
2295 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1,2],ymm1[3],ymm3[4,5,6],ymm1[7]
2296 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
2297 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
2298 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm3[1,1,1,1]
2299 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm3, %ymm5
2300 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm4[0,0,1,1]
2301 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm6, %ymm6
2302 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0],ymm6[1],ymm5[2,3,4],ymm6[5],ymm5[6,7]
2303 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm5[0,1],ymm1[2,3],ymm5[4,5],ymm1[6,7]
2304 ; AVX1-ONLY-NEXT: vmovups %ymm1, (%rsp) # 32-byte Spill
2305 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm2[2,2,3,3]
2306 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
2307 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[2,2,2,2]
2308 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm2, %ymm0
2309 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7]
2310 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm4[2,2,3,3]
2311 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm4[2,3,2,3]
2312 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
2313 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm3[2,3,2,3]
2314 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[3,3,3,3]
2315 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
2316 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3,4],ymm1[5],ymm2[6,7]
2317 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
2318 ; AVX1-ONLY-NEXT: vmovdqa 16(%r10), %xmm12
2319 ; AVX1-ONLY-NEXT: vmovdqa 16(%rax), %xmm11
2320 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm11[4],xmm12[4],xmm11[5],xmm12[5],xmm11[6],xmm12[6],xmm11[7],xmm12[7]
2321 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm8[0,0,0,0]
2322 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm8[0,1,0,1]
2323 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
2324 ; AVX1-ONLY-NEXT: vmovdqa 16(%r9), %xmm10
2325 ; AVX1-ONLY-NEXT: vmovdqa 16(%r8), %xmm7
2326 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm13 = xmm7[4],xmm10[4],xmm7[5],xmm10[5],xmm7[6],xmm10[6],xmm7[7],xmm10[7]
2327 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm13[0,1,0,1]
2328 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm2 = xmm13[0],zero,xmm13[1],zero
2329 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
2330 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7]
2331 ; AVX1-ONLY-NEXT: vmovdqa 16(%rsi), %xmm6
2332 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdi), %xmm5
2333 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm5[4],xmm6[4],xmm5[5],xmm6[5],xmm5[6],xmm6[6],xmm5[7],xmm6[7]
2334 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm2[1,1,1,1]
2335 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
2336 ; AVX1-ONLY-NEXT: vmovdqa 16(%rcx), %xmm4
2337 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdx), %xmm3
2338 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
2339 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm15 = xmm0[0,0,1,1]
2340 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm15, %ymm15
2341 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0],ymm15[1],ymm1[2,3,4],ymm15[5],ymm1[6,7]
2342 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm1[0,1],ymm9[2,3],ymm1[4,5],ymm9[6,7]
2343 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm13[2,2,3,3]
2344 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm13, %ymm1
2345 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm8[2,2,2,2]
2346 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm13, %ymm8
2347 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2],ymm8[3],ymm1[4,5,6],ymm8[7]
2348 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm0[2,2,3,3]
2349 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,3,2,3]
2350 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm8, %ymm0
2351 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm2[2,3,2,3]
2352 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[3,3,3,3]
2353 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm8, %ymm2
2354 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0],ymm0[1],ymm2[2,3,4],ymm0[5],ymm2[6,7]
2355 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
2356 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
2357 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm1[0,0,0,0]
2358 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm1[0,1,0,1]
2359 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm0
2360 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm7[0],xmm10[0],xmm7[1],xmm10[1],xmm7[2],xmm10[2],xmm7[3],xmm10[3]
2361 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm2[0,1,0,1]
2362 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm10 = xmm2[0],zero,xmm2[1],zero
2363 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm7, %ymm7
2364 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm7[0,1,2],ymm0[3],ymm7[4,5,6],ymm0[7]
2365 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
2366 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
2367 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm5[1,1,1,1]
2368 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm5, %ymm4
2369 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm3[0,0,1,1]
2370 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm6, %ymm6
2371 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0],ymm6[1],ymm4[2,3,4],ymm6[5],ymm4[6,7]
2372 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1],ymm0[2,3],ymm4[4,5],ymm0[6,7]
2373 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm2[2,2,3,3]
2374 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm2, %ymm2
2375 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm1[2,2,2,2]
2376 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm4, %ymm1
2377 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3],ymm2[4,5,6],ymm1[7]
2378 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm3[2,2,3,3]
2379 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,3,2,3]
2380 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
2381 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm5[2,3,2,3]
2382 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm5[3,3,3,3]
2383 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm3, %ymm3
2384 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3,4],ymm2[5],ymm3[6,7]
2385 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2,3],ymm2[4,5],ymm1[6,7]
2386 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
2387 ; AVX1-ONLY-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
2388 ; AVX1-ONLY-NEXT: # xmm2 = xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
2389 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm2[0,0,0,0]
2390 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm2[0,1,0,1]
2391 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm3, %ymm3
2392 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
2393 ; AVX1-ONLY-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm4, %xmm4 # 16-byte Folded Reload
2394 ; AVX1-ONLY-NEXT: # xmm4 = xmm4[4],mem[4],xmm4[5],mem[5],xmm4[6],mem[6],xmm4[7],mem[7]
2395 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm4[0,1,0,1]
2396 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm4[0],zero,xmm4[1],zero
2397 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm5
2398 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm5[0,1,2],ymm3[3],ymm5[4,5,6],ymm3[7]
2399 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
2400 ; AVX1-ONLY-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm5, %xmm5 # 16-byte Folded Reload
2401 ; AVX1-ONLY-NEXT: # xmm5 = xmm5[4],mem[4],xmm5[5],mem[5],xmm5[6],mem[6],xmm5[7],mem[7]
2402 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
2403 ; AVX1-ONLY-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm6, %xmm6 # 16-byte Folded Reload
2404 ; AVX1-ONLY-NEXT: # xmm6 = xmm6[4],mem[4],xmm6[5],mem[5],xmm6[6],mem[6],xmm6[7],mem[7]
2405 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm5[1,1,1,1]
2406 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm5, %ymm7
2407 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm6[0,0,1,1]
2408 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm10, %ymm10
2409 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0],ymm10[1],ymm7[2,3,4],ymm10[5],ymm7[6,7]
2410 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm7[0,1],ymm3[2,3],ymm7[4,5],ymm3[6,7]
2411 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm4[2,2,3,3]
2412 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm4, %ymm4
2413 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm2[2,2,2,2]
2414 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm7, %ymm2
2415 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm4[0,1,2],ymm2[3],ymm4[4,5,6],ymm2[7]
2416 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm6[2,2,3,3]
2417 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,3,2,3]
2418 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm4, %ymm4
2419 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm5[2,3,2,3]
2420 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[3,3,3,3]
2421 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm6, %ymm5
2422 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm5[0],ymm4[1],ymm5[2,3,4],ymm4[5],ymm5[6,7]
2423 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm4[0,1],ymm2[2,3],ymm4[4,5],ymm2[6,7]
2424 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
2425 ; AVX1-ONLY-NEXT: vmovaps %ymm2, 96(%rax)
2426 ; AVX1-ONLY-NEXT: vmovaps %ymm3, 64(%rax)
2427 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 160(%rax)
2428 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 128(%rax)
2429 ; AVX1-ONLY-NEXT: vmovaps %ymm8, 224(%rax)
2430 ; AVX1-ONLY-NEXT: vmovaps %ymm9, 192(%rax)
2431 ; AVX1-ONLY-NEXT: vmovaps %ymm14, 288(%rax)
2432 ; AVX1-ONLY-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
2433 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 256(%rax)
2434 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2435 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 352(%rax)
2436 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2437 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 320(%rax)
2438 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2439 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 416(%rax)
2440 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2441 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 384(%rax)
2442 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2443 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 480(%rax)
2444 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2445 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 448(%rax)
2446 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2447 ; AVX1-ONLY-NEXT: vmovaps %ymm0, (%rax)
2448 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2449 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
2450 ; AVX1-ONLY-NEXT: addq $296, %rsp # imm = 0x128
2451 ; AVX1-ONLY-NEXT: vzeroupper
2452 ; AVX1-ONLY-NEXT: retq
2454 ; AVX2-SLOW-LABEL: store_i16_stride8_vf32:
2455 ; AVX2-SLOW: # %bb.0:
2456 ; AVX2-SLOW-NEXT: subq $264, %rsp # imm = 0x108
2457 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
2458 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
2459 ; AVX2-SLOW-NEXT: vmovdqa (%rax), %xmm1
2460 ; AVX2-SLOW-NEXT: vmovdqa %xmm1, (%rsp) # 16-byte Spill
2461 ; AVX2-SLOW-NEXT: vmovdqa 32(%rax), %xmm11
2462 ; AVX2-SLOW-NEXT: vmovdqa (%r10), %xmm0
2463 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2464 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
2465 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,2,3,3]
2466 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
2467 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %xmm3
2468 ; AVX2-SLOW-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2469 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %xmm2
2470 ; AVX2-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2471 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
2472 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm2[2,2,3,3]
2473 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
2474 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm3[0,1,2],ymm1[3],ymm3[4,5,6],ymm1[7]
2475 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %xmm1
2476 ; AVX2-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2477 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %xmm3
2478 ; AVX2-SLOW-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2479 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm15 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
2480 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm15[2,2,3,3]
2481 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,1,3]
2482 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %xmm1
2483 ; AVX2-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2484 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %xmm13
2485 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm13[0],xmm1[0],xmm13[1],xmm1[1],xmm13[2],xmm1[2],xmm13[3],xmm1[3]
2486 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm14 = xmm1[2,2,3,3]
2487 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,1,1,3]
2488 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm14 = ymm14[0],ymm3[1],ymm14[2,3,4],ymm3[5],ymm14[6,7]
2489 ; AVX2-SLOW-NEXT: vmovdqa 32(%r10), %xmm9
2490 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm14[0,1],ymm5[2,3],ymm14[4,5],ymm5[6,7]
2491 ; AVX2-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2492 ; AVX2-SLOW-NEXT: vmovdqa 32(%r9), %xmm10
2493 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,0,1,1]
2494 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
2495 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm2 = xmm2[0],zero,xmm2[1],zero
2496 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
2497 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2],ymm0[3],ymm2[4,5,6],ymm0[7]
2498 ; AVX2-SLOW-NEXT: vmovdqa 32(%r8), %xmm14
2499 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm15[0,0,1,1]
2500 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,1,3]
2501 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm1 = xmm1[0],zero,xmm1[1],zero
2502 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,1,3]
2503 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3,4],ymm2[5],ymm1[6,7]
2504 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
2505 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2506 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm9[4],xmm11[4],xmm9[5],xmm11[5],xmm9[6],xmm11[6],xmm9[7],xmm11[7]
2507 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm4[0,0,1,1]
2508 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
2509 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm14[4],xmm10[4],xmm14[5],xmm10[5],xmm14[6],xmm10[6],xmm14[7],xmm10[7]
2510 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm2[0],zero,xmm2[1],zero
2511 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
2512 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm3[0,1,2],ymm1[3],ymm3[4,5,6],ymm1[7]
2513 ; AVX2-SLOW-NEXT: vmovdqa 32(%rcx), %xmm15
2514 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdx), %xmm8
2515 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm8[4],xmm15[4],xmm8[5],xmm15[5],xmm8[6],xmm15[6],xmm8[7],xmm15[7]
2516 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm1[0,0,1,1]
2517 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,1,3]
2518 ; AVX2-SLOW-NEXT: vmovdqa 32(%rsi), %xmm7
2519 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdi), %xmm6
2520 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm6[4],xmm7[4],xmm6[5],xmm7[5],xmm6[6],xmm7[6],xmm6[7],xmm7[7]
2521 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm12 = xmm0[0],zero,xmm0[1],zero
2522 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,1,1,3]
2523 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm12[0],ymm3[1],ymm12[2,3,4],ymm3[5],ymm12[6,7]
2524 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1],ymm5[2,3],ymm3[4,5],ymm5[6,7]
2525 ; AVX2-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2526 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm4[2,2,3,3]
2527 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
2528 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
2529 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
2530 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2],ymm3[3],ymm2[4,5,6],ymm3[7]
2531 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
2532 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,1,3]
2533 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2534 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,1,3]
2535 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3,4],ymm1[5],ymm0[6,7]
2536 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
2537 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2538 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm9[0],xmm11[0],xmm9[1],xmm11[1],xmm9[2],xmm11[2],xmm9[3],xmm11[3]
2539 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm14[0],xmm10[0],xmm14[1],xmm10[1],xmm14[2],xmm10[2],xmm14[3],xmm10[3]
2540 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[0,0,1,1]
2541 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
2542 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm1[0],zero,xmm1[1],zero
2543 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
2544 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7]
2545 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm8[0],xmm15[0],xmm8[1],xmm15[1],xmm8[2],xmm15[2],xmm8[3],xmm15[3]
2546 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
2547 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm3[0,0,1,1]
2548 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,1,3]
2549 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm5[0],zero,xmm5[1],zero
2550 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,1,1,3]
2551 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm6[0],ymm4[1],ymm6[2,3,4],ymm4[5],ymm6[6,7]
2552 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1],ymm2[2,3],ymm4[4,5],ymm2[6,7]
2553 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2554 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2555 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
2556 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
2557 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
2558 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7]
2559 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm3[2,2,3,3]
2560 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,1,3]
2561 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm5[2,2,3,3]
2562 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,1,3]
2563 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3,4],ymm1[5],ymm2[6,7]
2564 ; AVX2-SLOW-NEXT: vmovdqa 32(%r8), %ymm14
2565 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
2566 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2567 ; AVX2-SLOW-NEXT: vmovdqa 32(%r9), %ymm15
2568 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2569 ; AVX2-SLOW-NEXT: vpunpckhwd (%rsp), %xmm0, %xmm1 # 16-byte Folded Reload
2570 ; AVX2-SLOW-NEXT: # xmm1 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
2571 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm1[0,0,1,1]
2572 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
2573 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
2574 ; AVX2-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
2575 ; AVX2-SLOW-NEXT: # xmm2 = xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
2576 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm2[0],zero,xmm2[1],zero
2577 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
2578 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2],ymm0[3],ymm3[4,5,6],ymm0[7]
2579 ; AVX2-SLOW-NEXT: vmovdqa 32(%r10), %ymm5
2580 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2581 ; AVX2-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm7 # 16-byte Folded Reload
2582 ; AVX2-SLOW-NEXT: # xmm7 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
2583 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm7[0,0,1,1]
2584 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,1,1,3]
2585 ; AVX2-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm13, %xmm8 # 16-byte Folded Reload
2586 ; AVX2-SLOW-NEXT: # xmm8 = xmm13[4],mem[4],xmm13[5],mem[5],xmm13[6],mem[6],xmm13[7],mem[7]
2587 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm9 = xmm8[0],zero,xmm8[1],zero
2588 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,1,1,3]
2589 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm9[0],ymm6[1],ymm9[2,3,4],ymm6[5],ymm9[6,7]
2590 ; AVX2-SLOW-NEXT: vmovdqa 32(%rax), %ymm10
2591 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm6[0,1],ymm3[2,3],ymm6[4,5],ymm3[6,7]
2592 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2593 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
2594 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
2595 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
2596 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
2597 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3],ymm2[4,5,6],ymm1[7]
2598 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm7[2,2,3,3]
2599 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,1,3]
2600 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm8[2,2,3,3]
2601 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,1,3]
2602 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3,4],ymm2[5],ymm3[6,7]
2603 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1],ymm1[2,3],ymm2[4,5],ymm1[6,7]
2604 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, (%rsp) # 32-byte Spill
2605 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm5[4],ymm10[4],ymm5[5],ymm10[5],ymm5[6],ymm10[6],ymm5[7],ymm10[7],ymm5[12],ymm10[12],ymm5[13],ymm10[13],ymm5[14],ymm10[14],ymm5[15],ymm10[15]
2606 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm1[0,0,2,1,4,4,6,5]
2607 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,2,3]
2608 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm14[4],ymm15[4],ymm14[5],ymm15[5],ymm14[6],ymm15[6],ymm14[7],ymm15[7],ymm14[12],ymm15[12],ymm14[13],ymm15[13],ymm14[14],ymm15[14],ymm14[15],ymm15[15]
2609 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm8 = ymm3[0,1,1,3,4,5,5,7]
2610 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,2,2,3]
2611 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm8[0,1,2],ymm2[3],ymm8[4,5,6],ymm2[7]
2612 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdx), %ymm11
2613 ; AVX2-SLOW-NEXT: vmovdqa 32(%rcx), %ymm12
2614 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm9 = ymm11[4],ymm12[4],ymm11[5],ymm12[5],ymm11[6],ymm12[6],ymm11[7],ymm12[7],ymm11[12],ymm12[12],ymm11[13],ymm12[13],ymm11[14],ymm12[14],ymm11[15],ymm12[15]
2615 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm8 = ymm9[0,0,2,1,4,4,6,5]
2616 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,1,3,3]
2617 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdi), %ymm13
2618 ; AVX2-SLOW-NEXT: vmovdqa 32(%rsi), %ymm2
2619 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm13[4],ymm2[4],ymm13[5],ymm2[5],ymm13[6],ymm2[6],ymm13[7],ymm2[7],ymm13[12],ymm2[12],ymm13[13],ymm2[13],ymm13[14],ymm2[14],ymm13[15],ymm2[15]
2620 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm0[0,1,1,3,4,5,5,7]
2621 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,1,3,3]
2622 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0],ymm8[1],ymm6[2,3,4],ymm8[5],ymm6[6,7]
2623 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm6[0,1],ymm4[2,3],ymm6[4,5],ymm4[6,7]
2624 ; AVX2-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2625 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,2,2,3,4,6,6,7]
2626 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,2,3]
2627 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[2,1,3,3,6,5,7,7]
2628 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,2,2,3]
2629 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0,1,2],ymm1[3],ymm3[4,5,6],ymm1[7]
2630 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm9[0,2,2,3,4,6,6,7]
2631 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,3,3]
2632 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,1,3,3,6,5,7,7]
2633 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,3,3]
2634 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm3[1],ymm0[2,3,4],ymm3[5],ymm0[6,7]
2635 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
2636 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2637 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm5[0],ymm10[0],ymm5[1],ymm10[1],ymm5[2],ymm10[2],ymm5[3],ymm10[3],ymm5[8],ymm10[8],ymm5[9],ymm10[9],ymm5[10],ymm10[10],ymm5[11],ymm10[11]
2638 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm14[0],ymm15[0],ymm14[1],ymm15[1],ymm14[2],ymm15[2],ymm14[3],ymm15[3],ymm14[8],ymm15[8],ymm14[9],ymm15[9],ymm14[10],ymm15[10],ymm14[11],ymm15[11]
2639 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm0[0,0,2,1,4,4,6,5]
2640 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,2,2,3]
2641 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm1[0,1,1,3,4,5,5,7]
2642 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,2,2,3]
2643 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0,1,2],ymm3[3],ymm4[4,5,6],ymm3[7]
2644 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm4 = ymm11[0],ymm12[0],ymm11[1],ymm12[1],ymm11[2],ymm12[2],ymm11[3],ymm12[3],ymm11[8],ymm12[8],ymm11[9],ymm12[9],ymm11[10],ymm12[10],ymm11[11],ymm12[11]
2645 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm13[0],ymm2[0],ymm13[1],ymm2[1],ymm13[2],ymm2[2],ymm13[3],ymm2[3],ymm13[8],ymm2[8],ymm13[9],ymm2[9],ymm13[10],ymm2[10],ymm13[11],ymm2[11]
2646 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm4[0,0,2,1,4,4,6,5]
2647 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,1,3,3]
2648 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm2[0,1,1,3,4,5,5,7]
2649 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,1,3,3]
2650 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0],ymm5[1],ymm6[2,3,4],ymm5[5],ymm6[6,7]
2651 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm5[0,1],ymm3[2,3],ymm5[4,5],ymm3[6,7]
2652 ; AVX2-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2653 ; AVX2-SLOW-NEXT: vmovdqa (%r10), %ymm12
2654 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,2,2,3,4,6,6,7]
2655 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,2,3]
2656 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[2,1,3,3,6,5,7,7]
2657 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,2,3]
2658 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7]
2659 ; AVX2-SLOW-NEXT: vmovdqa (%rax), %ymm13
2660 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm4[0,2,2,3,4,6,6,7]
2661 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,3]
2662 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[2,1,3,3,6,5,7,7]
2663 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,3,3]
2664 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3,4],ymm1[5],ymm2[6,7]
2665 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm11 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
2666 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm12[4],ymm13[4],ymm12[5],ymm13[5],ymm12[6],ymm13[6],ymm12[7],ymm13[7],ymm12[12],ymm13[12],ymm12[13],ymm13[13],ymm12[14],ymm13[14],ymm12[15],ymm13[15]
2667 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm1[0,0,2,1,4,4,6,5]
2668 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,2,3]
2669 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %ymm8
2670 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %ymm7
2671 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm4 = ymm8[4],ymm7[4],ymm8[5],ymm7[5],ymm8[6],ymm7[6],ymm8[7],ymm7[7],ymm8[12],ymm7[12],ymm8[13],ymm7[13],ymm8[14],ymm7[14],ymm8[15],ymm7[15]
2672 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm4[0,1,1,3,4,5,5,7]
2673 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,2,2,3]
2674 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2],ymm0[3],ymm5[4,5,6],ymm0[7]
2675 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %ymm9
2676 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %ymm6
2677 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm14 = ymm9[4],ymm6[4],ymm9[5],ymm6[5],ymm9[6],ymm6[6],ymm9[7],ymm6[7],ymm9[12],ymm6[12],ymm9[13],ymm6[13],ymm9[14],ymm6[14],ymm9[15],ymm6[15]
2678 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm15 = ymm14[0,0,2,1,4,4,6,5]
2679 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,1,3,3]
2680 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %ymm3
2681 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %ymm2
2682 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm3[4],ymm2[4],ymm3[5],ymm2[5],ymm3[6],ymm2[6],ymm3[7],ymm2[7],ymm3[12],ymm2[12],ymm3[13],ymm2[13],ymm3[14],ymm2[14],ymm3[15],ymm2[15]
2683 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm10 = ymm0[0,1,1,3,4,5,5,7]
2684 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,1,3,3]
2685 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm10 = ymm10[0],ymm15[1],ymm10[2,3,4],ymm15[5],ymm10[6,7]
2686 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm15 = ymm10[0,1],ymm5[2,3],ymm10[4,5],ymm5[6,7]
2687 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,2,2,3,4,6,6,7]
2688 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,2,3]
2689 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm4[2,1,3,3,6,5,7,7]
2690 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,2,2,3]
2691 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm4[0,1,2],ymm1[3],ymm4[4,5,6],ymm1[7]
2692 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm14[0,2,2,3,4,6,6,7]
2693 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,1,3,3]
2694 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,1,3,3,6,5,7,7]
2695 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,3,3]
2696 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm4[1],ymm0[2,3,4],ymm4[5],ymm0[6,7]
2697 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
2698 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm12[0],ymm13[0],ymm12[1],ymm13[1],ymm12[2],ymm13[2],ymm12[3],ymm13[3],ymm12[8],ymm13[8],ymm12[9],ymm13[9],ymm12[10],ymm13[10],ymm12[11],ymm13[11]
2699 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm4 = ymm8[0],ymm7[0],ymm8[1],ymm7[1],ymm8[2],ymm7[2],ymm8[3],ymm7[3],ymm8[8],ymm7[8],ymm8[9],ymm7[9],ymm8[10],ymm7[10],ymm8[11],ymm7[11]
2700 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm0[0,0,2,1,4,4,6,5]
2701 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,2,2,3]
2702 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm4[0,1,1,3,4,5,5,7]
2703 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,2,2,3]
2704 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm7[0,1,2],ymm5[3],ymm7[4,5,6],ymm5[7]
2705 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm6 = ymm9[0],ymm6[0],ymm9[1],ymm6[1],ymm9[2],ymm6[2],ymm9[3],ymm6[3],ymm9[8],ymm6[8],ymm9[9],ymm6[9],ymm9[10],ymm6[10],ymm9[11],ymm6[11]
2706 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm3[0],ymm2[0],ymm3[1],ymm2[1],ymm3[2],ymm2[2],ymm3[3],ymm2[3],ymm3[8],ymm2[8],ymm3[9],ymm2[9],ymm3[10],ymm2[10],ymm3[11],ymm2[11]
2707 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm6[0,0,2,1,4,4,6,5]
2708 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,3,3]
2709 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm2[0,1,1,3,4,5,5,7]
2710 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,1,3,3]
2711 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm7[0],ymm3[1],ymm7[2,3,4],ymm3[5],ymm7[6,7]
2712 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1],ymm5[2,3],ymm3[4,5],ymm5[6,7]
2713 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,2,2,3,4,6,6,7]
2714 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,2,3]
2715 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm4[2,1,3,3,6,5,7,7]
2716 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,2,2,3]
2717 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2],ymm0[3],ymm4[4,5,6],ymm0[7]
2718 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm6[0,2,2,3,4,6,6,7]
2719 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,1,3,3]
2720 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[2,1,3,3,6,5,7,7]
2721 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,3,3]
2722 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0],ymm4[1],ymm2[2,3,4],ymm4[5],ymm2[6,7]
2723 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1],ymm0[2,3],ymm2[4,5],ymm0[6,7]
2724 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
2725 ; AVX2-SLOW-NEXT: vmovdqa %ymm0, 160(%rax)
2726 ; AVX2-SLOW-NEXT: vmovdqa %ymm3, 128(%rax)
2727 ; AVX2-SLOW-NEXT: vmovdqa %ymm1, 224(%rax)
2728 ; AVX2-SLOW-NEXT: vmovdqa %ymm15, 192(%rax)
2729 ; AVX2-SLOW-NEXT: vmovdqa %ymm11, 416(%rax)
2730 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2731 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 384(%rax)
2732 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2733 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 480(%rax)
2734 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2735 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 448(%rax)
2736 ; AVX2-SLOW-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
2737 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 96(%rax)
2738 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2739 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 64(%rax)
2740 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2741 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 288(%rax)
2742 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2743 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 256(%rax)
2744 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2745 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 352(%rax)
2746 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2747 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 320(%rax)
2748 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2749 ; AVX2-SLOW-NEXT: vmovaps %ymm0, (%rax)
2750 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2751 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 32(%rax)
2752 ; AVX2-SLOW-NEXT: addq $264, %rsp # imm = 0x108
2753 ; AVX2-SLOW-NEXT: vzeroupper
2754 ; AVX2-SLOW-NEXT: retq
2756 ; AVX2-FAST-LABEL: store_i16_stride8_vf32:
2757 ; AVX2-FAST: # %bb.0:
2758 ; AVX2-FAST-NEXT: subq $296, %rsp # imm = 0x128
2759 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
2760 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
2761 ; AVX2-FAST-NEXT: vmovdqa (%rax), %xmm0
2762 ; AVX2-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2763 ; AVX2-FAST-NEXT: vmovdqa (%r10), %xmm1
2764 ; AVX2-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2765 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
2766 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = <2,2,2,2,u,u,3,3>
2767 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm4, %ymm0
2768 ; AVX2-FAST-NEXT: vmovdqa (%r9), %xmm3
2769 ; AVX2-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2770 ; AVX2-FAST-NEXT: vmovdqa (%r8), %xmm2
2771 ; AVX2-FAST-NEXT: vmovdqa %xmm2, (%rsp) # 16-byte Spill
2772 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
2773 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm4, %ymm3
2774 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm3[0,1,2],ymm0[3],ymm3[4,5,6],ymm0[7]
2775 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %xmm4
2776 ; AVX2-FAST-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2777 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %xmm3
2778 ; AVX2-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2779 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
2780 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = <2,2,3,3,3,3,u,u>
2781 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm7, %ymm4
2782 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %xmm6
2783 ; AVX2-FAST-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2784 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %xmm5
2785 ; AVX2-FAST-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2786 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
2787 ; AVX2-FAST-NEXT: vpermd %ymm5, %ymm7, %ymm6
2788 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm6[0],ymm4[1],ymm6[2,3,4],ymm4[5],ymm6[6,7]
2789 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1],ymm0[2,3],ymm4[4,5],ymm0[6,7]
2790 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2791 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = <0,0,0,0,u,u,1,1>
2792 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm0, %ymm4
2793 ; AVX2-FAST-NEXT: vmovdqa %ymm0, %ymm1
2794 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = <0,u,0,u,u,u,1,u>
2795 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm0, %ymm2
2796 ; AVX2-FAST-NEXT: vmovdqa %ymm0, %ymm7
2797 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm2[0,1,2],ymm4[3],ymm2[4,5,6],ymm4[7]
2798 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = <0,0,1,1,1,1,u,u>
2799 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm0, %ymm6
2800 ; AVX2-FAST-NEXT: vmovdqa %ymm0, %ymm3
2801 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm14 = <0,u,1,u,1,u,u,u>
2802 ; AVX2-FAST-NEXT: vpermd %ymm5, %ymm14, %ymm5
2803 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0],ymm6[1],ymm5[2,3,4],ymm6[5],ymm5[6,7]
2804 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm5[0,1],ymm4[2,3],ymm5[4,5],ymm4[6,7]
2805 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2806 ; AVX2-FAST-NEXT: vmovdqa 32(%rax), %xmm9
2807 ; AVX2-FAST-NEXT: vmovdqa 32(%r10), %xmm8
2808 ; AVX2-FAST-NEXT: vmovdqa 32(%r9), %xmm13
2809 ; AVX2-FAST-NEXT: vmovdqa 32(%r8), %xmm10
2810 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm8[4],xmm9[4],xmm8[5],xmm9[5],xmm8[6],xmm9[6],xmm8[7],xmm9[7]
2811 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm1, %ymm0
2812 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm10[4],xmm13[4],xmm10[5],xmm13[5],xmm10[6],xmm13[6],xmm10[7],xmm13[7]
2813 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm7, %ymm11
2814 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm12 = ymm11[0,1,2],ymm0[3],ymm11[4,5,6],ymm0[7]
2815 ; AVX2-FAST-NEXT: vmovdqa 32(%rcx), %xmm11
2816 ; AVX2-FAST-NEXT: vmovdqa 32(%rdx), %xmm5
2817 ; AVX2-FAST-NEXT: vmovdqa 32(%rsi), %xmm7
2818 ; AVX2-FAST-NEXT: vmovdqa 32(%rdi), %xmm6
2819 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm5[4],xmm11[4],xmm5[5],xmm11[5],xmm5[6],xmm11[6],xmm5[7],xmm11[7]
2820 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm3, %ymm3
2821 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm6[4],xmm7[4],xmm6[5],xmm7[5],xmm6[6],xmm7[6],xmm6[7],xmm7[7]
2822 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm14, %ymm14
2823 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm14[0],ymm3[1],ymm14[2,3,4],ymm3[5],ymm14[6,7]
2824 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1],ymm12[2,3],ymm3[4,5],ymm12[6,7]
2825 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2826 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = <2,2,2,2,u,u,3,3>
2827 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm12, %ymm3
2828 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm12, %ymm2
2829 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2],ymm3[3],ymm2[4,5,6],ymm3[7]
2830 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm15 = <2,2,3,3,3,3,u,u>
2831 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm15, %ymm1
2832 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm15, %ymm0
2833 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3,4],ymm1[5],ymm0[6,7]
2834 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
2835 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2836 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
2837 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm10[0],xmm13[0],xmm10[1],xmm13[1],xmm10[2],xmm13[2],xmm10[3],xmm13[3]
2838 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = <0,0,0,0,u,u,1,1>
2839 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm8, %ymm2
2840 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = <0,u,0,u,u,u,1,u>
2841 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm10, %ymm3
2842 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7]
2843 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm5[0],xmm11[0],xmm5[1],xmm11[1],xmm5[2],xmm11[2],xmm5[3],xmm11[3]
2844 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
2845 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm11 = <0,0,1,1,1,1,u,u>
2846 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm11, %ymm5
2847 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = <0,u,1,u,1,u,u,u>
2848 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm7, %ymm6
2849 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0],ymm5[1],ymm6[2,3,4],ymm5[5],ymm6[6,7]
2850 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
2851 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2852 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm12, %ymm0
2853 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm12, %ymm1
2854 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7]
2855 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm15, %ymm1
2856 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm15, %ymm2
2857 ; AVX2-FAST-NEXT: vmovdqa %ymm15, %ymm9
2858 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3,4],ymm1[5],ymm2[6,7]
2859 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
2860 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2861 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2862 ; AVX2-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
2863 ; AVX2-FAST-NEXT: # xmm0 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
2864 ; AVX2-FAST-NEXT: vmovdqa (%rsp), %xmm1 # 16-byte Reload
2865 ; AVX2-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm2 # 16-byte Folded Reload
2866 ; AVX2-FAST-NEXT: # xmm2 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
2867 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm8, %ymm1
2868 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm10, %ymm3
2869 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0,1,2],ymm1[3],ymm3[4,5,6],ymm1[7]
2870 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
2871 ; AVX2-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm3, %xmm3 # 16-byte Folded Reload
2872 ; AVX2-FAST-NEXT: # xmm3 = xmm3[4],mem[4],xmm3[5],mem[5],xmm3[6],mem[6],xmm3[7],mem[7]
2873 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
2874 ; AVX2-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm4, %xmm4 # 16-byte Folded Reload
2875 ; AVX2-FAST-NEXT: # xmm4 = xmm4[4],mem[4],xmm4[5],mem[5],xmm4[6],mem[6],xmm4[7],mem[7]
2876 ; AVX2-FAST-NEXT: vmovdqa 32(%r8), %ymm14
2877 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm11, %ymm5
2878 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm7, %ymm6
2879 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0],ymm5[1],ymm6[2,3,4],ymm5[5],ymm6[6,7]
2880 ; AVX2-FAST-NEXT: vmovdqa 32(%r9), %ymm15
2881 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm5[0,1],ymm1[2,3],ymm5[4,5],ymm1[6,7]
2882 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2883 ; AVX2-FAST-NEXT: vmovdqa 32(%r10), %ymm8
2884 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm12, %ymm5
2885 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm12, %ymm2
2886 ; AVX2-FAST-NEXT: vmovdqa 32(%rax), %ymm7
2887 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2],ymm5[3],ymm2[4,5,6],ymm5[7]
2888 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm9, %ymm3
2889 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm9, %ymm4
2890 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0],ymm3[1],ymm4[2,3,4],ymm3[5],ymm4[6,7]
2891 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7]
2892 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2893 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm8[4],ymm7[4],ymm8[5],ymm7[5],ymm8[6],ymm7[6],ymm8[7],ymm7[7],ymm8[12],ymm7[12],ymm8[13],ymm7[13],ymm8[14],ymm7[14],ymm8[15],ymm7[15]
2894 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [0,0,4,4,4,4,6,5]
2895 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm0, %ymm3
2896 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm11 = ymm14[4],ymm15[4],ymm14[5],ymm15[5],ymm14[6],ymm15[6],ymm14[7],ymm15[7],ymm14[12],ymm15[12],ymm14[13],ymm15[13],ymm14[14],ymm15[14],ymm14[15],ymm15[15]
2897 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [0,1,4,5,4,5,5,7]
2898 ; AVX2-FAST-NEXT: vpermd %ymm11, %ymm0, %ymm4
2899 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm4[0,1,2],ymm3[3],ymm4[4,5,6],ymm3[7]
2900 ; AVX2-FAST-NEXT: vmovdqa 32(%rdi), %ymm9
2901 ; AVX2-FAST-NEXT: vmovdqa 32(%rsi), %ymm6
2902 ; AVX2-FAST-NEXT: vmovdqa 32(%rdx), %ymm3
2903 ; AVX2-FAST-NEXT: vmovdqa 32(%rcx), %ymm1
2904 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm13 = ymm3[4],ymm1[4],ymm3[5],ymm1[5],ymm3[6],ymm1[6],ymm3[7],ymm1[7],ymm3[12],ymm1[12],ymm3[13],ymm1[13],ymm3[14],ymm1[14],ymm3[15],ymm1[15]
2905 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [4,4,2,1,6,5,6,5]
2906 ; AVX2-FAST-NEXT: vpermd %ymm13, %ymm0, %ymm12
2907 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm9[4],ymm6[4],ymm9[5],ymm6[5],ymm9[6],ymm6[6],ymm9[7],ymm6[7],ymm9[12],ymm6[12],ymm9[13],ymm6[13],ymm9[14],ymm6[14],ymm9[15],ymm6[15]
2908 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = [4,5,1,3,5,7,5,7]
2909 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm4, %ymm10
2910 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm10 = ymm10[0],ymm12[1],ymm10[2,3,4],ymm12[5],ymm10[6,7]
2911 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm10[0,1],ymm5[2,3],ymm10[4,5],ymm5[6,7]
2912 ; AVX2-FAST-NEXT: vmovdqu %ymm5, (%rsp) # 32-byte Spill
2913 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = [0,2,4,6,4,6,6,7]
2914 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm4, %ymm2
2915 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = [2,1,6,5,6,5,7,7]
2916 ; AVX2-FAST-NEXT: vpermd %ymm11, %ymm4, %ymm10
2917 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm10 = ymm10[0,1,2],ymm2[3],ymm10[4,5,6],ymm2[7]
2918 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [4,6,2,3,6,7,6,7]
2919 ; AVX2-FAST-NEXT: vpermd %ymm13, %ymm2, %ymm11
2920 ; AVX2-FAST-NEXT: vmovdqa %ymm2, %ymm5
2921 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [6,5,3,3,7,7,7,7]
2922 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm2, %ymm0
2923 ; AVX2-FAST-NEXT: vmovdqa %ymm2, %ymm13
2924 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm11[1],ymm0[2,3,4],ymm11[5],ymm0[6,7]
2925 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm10[2,3],ymm0[4,5],ymm10[6,7]
2926 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2927 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm8[0],ymm7[0],ymm8[1],ymm7[1],ymm8[2],ymm7[2],ymm8[3],ymm7[3],ymm8[8],ymm7[8],ymm8[9],ymm7[9],ymm8[10],ymm7[10],ymm8[11],ymm7[11]
2928 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm7 = ymm14[0],ymm15[0],ymm14[1],ymm15[1],ymm14[2],ymm15[2],ymm14[3],ymm15[3],ymm14[8],ymm15[8],ymm14[9],ymm15[9],ymm14[10],ymm15[10],ymm14[11],ymm15[11]
2929 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [0,0,4,4,4,4,6,5]
2930 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm2, %ymm8
2931 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = [0,1,4,5,4,5,5,7]
2932 ; AVX2-FAST-NEXT: vpermd %ymm7, %ymm12, %ymm10
2933 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm10[0,1,2],ymm8[3],ymm10[4,5,6],ymm8[7]
2934 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm3[0],ymm1[0],ymm3[1],ymm1[1],ymm3[2],ymm1[2],ymm3[3],ymm1[3],ymm3[8],ymm1[8],ymm3[9],ymm1[9],ymm3[10],ymm1[10],ymm3[11],ymm1[11]
2935 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm9[0],ymm6[0],ymm9[1],ymm6[1],ymm9[2],ymm6[2],ymm9[3],ymm6[3],ymm9[8],ymm6[8],ymm9[9],ymm6[9],ymm9[10],ymm6[10],ymm9[11],ymm6[11]
2936 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm14 = [4,4,2,1,6,5,6,5]
2937 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm14, %ymm4
2938 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm15 = [4,5,1,3,5,7,5,7]
2939 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm15, %ymm6
2940 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm6[0],ymm4[1],ymm6[2,3,4],ymm4[5],ymm6[6,7]
2941 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1],ymm8[2,3],ymm4[4,5],ymm8[6,7]
2942 ; AVX2-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2943 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = [0,2,4,6,4,6,6,7]
2944 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm4, %ymm0
2945 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = [2,1,6,5,6,5,7,7]
2946 ; AVX2-FAST-NEXT: vpermd %ymm7, %ymm4, %ymm4
2947 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2],ymm0[3],ymm4[4,5,6],ymm0[7]
2948 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm5, %ymm1
2949 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm13, %ymm3
2950 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0],ymm1[1],ymm3[2,3,4],ymm1[5],ymm3[6,7]
2951 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
2952 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2953 ; AVX2-FAST-NEXT: vmovdqa (%r8), %ymm13
2954 ; AVX2-FAST-NEXT: vmovdqa (%r9), %ymm11
2955 ; AVX2-FAST-NEXT: vmovdqa (%r10), %ymm9
2956 ; AVX2-FAST-NEXT: vmovdqa (%rax), %ymm5
2957 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm6 = ymm9[4],ymm5[4],ymm9[5],ymm5[5],ymm9[6],ymm5[6],ymm9[7],ymm5[7],ymm9[12],ymm5[12],ymm9[13],ymm5[13],ymm9[14],ymm5[14],ymm9[15],ymm5[15]
2958 ; AVX2-FAST-NEXT: vpermd %ymm6, %ymm2, %ymm7
2959 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm8 = ymm13[4],ymm11[4],ymm13[5],ymm11[5],ymm13[6],ymm11[6],ymm13[7],ymm11[7],ymm13[12],ymm11[12],ymm13[13],ymm11[13],ymm13[14],ymm11[14],ymm13[15],ymm11[15]
2960 ; AVX2-FAST-NEXT: vpermd %ymm8, %ymm12, %ymm10
2961 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm12 = ymm10[0,1,2],ymm7[3],ymm10[4,5,6],ymm7[7]
2962 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %ymm10
2963 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %ymm7
2964 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %ymm4
2965 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %ymm3
2966 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm4[4],ymm3[4],ymm4[5],ymm3[5],ymm4[6],ymm3[6],ymm4[7],ymm3[7],ymm4[12],ymm3[12],ymm4[13],ymm3[13],ymm4[14],ymm3[14],ymm4[15],ymm3[15]
2967 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm14, %ymm2
2968 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm10[4],ymm7[4],ymm10[5],ymm7[5],ymm10[6],ymm7[6],ymm10[7],ymm7[7],ymm10[12],ymm7[12],ymm10[13],ymm7[13],ymm10[14],ymm7[14],ymm10[15],ymm7[15]
2969 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm15, %ymm14
2970 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm14[0],ymm2[1],ymm14[2,3,4],ymm2[5],ymm14[6,7]
2971 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1],ymm12[2,3],ymm2[4,5],ymm12[6,7]
2972 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = [0,2,4,6,4,6,6,7]
2973 ; AVX2-FAST-NEXT: vpermd %ymm6, %ymm12, %ymm6
2974 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm14 = [2,1,6,5,6,5,7,7]
2975 ; AVX2-FAST-NEXT: vpermd %ymm8, %ymm14, %ymm8
2976 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm8[0,1,2],ymm6[3],ymm8[4,5,6],ymm6[7]
2977 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm15 = [4,6,2,3,6,7,6,7]
2978 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm15, %ymm1
2979 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = [6,5,3,3,7,7,7,7]
2980 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm8, %ymm0
2981 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3,4],ymm1[5],ymm0[6,7]
2982 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm6[2,3],ymm0[4,5],ymm6[6,7]
2983 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm9[0],ymm5[0],ymm9[1],ymm5[1],ymm9[2],ymm5[2],ymm9[3],ymm5[3],ymm9[8],ymm5[8],ymm9[9],ymm5[9],ymm9[10],ymm5[10],ymm9[11],ymm5[11]
2984 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm5 = ymm13[0],ymm11[0],ymm13[1],ymm11[1],ymm13[2],ymm11[2],ymm13[3],ymm11[3],ymm13[8],ymm11[8],ymm13[9],ymm11[9],ymm13[10],ymm11[10],ymm13[11],ymm11[11]
2985 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = [0,0,4,4,4,4,6,5]
2986 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm6, %ymm6
2987 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = [0,1,4,5,4,5,5,7]
2988 ; AVX2-FAST-NEXT: vpermd %ymm5, %ymm8, %ymm8
2989 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm8[0,1,2],ymm6[3],ymm8[4,5,6],ymm6[7]
2990 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm4[0],ymm3[0],ymm4[1],ymm3[1],ymm4[2],ymm3[2],ymm4[3],ymm3[3],ymm4[8],ymm3[8],ymm4[9],ymm3[9],ymm4[10],ymm3[10],ymm4[11],ymm3[11]
2991 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm4 = ymm10[0],ymm7[0],ymm10[1],ymm7[1],ymm10[2],ymm7[2],ymm10[3],ymm7[3],ymm10[8],ymm7[8],ymm10[9],ymm7[9],ymm10[10],ymm7[10],ymm10[11],ymm7[11]
2992 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = [4,4,2,1,6,5,6,5]
2993 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm7, %ymm7
2994 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = [4,5,1,3,5,7,5,7]
2995 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm8, %ymm8
2996 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm7 = ymm8[0],ymm7[1],ymm8[2,3,4],ymm7[5],ymm8[6,7]
2997 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm7[0,1],ymm6[2,3],ymm7[4,5],ymm6[6,7]
2998 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm12, %ymm1
2999 ; AVX2-FAST-NEXT: vpermd %ymm5, %ymm14, %ymm5
3000 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm5[0,1,2],ymm1[3],ymm5[4,5,6],ymm1[7]
3001 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm15, %ymm3
3002 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = [6,5,3,3,7,7,7,7]
3003 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm5, %ymm4
3004 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0],ymm3[1],ymm4[2,3,4],ymm3[5],ymm4[6,7]
3005 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
3006 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
3007 ; AVX2-FAST-NEXT: vmovdqa %ymm1, 160(%rax)
3008 ; AVX2-FAST-NEXT: vmovdqa %ymm6, 128(%rax)
3009 ; AVX2-FAST-NEXT: vmovdqa %ymm0, 224(%rax)
3010 ; AVX2-FAST-NEXT: vmovdqa %ymm2, 192(%rax)
3011 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3012 ; AVX2-FAST-NEXT: vmovaps %ymm0, 416(%rax)
3013 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3014 ; AVX2-FAST-NEXT: vmovaps %ymm0, 384(%rax)
3015 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3016 ; AVX2-FAST-NEXT: vmovaps %ymm0, 480(%rax)
3017 ; AVX2-FAST-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
3018 ; AVX2-FAST-NEXT: vmovaps %ymm0, 448(%rax)
3019 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3020 ; AVX2-FAST-NEXT: vmovaps %ymm0, 96(%rax)
3021 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3022 ; AVX2-FAST-NEXT: vmovaps %ymm0, 64(%rax)
3023 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3024 ; AVX2-FAST-NEXT: vmovaps %ymm0, 288(%rax)
3025 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3026 ; AVX2-FAST-NEXT: vmovaps %ymm0, 256(%rax)
3027 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3028 ; AVX2-FAST-NEXT: vmovaps %ymm0, 352(%rax)
3029 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3030 ; AVX2-FAST-NEXT: vmovaps %ymm0, 320(%rax)
3031 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3032 ; AVX2-FAST-NEXT: vmovaps %ymm0, (%rax)
3033 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3034 ; AVX2-FAST-NEXT: vmovaps %ymm0, 32(%rax)
3035 ; AVX2-FAST-NEXT: addq $296, %rsp # imm = 0x128
3036 ; AVX2-FAST-NEXT: vzeroupper
3037 ; AVX2-FAST-NEXT: retq
3039 ; AVX2-FAST-PERLANE-LABEL: store_i16_stride8_vf32:
3040 ; AVX2-FAST-PERLANE: # %bb.0:
3041 ; AVX2-FAST-PERLANE-NEXT: subq $264, %rsp # imm = 0x108
3042 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
3043 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %r10
3044 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rax), %xmm1
3045 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, (%rsp) # 16-byte Spill
3046 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rax), %xmm11
3047 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r10), %xmm0
3048 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3049 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
3050 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,2,3,3]
3051 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
3052 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %xmm3
3053 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3054 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %xmm2
3055 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3056 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
3057 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm3 = xmm2[2,2,3,3]
3058 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
3059 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm3[0,1,2],ymm1[3],ymm3[4,5,6],ymm1[7]
3060 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %xmm1
3061 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3062 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %xmm3
3063 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3064 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm15 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
3065 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm3 = xmm15[2,2,3,3]
3066 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,1,3]
3067 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %xmm1
3068 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3069 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %xmm13
3070 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm13[0],xmm1[0],xmm13[1],xmm1[1],xmm13[2],xmm1[2],xmm13[3],xmm1[3]
3071 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm14 = xmm1[2,2,3,3]
3072 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,1,1,3]
3073 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm14 = ymm14[0],ymm3[1],ymm14[2,3,4],ymm3[5],ymm14[6,7]
3074 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r10), %xmm9
3075 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm14[0,1],ymm5[2,3],ymm14[4,5],ymm5[6,7]
3076 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3077 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r9), %xmm10
3078 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,0,1,1]
3079 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
3080 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm2 = xmm2[0],zero,xmm2[1],zero
3081 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
3082 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2],ymm0[3],ymm2[4,5,6],ymm0[7]
3083 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r8), %xmm14
3084 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm2 = xmm15[0,0,1,1]
3085 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,1,3]
3086 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm1 = xmm1[0],zero,xmm1[1],zero
3087 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,1,3]
3088 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3,4],ymm2[5],ymm1[6,7]
3089 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
3090 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3091 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm9[4],xmm11[4],xmm9[5],xmm11[5],xmm9[6],xmm11[6],xmm9[7],xmm11[7]
3092 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm4[0,0,1,1]
3093 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
3094 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm14[4],xmm10[4],xmm14[5],xmm10[5],xmm14[6],xmm10[6],xmm14[7],xmm10[7]
3095 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm2[0],zero,xmm2[1],zero
3096 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
3097 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm3[0,1,2],ymm1[3],ymm3[4,5,6],ymm1[7]
3098 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rcx), %xmm15
3099 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdx), %xmm8
3100 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm8[4],xmm15[4],xmm8[5],xmm15[5],xmm8[6],xmm15[6],xmm8[7],xmm15[7]
3101 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm3 = xmm1[0,0,1,1]
3102 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,1,3]
3103 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rsi), %xmm7
3104 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdi), %xmm6
3105 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm6[4],xmm7[4],xmm6[5],xmm7[5],xmm6[6],xmm7[6],xmm6[7],xmm7[7]
3106 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm12 = xmm0[0],zero,xmm0[1],zero
3107 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,1,1,3]
3108 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm12[0],ymm3[1],ymm12[2,3,4],ymm3[5],ymm12[6,7]
3109 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1],ymm5[2,3],ymm3[4,5],ymm5[6,7]
3110 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3111 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm3 = xmm4[2,2,3,3]
3112 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
3113 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
3114 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
3115 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2],ymm3[3],ymm2[4,5,6],ymm3[7]
3116 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
3117 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,1,3]
3118 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
3119 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,1,3]
3120 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3,4],ymm1[5],ymm0[6,7]
3121 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
3122 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3123 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm9[0],xmm11[0],xmm9[1],xmm11[1],xmm9[2],xmm11[2],xmm9[3],xmm11[3]
3124 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm14[0],xmm10[0],xmm14[1],xmm10[1],xmm14[2],xmm10[2],xmm14[3],xmm10[3]
3125 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[0,0,1,1]
3126 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
3127 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm1[0],zero,xmm1[1],zero
3128 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
3129 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7]
3130 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm8[0],xmm15[0],xmm8[1],xmm15[1],xmm8[2],xmm15[2],xmm8[3],xmm15[3]
3131 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
3132 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm4 = xmm3[0,0,1,1]
3133 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,1,3]
3134 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm5[0],zero,xmm5[1],zero
3135 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,1,1,3]
3136 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm6[0],ymm4[1],ymm6[2,3,4],ymm4[5],ymm6[6,7]
3137 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1],ymm2[2,3],ymm4[4,5],ymm2[6,7]
3138 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3139 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
3140 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
3141 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
3142 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
3143 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7]
3144 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm3[2,2,3,3]
3145 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,1,3]
3146 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm2 = xmm5[2,2,3,3]
3147 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,1,3]
3148 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3,4],ymm1[5],ymm2[6,7]
3149 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r8), %ymm14
3150 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
3151 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3152 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r9), %ymm15
3153 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3154 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd (%rsp), %xmm0, %xmm1 # 16-byte Folded Reload
3155 ; AVX2-FAST-PERLANE-NEXT: # xmm1 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
3156 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm0 = xmm1[0,0,1,1]
3157 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
3158 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
3159 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
3160 ; AVX2-FAST-PERLANE-NEXT: # xmm2 = xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
3161 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm2[0],zero,xmm2[1],zero
3162 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
3163 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2],ymm0[3],ymm3[4,5,6],ymm0[7]
3164 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r10), %ymm5
3165 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3166 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm7 # 16-byte Folded Reload
3167 ; AVX2-FAST-PERLANE-NEXT: # xmm7 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
3168 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm6 = xmm7[0,0,1,1]
3169 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,1,1,3]
3170 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm13, %xmm8 # 16-byte Folded Reload
3171 ; AVX2-FAST-PERLANE-NEXT: # xmm8 = xmm13[4],mem[4],xmm13[5],mem[5],xmm13[6],mem[6],xmm13[7],mem[7]
3172 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm9 = xmm8[0],zero,xmm8[1],zero
3173 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,1,1,3]
3174 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm6 = ymm9[0],ymm6[1],ymm9[2,3,4],ymm6[5],ymm9[6,7]
3175 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rax), %ymm10
3176 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm6[0,1],ymm3[2,3],ymm6[4,5],ymm3[6,7]
3177 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3178 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
3179 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
3180 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
3181 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
3182 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3],ymm2[4,5,6],ymm1[7]
3183 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm2 = xmm7[2,2,3,3]
3184 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,1,3]
3185 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm3 = xmm8[2,2,3,3]
3186 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,1,3]
3187 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3,4],ymm2[5],ymm3[6,7]
3188 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1],ymm1[2,3],ymm2[4,5],ymm1[6,7]
3189 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, (%rsp) # 32-byte Spill
3190 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm5[4],ymm10[4],ymm5[5],ymm10[5],ymm5[6],ymm10[6],ymm5[7],ymm10[7],ymm5[12],ymm10[12],ymm5[13],ymm10[13],ymm5[14],ymm10[14],ymm5[15],ymm10[15]
3191 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm2 = ymm1[0,0,2,1,4,4,6,5]
3192 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,2,3]
3193 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm14[4],ymm15[4],ymm14[5],ymm15[5],ymm14[6],ymm15[6],ymm14[7],ymm15[7],ymm14[12],ymm15[12],ymm14[13],ymm15[13],ymm14[14],ymm15[14],ymm14[15],ymm15[15]
3194 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm8 = ymm3[0,1,1,3,4,5,5,7]
3195 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,2,2,3]
3196 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm8[0,1,2],ymm2[3],ymm8[4,5,6],ymm2[7]
3197 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdx), %ymm11
3198 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rcx), %ymm12
3199 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm9 = ymm11[4],ymm12[4],ymm11[5],ymm12[5],ymm11[6],ymm12[6],ymm11[7],ymm12[7],ymm11[12],ymm12[12],ymm11[13],ymm12[13],ymm11[14],ymm12[14],ymm11[15],ymm12[15]
3200 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm8 = ymm9[0,0,2,1,4,4,6,5]
3201 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,1,3,3]
3202 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdi), %ymm13
3203 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rsi), %ymm2
3204 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm13[4],ymm2[4],ymm13[5],ymm2[5],ymm13[6],ymm2[6],ymm13[7],ymm2[7],ymm13[12],ymm2[12],ymm13[13],ymm2[13],ymm13[14],ymm2[14],ymm13[15],ymm2[15]
3205 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm6 = ymm0[0,1,1,3,4,5,5,7]
3206 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,1,3,3]
3207 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0],ymm8[1],ymm6[2,3,4],ymm8[5],ymm6[6,7]
3208 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm6[0,1],ymm4[2,3],ymm6[4,5],ymm4[6,7]
3209 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3210 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,2,2,3,4,6,6,7]
3211 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,2,3]
3212 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[2,1,3,3,6,5,7,7]
3213 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,2,2,3]
3214 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0,1,2],ymm1[3],ymm3[4,5,6],ymm1[7]
3215 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm3 = ymm9[0,2,2,3,4,6,6,7]
3216 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,3,3]
3217 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,1,3,3,6,5,7,7]
3218 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,3,3]
3219 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm3[1],ymm0[2,3,4],ymm3[5],ymm0[6,7]
3220 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3221 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3222 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm5[0],ymm10[0],ymm5[1],ymm10[1],ymm5[2],ymm10[2],ymm5[3],ymm10[3],ymm5[8],ymm10[8],ymm5[9],ymm10[9],ymm5[10],ymm10[10],ymm5[11],ymm10[11]
3223 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm14[0],ymm15[0],ymm14[1],ymm15[1],ymm14[2],ymm15[2],ymm14[3],ymm15[3],ymm14[8],ymm15[8],ymm14[9],ymm15[9],ymm14[10],ymm15[10],ymm14[11],ymm15[11]
3224 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm3 = ymm0[0,0,2,1,4,4,6,5]
3225 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,2,2,3]
3226 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm4 = ymm1[0,1,1,3,4,5,5,7]
3227 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,2,2,3]
3228 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0,1,2],ymm3[3],ymm4[4,5,6],ymm3[7]
3229 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm4 = ymm11[0],ymm12[0],ymm11[1],ymm12[1],ymm11[2],ymm12[2],ymm11[3],ymm12[3],ymm11[8],ymm12[8],ymm11[9],ymm12[9],ymm11[10],ymm12[10],ymm11[11],ymm12[11]
3230 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm13[0],ymm2[0],ymm13[1],ymm2[1],ymm13[2],ymm2[2],ymm13[3],ymm2[3],ymm13[8],ymm2[8],ymm13[9],ymm2[9],ymm13[10],ymm2[10],ymm13[11],ymm2[11]
3231 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm5 = ymm4[0,0,2,1,4,4,6,5]
3232 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,1,3,3]
3233 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm6 = ymm2[0,1,1,3,4,5,5,7]
3234 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,1,3,3]
3235 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0],ymm5[1],ymm6[2,3,4],ymm5[5],ymm6[6,7]
3236 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm5[0,1],ymm3[2,3],ymm5[4,5],ymm3[6,7]
3237 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3238 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r10), %ymm12
3239 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,2,2,3,4,6,6,7]
3240 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,2,3]
3241 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[2,1,3,3,6,5,7,7]
3242 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,2,3]
3243 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7]
3244 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rax), %ymm13
3245 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm4[0,2,2,3,4,6,6,7]
3246 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,3]
3247 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[2,1,3,3,6,5,7,7]
3248 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,3,3]
3249 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3,4],ymm1[5],ymm2[6,7]
3250 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm11 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
3251 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm12[4],ymm13[4],ymm12[5],ymm13[5],ymm12[6],ymm13[6],ymm12[7],ymm13[7],ymm12[12],ymm13[12],ymm12[13],ymm13[13],ymm12[14],ymm13[14],ymm12[15],ymm13[15]
3252 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm0 = ymm1[0,0,2,1,4,4,6,5]
3253 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,2,3]
3254 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %ymm8
3255 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %ymm7
3256 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm4 = ymm8[4],ymm7[4],ymm8[5],ymm7[5],ymm8[6],ymm7[6],ymm8[7],ymm7[7],ymm8[12],ymm7[12],ymm8[13],ymm7[13],ymm8[14],ymm7[14],ymm8[15],ymm7[15]
3257 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm5 = ymm4[0,1,1,3,4,5,5,7]
3258 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,2,2,3]
3259 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2],ymm0[3],ymm5[4,5,6],ymm0[7]
3260 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %ymm9
3261 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %ymm6
3262 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm14 = ymm9[4],ymm6[4],ymm9[5],ymm6[5],ymm9[6],ymm6[6],ymm9[7],ymm6[7],ymm9[12],ymm6[12],ymm9[13],ymm6[13],ymm9[14],ymm6[14],ymm9[15],ymm6[15]
3263 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm15 = ymm14[0,0,2,1,4,4,6,5]
3264 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,1,3,3]
3265 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %ymm3
3266 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %ymm2
3267 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm3[4],ymm2[4],ymm3[5],ymm2[5],ymm3[6],ymm2[6],ymm3[7],ymm2[7],ymm3[12],ymm2[12],ymm3[13],ymm2[13],ymm3[14],ymm2[14],ymm3[15],ymm2[15]
3268 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm10 = ymm0[0,1,1,3,4,5,5,7]
3269 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,1,3,3]
3270 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm10 = ymm10[0],ymm15[1],ymm10[2,3,4],ymm15[5],ymm10[6,7]
3271 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm15 = ymm10[0,1],ymm5[2,3],ymm10[4,5],ymm5[6,7]
3272 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,2,2,3,4,6,6,7]
3273 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,2,3]
3274 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm4 = ymm4[2,1,3,3,6,5,7,7]
3275 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,2,2,3]
3276 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm4[0,1,2],ymm1[3],ymm4[4,5,6],ymm1[7]
3277 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm4 = ymm14[0,2,2,3,4,6,6,7]
3278 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,1,3,3]
3279 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,1,3,3,6,5,7,7]
3280 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,3,3]
3281 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm4[1],ymm0[2,3,4],ymm4[5],ymm0[6,7]
3282 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
3283 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm12[0],ymm13[0],ymm12[1],ymm13[1],ymm12[2],ymm13[2],ymm12[3],ymm13[3],ymm12[8],ymm13[8],ymm12[9],ymm13[9],ymm12[10],ymm13[10],ymm12[11],ymm13[11]
3284 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm4 = ymm8[0],ymm7[0],ymm8[1],ymm7[1],ymm8[2],ymm7[2],ymm8[3],ymm7[3],ymm8[8],ymm7[8],ymm8[9],ymm7[9],ymm8[10],ymm7[10],ymm8[11],ymm7[11]
3285 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm5 = ymm0[0,0,2,1,4,4,6,5]
3286 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,2,2,3]
3287 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm7 = ymm4[0,1,1,3,4,5,5,7]
3288 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,2,2,3]
3289 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm7[0,1,2],ymm5[3],ymm7[4,5,6],ymm5[7]
3290 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm6 = ymm9[0],ymm6[0],ymm9[1],ymm6[1],ymm9[2],ymm6[2],ymm9[3],ymm6[3],ymm9[8],ymm6[8],ymm9[9],ymm6[9],ymm9[10],ymm6[10],ymm9[11],ymm6[11]
3291 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm3[0],ymm2[0],ymm3[1],ymm2[1],ymm3[2],ymm2[2],ymm3[3],ymm2[3],ymm3[8],ymm2[8],ymm3[9],ymm2[9],ymm3[10],ymm2[10],ymm3[11],ymm2[11]
3292 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm3 = ymm6[0,0,2,1,4,4,6,5]
3293 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,3,3]
3294 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm7 = ymm2[0,1,1,3,4,5,5,7]
3295 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,1,3,3]
3296 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm7[0],ymm3[1],ymm7[2,3,4],ymm3[5],ymm7[6,7]
3297 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1],ymm5[2,3],ymm3[4,5],ymm5[6,7]
3298 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,2,2,3,4,6,6,7]
3299 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,2,3]
3300 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm4 = ymm4[2,1,3,3,6,5,7,7]
3301 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,2,2,3]
3302 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2],ymm0[3],ymm4[4,5,6],ymm0[7]
3303 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm4 = ymm6[0,2,2,3,4,6,6,7]
3304 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,1,3,3]
3305 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[2,1,3,3,6,5,7,7]
3306 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,3,3]
3307 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0],ymm4[1],ymm2[2,3,4],ymm4[5],ymm2[6,7]
3308 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1],ymm0[2,3],ymm2[4,5],ymm0[6,7]
3309 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
3310 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, 160(%rax)
3311 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm3, 128(%rax)
3312 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm1, 224(%rax)
3313 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm15, 192(%rax)
3314 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm11, 416(%rax)
3315 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3316 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 384(%rax)
3317 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3318 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 480(%rax)
3319 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3320 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 448(%rax)
3321 ; AVX2-FAST-PERLANE-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
3322 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 96(%rax)
3323 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3324 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 64(%rax)
3325 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3326 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 288(%rax)
3327 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3328 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 256(%rax)
3329 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3330 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 352(%rax)
3331 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3332 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 320(%rax)
3333 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3334 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, (%rax)
3335 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3336 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 32(%rax)
3337 ; AVX2-FAST-PERLANE-NEXT: addq $264, %rsp # imm = 0x108
3338 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
3339 ; AVX2-FAST-PERLANE-NEXT: retq
3341 ; AVX512F-SLOW-LABEL: store_i16_stride8_vf32:
3342 ; AVX512F-SLOW: # %bb.0:
3343 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
3344 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
3345 ; AVX512F-SLOW-NEXT: vmovdqa (%r10), %xmm1
3346 ; AVX512F-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3347 ; AVX512F-SLOW-NEXT: vmovdqa (%rax), %xmm0
3348 ; AVX512F-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3349 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
3350 ; AVX512F-SLOW-NEXT: vmovdqa (%r9), %xmm1
3351 ; AVX512F-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3352 ; AVX512F-SLOW-NEXT: vmovdqa (%r8), %xmm5
3353 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm5[4],xmm1[4],xmm5[5],xmm1[5],xmm5[6],xmm1[6],xmm5[7],xmm1[7]
3354 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm26 = <0,u,0,u,u,u,1,u,2,2,2,2,u,u,3,3>
3355 ; AVX512F-SLOW-NEXT: vpermd %zmm1, %zmm26, %zmm30
3356 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm27 = <0,0,0,0,u,u,1,1,2,2,2,2,u,u,3,3>
3357 ; AVX512F-SLOW-NEXT: movw $-30584, %r11w # imm = 0x8888
3358 ; AVX512F-SLOW-NEXT: kmovw %r11d, %k1
3359 ; AVX512F-SLOW-NEXT: vpermd %zmm0, %zmm27, %zmm30 {%k1}
3360 ; AVX512F-SLOW-NEXT: vmovdqa (%rcx), %xmm8
3361 ; AVX512F-SLOW-NEXT: vmovdqa (%rdx), %xmm9
3362 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm9[4],xmm8[4],xmm9[5],xmm8[5],xmm9[6],xmm8[6],xmm9[7],xmm8[7]
3363 ; AVX512F-SLOW-NEXT: vmovdqa (%rsi), %xmm10
3364 ; AVX512F-SLOW-NEXT: vmovdqa (%rdi), %xmm11
3365 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
3366 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm28 = <0,u,1,u,1,u,u,u,2,2,3,3,3,3,u,u>
3367 ; AVX512F-SLOW-NEXT: vpermd %zmm1, %zmm28, %zmm3
3368 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm29 = <0,0,1,1,1,1,u,u,2,2,3,3,3,3,u,u>
3369 ; AVX512F-SLOW-NEXT: movw $8738, %r11w # imm = 0x2222
3370 ; AVX512F-SLOW-NEXT: kmovw %r11d, %k2
3371 ; AVX512F-SLOW-NEXT: vpermd %zmm0, %zmm29, %zmm3 {%k2}
3372 ; AVX512F-SLOW-NEXT: vmovdqa 32(%r10), %ymm15
3373 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rax), %ymm2
3374 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm6 = ymm2[0],ymm15[0],ymm2[1],ymm15[1],ymm2[2],ymm15[2],ymm2[3],ymm15[3],ymm2[8],ymm15[8],ymm2[9],ymm15[9],ymm2[10],ymm15[10],ymm2[11],ymm15[11]
3375 ; AVX512F-SLOW-NEXT: vmovdqa 32(%r9), %ymm7
3376 ; AVX512F-SLOW-NEXT: vmovdqa 32(%r8), %ymm12
3377 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm13 = ymm12[0],ymm7[0],ymm12[1],ymm7[1],ymm12[2],ymm7[2],ymm12[3],ymm7[3],ymm12[8],ymm7[8],ymm12[9],ymm7[9],ymm12[10],ymm7[10],ymm12[11],ymm7[11]
3378 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm19 = [0,1,4,5,4,5,5,7,2,1,6,5,6,5,7,7]
3379 ; AVX512F-SLOW-NEXT: vpermd %zmm13, %zmm19, %zmm31
3380 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm18 = [0,0,4,4,4,4,6,5,0,2,4,6,4,6,6,7]
3381 ; AVX512F-SLOW-NEXT: vpermd %zmm6, %zmm18, %zmm31 {%k1}
3382 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rcx), %ymm13
3383 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rsi), %ymm1
3384 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rdi), %ymm0
3385 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm6 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[8],ymm1[8],ymm0[9],ymm1[9],ymm0[10],ymm1[10],ymm0[11],ymm1[11]
3386 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm20 = [4,5,1,3,5,7,5,7,6,5,3,3,7,7,7,7]
3387 ; AVX512F-SLOW-NEXT: vpermd %zmm6, %zmm20, %zmm14
3388 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rdx), %ymm6
3389 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm21 = [4,4,2,1,6,5,6,5,4,6,2,3,6,7,6,7]
3390 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm4 = ymm6[0],ymm13[0],ymm6[1],ymm13[1],ymm6[2],ymm13[2],ymm6[3],ymm13[3],ymm6[8],ymm13[8],ymm6[9],ymm13[9],ymm6[10],ymm13[10],ymm6[11],ymm13[11]
3391 ; AVX512F-SLOW-NEXT: vpermd %zmm4, %zmm21, %zmm14 {%k2}
3392 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm4 = ymm2[4],ymm15[4],ymm2[5],ymm15[5],ymm2[6],ymm15[6],ymm2[7],ymm15[7],ymm2[12],ymm15[12],ymm2[13],ymm15[13],ymm2[14],ymm15[14],ymm2[15],ymm15[15]
3393 ; AVX512F-SLOW-NEXT: vmovdqa 32(%r10), %xmm2
3394 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm12 = ymm12[4],ymm7[4],ymm12[5],ymm7[5],ymm12[6],ymm7[6],ymm12[7],ymm7[7],ymm12[12],ymm7[12],ymm12[13],ymm7[13],ymm12[14],ymm7[14],ymm12[15],ymm7[15]
3395 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rax), %xmm7
3396 ; AVX512F-SLOW-NEXT: vpermd %zmm12, %zmm19, %zmm17
3397 ; AVX512F-SLOW-NEXT: vmovdqa 32(%r9), %xmm12
3398 ; AVX512F-SLOW-NEXT: vpermd %zmm4, %zmm18, %zmm17 {%k1}
3399 ; AVX512F-SLOW-NEXT: vmovdqa 32(%r8), %xmm15
3400 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm4 = ymm6[4],ymm13[4],ymm6[5],ymm13[5],ymm6[6],ymm13[6],ymm6[7],ymm13[7],ymm6[12],ymm13[12],ymm6[13],ymm13[13],ymm6[14],ymm13[14],ymm6[15],ymm13[15]
3401 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rcx), %xmm13
3402 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm0[4],ymm1[4],ymm0[5],ymm1[5],ymm0[6],ymm1[6],ymm0[7],ymm1[7],ymm0[12],ymm1[12],ymm0[13],ymm1[13],ymm0[14],ymm1[14],ymm0[15],ymm1[15]
3403 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rsi), %xmm1
3404 ; AVX512F-SLOW-NEXT: vpermd %zmm0, %zmm20, %zmm16
3405 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rdi), %xmm0
3406 ; AVX512F-SLOW-NEXT: vpermd %zmm4, %zmm21, %zmm16 {%k2}
3407 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm15[0],xmm12[0],xmm15[1],xmm12[1],xmm15[2],xmm12[2],xmm15[3],xmm12[3]
3408 ; AVX512F-SLOW-NEXT: vpermd %zmm4, %zmm26, %zmm23
3409 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm7[0],xmm2[0],xmm7[1],xmm2[1],xmm7[2],xmm2[2],xmm7[3],xmm2[3]
3410 ; AVX512F-SLOW-NEXT: vpermd %zmm4, %zmm27, %zmm23 {%k1}
3411 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
3412 ; AVX512F-SLOW-NEXT: vpermd %zmm4, %zmm28, %zmm22
3413 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rdx), %xmm4
3414 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm4[0],xmm13[0],xmm4[1],xmm13[1],xmm4[2],xmm13[2],xmm4[3],xmm13[3]
3415 ; AVX512F-SLOW-NEXT: vpermd %zmm6, %zmm29, %zmm22 {%k2}
3416 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm7[4],xmm2[4],xmm7[5],xmm2[5],xmm7[6],xmm2[6],xmm7[7],xmm2[7]
3417 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm15[4],xmm12[4],xmm15[5],xmm12[5],xmm15[6],xmm12[6],xmm15[7],xmm12[7]
3418 ; AVX512F-SLOW-NEXT: vpermd %zmm6, %zmm26, %zmm25
3419 ; AVX512F-SLOW-NEXT: vpermd %zmm2, %zmm27, %zmm25 {%k1}
3420 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm4[4],xmm13[4],xmm4[5],xmm13[5],xmm4[6],xmm13[6],xmm4[7],xmm13[7]
3421 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
3422 ; AVX512F-SLOW-NEXT: vpermd %zmm0, %zmm28, %zmm24
3423 ; AVX512F-SLOW-NEXT: vpermd %zmm2, %zmm29, %zmm24 {%k2}
3424 ; AVX512F-SLOW-NEXT: vmovdqa (%r10), %ymm0
3425 ; AVX512F-SLOW-NEXT: vmovdqa (%rax), %ymm1
3426 ; AVX512F-SLOW-NEXT: vmovdqa (%r9), %ymm2
3427 ; AVX512F-SLOW-NEXT: vmovdqa (%r8), %ymm4
3428 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm6 = ymm4[0],ymm2[0],ymm4[1],ymm2[1],ymm4[2],ymm2[2],ymm4[3],ymm2[3],ymm4[8],ymm2[8],ymm4[9],ymm2[9],ymm4[10],ymm2[10],ymm4[11],ymm2[11]
3429 ; AVX512F-SLOW-NEXT: vpermd %zmm6, %zmm19, %zmm6
3430 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm7 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[8],ymm0[8],ymm1[9],ymm0[9],ymm1[10],ymm0[10],ymm1[11],ymm0[11]
3431 ; AVX512F-SLOW-NEXT: vpermd %zmm7, %zmm18, %zmm6 {%k1}
3432 ; AVX512F-SLOW-NEXT: vmovdqa (%rcx), %ymm7
3433 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm1[4],ymm0[4],ymm1[5],ymm0[5],ymm1[6],ymm0[6],ymm1[7],ymm0[7],ymm1[12],ymm0[12],ymm1[13],ymm0[13],ymm1[14],ymm0[14],ymm1[15],ymm0[15]
3434 ; AVX512F-SLOW-NEXT: vmovdqa (%rdx), %ymm1
3435 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm4[4],ymm2[4],ymm4[5],ymm2[5],ymm4[6],ymm2[6],ymm4[7],ymm2[7],ymm4[12],ymm2[12],ymm4[13],ymm2[13],ymm4[14],ymm2[14],ymm4[15],ymm2[15]
3436 ; AVX512F-SLOW-NEXT: vmovdqa (%rsi), %ymm4
3437 ; AVX512F-SLOW-NEXT: vpermd %zmm2, %zmm19, %zmm2
3438 ; AVX512F-SLOW-NEXT: vmovdqa (%rdi), %ymm12
3439 ; AVX512F-SLOW-NEXT: vpermd %zmm0, %zmm18, %zmm2 {%k1}
3440 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm12[0],ymm4[0],ymm12[1],ymm4[1],ymm12[2],ymm4[2],ymm12[3],ymm4[3],ymm12[8],ymm4[8],ymm12[9],ymm4[9],ymm12[10],ymm4[10],ymm12[11],ymm4[11]
3441 ; AVX512F-SLOW-NEXT: vpermd %zmm0, %zmm20, %zmm0
3442 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm13 = ymm1[0],ymm7[0],ymm1[1],ymm7[1],ymm1[2],ymm7[2],ymm1[3],ymm7[3],ymm1[8],ymm7[8],ymm1[9],ymm7[9],ymm1[10],ymm7[10],ymm1[11],ymm7[11]
3443 ; AVX512F-SLOW-NEXT: vpermd %zmm13, %zmm21, %zmm0 {%k2}
3444 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm1[4],ymm7[4],ymm1[5],ymm7[5],ymm1[6],ymm7[6],ymm1[7],ymm7[7],ymm1[12],ymm7[12],ymm1[13],ymm7[13],ymm1[14],ymm7[14],ymm1[15],ymm7[15]
3445 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm4 = ymm12[4],ymm4[4],ymm12[5],ymm4[5],ymm12[6],ymm4[6],ymm12[7],ymm4[7],ymm12[12],ymm4[12],ymm12[13],ymm4[13],ymm12[14],ymm4[14],ymm12[15],ymm4[15]
3446 ; AVX512F-SLOW-NEXT: vpermd %zmm4, %zmm20, %zmm4
3447 ; AVX512F-SLOW-NEXT: vpermd %zmm1, %zmm21, %zmm4 {%k2}
3448 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
3449 ; AVX512F-SLOW-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
3450 ; AVX512F-SLOW-NEXT: # xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
3451 ; AVX512F-SLOW-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm5, %xmm5 # 16-byte Folded Reload
3452 ; AVX512F-SLOW-NEXT: # xmm5 = xmm5[0],mem[0],xmm5[1],mem[1],xmm5[2],mem[2],xmm5[3],mem[3]
3453 ; AVX512F-SLOW-NEXT: vpermd %zmm5, %zmm26, %zmm5
3454 ; AVX512F-SLOW-NEXT: vpermd %zmm1, %zmm27, %zmm5 {%k1}
3455 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
3456 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
3457 ; AVX512F-SLOW-NEXT: vpermd %zmm7, %zmm28, %zmm7
3458 ; AVX512F-SLOW-NEXT: vpermd %zmm1, %zmm29, %zmm7 {%k2}
3459 ; AVX512F-SLOW-NEXT: movb $-86, %al
3460 ; AVX512F-SLOW-NEXT: kmovw %eax, %k1
3461 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm30, %zmm3 {%k1}
3462 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm31, %zmm14 {%k1}
3463 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm17, %zmm16 {%k1}
3464 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm23, %zmm22 {%k1}
3465 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm25, %zmm24 {%k1}
3466 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm6, %zmm0 {%k1}
3467 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm2, %zmm4 {%k1}
3468 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm5, %zmm7 {%k1}
3469 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
3470 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm7, (%rax)
3471 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm4, 192(%rax)
3472 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm0, 128(%rax)
3473 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm24, 320(%rax)
3474 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm22, 256(%rax)
3475 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm16, 448(%rax)
3476 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm14, 384(%rax)
3477 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm3, 64(%rax)
3478 ; AVX512F-SLOW-NEXT: vzeroupper
3479 ; AVX512F-SLOW-NEXT: retq
3481 ; AVX512F-FAST-LABEL: store_i16_stride8_vf32:
3482 ; AVX512F-FAST: # %bb.0:
3483 ; AVX512F-FAST-NEXT: subq $472, %rsp # imm = 0x1D8
3484 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
3485 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
3486 ; AVX512F-FAST-NEXT: vmovdqa (%r10), %xmm1
3487 ; AVX512F-FAST-NEXT: vmovdqa (%rax), %xmm2
3488 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
3489 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm2, %xmm26
3490 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm1, %xmm27
3491 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm0, %zmm0, %zmm0
3492 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3493 ; AVX512F-FAST-NEXT: vmovdqa (%r9), %xmm0
3494 ; AVX512F-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3495 ; AVX512F-FAST-NEXT: vmovdqa (%r8), %xmm2
3496 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm2[4],xmm0[4],xmm2[5],xmm0[5],xmm2[6],xmm0[6],xmm2[7],xmm0[7]
3497 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm2, %xmm31
3498 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm0, %zmm0, %zmm0
3499 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3500 ; AVX512F-FAST-NEXT: vmovdqa (%rcx), %xmm1
3501 ; AVX512F-FAST-NEXT: vmovdqa (%rdx), %xmm2
3502 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
3503 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm2, %xmm20
3504 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm1, %xmm21
3505 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm0, %zmm0, %zmm0
3506 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3507 ; AVX512F-FAST-NEXT: vmovdqa (%rsi), %xmm1
3508 ; AVX512F-FAST-NEXT: vmovdqa (%rdi), %xmm2
3509 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
3510 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm2, %xmm18
3511 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm1, %xmm19
3512 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm0, %zmm0, %zmm0
3513 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3514 ; AVX512F-FAST-NEXT: vmovdqa 32(%r10), %ymm0
3515 ; AVX512F-FAST-NEXT: vmovdqa 32(%rax), %ymm1
3516 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[8],ymm0[8],ymm1[9],ymm0[9],ymm1[10],ymm0[10],ymm1[11],ymm0[11]
3517 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm2, %zmm2, %zmm24
3518 ; AVX512F-FAST-NEXT: vmovdqa 32(%r9), %ymm2
3519 ; AVX512F-FAST-NEXT: vmovdqa 32(%r8), %ymm4
3520 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm4[0],ymm2[0],ymm4[1],ymm2[1],ymm4[2],ymm2[2],ymm4[3],ymm2[3],ymm4[8],ymm2[8],ymm4[9],ymm2[9],ymm4[10],ymm2[10],ymm4[11],ymm2[11]
3521 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm3, %zmm25
3522 ; AVX512F-FAST-NEXT: vmovdqa 32(%rcx), %ymm5
3523 ; AVX512F-FAST-NEXT: vmovdqa 32(%rdx), %ymm6
3524 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm6[0],ymm5[0],ymm6[1],ymm5[1],ymm6[2],ymm5[2],ymm6[3],ymm5[3],ymm6[8],ymm5[8],ymm6[9],ymm5[9],ymm6[10],ymm5[10],ymm6[11],ymm5[11]
3525 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm3, %zmm22
3526 ; AVX512F-FAST-NEXT: vmovdqa 32(%rsi), %ymm7
3527 ; AVX512F-FAST-NEXT: vmovdqa 32(%rdi), %ymm8
3528 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm8[0],ymm7[0],ymm8[1],ymm7[1],ymm8[2],ymm7[2],ymm8[3],ymm7[3],ymm8[8],ymm7[8],ymm8[9],ymm7[9],ymm8[10],ymm7[10],ymm8[11],ymm7[11]
3529 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm3, %zmm23
3530 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm1[4],ymm0[4],ymm1[5],ymm0[5],ymm1[6],ymm0[6],ymm1[7],ymm0[7],ymm1[12],ymm0[12],ymm1[13],ymm0[13],ymm1[14],ymm0[14],ymm1[15],ymm0[15]
3531 ; AVX512F-FAST-NEXT: vmovdqa 32(%r10), %xmm0
3532 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm1, %zmm16
3533 ; AVX512F-FAST-NEXT: vmovdqa 32(%rax), %xmm3
3534 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm4[4],ymm2[4],ymm4[5],ymm2[5],ymm4[6],ymm2[6],ymm4[7],ymm2[7],ymm4[12],ymm2[12],ymm4[13],ymm2[13],ymm4[14],ymm2[14],ymm4[15],ymm2[15]
3535 ; AVX512F-FAST-NEXT: vmovdqa 32(%r9), %xmm1
3536 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm2, %zmm2, %zmm17
3537 ; AVX512F-FAST-NEXT: vmovdqa 32(%r8), %xmm4
3538 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm6[4],ymm5[4],ymm6[5],ymm5[5],ymm6[6],ymm5[6],ymm6[7],ymm5[7],ymm6[12],ymm5[12],ymm6[13],ymm5[13],ymm6[14],ymm5[14],ymm6[15],ymm5[15]
3539 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3540 ; AVX512F-FAST-NEXT: vmovdqa 32(%rcx), %xmm2
3541 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} ymm5 = ymm8[4],ymm7[4],ymm8[5],ymm7[5],ymm8[6],ymm7[6],ymm8[7],ymm7[7],ymm8[12],ymm7[12],ymm8[13],ymm7[13],ymm8[14],ymm7[14],ymm8[15],ymm7[15]
3542 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3543 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm3[0],xmm0[0],xmm3[1],xmm0[1],xmm3[2],xmm0[2],xmm3[3],xmm0[3]
3544 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm3[4],xmm0[4],xmm3[5],xmm0[5],xmm3[6],xmm0[6],xmm3[7],xmm0[7]
3545 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3546 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm15 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
3547 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm4[4],xmm1[4],xmm4[5],xmm1[5],xmm4[6],xmm1[6],xmm4[7],xmm1[7]
3548 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3549 ; AVX512F-FAST-NEXT: vmovdqa 32(%rdx), %xmm0
3550 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],xmm2[3]
3551 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm2[4],xmm0[5],xmm2[5],xmm0[6],xmm2[6],xmm0[7],xmm2[7]
3552 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3553 ; AVX512F-FAST-NEXT: vmovdqa 32(%rsi), %xmm0
3554 ; AVX512F-FAST-NEXT: vmovdqa 32(%rdi), %xmm1
3555 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
3556 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm14 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
3557 ; AVX512F-FAST-NEXT: vmovdqa (%r10), %ymm0
3558 ; AVX512F-FAST-NEXT: vmovdqa (%rax), %ymm1
3559 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm12 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[8],ymm0[8],ymm1[9],ymm0[9],ymm1[10],ymm0[10],ymm1[11],ymm0[11]
3560 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} ymm10 = ymm1[4],ymm0[4],ymm1[5],ymm0[5],ymm1[6],ymm0[6],ymm1[7],ymm0[7],ymm1[12],ymm0[12],ymm1[13],ymm0[13],ymm1[14],ymm0[14],ymm1[15],ymm0[15]
3561 ; AVX512F-FAST-NEXT: vmovdqa (%r9), %ymm0
3562 ; AVX512F-FAST-NEXT: vmovdqa (%r8), %ymm1
3563 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm6 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[8],ymm0[8],ymm1[9],ymm0[9],ymm1[10],ymm0[10],ymm1[11],ymm0[11]
3564 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} ymm7 = ymm1[4],ymm0[4],ymm1[5],ymm0[5],ymm1[6],ymm0[6],ymm1[7],ymm0[7],ymm1[12],ymm0[12],ymm1[13],ymm0[13],ymm1[14],ymm0[14],ymm1[15],ymm0[15]
3565 ; AVX512F-FAST-NEXT: vmovdqa (%rcx), %ymm0
3566 ; AVX512F-FAST-NEXT: vmovdqa (%rdx), %ymm1
3567 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm5 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[8],ymm0[8],ymm1[9],ymm0[9],ymm1[10],ymm0[10],ymm1[11],ymm0[11]
3568 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} ymm4 = ymm1[4],ymm0[4],ymm1[5],ymm0[5],ymm1[6],ymm0[6],ymm1[7],ymm0[7],ymm1[12],ymm0[12],ymm1[13],ymm0[13],ymm1[14],ymm0[14],ymm1[15],ymm0[15]
3569 ; AVX512F-FAST-NEXT: vmovdqa (%rsi), %ymm0
3570 ; AVX512F-FAST-NEXT: vmovdqa (%rdi), %ymm2
3571 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm2[0],ymm0[0],ymm2[1],ymm0[1],ymm2[2],ymm0[2],ymm2[3],ymm0[3],ymm2[8],ymm0[8],ymm2[9],ymm0[9],ymm2[10],ymm0[10],ymm2[11],ymm0[11]
3572 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm2[4],ymm0[4],ymm2[5],ymm0[5],ymm2[6],ymm0[6],ymm2[7],ymm0[7],ymm2[12],ymm0[12],ymm2[13],ymm0[13],ymm2[14],ymm0[14],ymm2[15],ymm0[15]
3573 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm26, %xmm0
3574 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm27, %xmm1
3575 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
3576 ; AVX512F-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3577 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm0, %zmm26
3578 ; AVX512F-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
3579 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm0, %zmm27
3580 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm11, %zmm11, %zmm30
3581 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm15, %zmm15, %zmm15
3582 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm9, %zmm9, %zmm28
3583 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm8, %zmm8, %zmm29
3584 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm31, %xmm0
3585 ; AVX512F-FAST-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm9 # 16-byte Folded Reload
3586 ; AVX512F-FAST-NEXT: # xmm9 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
3587 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm20, %xmm0
3588 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm21, %xmm8
3589 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm0[0],xmm8[0],xmm0[1],xmm8[1],xmm0[2],xmm8[2],xmm0[3],xmm8[3]
3590 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm18, %xmm0
3591 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm19, %xmm11
3592 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm0[0],xmm11[0],xmm0[1],xmm11[1],xmm0[2],xmm11[2],xmm0[3],xmm11[3]
3593 ; AVX512F-FAST-NEXT: movw $-30584, %ax # imm = 0x8888
3594 ; AVX512F-FAST-NEXT: kmovw %eax, %k1
3595 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm31 = [0,1,4,5,4,5,5,7,10,9,14,13,14,13,15,15]
3596 ; AVX512F-FAST-NEXT: vpermd %zmm25, %zmm31, %zmm25
3597 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,0,4,4,4,4,6,5,8,10,12,14,12,14,14,15]
3598 ; AVX512F-FAST-NEXT: vpermd %zmm24, %zmm0, %zmm25 {%k1}
3599 ; AVX512F-FAST-NEXT: vpermd %zmm17, %zmm31, %zmm17
3600 ; AVX512F-FAST-NEXT: vpermd %zmm16, %zmm0, %zmm17 {%k1}
3601 ; AVX512F-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
3602 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm13, %zmm13, %zmm16
3603 ; AVX512F-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
3604 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm13, %zmm13, %zmm24
3605 ; AVX512F-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
3606 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm13, %zmm13, %zmm13
3607 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm14, %zmm14, %zmm14
3608 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm12, %zmm12, %zmm12
3609 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm6, %zmm6, %zmm6
3610 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm5, %zmm5, %zmm5
3611 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm3, %zmm3
3612 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm10, %zmm10, %zmm10
3613 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm7, %zmm7, %zmm7
3614 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm4, %zmm4, %zmm4
3615 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm2, %zmm2, %zmm2
3616 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm1, %zmm1, %zmm1
3617 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm9, %zmm9, %zmm9
3618 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm8, %zmm8, %zmm8
3619 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm11, %zmm11, %zmm11
3620 ; AVX512F-FAST-NEXT: vpermd %zmm6, %zmm31, %zmm6
3621 ; AVX512F-FAST-NEXT: vpermd %zmm12, %zmm0, %zmm6 {%k1}
3622 ; AVX512F-FAST-NEXT: vpermd %zmm7, %zmm31, %zmm7
3623 ; AVX512F-FAST-NEXT: movw $8738, %ax # imm = 0x2222
3624 ; AVX512F-FAST-NEXT: kmovw %eax, %k2
3625 ; AVX512F-FAST-NEXT: vpermd %zmm10, %zmm0, %zmm7 {%k1}
3626 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [4,5,1,3,5,7,5,7,14,13,11,11,15,15,15,15]
3627 ; AVX512F-FAST-NEXT: vpermd %zmm23, %zmm0, %zmm10
3628 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm12 = [4,4,2,1,6,5,6,5,12,14,10,11,14,15,14,15]
3629 ; AVX512F-FAST-NEXT: vpermd %zmm22, %zmm12, %zmm10 {%k2}
3630 ; AVX512F-FAST-NEXT: vpermd %zmm27, %zmm0, %zmm22
3631 ; AVX512F-FAST-NEXT: vpermd %zmm26, %zmm12, %zmm22 {%k2}
3632 ; AVX512F-FAST-NEXT: vpermd %zmm3, %zmm0, %zmm3
3633 ; AVX512F-FAST-NEXT: vpermd %zmm5, %zmm12, %zmm3 {%k2}
3634 ; AVX512F-FAST-NEXT: vpermd %zmm2, %zmm0, %zmm0
3635 ; AVX512F-FAST-NEXT: vpermd %zmm4, %zmm12, %zmm0 {%k2}
3636 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm2 = <0,u,0,u,u,u,1,u,10,10,10,10,u,u,11,11>
3637 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm4 # 64-byte Folded Reload
3638 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = <0,0,0,0,u,u,1,1,10,10,10,10,u,u,11,11>
3639 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm5, %zmm4 {%k1} # 64-byte Folded Reload
3640 ; AVX512F-FAST-NEXT: vpermd %zmm15, %zmm2, %zmm12
3641 ; AVX512F-FAST-NEXT: vpermd %zmm30, %zmm5, %zmm12 {%k1}
3642 ; AVX512F-FAST-NEXT: vpermd %zmm24, %zmm2, %zmm15
3643 ; AVX512F-FAST-NEXT: vpermd %zmm16, %zmm5, %zmm15 {%k1}
3644 ; AVX512F-FAST-NEXT: vpermd %zmm9, %zmm2, %zmm2
3645 ; AVX512F-FAST-NEXT: vpermd %zmm1, %zmm5, %zmm2 {%k1}
3646 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,u,1,u,1,u,u,u,10,u,11,u,11,u,u,u>
3647 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 64-byte Folded Reload
3648 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = <0,0,1,1,1,1,u,u,10,10,11,11,11,11,u,u>
3649 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm5, %zmm1 {%k2} # 64-byte Folded Reload
3650 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm9 = <0,u,1,u,1,u,u,u,10,10,11,11,11,11,u,u>
3651 ; AVX512F-FAST-NEXT: vpermd %zmm29, %zmm9, %zmm16
3652 ; AVX512F-FAST-NEXT: vpermd %zmm28, %zmm5, %zmm16 {%k2}
3653 ; AVX512F-FAST-NEXT: vpermd %zmm14, %zmm9, %zmm14
3654 ; AVX512F-FAST-NEXT: vpermd %zmm13, %zmm5, %zmm14 {%k2}
3655 ; AVX512F-FAST-NEXT: vpermd %zmm11, %zmm9, %zmm9
3656 ; AVX512F-FAST-NEXT: vpermd %zmm8, %zmm5, %zmm9 {%k2}
3657 ; AVX512F-FAST-NEXT: movb $-86, %al
3658 ; AVX512F-FAST-NEXT: kmovw %eax, %k1
3659 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm4, %zmm1 {%k1}
3660 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm25, %zmm10 {%k1}
3661 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm17, %zmm22 {%k1}
3662 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm12, %zmm16 {%k1}
3663 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm15, %zmm14 {%k1}
3664 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm6, %zmm3 {%k1}
3665 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm7, %zmm0 {%k1}
3666 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm2, %zmm9 {%k1}
3667 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
3668 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm9, (%rax)
3669 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm0, 192(%rax)
3670 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm3, 128(%rax)
3671 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm14, 320(%rax)
3672 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm16, 256(%rax)
3673 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm22, 448(%rax)
3674 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm10, 384(%rax)
3675 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm1, 64(%rax)
3676 ; AVX512F-FAST-NEXT: addq $472, %rsp # imm = 0x1D8
3677 ; AVX512F-FAST-NEXT: vzeroupper
3678 ; AVX512F-FAST-NEXT: retq
3680 ; AVX512BW-LABEL: store_i16_stride8_vf32:
3681 ; AVX512BW: # %bb.0:
3682 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
3683 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r10
3684 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r11
3685 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm0
3686 ; AVX512BW-NEXT: vmovdqa64 (%rsi), %zmm1
3687 ; AVX512BW-NEXT: vmovdqa64 (%rdx), %zmm2
3688 ; AVX512BW-NEXT: vmovdqa64 (%rcx), %zmm3
3689 ; AVX512BW-NEXT: vmovdqa64 (%r8), %zmm5
3690 ; AVX512BW-NEXT: vmovdqa64 (%r9), %zmm6
3691 ; AVX512BW-NEXT: vmovdqa64 (%r11), %zmm7
3692 ; AVX512BW-NEXT: vmovdqa64 (%r10), %zmm8
3693 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = <u,u,u,u,u,u,0,32,u,u,u,u,u,u,1,33,u,u,u,u,u,u,2,34,u,u,u,u,u,u,3,35>
3694 ; AVX512BW-NEXT: vpermi2w %zmm8, %zmm7, %zmm4
3695 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm9 = <u,u,u,u,0,32,u,u,u,u,u,u,1,33,u,u,u,u,u,u,2,34,u,u,u,u,u,u,3,35,u,u>
3696 ; AVX512BW-NEXT: vpermi2w %zmm6, %zmm5, %zmm9
3697 ; AVX512BW-NEXT: movw $-30584, %cx # imm = 0x8888
3698 ; AVX512BW-NEXT: kmovd %ecx, %k1
3699 ; AVX512BW-NEXT: vmovdqa32 %zmm4, %zmm9 {%k1}
3700 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm10 = <u,u,0,32,u,u,u,u,u,u,1,33,u,u,u,u,u,u,2,34,u,u,u,u,u,u,3,35,u,u,u,u>
3701 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm2, %zmm10
3702 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = <0,32,u,u,u,u,u,u,1,33,u,u,u,u,u,u,2,34,u,u,u,u,u,u,3,35,u,u,u,u,u,u>
3703 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm4
3704 ; AVX512BW-NEXT: movw $8738, %cx # imm = 0x2222
3705 ; AVX512BW-NEXT: kmovd %ecx, %k2
3706 ; AVX512BW-NEXT: vmovdqa32 %zmm10, %zmm4 {%k2}
3707 ; AVX512BW-NEXT: movb $-86, %cl
3708 ; AVX512BW-NEXT: kmovd %ecx, %k3
3709 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm4 {%k3}
3710 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm9 = <u,u,u,u,u,u,4,36,u,u,u,u,u,u,5,37,u,u,u,u,u,u,6,38,u,u,u,u,u,u,7,39>
3711 ; AVX512BW-NEXT: vpermi2w %zmm8, %zmm7, %zmm9
3712 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm10 = <u,u,u,u,4,36,u,u,u,u,u,u,5,37,u,u,u,u,u,u,6,38,u,u,u,u,u,u,7,39,u,u>
3713 ; AVX512BW-NEXT: vpermi2w %zmm6, %zmm5, %zmm10
3714 ; AVX512BW-NEXT: vmovdqa32 %zmm9, %zmm10 {%k1}
3715 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm11 = <u,u,4,36,u,u,u,u,u,u,5,37,u,u,u,u,u,u,6,38,u,u,u,u,u,u,7,39,u,u,u,u>
3716 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm2, %zmm11
3717 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm9 = <4,36,u,u,u,u,u,u,5,37,u,u,u,u,u,u,6,38,u,u,u,u,u,u,7,39,u,u,u,u,u,u>
3718 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm9
3719 ; AVX512BW-NEXT: vmovdqa32 %zmm11, %zmm9 {%k2}
3720 ; AVX512BW-NEXT: vmovdqa64 %zmm10, %zmm9 {%k3}
3721 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm10 = <u,u,u,u,u,u,8,40,u,u,u,u,u,u,9,41,u,u,u,u,u,u,10,42,u,u,u,u,u,u,11,43>
3722 ; AVX512BW-NEXT: vpermi2w %zmm8, %zmm7, %zmm10
3723 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm11 = <u,u,u,u,8,40,u,u,u,u,u,u,9,41,u,u,u,u,u,u,10,42,u,u,u,u,u,u,11,43,u,u>
3724 ; AVX512BW-NEXT: vpermi2w %zmm6, %zmm5, %zmm11
3725 ; AVX512BW-NEXT: vmovdqa32 %zmm10, %zmm11 {%k1}
3726 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,8,40,u,u,u,u,u,u,9,41,u,u,u,u,u,u,10,42,u,u,u,u,u,u,11,43,u,u,u,u>
3727 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm2, %zmm12
3728 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm10 = <8,40,u,u,u,u,u,u,9,41,u,u,u,u,u,u,10,42,u,u,u,u,u,u,11,43,u,u,u,u,u,u>
3729 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm10
3730 ; AVX512BW-NEXT: vmovdqa32 %zmm12, %zmm10 {%k2}
3731 ; AVX512BW-NEXT: vmovdqa64 %zmm11, %zmm10 {%k3}
3732 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm11 = <u,u,u,u,u,u,12,44,u,u,u,u,u,u,13,45,u,u,u,u,u,u,14,46,u,u,u,u,u,u,15,47>
3733 ; AVX512BW-NEXT: vpermi2w %zmm8, %zmm7, %zmm11
3734 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,u,u,12,44,u,u,u,u,u,u,13,45,u,u,u,u,u,u,14,46,u,u,u,u,u,u,15,47,u,u>
3735 ; AVX512BW-NEXT: vpermi2w %zmm6, %zmm5, %zmm12
3736 ; AVX512BW-NEXT: vmovdqa32 %zmm11, %zmm12 {%k1}
3737 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm13 = <u,u,12,44,u,u,u,u,u,u,13,45,u,u,u,u,u,u,14,46,u,u,u,u,u,u,15,47,u,u,u,u>
3738 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm2, %zmm13
3739 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm11 = <12,44,u,u,u,u,u,u,13,45,u,u,u,u,u,u,14,46,u,u,u,u,u,u,15,47,u,u,u,u,u,u>
3740 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm11
3741 ; AVX512BW-NEXT: vmovdqa32 %zmm13, %zmm11 {%k2}
3742 ; AVX512BW-NEXT: vmovdqa64 %zmm12, %zmm11 {%k3}
3743 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,u,u,u,u,16,48,u,u,u,u,u,u,17,49,u,u,u,u,u,u,18,50,u,u,u,u,u,u,19,51>
3744 ; AVX512BW-NEXT: vpermi2w %zmm8, %zmm7, %zmm12
3745 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm13 = <u,u,u,u,16,48,u,u,u,u,u,u,17,49,u,u,u,u,u,u,18,50,u,u,u,u,u,u,19,51,u,u>
3746 ; AVX512BW-NEXT: vpermi2w %zmm6, %zmm5, %zmm13
3747 ; AVX512BW-NEXT: vmovdqa32 %zmm12, %zmm13 {%k1}
3748 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,16,48,u,u,u,u,u,u,17,49,u,u,u,u,u,u,18,50,u,u,u,u,u,u,19,51,u,u,u,u>
3749 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm2, %zmm12
3750 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm14 = <16,48,u,u,u,u,u,u,17,49,u,u,u,u,u,u,18,50,u,u,u,u,u,u,19,51,u,u,u,u,u,u>
3751 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm14
3752 ; AVX512BW-NEXT: vmovdqa32 %zmm12, %zmm14 {%k2}
3753 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm14 {%k3}
3754 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,u,u,u,u,20,52,u,u,u,u,u,u,21,53,u,u,u,u,u,u,22,54,u,u,u,u,u,u,23,55>
3755 ; AVX512BW-NEXT: vpermi2w %zmm8, %zmm7, %zmm12
3756 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm13 = <u,u,u,u,20,52,u,u,u,u,u,u,21,53,u,u,u,u,u,u,22,54,u,u,u,u,u,u,23,55,u,u>
3757 ; AVX512BW-NEXT: vpermi2w %zmm6, %zmm5, %zmm13
3758 ; AVX512BW-NEXT: vmovdqa32 %zmm12, %zmm13 {%k1}
3759 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,20,52,u,u,u,u,u,u,21,53,u,u,u,u,u,u,22,54,u,u,u,u,u,u,23,55,u,u,u,u>
3760 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm2, %zmm12
3761 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm15 = <20,52,u,u,u,u,u,u,21,53,u,u,u,u,u,u,22,54,u,u,u,u,u,u,23,55,u,u,u,u,u,u>
3762 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm15
3763 ; AVX512BW-NEXT: vmovdqa32 %zmm12, %zmm15 {%k2}
3764 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm15 {%k3}
3765 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,u,u,u,u,24,56,u,u,u,u,u,u,25,57,u,u,u,u,u,u,26,58,u,u,u,u,u,u,27,59>
3766 ; AVX512BW-NEXT: vpermi2w %zmm8, %zmm7, %zmm12
3767 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm13 = <u,u,u,u,24,56,u,u,u,u,u,u,25,57,u,u,u,u,u,u,26,58,u,u,u,u,u,u,27,59,u,u>
3768 ; AVX512BW-NEXT: vpermi2w %zmm6, %zmm5, %zmm13
3769 ; AVX512BW-NEXT: vmovdqa32 %zmm12, %zmm13 {%k1}
3770 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,24,56,u,u,u,u,u,u,25,57,u,u,u,u,u,u,26,58,u,u,u,u,u,u,27,59,u,u,u,u>
3771 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm2, %zmm12
3772 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm16 = <24,56,u,u,u,u,u,u,25,57,u,u,u,u,u,u,26,58,u,u,u,u,u,u,27,59,u,u,u,u,u,u>
3773 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm16
3774 ; AVX512BW-NEXT: vmovdqa32 %zmm12, %zmm16 {%k2}
3775 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm16 {%k3}
3776 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,u,u,u,u,28,60,u,u,u,u,u,u,29,61,u,u,u,u,u,u,30,62,u,u,u,u,u,u,31,63>
3777 ; AVX512BW-NEXT: vpermi2w %zmm8, %zmm7, %zmm12
3778 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <u,u,u,u,28,60,u,u,u,u,u,u,29,61,u,u,u,u,u,u,30,62,u,u,u,u,u,u,31,63,u,u>
3779 ; AVX512BW-NEXT: vpermi2w %zmm6, %zmm5, %zmm7
3780 ; AVX512BW-NEXT: vmovdqa32 %zmm12, %zmm7 {%k1}
3781 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm5 = <u,u,28,60,u,u,u,u,u,u,29,61,u,u,u,u,u,u,30,62,u,u,u,u,u,u,31,63,u,u,u,u>
3782 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm2, %zmm5
3783 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <28,60,u,u,u,u,u,u,29,61,u,u,u,u,u,u,30,62,u,u,u,u,u,u,31,63,u,u,u,u,u,u>
3784 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm2
3785 ; AVX512BW-NEXT: vmovdqa32 %zmm5, %zmm2 {%k2}
3786 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm2 {%k3}
3787 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 448(%rax)
3788 ; AVX512BW-NEXT: vmovdqa64 %zmm16, 384(%rax)
3789 ; AVX512BW-NEXT: vmovdqa64 %zmm15, 320(%rax)
3790 ; AVX512BW-NEXT: vmovdqa64 %zmm14, 256(%rax)
3791 ; AVX512BW-NEXT: vmovdqa64 %zmm11, 192(%rax)
3792 ; AVX512BW-NEXT: vmovdqa64 %zmm10, 128(%rax)
3793 ; AVX512BW-NEXT: vmovdqa64 %zmm9, 64(%rax)
3794 ; AVX512BW-NEXT: vmovdqa64 %zmm4, (%rax)
3795 ; AVX512BW-NEXT: vzeroupper
3796 ; AVX512BW-NEXT: retq
3797 %in.vec0 = load <32 x i16>, ptr %in.vecptr0, align 64
3798 %in.vec1 = load <32 x i16>, ptr %in.vecptr1, align 64
3799 %in.vec2 = load <32 x i16>, ptr %in.vecptr2, align 64
3800 %in.vec3 = load <32 x i16>, ptr %in.vecptr3, align 64
3801 %in.vec4 = load <32 x i16>, ptr %in.vecptr4, align 64
3802 %in.vec5 = load <32 x i16>, ptr %in.vecptr5, align 64
3803 %in.vec6 = load <32 x i16>, ptr %in.vecptr6, align 64
3804 %in.vec7 = load <32 x i16>, ptr %in.vecptr7, align 64
3805 %1 = shufflevector <32 x i16> %in.vec0, <32 x i16> %in.vec1, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
3806 %2 = shufflevector <32 x i16> %in.vec2, <32 x i16> %in.vec3, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
3807 %3 = shufflevector <32 x i16> %in.vec4, <32 x i16> %in.vec5, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
3808 %4 = shufflevector <32 x i16> %in.vec6, <32 x i16> %in.vec7, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
3809 %5 = shufflevector <64 x i16> %1, <64 x i16> %2, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
3810 %6 = shufflevector <64 x i16> %3, <64 x i16> %4, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
3811 %7 = shufflevector <128 x i16> %5, <128 x i16> %6, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255>
3812 %interleaved.vec = shufflevector <256 x i16> %7, <256 x i16> poison, <256 x i32> <i32 0, i32 32, i32 64, i32 96, i32 128, i32 160, i32 192, i32 224, i32 1, i32 33, i32 65, i32 97, i32 129, i32 161, i32 193, i32 225, i32 2, i32 34, i32 66, i32 98, i32 130, i32 162, i32 194, i32 226, i32 3, i32 35, i32 67, i32 99, i32 131, i32 163, i32 195, i32 227, i32 4, i32 36, i32 68, i32 100, i32 132, i32 164, i32 196, i32 228, i32 5, i32 37, i32 69, i32 101, i32 133, i32 165, i32 197, i32 229, i32 6, i32 38, i32 70, i32 102, i32 134, i32 166, i32 198, i32 230, i32 7, i32 39, i32 71, i32 103, i32 135, i32 167, i32 199, i32 231, i32 8, i32 40, i32 72, i32 104, i32 136, i32 168, i32 200, i32 232, i32 9, i32 41, i32 73, i32 105, i32 137, i32 169, i32 201, i32 233, i32 10, i32 42, i32 74, i32 106, i32 138, i32 170, i32 202, i32 234, i32 11, i32 43, i32 75, i32 107, i32 139, i32 171, i32 203, i32 235, i32 12, i32 44, i32 76, i32 108, i32 140, i32 172, i32 204, i32 236, i32 13, i32 45, i32 77, i32 109, i32 141, i32 173, i32 205, i32 237, i32 14, i32 46, i32 78, i32 110, i32 142, i32 174, i32 206, i32 238, i32 15, i32 47, i32 79, i32 111, i32 143, i32 175, i32 207, i32 239, i32 16, i32 48, i32 80, i32 112, i32 144, i32 176, i32 208, i32 240, i32 17, i32 49, i32 81, i32 113, i32 145, i32 177, i32 209, i32 241, i32 18, i32 50, i32 82, i32 114, i32 146, i32 178, i32 210, i32 242, i32 19, i32 51, i32 83, i32 115, i32 147, i32 179, i32 211, i32 243, i32 20, i32 52, i32 84, i32 116, i32 148, i32 180, i32 212, i32 244, i32 21, i32 53, i32 85, i32 117, i32 149, i32 181, i32 213, i32 245, i32 22, i32 54, i32 86, i32 118, i32 150, i32 182, i32 214, i32 246, i32 23, i32 55, i32 87, i32 119, i32 151, i32 183, i32 215, i32 247, i32 24, i32 56, i32 88, i32 120, i32 152, i32 184, i32 216, i32 248, i32 25, i32 57, i32 89, i32 121, i32 153, i32 185, i32 217, i32 249, i32 26, i32 58, i32 90, i32 122, i32 154, i32 186, i32 218, i32 250, i32 27, i32 59, i32 91, i32 123, i32 155, i32 187, i32 219, i32 251, i32 28, i32 60, i32 92, i32 124, i32 156, i32 188, i32 220, i32 252, i32 29, i32 61, i32 93, i32 125, i32 157, i32 189, i32 221, i32 253, i32 30, i32 62, i32 94, i32 126, i32 158, i32 190, i32 222, i32 254, i32 31, i32 63, i32 95, i32 127, i32 159, i32 191, i32 223, i32 255>
3813 store <256 x i16> %interleaved.vec, ptr %out.vec, align 64
3817 define void @store_i16_stride8_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %in.vecptr7, ptr %out.vec) nounwind {
3818 ; SSE-LABEL: store_i16_stride8_vf64:
3820 ; SSE-NEXT: subq $776, %rsp # imm = 0x308
3821 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
3822 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
3823 ; SSE-NEXT: movdqa (%rdi), %xmm2
3824 ; SSE-NEXT: movdqa (%rsi), %xmm7
3825 ; SSE-NEXT: movdqa (%rdx), %xmm3
3826 ; SSE-NEXT: movdqa (%rcx), %xmm9
3827 ; SSE-NEXT: movdqa (%r8), %xmm4
3828 ; SSE-NEXT: movdqa (%r9), %xmm10
3829 ; SSE-NEXT: movdqa (%r10), %xmm8
3830 ; SSE-NEXT: movdqa (%rax), %xmm11
3831 ; SSE-NEXT: movdqa %xmm3, %xmm1
3832 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm9[0],xmm1[1],xmm9[1],xmm1[2],xmm9[2],xmm1[3],xmm9[3]
3833 ; SSE-NEXT: movdqa %xmm2, %xmm0
3834 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm7[0],xmm0[1],xmm7[1],xmm0[2],xmm7[2],xmm0[3],xmm7[3]
3835 ; SSE-NEXT: movdqa %xmm0, %xmm5
3836 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1]
3837 ; SSE-NEXT: movdqa %xmm8, %xmm12
3838 ; SSE-NEXT: punpcklwd {{.*#+}} xmm12 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3]
3839 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm12[0,0,0,0]
3840 ; SSE-NEXT: movdqa %xmm4, %xmm6
3841 ; SSE-NEXT: punpcklwd {{.*#+}} xmm6 = xmm6[0],xmm10[0],xmm6[1],xmm10[1],xmm6[2],xmm10[2],xmm6[3],xmm10[3]
3842 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm6[0,0,0,0]
3843 ; SSE-NEXT: punpckhdq {{.*#+}} xmm14 = xmm14[2],xmm13[2],xmm14[3],xmm13[3]
3844 ; SSE-NEXT: movsd {{.*#+}} xmm14 = xmm5[0],xmm14[1]
3845 ; SSE-NEXT: movapd %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3846 ; SSE-NEXT: movdqa %xmm6, %xmm5
3847 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm12[0],xmm5[1],xmm12[1]
3848 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm1[1,1,1,1]
3849 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm0[1,1,1,1]
3850 ; SSE-NEXT: punpckldq {{.*#+}} xmm14 = xmm14[0],xmm13[0],xmm14[1],xmm13[1]
3851 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[0,1],xmm5[2,3]
3852 ; SSE-NEXT: movaps %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3853 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm12[2,2,2,2]
3854 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm6[2,2,2,2]
3855 ; SSE-NEXT: punpckhdq {{.*#+}} xmm13 = xmm13[2],xmm5[2],xmm13[3],xmm5[3]
3856 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm0[3,3,3,3]
3857 ; SSE-NEXT: punpckhdq {{.*#+}} xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
3858 ; SSE-NEXT: movsd {{.*#+}} xmm13 = xmm0[0],xmm13[1]
3859 ; SSE-NEXT: movapd %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3860 ; SSE-NEXT: movdqa 16(%r8), %xmm0
3861 ; SSE-NEXT: punpckhdq {{.*#+}} xmm6 = xmm6[2],xmm12[2],xmm6[3],xmm12[3]
3862 ; SSE-NEXT: movdqa 16(%r9), %xmm5
3863 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[3,3,3,3]
3864 ; SSE-NEXT: punpckldq {{.*#+}} xmm14 = xmm14[0],xmm1[0],xmm14[1],xmm1[1]
3865 ; SSE-NEXT: movdqa 16(%r10), %xmm1
3866 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[0,1],xmm6[2,3]
3867 ; SSE-NEXT: movaps %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3868 ; SSE-NEXT: movdqa 16(%rax), %xmm6
3869 ; SSE-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm9[4],xmm3[5],xmm9[5],xmm3[6],xmm9[6],xmm3[7],xmm9[7]
3870 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm7[4],xmm2[5],xmm7[5],xmm2[6],xmm7[6],xmm2[7],xmm7[7]
3871 ; SSE-NEXT: movdqa %xmm2, %xmm7
3872 ; SSE-NEXT: punpckldq {{.*#+}} xmm7 = xmm7[0],xmm3[0],xmm7[1],xmm3[1]
3873 ; SSE-NEXT: punpckhwd {{.*#+}} xmm8 = xmm8[4],xmm11[4],xmm8[5],xmm11[5],xmm8[6],xmm11[6],xmm8[7],xmm11[7]
3874 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm8[0,0,0,0]
3875 ; SSE-NEXT: punpckhwd {{.*#+}} xmm4 = xmm4[4],xmm10[4],xmm4[5],xmm10[5],xmm4[6],xmm10[6],xmm4[7],xmm10[7]
3876 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm4[0,0,0,0]
3877 ; SSE-NEXT: punpckhdq {{.*#+}} xmm10 = xmm10[2],xmm9[2],xmm10[3],xmm9[3]
3878 ; SSE-NEXT: movsd {{.*#+}} xmm10 = xmm7[0],xmm10[1]
3879 ; SSE-NEXT: movapd %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3880 ; SSE-NEXT: movdqa %xmm4, %xmm7
3881 ; SSE-NEXT: punpckldq {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1]
3882 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm3[1,1,1,1]
3883 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm2[1,1,1,1]
3884 ; SSE-NEXT: punpckldq {{.*#+}} xmm10 = xmm10[0],xmm9[0],xmm10[1],xmm9[1]
3885 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[0,1],xmm7[2,3]
3886 ; SSE-NEXT: movaps %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3887 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm8[2,2,2,2]
3888 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm4[2,2,2,2]
3889 ; SSE-NEXT: punpckhdq {{.*#+}} xmm9 = xmm9[2],xmm7[2],xmm9[3],xmm7[3]
3890 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm2[3,3,3,3]
3891 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm3[2],xmm2[3],xmm3[3]
3892 ; SSE-NEXT: movsd {{.*#+}} xmm9 = xmm2[0],xmm9[1]
3893 ; SSE-NEXT: movapd %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3894 ; SSE-NEXT: punpckhdq {{.*#+}} xmm4 = xmm4[2],xmm8[2],xmm4[3],xmm8[3]
3895 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm3[3,3,3,3]
3896 ; SSE-NEXT: punpckldq {{.*#+}} xmm7 = xmm7[0],xmm2[0],xmm7[1],xmm2[1]
3897 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[0,1],xmm4[2,3]
3898 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3899 ; SSE-NEXT: movdqa %xmm1, %xmm7
3900 ; SSE-NEXT: punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
3901 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm7[0,0,0,0]
3902 ; SSE-NEXT: movdqa %xmm0, %xmm4
3903 ; SSE-NEXT: punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
3904 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm4[0,0,0,0]
3905 ; SSE-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
3906 ; SSE-NEXT: movdqa %xmm3, %xmm13
3907 ; SSE-NEXT: movdqa 16(%rdx), %xmm2
3908 ; SSE-NEXT: movdqa 16(%rcx), %xmm8
3909 ; SSE-NEXT: movdqa %xmm2, %xmm10
3910 ; SSE-NEXT: punpcklwd {{.*#+}} xmm10 = xmm10[0],xmm8[0],xmm10[1],xmm8[1],xmm10[2],xmm8[2],xmm10[3],xmm8[3]
3911 ; SSE-NEXT: movdqa 16(%rdi), %xmm3
3912 ; SSE-NEXT: movdqa 16(%rsi), %xmm9
3913 ; SSE-NEXT: movdqa %xmm3, %xmm11
3914 ; SSE-NEXT: punpcklwd {{.*#+}} xmm11 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3]
3915 ; SSE-NEXT: movdqa %xmm11, %xmm12
3916 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm10[0],xmm12[1],xmm10[1]
3917 ; SSE-NEXT: movsd {{.*#+}} xmm13 = xmm12[0],xmm13[1]
3918 ; SSE-NEXT: movapd %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3919 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm10[1,1,1,1]
3920 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm11[1,1,1,1]
3921 ; SSE-NEXT: punpckldq {{.*#+}} xmm13 = xmm13[0],xmm12[0],xmm13[1],xmm12[1]
3922 ; SSE-NEXT: movdqa %xmm4, %xmm12
3923 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm7[0],xmm12[1],xmm7[1]
3924 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[0,1],xmm12[2,3]
3925 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3926 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm7[2,2,2,2]
3927 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm4[2,2,2,2]
3928 ; SSE-NEXT: punpckhdq {{.*#+}} xmm13 = xmm13[2],xmm12[2],xmm13[3],xmm12[3]
3929 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm11[3,3,3,3]
3930 ; SSE-NEXT: punpckhdq {{.*#+}} xmm11 = xmm11[2],xmm10[2],xmm11[3],xmm10[3]
3931 ; SSE-NEXT: movsd {{.*#+}} xmm13 = xmm11[0],xmm13[1]
3932 ; SSE-NEXT: movapd %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3933 ; SSE-NEXT: punpckhdq {{.*#+}} xmm4 = xmm4[2],xmm7[2],xmm4[3],xmm7[3]
3934 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm10[3,3,3,3]
3935 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm7[0],xmm12[1],xmm7[1]
3936 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[0,1],xmm4[2,3]
3937 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3938 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm8[4],xmm2[5],xmm8[5],xmm2[6],xmm8[6],xmm2[7],xmm8[7]
3939 ; SSE-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm9[4],xmm3[5],xmm9[5],xmm3[6],xmm9[6],xmm3[7],xmm9[7]
3940 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm6[4],xmm1[5],xmm6[5],xmm1[6],xmm6[6],xmm1[7],xmm6[7]
3941 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm5[4],xmm0[5],xmm5[5],xmm0[6],xmm5[6],xmm0[7],xmm5[7]
3942 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm1[0,0,0,0]
3943 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm0[0,0,0,0]
3944 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
3945 ; SSE-NEXT: movdqa %xmm3, %xmm4
3946 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm2[0],xmm4[1],xmm2[1]
3947 ; SSE-NEXT: movsd {{.*#+}} xmm5 = xmm4[0],xmm5[1]
3948 ; SSE-NEXT: movapd %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3949 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm2[1,1,1,1]
3950 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm3[1,1,1,1]
3951 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
3952 ; SSE-NEXT: movdqa %xmm0, %xmm4
3953 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
3954 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[2,3]
3955 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3956 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm1[2,2,2,2]
3957 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm0[2,2,2,2]
3958 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
3959 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm3[3,3,3,3]
3960 ; SSE-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
3961 ; SSE-NEXT: movsd {{.*#+}} xmm5 = xmm3[0],xmm5[1]
3962 ; SSE-NEXT: movapd %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3963 ; SSE-NEXT: punpckhdq {{.*#+}} xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
3964 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm2[3,3,3,3]
3965 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
3966 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm0[2,3]
3967 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3968 ; SSE-NEXT: movdqa 32(%r10), %xmm0
3969 ; SSE-NEXT: movdqa 32(%rax), %xmm4
3970 ; SSE-NEXT: movdqa %xmm0, %xmm6
3971 ; SSE-NEXT: punpcklwd {{.*#+}} xmm6 = xmm6[0],xmm4[0],xmm6[1],xmm4[1],xmm6[2],xmm4[2],xmm6[3],xmm4[3]
3972 ; SSE-NEXT: movdqa 32(%r8), %xmm1
3973 ; SSE-NEXT: movdqa 32(%r9), %xmm5
3974 ; SSE-NEXT: movdqa %xmm1, %xmm7
3975 ; SSE-NEXT: punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm5[0],xmm7[1],xmm5[1],xmm7[2],xmm5[2],xmm7[3],xmm5[3]
3976 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm6[0,0,0,0]
3977 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm7[0,0,0,0]
3978 ; SSE-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
3979 ; SSE-NEXT: movdqa %xmm3, %xmm13
3980 ; SSE-NEXT: movdqa 32(%rdx), %xmm2
3981 ; SSE-NEXT: movdqa 32(%rcx), %xmm8
3982 ; SSE-NEXT: movdqa %xmm2, %xmm10
3983 ; SSE-NEXT: punpcklwd {{.*#+}} xmm10 = xmm10[0],xmm8[0],xmm10[1],xmm8[1],xmm10[2],xmm8[2],xmm10[3],xmm8[3]
3984 ; SSE-NEXT: movdqa 32(%rdi), %xmm3
3985 ; SSE-NEXT: movdqa 32(%rsi), %xmm9
3986 ; SSE-NEXT: movdqa %xmm3, %xmm11
3987 ; SSE-NEXT: punpcklwd {{.*#+}} xmm11 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3]
3988 ; SSE-NEXT: movdqa %xmm11, %xmm12
3989 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm10[0],xmm12[1],xmm10[1]
3990 ; SSE-NEXT: movsd {{.*#+}} xmm13 = xmm12[0],xmm13[1]
3991 ; SSE-NEXT: movapd %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3992 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm10[1,1,1,1]
3993 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm11[1,1,1,1]
3994 ; SSE-NEXT: punpckldq {{.*#+}} xmm13 = xmm13[0],xmm12[0],xmm13[1],xmm12[1]
3995 ; SSE-NEXT: movdqa %xmm7, %xmm12
3996 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm6[0],xmm12[1],xmm6[1]
3997 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[0,1],xmm12[2,3]
3998 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3999 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm6[2,2,2,2]
4000 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm7[2,2,2,2]
4001 ; SSE-NEXT: punpckhdq {{.*#+}} xmm13 = xmm13[2],xmm12[2],xmm13[3],xmm12[3]
4002 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm11[3,3,3,3]
4003 ; SSE-NEXT: punpckhdq {{.*#+}} xmm11 = xmm11[2],xmm10[2],xmm11[3],xmm10[3]
4004 ; SSE-NEXT: movsd {{.*#+}} xmm13 = xmm11[0],xmm13[1]
4005 ; SSE-NEXT: movapd %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4006 ; SSE-NEXT: punpckhdq {{.*#+}} xmm7 = xmm7[2],xmm6[2],xmm7[3],xmm6[3]
4007 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm10[3,3,3,3]
4008 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm6[0],xmm12[1],xmm6[1]
4009 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[0,1],xmm7[2,3]
4010 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4011 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm8[4],xmm2[5],xmm8[5],xmm2[6],xmm8[6],xmm2[7],xmm8[7]
4012 ; SSE-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm9[4],xmm3[5],xmm9[5],xmm3[6],xmm9[6],xmm3[7],xmm9[7]
4013 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm4[4],xmm0[5],xmm4[5],xmm0[6],xmm4[6],xmm0[7],xmm4[7]
4014 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm5[4],xmm1[5],xmm5[5],xmm1[6],xmm5[6],xmm1[7],xmm5[7]
4015 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm0[0,0,0,0]
4016 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[0,0,0,0]
4017 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
4018 ; SSE-NEXT: movdqa %xmm3, %xmm4
4019 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm2[0],xmm4[1],xmm2[1]
4020 ; SSE-NEXT: movsd {{.*#+}} xmm5 = xmm4[0],xmm5[1]
4021 ; SSE-NEXT: movapd %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4022 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm2[1,1,1,1]
4023 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm3[1,1,1,1]
4024 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
4025 ; SSE-NEXT: movdqa %xmm1, %xmm4
4026 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm0[0],xmm4[1],xmm0[1]
4027 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[2,3]
4028 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4029 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm0[2,2,2,2]
4030 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[2,2,2,2]
4031 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
4032 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm3[3,3,3,3]
4033 ; SSE-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
4034 ; SSE-NEXT: movsd {{.*#+}} xmm5 = xmm3[0],xmm5[1]
4035 ; SSE-NEXT: movapd %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4036 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
4037 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[3,3,3,3]
4038 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm0[0],xmm4[1],xmm0[1]
4039 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm1[2,3]
4040 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4041 ; SSE-NEXT: movdqa 48(%r10), %xmm0
4042 ; SSE-NEXT: movdqa 48(%rax), %xmm4
4043 ; SSE-NEXT: movdqa %xmm0, %xmm6
4044 ; SSE-NEXT: punpcklwd {{.*#+}} xmm6 = xmm6[0],xmm4[0],xmm6[1],xmm4[1],xmm6[2],xmm4[2],xmm6[3],xmm4[3]
4045 ; SSE-NEXT: movdqa 48(%r8), %xmm1
4046 ; SSE-NEXT: movdqa 48(%r9), %xmm5
4047 ; SSE-NEXT: movdqa %xmm1, %xmm7
4048 ; SSE-NEXT: punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm5[0],xmm7[1],xmm5[1],xmm7[2],xmm5[2],xmm7[3],xmm5[3]
4049 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm6[0,0,0,0]
4050 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm7[0,0,0,0]
4051 ; SSE-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
4052 ; SSE-NEXT: movdqa %xmm3, %xmm13
4053 ; SSE-NEXT: movdqa 48(%rdx), %xmm2
4054 ; SSE-NEXT: movdqa 48(%rcx), %xmm8
4055 ; SSE-NEXT: movdqa %xmm2, %xmm10
4056 ; SSE-NEXT: punpcklwd {{.*#+}} xmm10 = xmm10[0],xmm8[0],xmm10[1],xmm8[1],xmm10[2],xmm8[2],xmm10[3],xmm8[3]
4057 ; SSE-NEXT: movdqa 48(%rdi), %xmm3
4058 ; SSE-NEXT: movdqa 48(%rsi), %xmm9
4059 ; SSE-NEXT: movdqa %xmm3, %xmm11
4060 ; SSE-NEXT: punpcklwd {{.*#+}} xmm11 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3]
4061 ; SSE-NEXT: movdqa %xmm11, %xmm12
4062 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm10[0],xmm12[1],xmm10[1]
4063 ; SSE-NEXT: movsd {{.*#+}} xmm13 = xmm12[0],xmm13[1]
4064 ; SSE-NEXT: movapd %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4065 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm10[1,1,1,1]
4066 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm11[1,1,1,1]
4067 ; SSE-NEXT: punpckldq {{.*#+}} xmm13 = xmm13[0],xmm12[0],xmm13[1],xmm12[1]
4068 ; SSE-NEXT: movdqa %xmm7, %xmm12
4069 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm6[0],xmm12[1],xmm6[1]
4070 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[0,1],xmm12[2,3]
4071 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4072 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm6[2,2,2,2]
4073 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm7[2,2,2,2]
4074 ; SSE-NEXT: punpckhdq {{.*#+}} xmm13 = xmm13[2],xmm12[2],xmm13[3],xmm12[3]
4075 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm11[3,3,3,3]
4076 ; SSE-NEXT: punpckhdq {{.*#+}} xmm11 = xmm11[2],xmm10[2],xmm11[3],xmm10[3]
4077 ; SSE-NEXT: movsd {{.*#+}} xmm13 = xmm11[0],xmm13[1]
4078 ; SSE-NEXT: movapd %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4079 ; SSE-NEXT: punpckhdq {{.*#+}} xmm7 = xmm7[2],xmm6[2],xmm7[3],xmm6[3]
4080 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm10[3,3,3,3]
4081 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm6[0],xmm12[1],xmm6[1]
4082 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[0,1],xmm7[2,3]
4083 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4084 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm8[4],xmm2[5],xmm8[5],xmm2[6],xmm8[6],xmm2[7],xmm8[7]
4085 ; SSE-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm9[4],xmm3[5],xmm9[5],xmm3[6],xmm9[6],xmm3[7],xmm9[7]
4086 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm4[4],xmm0[5],xmm4[5],xmm0[6],xmm4[6],xmm0[7],xmm4[7]
4087 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm5[4],xmm1[5],xmm5[5],xmm1[6],xmm5[6],xmm1[7],xmm5[7]
4088 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm0[0,0,0,0]
4089 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[0,0,0,0]
4090 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
4091 ; SSE-NEXT: movdqa %xmm3, %xmm4
4092 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm2[0],xmm4[1],xmm2[1]
4093 ; SSE-NEXT: movsd {{.*#+}} xmm5 = xmm4[0],xmm5[1]
4094 ; SSE-NEXT: movapd %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4095 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm2[1,1,1,1]
4096 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm3[1,1,1,1]
4097 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
4098 ; SSE-NEXT: movdqa %xmm1, %xmm4
4099 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm0[0],xmm4[1],xmm0[1]
4100 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[2,3]
4101 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4102 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm0[2,2,2,2]
4103 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[2,2,2,2]
4104 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
4105 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm3[3,3,3,3]
4106 ; SSE-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
4107 ; SSE-NEXT: movsd {{.*#+}} xmm5 = xmm3[0],xmm5[1]
4108 ; SSE-NEXT: movapd %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4109 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
4110 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[3,3,3,3]
4111 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm0[0],xmm4[1],xmm0[1]
4112 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm1[2,3]
4113 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4114 ; SSE-NEXT: movdqa 64(%r10), %xmm0
4115 ; SSE-NEXT: movdqa 64(%rax), %xmm4
4116 ; SSE-NEXT: movdqa %xmm0, %xmm6
4117 ; SSE-NEXT: punpcklwd {{.*#+}} xmm6 = xmm6[0],xmm4[0],xmm6[1],xmm4[1],xmm6[2],xmm4[2],xmm6[3],xmm4[3]
4118 ; SSE-NEXT: movdqa 64(%r8), %xmm1
4119 ; SSE-NEXT: movdqa 64(%r9), %xmm5
4120 ; SSE-NEXT: movdqa %xmm1, %xmm7
4121 ; SSE-NEXT: punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm5[0],xmm7[1],xmm5[1],xmm7[2],xmm5[2],xmm7[3],xmm5[3]
4122 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm6[0,0,0,0]
4123 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm7[0,0,0,0]
4124 ; SSE-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
4125 ; SSE-NEXT: movdqa %xmm3, %xmm13
4126 ; SSE-NEXT: movdqa 64(%rdx), %xmm2
4127 ; SSE-NEXT: movdqa 64(%rcx), %xmm8
4128 ; SSE-NEXT: movdqa %xmm2, %xmm10
4129 ; SSE-NEXT: punpcklwd {{.*#+}} xmm10 = xmm10[0],xmm8[0],xmm10[1],xmm8[1],xmm10[2],xmm8[2],xmm10[3],xmm8[3]
4130 ; SSE-NEXT: movdqa 64(%rdi), %xmm3
4131 ; SSE-NEXT: movdqa 64(%rsi), %xmm9
4132 ; SSE-NEXT: movdqa %xmm3, %xmm11
4133 ; SSE-NEXT: punpcklwd {{.*#+}} xmm11 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3]
4134 ; SSE-NEXT: movdqa %xmm11, %xmm12
4135 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm10[0],xmm12[1],xmm10[1]
4136 ; SSE-NEXT: movsd {{.*#+}} xmm13 = xmm12[0],xmm13[1]
4137 ; SSE-NEXT: movapd %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4138 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm10[1,1,1,1]
4139 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm11[1,1,1,1]
4140 ; SSE-NEXT: punpckldq {{.*#+}} xmm13 = xmm13[0],xmm12[0],xmm13[1],xmm12[1]
4141 ; SSE-NEXT: movdqa %xmm7, %xmm12
4142 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm6[0],xmm12[1],xmm6[1]
4143 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[0,1],xmm12[2,3]
4144 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4145 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm6[2,2,2,2]
4146 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm7[2,2,2,2]
4147 ; SSE-NEXT: punpckhdq {{.*#+}} xmm13 = xmm13[2],xmm12[2],xmm13[3],xmm12[3]
4148 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm11[3,3,3,3]
4149 ; SSE-NEXT: punpckhdq {{.*#+}} xmm11 = xmm11[2],xmm10[2],xmm11[3],xmm10[3]
4150 ; SSE-NEXT: movsd {{.*#+}} xmm13 = xmm11[0],xmm13[1]
4151 ; SSE-NEXT: movapd %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4152 ; SSE-NEXT: punpckhdq {{.*#+}} xmm7 = xmm7[2],xmm6[2],xmm7[3],xmm6[3]
4153 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm10[3,3,3,3]
4154 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm6[0],xmm12[1],xmm6[1]
4155 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[0,1],xmm7[2,3]
4156 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4157 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm8[4],xmm2[5],xmm8[5],xmm2[6],xmm8[6],xmm2[7],xmm8[7]
4158 ; SSE-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm9[4],xmm3[5],xmm9[5],xmm3[6],xmm9[6],xmm3[7],xmm9[7]
4159 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm4[4],xmm0[5],xmm4[5],xmm0[6],xmm4[6],xmm0[7],xmm4[7]
4160 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm5[4],xmm1[5],xmm5[5],xmm1[6],xmm5[6],xmm1[7],xmm5[7]
4161 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm0[0,0,0,0]
4162 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[0,0,0,0]
4163 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
4164 ; SSE-NEXT: movdqa %xmm3, %xmm4
4165 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm2[0],xmm4[1],xmm2[1]
4166 ; SSE-NEXT: movsd {{.*#+}} xmm5 = xmm4[0],xmm5[1]
4167 ; SSE-NEXT: movapd %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4168 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm2[1,1,1,1]
4169 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm3[1,1,1,1]
4170 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
4171 ; SSE-NEXT: movdqa %xmm1, %xmm4
4172 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm0[0],xmm4[1],xmm0[1]
4173 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[2,3]
4174 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4175 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm0[2,2,2,2]
4176 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[2,2,2,2]
4177 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
4178 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm3[3,3,3,3]
4179 ; SSE-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
4180 ; SSE-NEXT: movsd {{.*#+}} xmm5 = xmm3[0],xmm5[1]
4181 ; SSE-NEXT: movapd %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4182 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
4183 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[3,3,3,3]
4184 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm0[0],xmm4[1],xmm0[1]
4185 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm1[2,3]
4186 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4187 ; SSE-NEXT: movdqa 80(%r10), %xmm0
4188 ; SSE-NEXT: movdqa 80(%rax), %xmm4
4189 ; SSE-NEXT: movdqa %xmm0, %xmm6
4190 ; SSE-NEXT: punpcklwd {{.*#+}} xmm6 = xmm6[0],xmm4[0],xmm6[1],xmm4[1],xmm6[2],xmm4[2],xmm6[3],xmm4[3]
4191 ; SSE-NEXT: movdqa 80(%r8), %xmm1
4192 ; SSE-NEXT: movdqa 80(%r9), %xmm5
4193 ; SSE-NEXT: movdqa %xmm1, %xmm7
4194 ; SSE-NEXT: punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm5[0],xmm7[1],xmm5[1],xmm7[2],xmm5[2],xmm7[3],xmm5[3]
4195 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm6[0,0,0,0]
4196 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm7[0,0,0,0]
4197 ; SSE-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
4198 ; SSE-NEXT: movdqa %xmm3, %xmm13
4199 ; SSE-NEXT: movdqa 80(%rdx), %xmm2
4200 ; SSE-NEXT: movdqa 80(%rcx), %xmm8
4201 ; SSE-NEXT: movdqa %xmm2, %xmm10
4202 ; SSE-NEXT: punpcklwd {{.*#+}} xmm10 = xmm10[0],xmm8[0],xmm10[1],xmm8[1],xmm10[2],xmm8[2],xmm10[3],xmm8[3]
4203 ; SSE-NEXT: movdqa 80(%rdi), %xmm3
4204 ; SSE-NEXT: movdqa 80(%rsi), %xmm9
4205 ; SSE-NEXT: movdqa %xmm3, %xmm11
4206 ; SSE-NEXT: punpcklwd {{.*#+}} xmm11 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3]
4207 ; SSE-NEXT: movdqa %xmm11, %xmm12
4208 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm10[0],xmm12[1],xmm10[1]
4209 ; SSE-NEXT: movsd {{.*#+}} xmm13 = xmm12[0],xmm13[1]
4210 ; SSE-NEXT: movapd %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4211 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm10[1,1,1,1]
4212 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm11[1,1,1,1]
4213 ; SSE-NEXT: punpckldq {{.*#+}} xmm13 = xmm13[0],xmm12[0],xmm13[1],xmm12[1]
4214 ; SSE-NEXT: movdqa %xmm7, %xmm12
4215 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm6[0],xmm12[1],xmm6[1]
4216 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[0,1],xmm12[2,3]
4217 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4218 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm6[2,2,2,2]
4219 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm7[2,2,2,2]
4220 ; SSE-NEXT: punpckhdq {{.*#+}} xmm13 = xmm13[2],xmm12[2],xmm13[3],xmm12[3]
4221 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm11[3,3,3,3]
4222 ; SSE-NEXT: punpckhdq {{.*#+}} xmm11 = xmm11[2],xmm10[2],xmm11[3],xmm10[3]
4223 ; SSE-NEXT: movsd {{.*#+}} xmm13 = xmm11[0],xmm13[1]
4224 ; SSE-NEXT: movapd %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4225 ; SSE-NEXT: punpckhdq {{.*#+}} xmm7 = xmm7[2],xmm6[2],xmm7[3],xmm6[3]
4226 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm10[3,3,3,3]
4227 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm6[0],xmm12[1],xmm6[1]
4228 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[0,1],xmm7[2,3]
4229 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4230 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm8[4],xmm2[5],xmm8[5],xmm2[6],xmm8[6],xmm2[7],xmm8[7]
4231 ; SSE-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm9[4],xmm3[5],xmm9[5],xmm3[6],xmm9[6],xmm3[7],xmm9[7]
4232 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm4[4],xmm0[5],xmm4[5],xmm0[6],xmm4[6],xmm0[7],xmm4[7]
4233 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm5[4],xmm1[5],xmm5[5],xmm1[6],xmm5[6],xmm1[7],xmm5[7]
4234 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm0[0,0,0,0]
4235 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[0,0,0,0]
4236 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
4237 ; SSE-NEXT: movdqa %xmm3, %xmm4
4238 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm2[0],xmm4[1],xmm2[1]
4239 ; SSE-NEXT: movsd {{.*#+}} xmm5 = xmm4[0],xmm5[1]
4240 ; SSE-NEXT: movapd %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4241 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm2[1,1,1,1]
4242 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm3[1,1,1,1]
4243 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
4244 ; SSE-NEXT: movdqa %xmm1, %xmm4
4245 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm0[0],xmm4[1],xmm0[1]
4246 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[2,3]
4247 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4248 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm0[2,2,2,2]
4249 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[2,2,2,2]
4250 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
4251 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm3[3,3,3,3]
4252 ; SSE-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
4253 ; SSE-NEXT: movsd {{.*#+}} xmm5 = xmm3[0],xmm5[1]
4254 ; SSE-NEXT: movapd %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4255 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
4256 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[3,3,3,3]
4257 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm0[0],xmm4[1],xmm0[1]
4258 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm1[2,3]
4259 ; SSE-NEXT: movaps %xmm4, (%rsp) # 16-byte Spill
4260 ; SSE-NEXT: movdqa 96(%r10), %xmm0
4261 ; SSE-NEXT: movdqa 96(%rax), %xmm4
4262 ; SSE-NEXT: movdqa %xmm0, %xmm6
4263 ; SSE-NEXT: punpcklwd {{.*#+}} xmm6 = xmm6[0],xmm4[0],xmm6[1],xmm4[1],xmm6[2],xmm4[2],xmm6[3],xmm4[3]
4264 ; SSE-NEXT: movdqa 96(%r8), %xmm1
4265 ; SSE-NEXT: movdqa 96(%r9), %xmm5
4266 ; SSE-NEXT: movdqa %xmm1, %xmm7
4267 ; SSE-NEXT: punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm5[0],xmm7[1],xmm5[1],xmm7[2],xmm5[2],xmm7[3],xmm5[3]
4268 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm6[0,0,0,0]
4269 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm7[0,0,0,0]
4270 ; SSE-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
4271 ; SSE-NEXT: movdqa %xmm3, %xmm13
4272 ; SSE-NEXT: movdqa 96(%rdx), %xmm2
4273 ; SSE-NEXT: movdqa 96(%rcx), %xmm8
4274 ; SSE-NEXT: movdqa %xmm2, %xmm10
4275 ; SSE-NEXT: punpcklwd {{.*#+}} xmm10 = xmm10[0],xmm8[0],xmm10[1],xmm8[1],xmm10[2],xmm8[2],xmm10[3],xmm8[3]
4276 ; SSE-NEXT: movdqa 96(%rdi), %xmm3
4277 ; SSE-NEXT: movdqa 96(%rsi), %xmm9
4278 ; SSE-NEXT: movdqa %xmm3, %xmm11
4279 ; SSE-NEXT: punpcklwd {{.*#+}} xmm11 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3]
4280 ; SSE-NEXT: movdqa %xmm11, %xmm12
4281 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm10[0],xmm12[1],xmm10[1]
4282 ; SSE-NEXT: movsd {{.*#+}} xmm13 = xmm12[0],xmm13[1]
4283 ; SSE-NEXT: movapd %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4284 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm10[1,1,1,1]
4285 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm11[1,1,1,1]
4286 ; SSE-NEXT: punpckldq {{.*#+}} xmm13 = xmm13[0],xmm12[0],xmm13[1],xmm12[1]
4287 ; SSE-NEXT: movdqa %xmm7, %xmm12
4288 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm6[0],xmm12[1],xmm6[1]
4289 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[0,1],xmm12[2,3]
4290 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4291 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm6[2,2,2,2]
4292 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm7[2,2,2,2]
4293 ; SSE-NEXT: punpckhdq {{.*#+}} xmm13 = xmm13[2],xmm12[2],xmm13[3],xmm12[3]
4294 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm11[3,3,3,3]
4295 ; SSE-NEXT: punpckhdq {{.*#+}} xmm11 = xmm11[2],xmm10[2],xmm11[3],xmm10[3]
4296 ; SSE-NEXT: movsd {{.*#+}} xmm13 = xmm11[0],xmm13[1]
4297 ; SSE-NEXT: movapd %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4298 ; SSE-NEXT: punpckhdq {{.*#+}} xmm7 = xmm7[2],xmm6[2],xmm7[3],xmm6[3]
4299 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm10[3,3,3,3]
4300 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm6[0],xmm12[1],xmm6[1]
4301 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[0,1],xmm7[2,3]
4302 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4303 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm8[4],xmm2[5],xmm8[5],xmm2[6],xmm8[6],xmm2[7],xmm8[7]
4304 ; SSE-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm9[4],xmm3[5],xmm9[5],xmm3[6],xmm9[6],xmm3[7],xmm9[7]
4305 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm4[4],xmm0[5],xmm4[5],xmm0[6],xmm4[6],xmm0[7],xmm4[7]
4306 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm5[4],xmm1[5],xmm5[5],xmm1[6],xmm5[6],xmm1[7],xmm5[7]
4307 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm0[0,0,0,0]
4308 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[0,0,0,0]
4309 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
4310 ; SSE-NEXT: movdqa %xmm3, %xmm4
4311 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm2[0],xmm4[1],xmm2[1]
4312 ; SSE-NEXT: movsd {{.*#+}} xmm5 = xmm4[0],xmm5[1]
4313 ; SSE-NEXT: movapd %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4314 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm2[1,1,1,1]
4315 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm3[1,1,1,1]
4316 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
4317 ; SSE-NEXT: movdqa %xmm1, %xmm4
4318 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm0[0],xmm4[1],xmm0[1]
4319 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[2,3]
4320 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4321 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm0[2,2,2,2]
4322 ; SSE-NEXT: pshufd {{.*#+}} xmm15 = xmm1[2,2,2,2]
4323 ; SSE-NEXT: punpckhdq {{.*#+}} xmm15 = xmm15[2],xmm4[2],xmm15[3],xmm4[3]
4324 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm3[3,3,3,3]
4325 ; SSE-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
4326 ; SSE-NEXT: movsd {{.*#+}} xmm15 = xmm3[0],xmm15[1]
4327 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
4328 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[3,3,3,3]
4329 ; SSE-NEXT: punpckldq {{.*#+}} xmm14 = xmm14[0],xmm0[0],xmm14[1],xmm0[1]
4330 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[0,1],xmm1[2,3]
4331 ; SSE-NEXT: movdqa 112(%r10), %xmm9
4332 ; SSE-NEXT: movdqa 112(%rax), %xmm0
4333 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4334 ; SSE-NEXT: movdqa %xmm9, %xmm5
4335 ; SSE-NEXT: punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm0[0],xmm5[1],xmm0[1],xmm5[2],xmm0[2],xmm5[3],xmm0[3]
4336 ; SSE-NEXT: movdqa 112(%r8), %xmm4
4337 ; SSE-NEXT: movdqa 112(%r9), %xmm0
4338 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4339 ; SSE-NEXT: movdqa %xmm4, %xmm1
4340 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
4341 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm5[0,0,0,0]
4342 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm1[0,0,0,0]
4343 ; SSE-NEXT: punpckhdq {{.*#+}} xmm11 = xmm11[2],xmm0[2],xmm11[3],xmm0[3]
4344 ; SSE-NEXT: movdqa 112(%rdx), %xmm6
4345 ; SSE-NEXT: movdqa 112(%rcx), %xmm13
4346 ; SSE-NEXT: movdqa %xmm6, %xmm3
4347 ; SSE-NEXT: punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm13[0],xmm3[1],xmm13[1],xmm3[2],xmm13[2],xmm3[3],xmm13[3]
4348 ; SSE-NEXT: movdqa 112(%rdi), %xmm2
4349 ; SSE-NEXT: movdqa 112(%rsi), %xmm12
4350 ; SSE-NEXT: movdqa %xmm2, %xmm0
4351 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm12[0],xmm0[1],xmm12[1],xmm0[2],xmm12[2],xmm0[3],xmm12[3]
4352 ; SSE-NEXT: movdqa %xmm0, %xmm7
4353 ; SSE-NEXT: punpckldq {{.*#+}} xmm7 = xmm7[0],xmm3[0],xmm7[1],xmm3[1]
4354 ; SSE-NEXT: movsd {{.*#+}} xmm11 = xmm7[0],xmm11[1]
4355 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm3[1,1,1,1]
4356 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm0[1,1,1,1]
4357 ; SSE-NEXT: punpckldq {{.*#+}} xmm10 = xmm10[0],xmm7[0],xmm10[1],xmm7[1]
4358 ; SSE-NEXT: movdqa %xmm1, %xmm7
4359 ; SSE-NEXT: punpckldq {{.*#+}} xmm7 = xmm7[0],xmm5[0],xmm7[1],xmm5[1]
4360 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[0,1],xmm7[2,3]
4361 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm5[2,2,2,2]
4362 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm1[2,2,2,2]
4363 ; SSE-NEXT: punpckhdq {{.*#+}} xmm8 = xmm8[2],xmm7[2],xmm8[3],xmm7[3]
4364 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm0[3,3,3,3]
4365 ; SSE-NEXT: punpckhdq {{.*#+}} xmm0 = xmm0[2],xmm3[2],xmm0[3],xmm3[3]
4366 ; SSE-NEXT: movsd {{.*#+}} xmm8 = xmm0[0],xmm8[1]
4367 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm5[2],xmm1[3],xmm5[3]
4368 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm3[3,3,3,3]
4369 ; SSE-NEXT: punpckldq {{.*#+}} xmm7 = xmm7[0],xmm0[0],xmm7[1],xmm0[1]
4370 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[0,1],xmm1[2,3]
4371 ; SSE-NEXT: punpckhwd {{.*#+}} xmm6 = xmm6[4],xmm13[4],xmm6[5],xmm13[5],xmm6[6],xmm13[6],xmm6[7],xmm13[7]
4372 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm12[4],xmm2[5],xmm12[5],xmm2[6],xmm12[6],xmm2[7],xmm12[7]
4373 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Folded Reload
4374 ; SSE-NEXT: # xmm9 = xmm9[4],mem[4],xmm9[5],mem[5],xmm9[6],mem[6],xmm9[7],mem[7]
4375 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
4376 ; SSE-NEXT: # xmm4 = xmm4[4],mem[4],xmm4[5],mem[5],xmm4[6],mem[6],xmm4[7],mem[7]
4377 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm9[0,0,0,0]
4378 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm4[0,0,0,0]
4379 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
4380 ; SSE-NEXT: movdqa %xmm2, %xmm0
4381 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm6[0],xmm0[1],xmm6[1]
4382 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
4383 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm6[1,1,1,1]
4384 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm2[1,1,1,1]
4385 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
4386 ; SSE-NEXT: movdqa %xmm4, %xmm0
4387 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm9[0],xmm0[1],xmm9[1]
4388 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm0[2,3]
4389 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm9[2,2,2,2]
4390 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm4[2,2,2,2]
4391 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm0[2],xmm5[3],xmm0[3]
4392 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[3,3,3,3]
4393 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm6[2],xmm2[3],xmm6[3]
4394 ; SSE-NEXT: movsd {{.*#+}} xmm5 = xmm2[0],xmm5[1]
4395 ; SSE-NEXT: punpckhdq {{.*#+}} xmm4 = xmm4[2],xmm9[2],xmm4[3],xmm9[3]
4396 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm6[3,3,3,3]
4397 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1]
4398 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,1],xmm4[2,3]
4399 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
4400 ; SSE-NEXT: movaps %xmm0, 1008(%rax)
4401 ; SSE-NEXT: movapd %xmm5, 992(%rax)
4402 ; SSE-NEXT: movaps %xmm3, 976(%rax)
4403 ; SSE-NEXT: movapd %xmm1, 960(%rax)
4404 ; SSE-NEXT: movaps %xmm7, 944(%rax)
4405 ; SSE-NEXT: movapd %xmm8, 928(%rax)
4406 ; SSE-NEXT: movaps %xmm10, 912(%rax)
4407 ; SSE-NEXT: movapd %xmm11, 896(%rax)
4408 ; SSE-NEXT: movaps %xmm14, 880(%rax)
4409 ; SSE-NEXT: movapd %xmm15, 864(%rax)
4410 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4411 ; SSE-NEXT: movaps %xmm0, 848(%rax)
4412 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4413 ; SSE-NEXT: movaps %xmm0, 832(%rax)
4414 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4415 ; SSE-NEXT: movaps %xmm0, 816(%rax)
4416 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4417 ; SSE-NEXT: movaps %xmm0, 800(%rax)
4418 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4419 ; SSE-NEXT: movaps %xmm0, 784(%rax)
4420 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4421 ; SSE-NEXT: movaps %xmm0, 768(%rax)
4422 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
4423 ; SSE-NEXT: movaps %xmm0, 752(%rax)
4424 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4425 ; SSE-NEXT: movaps %xmm0, 736(%rax)
4426 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4427 ; SSE-NEXT: movaps %xmm0, 720(%rax)
4428 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4429 ; SSE-NEXT: movaps %xmm0, 704(%rax)
4430 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4431 ; SSE-NEXT: movaps %xmm0, 688(%rax)
4432 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4433 ; SSE-NEXT: movaps %xmm0, 672(%rax)
4434 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4435 ; SSE-NEXT: movaps %xmm0, 656(%rax)
4436 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4437 ; SSE-NEXT: movaps %xmm0, 640(%rax)
4438 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4439 ; SSE-NEXT: movaps %xmm0, 624(%rax)
4440 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4441 ; SSE-NEXT: movaps %xmm0, 608(%rax)
4442 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4443 ; SSE-NEXT: movaps %xmm0, 592(%rax)
4444 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4445 ; SSE-NEXT: movaps %xmm0, 576(%rax)
4446 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4447 ; SSE-NEXT: movaps %xmm0, 560(%rax)
4448 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4449 ; SSE-NEXT: movaps %xmm0, 544(%rax)
4450 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4451 ; SSE-NEXT: movaps %xmm0, 528(%rax)
4452 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4453 ; SSE-NEXT: movaps %xmm0, 512(%rax)
4454 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4455 ; SSE-NEXT: movaps %xmm0, 496(%rax)
4456 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4457 ; SSE-NEXT: movaps %xmm0, 480(%rax)
4458 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4459 ; SSE-NEXT: movaps %xmm0, 464(%rax)
4460 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4461 ; SSE-NEXT: movaps %xmm0, 448(%rax)
4462 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4463 ; SSE-NEXT: movaps %xmm0, 432(%rax)
4464 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4465 ; SSE-NEXT: movaps %xmm0, 416(%rax)
4466 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4467 ; SSE-NEXT: movaps %xmm0, 400(%rax)
4468 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4469 ; SSE-NEXT: movaps %xmm0, 384(%rax)
4470 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4471 ; SSE-NEXT: movaps %xmm0, 368(%rax)
4472 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4473 ; SSE-NEXT: movaps %xmm0, 352(%rax)
4474 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4475 ; SSE-NEXT: movaps %xmm0, 336(%rax)
4476 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4477 ; SSE-NEXT: movaps %xmm0, 320(%rax)
4478 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4479 ; SSE-NEXT: movaps %xmm0, 304(%rax)
4480 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4481 ; SSE-NEXT: movaps %xmm0, 288(%rax)
4482 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4483 ; SSE-NEXT: movaps %xmm0, 272(%rax)
4484 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4485 ; SSE-NEXT: movaps %xmm0, 256(%rax)
4486 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4487 ; SSE-NEXT: movaps %xmm0, 240(%rax)
4488 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4489 ; SSE-NEXT: movaps %xmm0, 224(%rax)
4490 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4491 ; SSE-NEXT: movaps %xmm0, 208(%rax)
4492 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4493 ; SSE-NEXT: movaps %xmm0, 192(%rax)
4494 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4495 ; SSE-NEXT: movaps %xmm0, 176(%rax)
4496 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4497 ; SSE-NEXT: movaps %xmm0, 160(%rax)
4498 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4499 ; SSE-NEXT: movaps %xmm0, 144(%rax)
4500 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4501 ; SSE-NEXT: movaps %xmm0, 128(%rax)
4502 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4503 ; SSE-NEXT: movaps %xmm0, 112(%rax)
4504 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4505 ; SSE-NEXT: movaps %xmm0, 96(%rax)
4506 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4507 ; SSE-NEXT: movaps %xmm0, 80(%rax)
4508 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4509 ; SSE-NEXT: movaps %xmm0, 64(%rax)
4510 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4511 ; SSE-NEXT: movaps %xmm0, 48(%rax)
4512 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4513 ; SSE-NEXT: movaps %xmm0, 32(%rax)
4514 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4515 ; SSE-NEXT: movaps %xmm0, 16(%rax)
4516 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4517 ; SSE-NEXT: movaps %xmm0, (%rax)
4518 ; SSE-NEXT: addq $776, %rsp # imm = 0x308
4521 ; AVX1-ONLY-LABEL: store_i16_stride8_vf64:
4522 ; AVX1-ONLY: # %bb.0:
4523 ; AVX1-ONLY-NEXT: subq $744, %rsp # imm = 0x2E8
4524 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
4525 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
4526 ; AVX1-ONLY-NEXT: vmovdqa (%r10), %xmm5
4527 ; AVX1-ONLY-NEXT: vmovdqa 16(%r10), %xmm0
4528 ; AVX1-ONLY-NEXT: vmovdqa (%rax), %xmm6
4529 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
4530 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm2[0,0,0,0]
4531 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm2[0,1,0,1]
4532 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm1, %ymm1
4533 ; AVX1-ONLY-NEXT: vmovdqa (%r9), %xmm7
4534 ; AVX1-ONLY-NEXT: vmovdqa (%r8), %xmm8
4535 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3]
4536 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm3[0,1,0,1]
4537 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm9 = xmm3[0],zero,xmm3[1],zero
4538 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm4, %ymm4
4539 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm4[0,1,2],ymm1[3],ymm4[4,5,6],ymm1[7]
4540 ; AVX1-ONLY-NEXT: vmovdqa (%rsi), %xmm9
4541 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm10
4542 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
4543 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm4[1,1,1,1]
4544 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm4, %ymm1
4545 ; AVX1-ONLY-NEXT: vmovdqa (%rcx), %xmm11
4546 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm12
4547 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3]
4548 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm15 = xmm14[0,0,1,1]
4549 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm14, %ymm15, %ymm15
4550 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm15 = ymm1[0],ymm15[1],ymm1[2,3,4],ymm15[5],ymm1[6,7]
4551 ; AVX1-ONLY-NEXT: vmovdqa 16(%rax), %xmm1
4552 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm15[0,1],ymm13[2,3],ymm15[4,5],ymm13[6,7]
4553 ; AVX1-ONLY-NEXT: vmovups %ymm13, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4554 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm3[2,2,3,3]
4555 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm3, %ymm3
4556 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm2[2,2,2,2]
4557 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm13, %ymm13
4558 ; AVX1-ONLY-NEXT: vmovdqa 16(%r9), %xmm2
4559 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm3[0,1,2],ymm13[3],ymm3[4,5,6],ymm13[7]
4560 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm14[2,2,3,3]
4561 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm14[2,3,2,3]
4562 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm14, %ymm3, %ymm3
4563 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm4[2,3,2,3]
4564 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[3,3,3,3]
4565 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm14, %ymm14
4566 ; AVX1-ONLY-NEXT: vmovdqa 16(%r8), %xmm4
4567 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm14[0],ymm3[1],ymm14[2,3,4],ymm3[5],ymm14[6,7]
4568 ; AVX1-ONLY-NEXT: vmovdqa 16(%rsi), %xmm3
4569 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm14[0,1],ymm13[2,3],ymm14[4,5],ymm13[6,7]
4570 ; AVX1-ONLY-NEXT: vmovups %ymm13, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4571 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm6[4],xmm5[4],xmm6[5],xmm5[5],xmm6[6],xmm5[6],xmm6[7],xmm5[7]
4572 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm5[0,0,0,0]
4573 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm5[0,1,0,1]
4574 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm6, %ymm6
4575 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
4576 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm8[0,1,0,1]
4577 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm13 = xmm8[0],zero,xmm8[1],zero
4578 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm7, %ymm13
4579 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdi), %xmm7
4580 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm13[0,1,2],ymm6[3],ymm13[4,5,6],ymm6[7]
4581 ; AVX1-ONLY-NEXT: vmovdqa 16(%rcx), %xmm6
4582 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm9 = xmm10[4],xmm9[4],xmm10[5],xmm9[5],xmm10[6],xmm9[6],xmm10[7],xmm9[7]
4583 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm9[1,1,1,1]
4584 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm9, %ymm10
4585 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm11 = xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
4586 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm11[0,0,1,1]
4587 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm12, %ymm12
4588 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0],ymm12[1],ymm10[2,3,4],ymm12[5],ymm10[6,7]
4589 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1],ymm13[2,3],ymm10[4,5],ymm13[6,7]
4590 ; AVX1-ONLY-NEXT: vmovups %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4591 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm8[2,2,3,3]
4592 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm8, %ymm8
4593 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm5[2,2,2,2]
4594 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm10, %ymm5
4595 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm8[0,1,2],ymm5[3],ymm8[4,5,6],ymm5[7]
4596 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm11[2,2,3,3]
4597 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm11[2,3,2,3]
4598 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm8, %ymm8
4599 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm9[2,3,2,3]
4600 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[3,3,3,3]
4601 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm10, %ymm9
4602 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm9[0],ymm8[1],ymm9[2,3,4],ymm8[5],ymm9[6,7]
4603 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm8[0,1],ymm5[2,3],ymm8[4,5],ymm5[6,7]
4604 ; AVX1-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4605 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
4606 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm5[0,0,0,0]
4607 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm5[0,1,0,1]
4608 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm8, %ymm8
4609 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
4610 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm9[0,1,0,1]
4611 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm11 = xmm9[0],zero,xmm9[1],zero
4612 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm10, %ymm10
4613 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0,1,2],ymm8[3],ymm10[4,5,6],ymm8[7]
4614 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm7[0],xmm3[0],xmm7[1],xmm3[1],xmm7[2],xmm3[2],xmm7[3],xmm3[3]
4615 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm11[1,1,1,1]
4616 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm11, %ymm12
4617 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdx), %xmm8
4618 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm8[0],xmm6[0],xmm8[1],xmm6[1],xmm8[2],xmm6[2],xmm8[3],xmm6[3]
4619 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm13[0,0,1,1]
4620 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm14, %ymm14
4621 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm12[0],ymm14[1],ymm12[2,3,4],ymm14[5],ymm12[6,7]
4622 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm12[0,1],ymm10[2,3],ymm12[4,5],ymm10[6,7]
4623 ; AVX1-ONLY-NEXT: vmovups %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4624 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm9[2,2,3,3]
4625 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm9, %ymm9
4626 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm5[2,2,2,2]
4627 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm10, %ymm5
4628 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm9[0,1,2],ymm5[3],ymm9[4,5,6],ymm5[7]
4629 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm13[2,2,3,3]
4630 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm13[2,3,2,3]
4631 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm5, %ymm5
4632 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm11[2,3,2,3]
4633 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[3,3,3,3]
4634 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm10, %ymm10
4635 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm10[0],ymm5[1],ymm10[2,3,4],ymm5[5],ymm10[6,7]
4636 ; AVX1-ONLY-NEXT: vmovdqa 32(%r10), %xmm5
4637 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm10[0,1],ymm9[2,3],ymm10[4,5],ymm9[6,7]
4638 ; AVX1-ONLY-NEXT: vmovups %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4639 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm9 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
4640 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm9[0,0,0,0]
4641 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm9[0,1,0,1]
4642 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
4643 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm4[4],xmm2[4],xmm4[5],xmm2[5],xmm4[6],xmm2[6],xmm4[7],xmm2[7]
4644 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm4[0,1,0,1]
4645 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm2 = xmm4[0],zero,xmm4[1],zero
4646 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm2
4647 ; AVX1-ONLY-NEXT: vmovdqa 32(%rax), %xmm1
4648 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm2[0,1,2],ymm0[3],ymm2[4,5,6],ymm0[7]
4649 ; AVX1-ONLY-NEXT: vmovdqa 32(%r9), %xmm0
4650 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm7[4],xmm3[4],xmm7[5],xmm3[5],xmm7[6],xmm3[6],xmm7[7],xmm3[7]
4651 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm3[1,1,1,1]
4652 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm2
4653 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm8[4],xmm6[4],xmm8[5],xmm6[5],xmm8[6],xmm6[6],xmm8[7],xmm6[7]
4654 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm6[0,0,1,1]
4655 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm7, %ymm7
4656 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm2[0],ymm7[1],ymm2[2,3,4],ymm7[5],ymm2[6,7]
4657 ; AVX1-ONLY-NEXT: vmovdqa 32(%r8), %xmm2
4658 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0,1],ymm10[2,3],ymm7[4,5],ymm10[6,7]
4659 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4660 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm4[2,2,3,3]
4661 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm4, %ymm4
4662 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm9[2,2,2,2]
4663 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm7, %ymm7
4664 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2],ymm7[3],ymm4[4,5,6],ymm7[7]
4665 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm6[2,2,3,3]
4666 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,3,2,3]
4667 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm7, %ymm6
4668 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm3[2,3,2,3]
4669 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[3,3,3,3]
4670 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm7, %ymm3
4671 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0],ymm6[1],ymm3[2,3,4],ymm6[5],ymm3[6,7]
4672 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1],ymm4[2,3],ymm3[4,5],ymm4[6,7]
4673 ; AVX1-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4674 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm1[0],xmm5[0],xmm1[1],xmm5[1],xmm1[2],xmm5[2],xmm1[3],xmm5[3]
4675 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm8[0,0,0,0]
4676 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm8[0,1,0,1]
4677 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm3, %ymm3
4678 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
4679 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm9[0,1,0,1]
4680 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm9[0],zero,xmm9[1],zero
4681 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm4, %ymm4
4682 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm4[0,1,2],ymm3[3],ymm4[4,5,6],ymm3[7]
4683 ; AVX1-ONLY-NEXT: vmovdqa 32(%rsi), %xmm3
4684 ; AVX1-ONLY-NEXT: vmovdqa 32(%rdi), %xmm4
4685 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
4686 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm11[1,1,1,1]
4687 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm11, %ymm12
4688 ; AVX1-ONLY-NEXT: vmovdqa 32(%rcx), %xmm6
4689 ; AVX1-ONLY-NEXT: vmovdqa 32(%rdx), %xmm7
4690 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
4691 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm13[0,0,1,1]
4692 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm14, %ymm14
4693 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm12[0],ymm14[1],ymm12[2,3,4],ymm14[5],ymm12[6,7]
4694 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm12[0,1],ymm10[2,3],ymm12[4,5],ymm10[6,7]
4695 ; AVX1-ONLY-NEXT: vmovups %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4696 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm9[2,2,3,3]
4697 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm9, %ymm9
4698 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm8[2,2,2,2]
4699 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm10, %ymm8
4700 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm9[0,1,2],ymm8[3],ymm9[4,5,6],ymm8[7]
4701 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm13[2,2,3,3]
4702 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm13[2,3,2,3]
4703 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm9, %ymm9
4704 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm11[2,3,2,3]
4705 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[3,3,3,3]
4706 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm10, %ymm10
4707 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm10[0],ymm9[1],ymm10[2,3,4],ymm9[5],ymm10[6,7]
4708 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm9[0,1],ymm8[2,3],ymm9[4,5],ymm8[6,7]
4709 ; AVX1-ONLY-NEXT: vmovups %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4710 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm1[4],xmm5[4],xmm1[5],xmm5[5],xmm1[6],xmm5[6],xmm1[7],xmm5[7]
4711 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm1[0,0,0,0]
4712 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm1[0,1,0,1]
4713 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm5, %ymm5
4714 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm2[4],xmm0[4],xmm2[5],xmm0[5],xmm2[6],xmm0[6],xmm2[7],xmm0[7]
4715 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[0,1,0,1]
4716 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm8 = xmm0[0],zero,xmm0[1],zero
4717 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm2, %ymm2
4718 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2],ymm5[3],ymm2[4,5,6],ymm5[7]
4719 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
4720 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
4721 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm3[1,1,1,1]
4722 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm3, %ymm5
4723 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm4[0,0,1,1]
4724 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm6, %ymm6
4725 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0],ymm6[1],ymm5[2,3,4],ymm6[5],ymm5[6,7]
4726 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
4727 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4728 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[2,2,3,3]
4729 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
4730 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm1[2,2,2,2]
4731 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
4732 ; AVX1-ONLY-NEXT: vmovdqa 48(%r10), %xmm0
4733 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2],ymm1[3],ymm2[4,5,6],ymm1[7]
4734 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm4[2,2,3,3]
4735 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,3,2,3]
4736 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm1, %ymm4
4737 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm3[2,3,2,3]
4738 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[3,3,3,3]
4739 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm1, %ymm3
4740 ; AVX1-ONLY-NEXT: vmovdqa 48(%rax), %xmm1
4741 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0],ymm4[1],ymm3[2,3,4],ymm4[5],ymm3[6,7]
4742 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7]
4743 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4744 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
4745 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm8[0,0,0,0]
4746 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm8[0,1,0,1]
4747 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm4
4748 ; AVX1-ONLY-NEXT: vmovdqa 48(%r9), %xmm2
4749 ; AVX1-ONLY-NEXT: vmovdqa 48(%r8), %xmm3
4750 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
4751 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm9[0,1,0,1]
4752 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm9[0],zero,xmm9[1],zero
4753 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm5
4754 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm5[0,1,2],ymm4[3],ymm5[4,5,6],ymm4[7]
4755 ; AVX1-ONLY-NEXT: vmovdqa 48(%rsi), %xmm4
4756 ; AVX1-ONLY-NEXT: vmovdqa 48(%rdi), %xmm5
4757 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
4758 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm11[1,1,1,1]
4759 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm11, %ymm12
4760 ; AVX1-ONLY-NEXT: vmovdqa 48(%rcx), %xmm6
4761 ; AVX1-ONLY-NEXT: vmovdqa 48(%rdx), %xmm7
4762 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
4763 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm13[0,0,1,1]
4764 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm14, %ymm14
4765 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm12[0],ymm14[1],ymm12[2,3,4],ymm14[5],ymm12[6,7]
4766 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm12[0,1],ymm10[2,3],ymm12[4,5],ymm10[6,7]
4767 ; AVX1-ONLY-NEXT: vmovups %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4768 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm9[2,2,3,3]
4769 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm9, %ymm9
4770 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm8[2,2,2,2]
4771 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm10, %ymm8
4772 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm9[0,1,2],ymm8[3],ymm9[4,5,6],ymm8[7]
4773 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm13[2,2,3,3]
4774 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm13[2,3,2,3]
4775 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm9, %ymm9
4776 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm11[2,3,2,3]
4777 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[3,3,3,3]
4778 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm10, %ymm10
4779 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm10[0],ymm9[1],ymm10[2,3,4],ymm9[5],ymm10[6,7]
4780 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm9[0,1],ymm8[2,3],ymm9[4,5],ymm8[6,7]
4781 ; AVX1-ONLY-NEXT: vmovups %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4782 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
4783 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[0,0,0,0]
4784 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm0[0,1,0,1]
4785 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm1, %ymm1
4786 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
4787 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm2[0,1,0,1]
4788 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm8 = xmm2[0],zero,xmm2[1],zero
4789 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm3, %ymm3
4790 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1,2],ymm1[3],ymm3[4,5,6],ymm1[7]
4791 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
4792 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
4793 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm3[1,1,1,1]
4794 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm3, %ymm5
4795 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm4[0,0,1,1]
4796 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm6, %ymm6
4797 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0],ymm6[1],ymm5[2,3,4],ymm6[5],ymm5[6,7]
4798 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm5[0,1],ymm1[2,3],ymm5[4,5],ymm1[6,7]
4799 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4800 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm2[2,2,3,3]
4801 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
4802 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[2,2,2,2]
4803 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm2, %ymm0
4804 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7]
4805 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm4[2,2,3,3]
4806 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm4[2,3,2,3]
4807 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
4808 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm3[2,3,2,3]
4809 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[3,3,3,3]
4810 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
4811 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3,4],ymm1[5],ymm2[6,7]
4812 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
4813 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4814 ; AVX1-ONLY-NEXT: vmovdqa 64(%r10), %xmm0
4815 ; AVX1-ONLY-NEXT: vmovdqa 64(%rax), %xmm1
4816 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
4817 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm8[0,0,0,0]
4818 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm8[0,1,0,1]
4819 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm4
4820 ; AVX1-ONLY-NEXT: vmovdqa 64(%r9), %xmm2
4821 ; AVX1-ONLY-NEXT: vmovdqa 64(%r8), %xmm3
4822 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
4823 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm9[0,1,0,1]
4824 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm9[0],zero,xmm9[1],zero
4825 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm5
4826 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm5[0,1,2],ymm4[3],ymm5[4,5,6],ymm4[7]
4827 ; AVX1-ONLY-NEXT: vmovdqa 64(%rsi), %xmm4
4828 ; AVX1-ONLY-NEXT: vmovdqa 64(%rdi), %xmm5
4829 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
4830 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm11[1,1,1,1]
4831 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm11, %ymm12
4832 ; AVX1-ONLY-NEXT: vmovdqa 64(%rcx), %xmm6
4833 ; AVX1-ONLY-NEXT: vmovdqa 64(%rdx), %xmm7
4834 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
4835 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm13[0,0,1,1]
4836 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm14, %ymm14
4837 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm12[0],ymm14[1],ymm12[2,3,4],ymm14[5],ymm12[6,7]
4838 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm12[0,1],ymm10[2,3],ymm12[4,5],ymm10[6,7]
4839 ; AVX1-ONLY-NEXT: vmovups %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4840 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm9[2,2,3,3]
4841 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm9, %ymm9
4842 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm8[2,2,2,2]
4843 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm10, %ymm8
4844 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm9[0,1,2],ymm8[3],ymm9[4,5,6],ymm8[7]
4845 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm13[2,2,3,3]
4846 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm13[2,3,2,3]
4847 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm9, %ymm9
4848 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm11[2,3,2,3]
4849 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[3,3,3,3]
4850 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm10, %ymm10
4851 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm10[0],ymm9[1],ymm10[2,3,4],ymm9[5],ymm10[6,7]
4852 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm9[0,1],ymm8[2,3],ymm9[4,5],ymm8[6,7]
4853 ; AVX1-ONLY-NEXT: vmovups %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4854 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
4855 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[0,0,0,0]
4856 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm0[0,1,0,1]
4857 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm1, %ymm1
4858 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
4859 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm2[0,1,0,1]
4860 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm8 = xmm2[0],zero,xmm2[1],zero
4861 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm3, %ymm3
4862 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1,2],ymm1[3],ymm3[4,5,6],ymm1[7]
4863 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
4864 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
4865 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm3[1,1,1,1]
4866 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm3, %ymm5
4867 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm4[0,0,1,1]
4868 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm6, %ymm6
4869 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0],ymm6[1],ymm5[2,3,4],ymm6[5],ymm5[6,7]
4870 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm5[0,1],ymm1[2,3],ymm5[4,5],ymm1[6,7]
4871 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4872 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm2[2,2,3,3]
4873 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
4874 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[2,2,2,2]
4875 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm2, %ymm0
4876 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7]
4877 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm4[2,2,3,3]
4878 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm4[2,3,2,3]
4879 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
4880 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm3[2,3,2,3]
4881 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[3,3,3,3]
4882 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
4883 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3,4],ymm1[5],ymm2[6,7]
4884 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
4885 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4886 ; AVX1-ONLY-NEXT: vmovdqa 80(%r10), %xmm0
4887 ; AVX1-ONLY-NEXT: vmovdqa 80(%rax), %xmm1
4888 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
4889 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm8[0,0,0,0]
4890 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm8[0,1,0,1]
4891 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm4
4892 ; AVX1-ONLY-NEXT: vmovdqa 80(%r9), %xmm2
4893 ; AVX1-ONLY-NEXT: vmovdqa 80(%r8), %xmm3
4894 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
4895 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm9[0,1,0,1]
4896 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm9[0],zero,xmm9[1],zero
4897 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm5
4898 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm5[0,1,2],ymm4[3],ymm5[4,5,6],ymm4[7]
4899 ; AVX1-ONLY-NEXT: vmovdqa 80(%rsi), %xmm4
4900 ; AVX1-ONLY-NEXT: vmovdqa 80(%rdi), %xmm5
4901 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
4902 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm11[1,1,1,1]
4903 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm11, %ymm12
4904 ; AVX1-ONLY-NEXT: vmovdqa 80(%rcx), %xmm6
4905 ; AVX1-ONLY-NEXT: vmovdqa 80(%rdx), %xmm7
4906 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
4907 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm13[0,0,1,1]
4908 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm14, %ymm14
4909 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm12[0],ymm14[1],ymm12[2,3,4],ymm14[5],ymm12[6,7]
4910 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm12[0,1],ymm10[2,3],ymm12[4,5],ymm10[6,7]
4911 ; AVX1-ONLY-NEXT: vmovups %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4912 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm9[2,2,3,3]
4913 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm9, %ymm9
4914 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm8[2,2,2,2]
4915 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm10, %ymm8
4916 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm9[0,1,2],ymm8[3],ymm9[4,5,6],ymm8[7]
4917 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm13[2,2,3,3]
4918 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm13[2,3,2,3]
4919 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm9, %ymm9
4920 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm11[2,3,2,3]
4921 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[3,3,3,3]
4922 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm10, %ymm10
4923 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm10[0],ymm9[1],ymm10[2,3,4],ymm9[5],ymm10[6,7]
4924 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm9[0,1],ymm8[2,3],ymm9[4,5],ymm8[6,7]
4925 ; AVX1-ONLY-NEXT: vmovups %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4926 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
4927 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[0,0,0,0]
4928 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm0[0,1,0,1]
4929 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm1, %ymm1
4930 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
4931 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm2[0,1,0,1]
4932 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm8 = xmm2[0],zero,xmm2[1],zero
4933 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm3, %ymm3
4934 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1,2],ymm1[3],ymm3[4,5,6],ymm1[7]
4935 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
4936 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
4937 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm3[1,1,1,1]
4938 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm3, %ymm5
4939 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm4[0,0,1,1]
4940 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm6, %ymm6
4941 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0],ymm6[1],ymm5[2,3,4],ymm6[5],ymm5[6,7]
4942 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm5[0,1],ymm1[2,3],ymm5[4,5],ymm1[6,7]
4943 ; AVX1-ONLY-NEXT: vmovups %ymm1, (%rsp) # 32-byte Spill
4944 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm2[2,2,3,3]
4945 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
4946 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[2,2,2,2]
4947 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm2, %ymm0
4948 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7]
4949 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm4[2,2,3,3]
4950 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm4[2,3,2,3]
4951 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
4952 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm3[2,3,2,3]
4953 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[3,3,3,3]
4954 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
4955 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3,4],ymm1[5],ymm2[6,7]
4956 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
4957 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4958 ; AVX1-ONLY-NEXT: vmovdqa 96(%r10), %xmm13
4959 ; AVX1-ONLY-NEXT: vmovdqa 96(%rax), %xmm1
4960 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm1[0],xmm13[0],xmm1[1],xmm13[1],xmm1[2],xmm13[2],xmm1[3],xmm13[3]
4961 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm8[0,0,0,0]
4962 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm8[0,1,0,1]
4963 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm4
4964 ; AVX1-ONLY-NEXT: vmovdqa 96(%r9), %xmm2
4965 ; AVX1-ONLY-NEXT: vmovdqa 96(%r8), %xmm3
4966 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
4967 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm9[0,1,0,1]
4968 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm9[0],zero,xmm9[1],zero
4969 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm5
4970 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm5[0,1,2],ymm4[3],ymm5[4,5,6],ymm4[7]
4971 ; AVX1-ONLY-NEXT: vmovdqa 96(%rsi), %xmm4
4972 ; AVX1-ONLY-NEXT: vmovdqa 96(%rdi), %xmm5
4973 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
4974 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm11[1,1,1,1]
4975 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm11, %ymm12
4976 ; AVX1-ONLY-NEXT: vmovdqa 96(%rcx), %xmm6
4977 ; AVX1-ONLY-NEXT: vmovdqa 96(%rdx), %xmm7
4978 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
4979 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm0[0,0,1,1]
4980 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm14, %ymm14
4981 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm12[0],ymm14[1],ymm12[2,3,4],ymm14[5],ymm12[6,7]
4982 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm12[0,1],ymm10[2,3],ymm12[4,5],ymm10[6,7]
4983 ; AVX1-ONLY-NEXT: vmovups %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4984 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm9[2,2,3,3]
4985 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm9, %ymm9
4986 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm8[2,2,2,2]
4987 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm10, %ymm8
4988 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm9[0,1,2],ymm8[3],ymm9[4,5,6],ymm8[7]
4989 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm0[2,2,3,3]
4990 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,3,2,3]
4991 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm9, %ymm0
4992 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm11[2,3,2,3]
4993 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm11[3,3,3,3]
4994 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm9, %ymm9
4995 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm9[0],ymm0[1],ymm9[2,3,4],ymm0[5],ymm9[6,7]
4996 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm8[2,3],ymm0[4,5],ymm8[6,7]
4997 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4998 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm1[4],xmm13[4],xmm1[5],xmm13[5],xmm1[6],xmm13[6],xmm1[7],xmm13[7]
4999 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[0,0,0,0]
5000 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm0[0,1,0,1]
5001 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm1, %ymm1
5002 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
5003 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm2[0,1,0,1]
5004 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm8 = xmm2[0],zero,xmm2[1],zero
5005 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm3, %ymm3
5006 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1,2],ymm1[3],ymm3[4,5,6],ymm1[7]
5007 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
5008 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
5009 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm3[1,1,1,1]
5010 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm3, %ymm5
5011 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm4[0,0,1,1]
5012 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm6, %ymm6
5013 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm5[0],ymm6[1],ymm5[2,3,4],ymm6[5],ymm5[6,7]
5014 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm5[0,1],ymm1[2,3],ymm5[4,5],ymm1[6,7]
5015 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5016 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm2[2,2,3,3]
5017 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
5018 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[2,2,2,2]
5019 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm2, %ymm0
5020 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7]
5021 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm4[2,2,3,3]
5022 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm4[2,3,2,3]
5023 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
5024 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm3[2,3,2,3]
5025 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[3,3,3,3]
5026 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
5027 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3,4],ymm1[5],ymm2[6,7]
5028 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm14 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
5029 ; AVX1-ONLY-NEXT: vmovdqa 112(%r10), %xmm12
5030 ; AVX1-ONLY-NEXT: vmovdqa 112(%rax), %xmm11
5031 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
5032 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm8[0,0,0,0]
5033 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm8[0,1,0,1]
5034 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
5035 ; AVX1-ONLY-NEXT: vmovdqa 112(%r9), %xmm10
5036 ; AVX1-ONLY-NEXT: vmovdqa 112(%r8), %xmm7
5037 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm7[0],xmm10[0],xmm7[1],xmm10[1],xmm7[2],xmm10[2],xmm7[3],xmm10[3]
5038 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm13[0,1,0,1]
5039 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm2 = xmm13[0],zero,xmm13[1],zero
5040 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
5041 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7]
5042 ; AVX1-ONLY-NEXT: vmovdqa 112(%rsi), %xmm6
5043 ; AVX1-ONLY-NEXT: vmovdqa 112(%rdi), %xmm5
5044 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
5045 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm2[1,1,1,1]
5046 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
5047 ; AVX1-ONLY-NEXT: vmovdqa 112(%rcx), %xmm4
5048 ; AVX1-ONLY-NEXT: vmovdqa 112(%rdx), %xmm3
5049 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
5050 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm15 = xmm0[0,0,1,1]
5051 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm15, %ymm15
5052 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0],ymm15[1],ymm1[2,3,4],ymm15[5],ymm1[6,7]
5053 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm1[0,1],ymm9[2,3],ymm1[4,5],ymm9[6,7]
5054 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm13[2,2,3,3]
5055 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm13, %ymm1
5056 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm8[2,2,2,2]
5057 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm13, %ymm8
5058 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0,1,2],ymm8[3],ymm1[4,5,6],ymm8[7]
5059 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm0[2,2,3,3]
5060 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,3,2,3]
5061 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm8, %ymm0
5062 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm2[2,3,2,3]
5063 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[3,3,3,3]
5064 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm8, %ymm2
5065 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0],ymm0[1],ymm2[2,3,4],ymm0[5],ymm2[6,7]
5066 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
5067 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm11[4],xmm12[4],xmm11[5],xmm12[5],xmm11[6],xmm12[6],xmm11[7],xmm12[7]
5068 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[0,0,0,0]
5069 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[0,1,0,1]
5070 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
5071 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm7[4],xmm10[4],xmm7[5],xmm10[5],xmm7[6],xmm10[6],xmm7[7],xmm10[7]
5072 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm2[0,1,0,1]
5073 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm10 = xmm2[0],zero,xmm2[1],zero
5074 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm7, %ymm7
5075 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm7[0,1,2],ymm1[3],ymm7[4,5,6],ymm1[7]
5076 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm5[4],xmm6[4],xmm5[5],xmm6[5],xmm5[6],xmm6[6],xmm5[7],xmm6[7]
5077 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
5078 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm5[1,1,1,1]
5079 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm5, %ymm4
5080 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm3[0,0,1,1]
5081 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm6, %ymm6
5082 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0],ymm6[1],ymm4[2,3,4],ymm6[5],ymm4[6,7]
5083 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm4[0,1],ymm1[2,3],ymm4[4,5],ymm1[6,7]
5084 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm2[2,2,3,3]
5085 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm2, %ymm2
5086 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm0[2,2,2,2]
5087 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm4, %ymm0
5088 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2],ymm0[3],ymm2[4,5,6],ymm0[7]
5089 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm3[2,2,3,3]
5090 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,3,2,3]
5091 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
5092 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm5[2,3,2,3]
5093 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm5[3,3,3,3]
5094 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm3, %ymm3
5095 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3,4],ymm2[5],ymm3[6,7]
5096 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1],ymm0[2,3],ymm2[4,5],ymm0[6,7]
5097 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
5098 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 992(%rax)
5099 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 960(%rax)
5100 ; AVX1-ONLY-NEXT: vmovaps %ymm8, 928(%rax)
5101 ; AVX1-ONLY-NEXT: vmovaps %ymm9, 896(%rax)
5102 ; AVX1-ONLY-NEXT: vmovaps %ymm14, 864(%rax)
5103 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5104 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 832(%rax)
5105 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5106 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 800(%rax)
5107 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5108 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 768(%rax)
5109 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5110 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 736(%rax)
5111 ; AVX1-ONLY-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
5112 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 704(%rax)
5113 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5114 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 672(%rax)
5115 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5116 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 640(%rax)
5117 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5118 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 608(%rax)
5119 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5120 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 576(%rax)
5121 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5122 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 544(%rax)
5123 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5124 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 512(%rax)
5125 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5126 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 480(%rax)
5127 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5128 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 448(%rax)
5129 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5130 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 416(%rax)
5131 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5132 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 384(%rax)
5133 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5134 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 352(%rax)
5135 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5136 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 320(%rax)
5137 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5138 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 288(%rax)
5139 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5140 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 256(%rax)
5141 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5142 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 224(%rax)
5143 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5144 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 192(%rax)
5145 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5146 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 160(%rax)
5147 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5148 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 128(%rax)
5149 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5150 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 96(%rax)
5151 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5152 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 64(%rax)
5153 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5154 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
5155 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5156 ; AVX1-ONLY-NEXT: vmovaps %ymm0, (%rax)
5157 ; AVX1-ONLY-NEXT: addq $744, %rsp # imm = 0x2E8
5158 ; AVX1-ONLY-NEXT: vzeroupper
5159 ; AVX1-ONLY-NEXT: retq
5161 ; AVX2-SLOW-LABEL: store_i16_stride8_vf64:
5162 ; AVX2-SLOW: # %bb.0:
5163 ; AVX2-SLOW-NEXT: subq $744, %rsp # imm = 0x2E8
5164 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
5165 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
5166 ; AVX2-SLOW-NEXT: vmovdqa (%rax), %xmm6
5167 ; AVX2-SLOW-NEXT: vmovdqa 32(%rax), %xmm0
5168 ; AVX2-SLOW-NEXT: vmovdqa (%r10), %xmm7
5169 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
5170 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm3[0,0,1,1]
5171 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
5172 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %xmm8
5173 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %xmm9
5174 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
5175 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm2 = xmm4[0],zero,xmm4[1],zero
5176 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
5177 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3],ymm2[4,5,6],ymm1[7]
5178 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %xmm10
5179 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %xmm11
5180 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
5181 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm5[0,0,1,1]
5182 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,1,3]
5183 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %xmm12
5184 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %xmm13
5185 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3]
5186 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm15 = xmm14[0],zero,xmm14[1],zero
5187 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[0,1,1,3]
5188 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm15 = ymm15[0],ymm2[1],ymm15[2,3,4],ymm2[5],ymm15[6,7]
5189 ; AVX2-SLOW-NEXT: vmovdqa 32(%r10), %xmm2
5190 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm15[0,1],ymm1[2,3],ymm15[4,5],ymm1[6,7]
5191 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5192 ; AVX2-SLOW-NEXT: vmovdqa 32(%r9), %xmm1
5193 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
5194 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
5195 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
5196 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,2,1]
5197 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm15 = ymm4[0,1,2],ymm3[3],ymm4[4,5,6],ymm3[7]
5198 ; AVX2-SLOW-NEXT: vmovdqa 32(%r8), %xmm3
5199 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm5[2,2,3,3]
5200 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,1,3]
5201 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm14[2,2,3,3]
5202 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,1,1,3]
5203 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0],ymm4[1],ymm5[2,3,4],ymm4[5],ymm5[6,7]
5204 ; AVX2-SLOW-NEXT: vmovdqa 32(%rcx), %xmm4
5205 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1],ymm15[2,3],ymm5[4,5],ymm15[6,7]
5206 ; AVX2-SLOW-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5207 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdx), %xmm5
5208 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
5209 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm7 = xmm6[0,0,1,1]
5210 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,0,2,1]
5211 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm9[4],xmm8[4],xmm9[5],xmm8[5],xmm9[6],xmm8[6],xmm9[7],xmm8[7]
5212 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm9 = xmm8[0],zero,xmm8[1],zero
5213 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,2,1]
5214 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2],ymm7[3],ymm9[4,5,6],ymm7[7]
5215 ; AVX2-SLOW-NEXT: vmovdqa 32(%rsi), %xmm7
5216 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm10 = xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
5217 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm11 = xmm10[0,0,1,1]
5218 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,1,1,3]
5219 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm12 = xmm13[4],xmm12[4],xmm13[5],xmm12[5],xmm13[6],xmm12[6],xmm13[7],xmm12[7]
5220 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm13 = xmm12[0],zero,xmm12[1],zero
5221 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm13 = ymm13[0,1,1,3]
5222 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm11 = ymm13[0],ymm11[1],ymm13[2,3,4],ymm11[5],ymm13[6,7]
5223 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm9 = ymm11[0,1],ymm9[2,3],ymm11[4,5],ymm9[6,7]
5224 ; AVX2-SLOW-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5225 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
5226 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,2,1]
5227 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[2,2,3,3]
5228 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,0,2,1]
5229 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm8[0,1,2],ymm6[3],ymm8[4,5,6],ymm6[7]
5230 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm8 = xmm10[2,2,3,3]
5231 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,1,1,3]
5232 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm12[2,2,3,3]
5233 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,1,1,3]
5234 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0],ymm8[1],ymm9[2,3,4],ymm8[5],ymm9[6,7]
5235 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm8[0,1],ymm6[2,3],ymm8[4,5],ymm6[6,7]
5236 ; AVX2-SLOW-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5237 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
5238 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm8 = xmm6[0,0,1,1]
5239 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,0,2,1]
5240 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
5241 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm9 = xmm10[0],zero,xmm10[1],zero
5242 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,2,1]
5243 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0,1,2],ymm8[3],ymm9[4,5,6],ymm8[7]
5244 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
5245 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm11[0,0,1,1]
5246 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm9[0,1,1,3]
5247 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdi), %xmm9
5248 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm9[0],xmm7[0],xmm9[1],xmm7[1],xmm9[2],xmm7[2],xmm9[3],xmm7[3]
5249 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm14 = xmm13[0],zero,xmm13[1],zero
5250 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,1,1,3]
5251 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = ymm14[0],ymm12[1],ymm14[2,3,4],ymm12[5],ymm14[6,7]
5252 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm12[0,1],ymm8[2,3],ymm12[4,5],ymm8[6,7]
5253 ; AVX2-SLOW-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5254 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
5255 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,2,1]
5256 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm8 = xmm10[2,2,3,3]
5257 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,0,2,1]
5258 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2],ymm6[3],ymm8[4,5,6],ymm6[7]
5259 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm11[2,2,3,3]
5260 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,1,1,3]
5261 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm10 = xmm13[2,2,3,3]
5262 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,1,1,3]
5263 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm10 = ymm10[0],ymm6[1],ymm10[2,3,4],ymm6[5],ymm10[6,7]
5264 ; AVX2-SLOW-NEXT: vmovdqa 64(%rax), %xmm6
5265 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm10[0,1],ymm8[2,3],ymm10[4,5],ymm8[6,7]
5266 ; AVX2-SLOW-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5267 ; AVX2-SLOW-NEXT: vmovdqa 64(%r10), %xmm8
5268 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm2[4],xmm0[4],xmm2[5],xmm0[5],xmm2[6],xmm0[6],xmm2[7],xmm0[7]
5269 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm2[0,0,1,1]
5270 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
5271 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
5272 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
5273 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
5274 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm10 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7]
5275 ; AVX2-SLOW-NEXT: vmovdqa 64(%r9), %xmm0
5276 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
5277 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm4[0,0,1,1]
5278 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,1,3]
5279 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm9[4],xmm7[4],xmm9[5],xmm7[5],xmm9[6],xmm7[6],xmm9[7],xmm7[7]
5280 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm7 = xmm5[0],zero,xmm5[1],zero
5281 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,1,1,3]
5282 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm7 = ymm7[0],ymm1[1],ymm7[2,3,4],ymm1[5],ymm7[6,7]
5283 ; AVX2-SLOW-NEXT: vmovdqa 64(%r8), %xmm1
5284 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm7 = ymm7[0,1],ymm10[2,3],ymm7[4,5],ymm10[6,7]
5285 ; AVX2-SLOW-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5286 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
5287 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
5288 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
5289 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
5290 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7]
5291 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm4[2,2,3,3]
5292 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,1,3]
5293 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm5[2,2,3,3]
5294 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,1,3]
5295 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0],ymm3[1],ymm4[2,3,4],ymm3[5],ymm4[6,7]
5296 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7]
5297 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5298 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm8[0],xmm6[0],xmm8[1],xmm6[1],xmm8[2],xmm6[2],xmm8[3],xmm6[3]
5299 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm7[0,0,1,1]
5300 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
5301 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
5302 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm9[0],zero,xmm9[1],zero
5303 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
5304 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm10 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7]
5305 ; AVX2-SLOW-NEXT: vmovdqa 64(%rcx), %xmm2
5306 ; AVX2-SLOW-NEXT: vmovdqa 64(%rdx), %xmm3
5307 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
5308 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm11[0,0,1,1]
5309 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm4[0,1,1,3]
5310 ; AVX2-SLOW-NEXT: vmovdqa 64(%rsi), %xmm4
5311 ; AVX2-SLOW-NEXT: vmovdqa 64(%rdi), %xmm5
5312 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
5313 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm14 = xmm13[0],zero,xmm13[1],zero
5314 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,1,1,3]
5315 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = ymm14[0],ymm12[1],ymm14[2,3,4],ymm12[5],ymm14[6,7]
5316 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm10 = ymm12[0,1],ymm10[2,3],ymm12[4,5],ymm10[6,7]
5317 ; AVX2-SLOW-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5318 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
5319 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,0,2,1]
5320 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
5321 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,2,1]
5322 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm7 = ymm9[0,1,2],ymm7[3],ymm9[4,5,6],ymm7[7]
5323 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm11[2,2,3,3]
5324 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,1,1,3]
5325 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm10 = xmm13[2,2,3,3]
5326 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,1,1,3]
5327 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm9 = ymm10[0],ymm9[1],ymm10[2,3,4],ymm9[5],ymm10[6,7]
5328 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm7 = ymm9[0,1],ymm7[2,3],ymm9[4,5],ymm7[6,7]
5329 ; AVX2-SLOW-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5330 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm8[4],xmm6[4],xmm8[5],xmm6[5],xmm8[6],xmm6[6],xmm8[7],xmm6[7]
5331 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
5332 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm6[0,0,1,1]
5333 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
5334 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm7 = xmm0[0],zero,xmm0[1],zero
5335 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,0,2,1]
5336 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm7[0,1,2],ymm1[3],ymm7[4,5,6],ymm1[7]
5337 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
5338 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
5339 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm2[0,0,1,1]
5340 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,1,3]
5341 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm5 = xmm3[0],zero,xmm3[1],zero
5342 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,1,1,3]
5343 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0],ymm4[1],ymm5[2,3,4],ymm4[5],ymm5[6,7]
5344 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm4[0,1],ymm1[2,3],ymm4[4,5],ymm1[6,7]
5345 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5346 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm6[2,2,3,3]
5347 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
5348 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
5349 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
5350 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2],ymm1[3],ymm0[4,5,6],ymm1[7]
5351 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm2[2,2,3,3]
5352 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,1,3]
5353 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm3[2,2,3,3]
5354 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,1,3]
5355 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3,4],ymm1[5],ymm2[6,7]
5356 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
5357 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5358 ; AVX2-SLOW-NEXT: vmovdqa 96(%rax), %xmm0
5359 ; AVX2-SLOW-NEXT: vmovdqa 96(%r10), %xmm1
5360 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
5361 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm8[0,0,1,1]
5362 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm2[0,0,2,1]
5363 ; AVX2-SLOW-NEXT: vmovdqa 96(%r9), %xmm2
5364 ; AVX2-SLOW-NEXT: vmovdqa 96(%r8), %xmm4
5365 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
5366 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm5 = xmm9[0],zero,xmm9[1],zero
5367 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,2,1]
5368 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm10 = ymm5[0,1,2],ymm3[3],ymm5[4,5,6],ymm3[7]
5369 ; AVX2-SLOW-NEXT: vmovdqa 96(%rcx), %xmm3
5370 ; AVX2-SLOW-NEXT: vmovdqa 96(%rdx), %xmm5
5371 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
5372 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm11[0,0,1,1]
5373 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm6[0,1,1,3]
5374 ; AVX2-SLOW-NEXT: vmovdqa 96(%rsi), %xmm6
5375 ; AVX2-SLOW-NEXT: vmovdqa 96(%rdi), %xmm7
5376 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
5377 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm14 = xmm13[0],zero,xmm13[1],zero
5378 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,1,1,3]
5379 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = ymm14[0],ymm12[1],ymm14[2,3,4],ymm12[5],ymm14[6,7]
5380 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm10 = ymm12[0,1],ymm10[2,3],ymm12[4,5],ymm10[6,7]
5381 ; AVX2-SLOW-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5382 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[2,2,3,3]
5383 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,0,2,1]
5384 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
5385 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,2,1]
5386 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0,1,2],ymm8[3],ymm9[4,5,6],ymm8[7]
5387 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm11[2,2,3,3]
5388 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,1,1,3]
5389 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm10 = xmm13[2,2,3,3]
5390 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,1,1,3]
5391 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm9 = ymm10[0],ymm9[1],ymm10[2,3,4],ymm9[5],ymm10[6,7]
5392 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0,1],ymm8[2,3],ymm9[4,5],ymm8[6,7]
5393 ; AVX2-SLOW-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5394 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
5395 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm4[4],xmm2[4],xmm4[5],xmm2[5],xmm4[6],xmm2[6],xmm4[7],xmm2[7]
5396 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[0,0,1,1]
5397 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
5398 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm4 = xmm1[0],zero,xmm1[1],zero
5399 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,2,1]
5400 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1,2],ymm2[3],ymm4[4,5,6],ymm2[7]
5401 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm5[4],xmm3[4],xmm5[5],xmm3[5],xmm5[6],xmm3[6],xmm5[7],xmm3[7]
5402 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
5403 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm3[0,0,1,1]
5404 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,1,1,3]
5405 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm4[0],zero,xmm4[1],zero
5406 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,1,1,3]
5407 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0],ymm5[1],ymm6[2,3,4],ymm5[5],ymm6[6,7]
5408 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
5409 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5410 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
5411 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
5412 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
5413 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
5414 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7]
5415 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm3[2,2,3,3]
5416 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,1,3]
5417 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm4[2,2,3,3]
5418 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,1,3]
5419 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3,4],ymm1[5],ymm2[6,7]
5420 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
5421 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5422 ; AVX2-SLOW-NEXT: vmovdqa (%r10), %ymm0
5423 ; AVX2-SLOW-NEXT: vmovdqa (%rax), %ymm1
5424 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[8],ymm1[8],ymm0[9],ymm1[9],ymm0[10],ymm1[10],ymm0[11],ymm1[11]
5425 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm2[0,0,2,1,4,4,6,5]
5426 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,2,2,3]
5427 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %ymm4
5428 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %ymm5
5429 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm6 = ymm4[0],ymm5[0],ymm4[1],ymm5[1],ymm4[2],ymm5[2],ymm4[3],ymm5[3],ymm4[8],ymm5[8],ymm4[9],ymm5[9],ymm4[10],ymm5[10],ymm4[11],ymm5[11]
5430 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm6[0,1,1,3,4,5,5,7]
5431 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,2,2,3]
5432 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm7[0,1,2],ymm3[3],ymm7[4,5,6],ymm3[7]
5433 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %ymm7
5434 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %ymm8
5435 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm9 = ymm7[0],ymm8[0],ymm7[1],ymm8[1],ymm7[2],ymm8[2],ymm7[3],ymm8[3],ymm7[8],ymm8[8],ymm7[9],ymm8[9],ymm7[10],ymm8[10],ymm7[11],ymm8[11]
5436 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm10 = ymm9[0,0,2,1,4,4,6,5]
5437 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,1,3,3]
5438 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %ymm11
5439 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %ymm12
5440 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm13 = ymm11[0],ymm12[0],ymm11[1],ymm12[1],ymm11[2],ymm12[2],ymm11[3],ymm12[3],ymm11[8],ymm12[8],ymm11[9],ymm12[9],ymm11[10],ymm12[10],ymm11[11],ymm12[11]
5441 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm13[0,1,1,3,4,5,5,7]
5442 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,1,3,3]
5443 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm10 = ymm14[0],ymm10[1],ymm14[2,3,4],ymm10[5],ymm14[6,7]
5444 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm10[0,1],ymm3[2,3],ymm10[4,5],ymm3[6,7]
5445 ; AVX2-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5446 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,2,2,3,4,6,6,7]
5447 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,2,3]
5448 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm6[2,1,3,3,6,5,7,7]
5449 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,2,2,3]
5450 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7]
5451 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm9[0,2,2,3,4,6,6,7]
5452 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,3,3]
5453 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm13[2,1,3,3,6,5,7,7]
5454 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,1,3,3]
5455 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm6[0],ymm3[1],ymm6[2,3,4],ymm3[5],ymm6[6,7]
5456 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7]
5457 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5458 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm0[4],ymm1[4],ymm0[5],ymm1[5],ymm0[6],ymm1[6],ymm0[7],ymm1[7],ymm0[12],ymm1[12],ymm0[13],ymm1[13],ymm0[14],ymm1[14],ymm0[15],ymm1[15]
5459 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm4[4],ymm5[4],ymm4[5],ymm5[5],ymm4[6],ymm5[6],ymm4[7],ymm5[7],ymm4[12],ymm5[12],ymm4[13],ymm5[13],ymm4[14],ymm5[14],ymm4[15],ymm5[15]
5460 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm0[0,0,2,1,4,4,6,5]
5461 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,2,3]
5462 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm1[0,1,1,3,4,5,5,7]
5463 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,2,2,3]
5464 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7]
5465 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm7[4],ymm8[4],ymm7[5],ymm8[5],ymm7[6],ymm8[6],ymm7[7],ymm8[7],ymm7[12],ymm8[12],ymm7[13],ymm8[13],ymm7[14],ymm8[14],ymm7[15],ymm8[15]
5466 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm4 = ymm11[4],ymm12[4],ymm11[5],ymm12[5],ymm11[6],ymm12[6],ymm11[7],ymm12[7],ymm11[12],ymm12[12],ymm11[13],ymm12[13],ymm11[14],ymm12[14],ymm11[15],ymm12[15]
5467 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm3[0,0,2,1,4,4,6,5]
5468 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,1,3,3]
5469 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm4[0,1,1,3,4,5,5,7]
5470 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,1,3,3]
5471 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0],ymm5[1],ymm6[2,3,4],ymm5[5],ymm6[6,7]
5472 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
5473 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5474 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,2,2,3,4,6,6,7]
5475 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,2,3]
5476 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[2,1,3,3,6,5,7,7]
5477 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,2,3]
5478 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7]
5479 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm3[0,2,2,3,4,6,6,7]
5480 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,3]
5481 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm4[2,1,3,3,6,5,7,7]
5482 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,3,3]
5483 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3,4],ymm1[5],ymm2[6,7]
5484 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
5485 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5486 ; AVX2-SLOW-NEXT: vmovdqa 32(%r10), %ymm13
5487 ; AVX2-SLOW-NEXT: vmovdqa 32(%rax), %ymm11
5488 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm13[0],ymm11[0],ymm13[1],ymm11[1],ymm13[2],ymm11[2],ymm13[3],ymm11[3],ymm13[8],ymm11[8],ymm13[9],ymm11[9],ymm13[10],ymm11[10],ymm13[11],ymm11[11]
5489 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm2[0,0,2,1,4,4,6,5]
5490 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,2,2,3]
5491 ; AVX2-SLOW-NEXT: vmovdqa 32(%r8), %ymm4
5492 ; AVX2-SLOW-NEXT: vmovdqa 32(%r9), %ymm5
5493 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm6 = ymm4[0],ymm5[0],ymm4[1],ymm5[1],ymm4[2],ymm5[2],ymm4[3],ymm5[3],ymm4[8],ymm5[8],ymm4[9],ymm5[9],ymm4[10],ymm5[10],ymm4[11],ymm5[11]
5494 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm6[0,1,1,3,4,5,5,7]
5495 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,2,2,3]
5496 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = ymm7[0,1,2],ymm3[3],ymm7[4,5,6],ymm3[7]
5497 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdx), %ymm7
5498 ; AVX2-SLOW-NEXT: vmovdqa 32(%rcx), %ymm8
5499 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm9 = ymm7[0],ymm8[0],ymm7[1],ymm8[1],ymm7[2],ymm8[2],ymm7[3],ymm8[3],ymm7[8],ymm8[8],ymm7[9],ymm8[9],ymm7[10],ymm8[10],ymm7[11],ymm8[11]
5500 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm10 = ymm9[0,0,2,1,4,4,6,5]
5501 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,1,3,3]
5502 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdi), %ymm1
5503 ; AVX2-SLOW-NEXT: vmovdqa 32(%rsi), %ymm3
5504 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm1[0],ymm3[0],ymm1[1],ymm3[1],ymm1[2],ymm3[2],ymm1[3],ymm3[3],ymm1[8],ymm3[8],ymm1[9],ymm3[9],ymm1[10],ymm3[10],ymm1[11],ymm3[11]
5505 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm15 = ymm0[0,1,1,3,4,5,5,7]
5506 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,1,3,3]
5507 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm10 = ymm15[0],ymm10[1],ymm15[2,3,4],ymm10[5],ymm15[6,7]
5508 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm10 = ymm10[0,1],ymm12[2,3],ymm10[4,5],ymm12[6,7]
5509 ; AVX2-SLOW-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5510 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,2,2,3,4,6,6,7]
5511 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,2,3]
5512 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm6[2,1,3,3,6,5,7,7]
5513 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,2,2,3]
5514 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm6[0,1,2],ymm2[3],ymm6[4,5,6],ymm2[7]
5515 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm9[0,2,2,3,4,6,6,7]
5516 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,1,3,3]
5517 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,1,3,3,6,5,7,7]
5518 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,3,3]
5519 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm6[1],ymm0[2,3,4],ymm6[5],ymm0[6,7]
5520 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
5521 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5522 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm13[4],ymm11[4],ymm13[5],ymm11[5],ymm13[6],ymm11[6],ymm13[7],ymm11[7],ymm13[12],ymm11[12],ymm13[13],ymm11[13],ymm13[14],ymm11[14],ymm13[15],ymm11[15]
5523 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm4[4],ymm5[4],ymm4[5],ymm5[5],ymm4[6],ymm5[6],ymm4[7],ymm5[7],ymm4[12],ymm5[12],ymm4[13],ymm5[13],ymm4[14],ymm5[14],ymm4[15],ymm5[15]
5524 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm0[0,0,2,1,4,4,6,5]
5525 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,2,2,3]
5526 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm2[0,1,1,3,4,5,5,7]
5527 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,2,2,3]
5528 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0,1,2],ymm4[3],ymm5[4,5,6],ymm4[7]
5529 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm5 = ymm7[4],ymm8[4],ymm7[5],ymm8[5],ymm7[6],ymm8[6],ymm7[7],ymm8[7],ymm7[12],ymm8[12],ymm7[13],ymm8[13],ymm7[14],ymm8[14],ymm7[15],ymm8[15]
5530 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm1[4],ymm3[4],ymm1[5],ymm3[5],ymm1[6],ymm3[6],ymm1[7],ymm3[7],ymm1[12],ymm3[12],ymm1[13],ymm3[13],ymm1[14],ymm3[14],ymm1[15],ymm3[15]
5531 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm5[0,0,2,1,4,4,6,5]
5532 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,3,3]
5533 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm1[0,1,1,3,4,5,5,7]
5534 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,1,3,3]
5535 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm6[0],ymm3[1],ymm6[2,3,4],ymm3[5],ymm6[6,7]
5536 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1],ymm4[2,3],ymm3[4,5],ymm4[6,7]
5537 ; AVX2-SLOW-NEXT: vmovdqu %ymm3, (%rsp) # 32-byte Spill
5538 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,2,2,3,4,6,6,7]
5539 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,2,3]
5540 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[2,1,3,3,6,5,7,7]
5541 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,2,3]
5542 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2],ymm0[3],ymm2[4,5,6],ymm0[7]
5543 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm5[0,2,2,3,4,6,6,7]
5544 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,3,3]
5545 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[2,1,3,3,6,5,7,7]
5546 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,3]
5547 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3,4],ymm2[5],ymm1[6,7]
5548 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
5549 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5550 ; AVX2-SLOW-NEXT: vmovdqa 64(%r10), %ymm10
5551 ; AVX2-SLOW-NEXT: vmovdqa 64(%rax), %ymm9
5552 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm10[0],ymm9[0],ymm10[1],ymm9[1],ymm10[2],ymm9[2],ymm10[3],ymm9[3],ymm10[8],ymm9[8],ymm10[9],ymm9[9],ymm10[10],ymm9[10],ymm10[11],ymm9[11]
5553 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm2[0,0,2,1,4,4,6,5]
5554 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,2,2,3]
5555 ; AVX2-SLOW-NEXT: vmovdqa 64(%r8), %ymm8
5556 ; AVX2-SLOW-NEXT: vmovdqa 64(%r9), %ymm5
5557 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm6 = ymm8[0],ymm5[0],ymm8[1],ymm5[1],ymm8[2],ymm5[2],ymm8[3],ymm5[3],ymm8[8],ymm5[8],ymm8[9],ymm5[9],ymm8[10],ymm5[10],ymm8[11],ymm5[11]
5558 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm6[0,1,1,3,4,5,5,7]
5559 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,2,2,3]
5560 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm7[0,1,2],ymm3[3],ymm7[4,5,6],ymm3[7]
5561 ; AVX2-SLOW-NEXT: vmovdqa 64(%rdx), %ymm11
5562 ; AVX2-SLOW-NEXT: vmovdqa 64(%rcx), %ymm13
5563 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm15 = ymm11[0],ymm13[0],ymm11[1],ymm13[1],ymm11[2],ymm13[2],ymm11[3],ymm13[3],ymm11[8],ymm13[8],ymm11[9],ymm13[9],ymm11[10],ymm13[10],ymm11[11],ymm13[11]
5564 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm15[0,0,2,1,4,4,6,5]
5565 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,1,3,3]
5566 ; AVX2-SLOW-NEXT: vmovdqa 64(%rdi), %ymm3
5567 ; AVX2-SLOW-NEXT: vmovdqa 64(%rsi), %ymm1
5568 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm3[0],ymm1[0],ymm3[1],ymm1[1],ymm3[2],ymm1[2],ymm3[3],ymm1[3],ymm3[8],ymm1[8],ymm3[9],ymm1[9],ymm3[10],ymm1[10],ymm3[11],ymm1[11]
5569 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm0[0,1,1,3,4,5,5,7]
5570 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,1,3,3]
5571 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm7 = ymm14[0],ymm7[1],ymm14[2,3,4],ymm7[5],ymm14[6,7]
5572 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm7[0,1],ymm4[2,3],ymm7[4,5],ymm4[6,7]
5573 ; AVX2-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5574 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,2,2,3,4,6,6,7]
5575 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,2,3]
5576 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm6[2,1,3,3,6,5,7,7]
5577 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,2,2,3]
5578 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1,2],ymm2[3],ymm4[4,5,6],ymm2[7]
5579 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm15[0,2,2,3,4,6,6,7]
5580 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,1,3,3]
5581 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,1,3,3,6,5,7,7]
5582 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,3,3]
5583 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm4[1],ymm0[2,3,4],ymm4[5],ymm0[6,7]
5584 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
5585 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5586 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm10[4],ymm9[4],ymm10[5],ymm9[5],ymm10[6],ymm9[6],ymm10[7],ymm9[7],ymm10[12],ymm9[12],ymm10[13],ymm9[13],ymm10[14],ymm9[14],ymm10[15],ymm9[15]
5587 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm8[4],ymm5[4],ymm8[5],ymm5[5],ymm8[6],ymm5[6],ymm8[7],ymm5[7],ymm8[12],ymm5[12],ymm8[13],ymm5[13],ymm8[14],ymm5[14],ymm8[15],ymm5[15]
5588 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm0[0,0,2,1,4,4,6,5]
5589 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,2,2,3]
5590 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm2[0,1,1,3,4,5,5,7]
5591 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,2,2,3]
5592 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0,1,2],ymm4[3],ymm5[4,5,6],ymm4[7]
5593 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm8 = ymm11[4],ymm13[4],ymm11[5],ymm13[5],ymm11[6],ymm13[6],ymm11[7],ymm13[7],ymm11[12],ymm13[12],ymm11[13],ymm13[13],ymm11[14],ymm13[14],ymm11[15],ymm13[15]
5594 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm3[4],ymm1[4],ymm3[5],ymm1[5],ymm3[6],ymm1[6],ymm3[7],ymm1[7],ymm3[12],ymm1[12],ymm3[13],ymm1[13],ymm3[14],ymm1[14],ymm3[15],ymm1[15]
5595 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm8[0,0,2,1,4,4,6,5]
5596 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,3,3]
5597 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm1[0,1,1,3,4,5,5,7]
5598 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,1,3,3]
5599 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm5[0],ymm3[1],ymm5[2,3,4],ymm3[5],ymm5[6,7]
5600 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1],ymm4[2,3],ymm3[4,5],ymm4[6,7]
5601 ; AVX2-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5602 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,2,2,3,4,6,6,7]
5603 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,2,3]
5604 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[2,1,3,3,6,5,7,7]
5605 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,2,3]
5606 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2],ymm0[3],ymm2[4,5,6],ymm0[7]
5607 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm8[0,2,2,3,4,6,6,7]
5608 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,3,3]
5609 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[2,1,3,3,6,5,7,7]
5610 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,3]
5611 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3,4],ymm2[5],ymm1[6,7]
5612 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm7 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
5613 ; AVX2-SLOW-NEXT: vmovdqa 96(%r10), %ymm6
5614 ; AVX2-SLOW-NEXT: vmovdqa 96(%rax), %ymm5
5615 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm6[0],ymm5[0],ymm6[1],ymm5[1],ymm6[2],ymm5[2],ymm6[3],ymm5[3],ymm6[8],ymm5[8],ymm6[9],ymm5[9],ymm6[10],ymm5[10],ymm6[11],ymm5[11]
5616 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm2[0,0,2,1,4,4,6,5]
5617 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,2,2,3]
5618 ; AVX2-SLOW-NEXT: vmovdqa 96(%r8), %ymm8
5619 ; AVX2-SLOW-NEXT: vmovdqa 96(%r9), %ymm9
5620 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm10 = ymm8[0],ymm9[0],ymm8[1],ymm9[1],ymm8[2],ymm9[2],ymm8[3],ymm9[3],ymm8[8],ymm9[8],ymm8[9],ymm9[9],ymm8[10],ymm9[10],ymm8[11],ymm9[11]
5621 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm11 = ymm10[0,1,1,3,4,5,5,7]
5622 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,2,2,3]
5623 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm11[0,1,2],ymm3[3],ymm11[4,5,6],ymm3[7]
5624 ; AVX2-SLOW-NEXT: vmovdqa 96(%rdx), %ymm11
5625 ; AVX2-SLOW-NEXT: vmovdqa 96(%rcx), %ymm13
5626 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm14 = ymm11[0],ymm13[0],ymm11[1],ymm13[1],ymm11[2],ymm13[2],ymm11[3],ymm13[3],ymm11[8],ymm13[8],ymm11[9],ymm13[9],ymm11[10],ymm13[10],ymm11[11],ymm13[11]
5627 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm15 = ymm14[0,0,2,1,4,4,6,5]
5628 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,1,3,3]
5629 ; AVX2-SLOW-NEXT: vmovdqa 96(%rdi), %ymm3
5630 ; AVX2-SLOW-NEXT: vmovdqa 96(%rsi), %ymm1
5631 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm3[0],ymm1[0],ymm3[1],ymm1[1],ymm3[2],ymm1[2],ymm3[3],ymm1[3],ymm3[8],ymm1[8],ymm3[9],ymm1[9],ymm3[10],ymm1[10],ymm3[11],ymm1[11]
5632 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm0[0,1,1,3,4,5,5,7]
5633 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,1,3,3]
5634 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0],ymm15[1],ymm12[2,3,4],ymm15[5],ymm12[6,7]
5635 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm15 = ymm12[0,1],ymm4[2,3],ymm12[4,5],ymm4[6,7]
5636 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,2,2,3,4,6,6,7]
5637 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,2,3]
5638 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm10[2,1,3,3,6,5,7,7]
5639 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,2,2,3]
5640 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1,2],ymm2[3],ymm4[4,5,6],ymm2[7]
5641 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm14[0,2,2,3,4,6,6,7]
5642 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,1,3,3]
5643 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,1,3,3,6,5,7,7]
5644 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,3,3]
5645 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm4[1],ymm0[2,3,4],ymm4[5],ymm0[6,7]
5646 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
5647 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm6[4],ymm5[4],ymm6[5],ymm5[5],ymm6[6],ymm5[6],ymm6[7],ymm5[7],ymm6[12],ymm5[12],ymm6[13],ymm5[13],ymm6[14],ymm5[14],ymm6[15],ymm5[15]
5648 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm4 = ymm8[4],ymm9[4],ymm8[5],ymm9[5],ymm8[6],ymm9[6],ymm8[7],ymm9[7],ymm8[12],ymm9[12],ymm8[13],ymm9[13],ymm8[14],ymm9[14],ymm8[15],ymm9[15]
5649 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm0[0,0,2,1,4,4,6,5]
5650 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,2,2,3]
5651 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm4[0,1,1,3,4,5,5,7]
5652 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,2,2,3]
5653 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2],ymm5[3],ymm6[4,5,6],ymm5[7]
5654 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm6 = ymm11[4],ymm13[4],ymm11[5],ymm13[5],ymm11[6],ymm13[6],ymm11[7],ymm13[7],ymm11[12],ymm13[12],ymm11[13],ymm13[13],ymm11[14],ymm13[14],ymm11[15],ymm13[15]
5655 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm3[4],ymm1[4],ymm3[5],ymm1[5],ymm3[6],ymm1[6],ymm3[7],ymm1[7],ymm3[12],ymm1[12],ymm3[13],ymm1[13],ymm3[14],ymm1[14],ymm3[15],ymm1[15]
5656 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm6[0,0,2,1,4,4,6,5]
5657 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,3,3]
5658 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm8 = ymm1[0,1,1,3,4,5,5,7]
5659 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,1,3,3]
5660 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm8[0],ymm3[1],ymm8[2,3,4],ymm3[5],ymm8[6,7]
5661 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1],ymm5[2,3],ymm3[4,5],ymm5[6,7]
5662 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,2,2,3,4,6,6,7]
5663 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,2,3]
5664 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm4[2,1,3,3,6,5,7,7]
5665 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,2,2,3]
5666 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2],ymm0[3],ymm4[4,5,6],ymm0[7]
5667 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm6[0,2,2,3,4,6,6,7]
5668 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,1,3,3]
5669 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[2,1,3,3,6,5,7,7]
5670 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,3]
5671 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm4[1],ymm1[2,3,4],ymm4[5],ymm1[6,7]
5672 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
5673 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
5674 ; AVX2-SLOW-NEXT: vmovdqa %ymm0, 992(%rax)
5675 ; AVX2-SLOW-NEXT: vmovdqa %ymm3, 960(%rax)
5676 ; AVX2-SLOW-NEXT: vmovdqa %ymm2, 928(%rax)
5677 ; AVX2-SLOW-NEXT: vmovdqa %ymm15, 896(%rax)
5678 ; AVX2-SLOW-NEXT: vmovdqa %ymm7, 736(%rax)
5679 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5680 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 704(%rax)
5681 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5682 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 672(%rax)
5683 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5684 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 640(%rax)
5685 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5686 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 480(%rax)
5687 ; AVX2-SLOW-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
5688 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 448(%rax)
5689 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5690 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 416(%rax)
5691 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5692 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 384(%rax)
5693 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5694 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 224(%rax)
5695 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5696 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 192(%rax)
5697 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5698 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 160(%rax)
5699 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5700 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 128(%rax)
5701 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5702 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 864(%rax)
5703 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5704 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 832(%rax)
5705 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5706 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 800(%rax)
5707 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5708 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 768(%rax)
5709 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5710 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 608(%rax)
5711 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5712 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 576(%rax)
5713 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5714 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 544(%rax)
5715 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5716 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 512(%rax)
5717 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5718 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 352(%rax)
5719 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5720 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 320(%rax)
5721 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5722 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 288(%rax)
5723 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5724 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 256(%rax)
5725 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5726 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 96(%rax)
5727 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5728 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 64(%rax)
5729 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5730 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 32(%rax)
5731 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5732 ; AVX2-SLOW-NEXT: vmovaps %ymm0, (%rax)
5733 ; AVX2-SLOW-NEXT: addq $744, %rsp # imm = 0x2E8
5734 ; AVX2-SLOW-NEXT: vzeroupper
5735 ; AVX2-SLOW-NEXT: retq
5737 ; AVX2-FAST-LABEL: store_i16_stride8_vf64:
5738 ; AVX2-FAST: # %bb.0:
5739 ; AVX2-FAST-NEXT: subq $776, %rsp # imm = 0x308
5740 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
5741 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
5742 ; AVX2-FAST-NEXT: vmovdqa (%rax), %xmm4
5743 ; AVX2-FAST-NEXT: vmovdqa (%r10), %xmm5
5744 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
5745 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = <0,0,0,0,u,u,1,1>
5746 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm0, %ymm1
5747 ; AVX2-FAST-NEXT: vmovdqa (%r9), %xmm6
5748 ; AVX2-FAST-NEXT: vmovdqa (%r8), %xmm7
5749 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
5750 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = <0,u,0,u,u,u,1,u>
5751 ; AVX2-FAST-NEXT: vpermd %ymm10, %ymm0, %ymm2
5752 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2],ymm1[3],ymm2[4,5,6],ymm1[7]
5753 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %xmm8
5754 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %xmm9
5755 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
5756 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = <0,0,1,1,1,1,u,u>
5757 ; AVX2-FAST-NEXT: vpermd %ymm11, %ymm0, %ymm12
5758 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %xmm13
5759 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %xmm1
5760 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm13[0],xmm1[1],xmm13[1],xmm1[2],xmm13[2],xmm1[3],xmm13[3]
5761 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm14 = <0,u,1,u,1,u,u,u>
5762 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm14, %ymm15
5763 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm12 = ymm15[0],ymm12[1],ymm15[2,3,4],ymm12[5],ymm15[6,7]
5764 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm12[0,1],ymm2[2,3],ymm12[4,5],ymm2[6,7]
5765 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5766 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = <2,2,2,2,u,u,3,3>
5767 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm12, %ymm2
5768 ; AVX2-FAST-NEXT: vpermd %ymm10, %ymm12, %ymm3
5769 ; AVX2-FAST-NEXT: vmovdqa %ymm12, %ymm15
5770 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7]
5771 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = <2,2,3,3,3,3,u,u>
5772 ; AVX2-FAST-NEXT: vpermd %ymm11, %ymm10, %ymm3
5773 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm10, %ymm0
5774 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm3[1],ymm0[2,3,4],ymm3[5],ymm0[6,7]
5775 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
5776 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5777 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
5778 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm11 = <0,0,0,0,u,u,1,1>
5779 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm11, %ymm2
5780 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
5781 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = <0,u,0,u,u,u,1,u>
5782 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm12, %ymm4
5783 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1,2],ymm2[3],ymm4[4,5,6],ymm2[7]
5784 ; AVX2-FAST-NEXT: vmovdqa 32(%rax), %xmm10
5785 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm9[4],xmm8[4],xmm9[5],xmm8[5],xmm9[6],xmm8[6],xmm9[7],xmm8[7]
5786 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm14 = <0,0,1,1,1,1,u,u>
5787 ; AVX2-FAST-NEXT: vpermd %ymm8, %ymm14, %ymm5
5788 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm1[4],xmm13[4],xmm1[5],xmm13[5],xmm1[6],xmm13[6],xmm1[7],xmm13[7]
5789 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = <0,u,1,u,1,u,u,u>
5790 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm4, %ymm6
5791 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0],ymm5[1],ymm6[2,3,4],ymm5[5],ymm6[6,7]
5792 ; AVX2-FAST-NEXT: vmovdqa 32(%r10), %xmm6
5793 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
5794 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5795 ; AVX2-FAST-NEXT: vmovdqa 32(%r9), %xmm5
5796 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm15, %ymm0
5797 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm15, %ymm2
5798 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2],ymm0[3],ymm2[4,5,6],ymm0[7]
5799 ; AVX2-FAST-NEXT: vmovdqa 32(%r8), %xmm7
5800 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = <2,2,3,3,3,3,u,u>
5801 ; AVX2-FAST-NEXT: vpermd %ymm8, %ymm3, %ymm2
5802 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm3, %ymm1
5803 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3,4],ymm2[5],ymm1[6,7]
5804 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
5805 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5806 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm6[0],xmm10[0],xmm6[1],xmm10[1],xmm6[2],xmm10[2],xmm6[3],xmm10[3]
5807 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm11, %ymm1
5808 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm7[0],xmm5[0],xmm7[1],xmm5[1],xmm7[2],xmm5[2],xmm7[3],xmm5[3]
5809 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm12, %ymm3
5810 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0,1,2],ymm1[3],ymm3[4,5,6],ymm1[7]
5811 ; AVX2-FAST-NEXT: vmovdqa 32(%rcx), %xmm3
5812 ; AVX2-FAST-NEXT: vmovdqa 32(%rdx), %xmm8
5813 ; AVX2-FAST-NEXT: vmovdqa 32(%rsi), %xmm9
5814 ; AVX2-FAST-NEXT: vmovdqa 32(%rdi), %xmm11
5815 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm8[0],xmm3[0],xmm8[1],xmm3[1],xmm8[2],xmm3[2],xmm8[3],xmm3[3]
5816 ; AVX2-FAST-NEXT: vpermd %ymm13, %ymm14, %ymm15
5817 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3]
5818 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = <0,u,1,u,1,u,u,u>
5819 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm12, %ymm14
5820 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm14 = ymm14[0],ymm15[1],ymm14[2,3,4],ymm15[5],ymm14[6,7]
5821 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm14[0,1],ymm1[2,3],ymm14[4,5],ymm1[6,7]
5822 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5823 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm14 = <2,2,2,2,u,u,3,3>
5824 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm14, %ymm1
5825 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm14, %ymm2
5826 ; AVX2-FAST-NEXT: vmovdqa %ymm14, %ymm15
5827 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3],ymm2[4,5,6],ymm1[7]
5828 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = <2,2,3,3,3,3,u,u>
5829 ; AVX2-FAST-NEXT: vpermd %ymm13, %ymm4, %ymm2
5830 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm4, %ymm0
5831 ; AVX2-FAST-NEXT: vmovdqa %ymm4, %ymm13
5832 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm2[1],ymm0[2,3,4],ymm2[5],ymm0[6,7]
5833 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
5834 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5835 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm6[4],xmm10[4],xmm6[5],xmm10[5],xmm6[6],xmm10[6],xmm6[7],xmm10[7]
5836 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm7[4],xmm5[4],xmm7[5],xmm5[5],xmm7[6],xmm5[6],xmm7[7],xmm5[7]
5837 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = <0,0,0,0,u,u,1,1>
5838 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm7, %ymm2
5839 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = <0,u,0,u,u,u,1,u>
5840 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm10, %ymm4
5841 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1,2],ymm2[3],ymm4[4,5,6],ymm2[7]
5842 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm8[4],xmm3[4],xmm8[5],xmm3[5],xmm8[6],xmm3[6],xmm8[7],xmm3[7]
5843 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm11[4],xmm9[4],xmm11[5],xmm9[5],xmm11[6],xmm9[6],xmm11[7],xmm9[7]
5844 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm14 = <0,0,1,1,1,1,u,u>
5845 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm14, %ymm5
5846 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm12, %ymm6
5847 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0],ymm5[1],ymm6[2,3,4],ymm5[5],ymm6[6,7]
5848 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
5849 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5850 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm15, %ymm0
5851 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm15, %ymm1
5852 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7]
5853 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm13, %ymm1
5854 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm13, %ymm2
5855 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3,4],ymm1[5],ymm2[6,7]
5856 ; AVX2-FAST-NEXT: vmovdqa 64(%rax), %xmm2
5857 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
5858 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5859 ; AVX2-FAST-NEXT: vmovdqa 64(%r10), %xmm0
5860 ; AVX2-FAST-NEXT: vmovdqa 64(%r9), %xmm1
5861 ; AVX2-FAST-NEXT: vmovdqa 64(%r8), %xmm3
5862 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],xmm2[3]
5863 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm7, %ymm5
5864 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
5865 ; AVX2-FAST-NEXT: vpermd %ymm6, %ymm10, %ymm7
5866 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm7[0,1,2],ymm5[3],ymm7[4,5,6],ymm5[7]
5867 ; AVX2-FAST-NEXT: vmovdqa 64(%rcx), %xmm7
5868 ; AVX2-FAST-NEXT: vmovdqa 64(%rdx), %xmm8
5869 ; AVX2-FAST-NEXT: vmovdqa 64(%rsi), %xmm9
5870 ; AVX2-FAST-NEXT: vmovdqa 64(%rdi), %xmm10
5871 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3]
5872 ; AVX2-FAST-NEXT: vpermd %ymm11, %ymm14, %ymm13
5873 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
5874 ; AVX2-FAST-NEXT: vpermd %ymm14, %ymm12, %ymm15
5875 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm13 = ymm15[0],ymm13[1],ymm15[2,3,4],ymm13[5],ymm15[6,7]
5876 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm13[0,1],ymm5[2,3],ymm13[4,5],ymm5[6,7]
5877 ; AVX2-FAST-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5878 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = <2,2,2,2,u,u,3,3>
5879 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm13, %ymm4
5880 ; AVX2-FAST-NEXT: vpermd %ymm6, %ymm13, %ymm5
5881 ; AVX2-FAST-NEXT: vmovdqa %ymm13, %ymm15
5882 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0,1,2],ymm4[3],ymm5[4,5,6],ymm4[7]
5883 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = <2,2,3,3,3,3,u,u>
5884 ; AVX2-FAST-NEXT: vpermd %ymm11, %ymm12, %ymm5
5885 ; AVX2-FAST-NEXT: vpermd %ymm14, %ymm12, %ymm6
5886 ; AVX2-FAST-NEXT: vmovdqa %ymm12, %ymm14
5887 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0],ymm5[1],ymm6[2,3,4],ymm5[5],ymm6[6,7]
5888 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0,1],ymm4[2,3],ymm5[4,5],ymm4[6,7]
5889 ; AVX2-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5890 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm2[4],xmm0[5],xmm2[5],xmm0[6],xmm2[6],xmm0[7],xmm2[7]
5891 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
5892 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm11 = <0,0,0,0,u,u,1,1>
5893 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm11, %ymm2
5894 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = <0,u,0,u,u,u,1,u>
5895 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm13, %ymm3
5896 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7]
5897 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
5898 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm10[4],xmm9[4],xmm10[5],xmm9[5],xmm10[6],xmm9[6],xmm10[7],xmm9[7]
5899 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = <0,0,1,1,1,1,u,u>
5900 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm10, %ymm5
5901 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = <0,u,1,u,1,u,u,u>
5902 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm12, %ymm6
5903 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0],ymm5[1],ymm6[2,3,4],ymm5[5],ymm6[6,7]
5904 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
5905 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5906 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm15, %ymm0
5907 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm15, %ymm1
5908 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7]
5909 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm14, %ymm1
5910 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm14, %ymm2
5911 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3,4],ymm1[5],ymm2[6,7]
5912 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
5913 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5914 ; AVX2-FAST-NEXT: vmovdqa 96(%rax), %xmm0
5915 ; AVX2-FAST-NEXT: vmovdqa 96(%r10), %xmm1
5916 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
5917 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm11, %ymm3
5918 ; AVX2-FAST-NEXT: vmovdqa 96(%r9), %xmm4
5919 ; AVX2-FAST-NEXT: vmovdqa 96(%r8), %xmm5
5920 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
5921 ; AVX2-FAST-NEXT: vpermd %ymm6, %ymm13, %ymm7
5922 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm7[0,1,2],ymm3[3],ymm7[4,5,6],ymm3[7]
5923 ; AVX2-FAST-NEXT: vmovdqa 96(%rcx), %xmm7
5924 ; AVX2-FAST-NEXT: vmovdqa 96(%rdx), %xmm8
5925 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3]
5926 ; AVX2-FAST-NEXT: vpermd %ymm9, %ymm10, %ymm10
5927 ; AVX2-FAST-NEXT: vmovdqa 96(%rsi), %xmm11
5928 ; AVX2-FAST-NEXT: vmovdqa 96(%rdi), %xmm13
5929 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm13[0],xmm11[0],xmm13[1],xmm11[1],xmm13[2],xmm11[2],xmm13[3],xmm11[3]
5930 ; AVX2-FAST-NEXT: vpermd %ymm14, %ymm12, %ymm15
5931 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm10 = ymm15[0],ymm10[1],ymm15[2,3,4],ymm10[5],ymm15[6,7]
5932 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm10[0,1],ymm3[2,3],ymm10[4,5],ymm3[6,7]
5933 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5934 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = <2,2,2,2,u,u,3,3>
5935 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm10, %ymm2
5936 ; AVX2-FAST-NEXT: vpermd %ymm6, %ymm10, %ymm3
5937 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7]
5938 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = <2,2,3,3,3,3,u,u>
5939 ; AVX2-FAST-NEXT: vpermd %ymm9, %ymm12, %ymm3
5940 ; AVX2-FAST-NEXT: vpermd %ymm14, %ymm12, %ymm6
5941 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm6[0],ymm3[1],ymm6[2,3,4],ymm3[5],ymm6[6,7]
5942 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7]
5943 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5944 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
5945 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
5946 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <0,0,0,0,u,u,1,1>
5947 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm2, %ymm2
5948 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = <0,u,0,u,u,u,1,u>
5949 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm3, %ymm3
5950 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7]
5951 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
5952 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm13[4],xmm11[4],xmm13[5],xmm11[5],xmm13[6],xmm11[6],xmm13[7],xmm11[7]
5953 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = <0,0,1,1,1,1,u,u>
5954 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm5, %ymm5
5955 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = <0,u,1,u,1,u,u,u>
5956 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm6, %ymm6
5957 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0],ymm5[1],ymm6[2,3,4],ymm5[5],ymm6[6,7]
5958 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
5959 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5960 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm10, %ymm0
5961 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm10, %ymm1
5962 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7]
5963 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm12, %ymm1
5964 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm12, %ymm2
5965 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3,4],ymm1[5],ymm2[6,7]
5966 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
5967 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5968 ; AVX2-FAST-NEXT: vmovdqa (%r8), %ymm8
5969 ; AVX2-FAST-NEXT: vmovdqa (%r9), %ymm7
5970 ; AVX2-FAST-NEXT: vmovdqa (%r10), %ymm10
5971 ; AVX2-FAST-NEXT: vmovdqa (%rax), %ymm11
5972 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm4 = ymm10[0],ymm11[0],ymm10[1],ymm11[1],ymm10[2],ymm11[2],ymm10[3],ymm11[3],ymm10[8],ymm11[8],ymm10[9],ymm11[9],ymm10[10],ymm11[10],ymm10[11],ymm11[11]
5973 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [0,0,4,4,4,4,6,5]
5974 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm0, %ymm2
5975 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm12 = ymm8[0],ymm7[0],ymm8[1],ymm7[1],ymm8[2],ymm7[2],ymm8[3],ymm7[3],ymm8[8],ymm7[8],ymm8[9],ymm7[9],ymm8[10],ymm7[10],ymm8[11],ymm7[11]
5976 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [0,1,4,5,4,5,5,7]
5977 ; AVX2-FAST-NEXT: vpermd %ymm12, %ymm0, %ymm3
5978 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7]
5979 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %ymm13
5980 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %ymm14
5981 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %ymm15
5982 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %ymm2
5983 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm15[0],ymm2[0],ymm15[1],ymm2[1],ymm15[2],ymm2[2],ymm15[3],ymm2[3],ymm15[8],ymm2[8],ymm15[9],ymm2[9],ymm15[10],ymm2[10],ymm15[11],ymm2[11]
5984 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [4,4,2,1,6,5,6,5]
5985 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm0, %ymm5
5986 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm13[0],ymm14[0],ymm13[1],ymm14[1],ymm13[2],ymm14[2],ymm13[3],ymm14[3],ymm13[8],ymm14[8],ymm13[9],ymm14[9],ymm13[10],ymm14[10],ymm13[11],ymm14[11]
5987 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [4,5,1,3,5,7,5,7]
5988 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm3, %ymm9
5989 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm9[0],ymm5[1],ymm9[2,3,4],ymm5[5],ymm9[6,7]
5990 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm5[0,1],ymm6[2,3],ymm5[4,5],ymm6[6,7]
5991 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5992 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [0,2,4,6,4,6,6,7]
5993 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm3, %ymm4
5994 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [2,1,6,5,6,5,7,7]
5995 ; AVX2-FAST-NEXT: vpermd %ymm12, %ymm3, %ymm9
5996 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm9[0,1,2],ymm4[3],ymm9[4,5,6],ymm4[7]
5997 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = [4,6,2,3,6,7,6,7]
5998 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm5, %ymm9
5999 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [6,5,3,3,7,7,7,7]
6000 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm3, %ymm0
6001 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm9[1],ymm0[2,3,4],ymm9[5],ymm0[6,7]
6002 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm4[2,3],ymm0[4,5],ymm4[6,7]
6003 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6004 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm10[4],ymm11[4],ymm10[5],ymm11[5],ymm10[6],ymm11[6],ymm10[7],ymm11[7],ymm10[12],ymm11[12],ymm10[13],ymm11[13],ymm10[14],ymm11[14],ymm10[15],ymm11[15]
6005 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm4 = ymm8[4],ymm7[4],ymm8[5],ymm7[5],ymm8[6],ymm7[6],ymm8[7],ymm7[7],ymm8[12],ymm7[12],ymm8[13],ymm7[13],ymm8[14],ymm7[14],ymm8[15],ymm7[15]
6006 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm11 = [0,0,4,4,4,4,6,5]
6007 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm11, %ymm6
6008 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = [0,1,4,5,4,5,5,7]
6009 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm8, %ymm7
6010 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm7[0,1,2],ymm6[3],ymm7[4,5,6],ymm6[7]
6011 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm15[4],ymm2[4],ymm15[5],ymm2[5],ymm15[6],ymm2[6],ymm15[7],ymm2[7],ymm15[12],ymm2[12],ymm15[13],ymm2[13],ymm15[14],ymm2[14],ymm15[15],ymm2[15]
6012 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm7 = ymm13[4],ymm14[4],ymm13[5],ymm14[5],ymm13[6],ymm14[6],ymm13[7],ymm14[7],ymm13[12],ymm14[12],ymm13[13],ymm14[13],ymm13[14],ymm14[14],ymm13[15],ymm14[15]
6013 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = [4,4,2,1,6,5,6,5]
6014 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm12, %ymm9
6015 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [4,5,1,3,5,7,5,7]
6016 ; AVX2-FAST-NEXT: vpermd %ymm7, %ymm1, %ymm10
6017 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm9 = ymm10[0],ymm9[1],ymm10[2,3,4],ymm9[5],ymm10[6,7]
6018 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm9[0,1],ymm6[2,3],ymm9[4,5],ymm6[6,7]
6019 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6020 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [0,2,4,6,4,6,6,7]
6021 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm1, %ymm0
6022 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [2,1,6,5,6,5,7,7]
6023 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm1, %ymm4
6024 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2],ymm0[3],ymm4[4,5,6],ymm0[7]
6025 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm5, %ymm2
6026 ; AVX2-FAST-NEXT: vpermd %ymm7, %ymm3, %ymm4
6027 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0],ymm2[1],ymm4[2,3,4],ymm2[5],ymm4[6,7]
6028 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1],ymm0[2,3],ymm2[4,5],ymm0[6,7]
6029 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6030 ; AVX2-FAST-NEXT: vmovdqa 32(%r8), %ymm5
6031 ; AVX2-FAST-NEXT: vmovdqa 32(%r9), %ymm3
6032 ; AVX2-FAST-NEXT: vmovdqa 32(%r10), %ymm4
6033 ; AVX2-FAST-NEXT: vmovdqa 32(%rax), %ymm6
6034 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm7 = ymm4[0],ymm6[0],ymm4[1],ymm6[1],ymm4[2],ymm6[2],ymm4[3],ymm6[3],ymm4[8],ymm6[8],ymm4[9],ymm6[9],ymm4[10],ymm6[10],ymm4[11],ymm6[11]
6035 ; AVX2-FAST-NEXT: vpermd %ymm7, %ymm11, %ymm9
6036 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm10 = ymm5[0],ymm3[0],ymm5[1],ymm3[1],ymm5[2],ymm3[2],ymm5[3],ymm3[3],ymm5[8],ymm3[8],ymm5[9],ymm3[9],ymm5[10],ymm3[10],ymm5[11],ymm3[11]
6037 ; AVX2-FAST-NEXT: vpermd %ymm10, %ymm8, %ymm11
6038 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm9 = ymm11[0,1,2],ymm9[3],ymm11[4,5,6],ymm9[7]
6039 ; AVX2-FAST-NEXT: vmovdqa 32(%rdi), %ymm11
6040 ; AVX2-FAST-NEXT: vmovdqa 32(%rsi), %ymm13
6041 ; AVX2-FAST-NEXT: vmovdqa 32(%rdx), %ymm15
6042 ; AVX2-FAST-NEXT: vmovdqa 32(%rcx), %ymm2
6043 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm14 = ymm15[0],ymm2[0],ymm15[1],ymm2[1],ymm15[2],ymm2[2],ymm15[3],ymm2[3],ymm15[8],ymm2[8],ymm15[9],ymm2[9],ymm15[10],ymm2[10],ymm15[11],ymm2[11]
6044 ; AVX2-FAST-NEXT: vpermd %ymm14, %ymm12, %ymm1
6045 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm11[0],ymm13[0],ymm11[1],ymm13[1],ymm11[2],ymm13[2],ymm11[3],ymm13[3],ymm11[8],ymm13[8],ymm11[9],ymm13[9],ymm11[10],ymm13[10],ymm11[11],ymm13[11]
6046 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = [4,5,1,3,5,7,5,7]
6047 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm8, %ymm12
6048 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm12[0],ymm1[1],ymm12[2,3,4],ymm1[5],ymm12[6,7]
6049 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm9[2,3],ymm1[4,5],ymm9[6,7]
6050 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6051 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = [0,2,4,6,4,6,6,7]
6052 ; AVX2-FAST-NEXT: vpermd %ymm7, %ymm8, %ymm1
6053 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = [2,1,6,5,6,5,7,7]
6054 ; AVX2-FAST-NEXT: vpermd %ymm10, %ymm9, %ymm7
6055 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm7[0,1,2],ymm1[3],ymm7[4,5,6],ymm1[7]
6056 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = [4,6,2,3,6,7,6,7]
6057 ; AVX2-FAST-NEXT: vpermd %ymm14, %ymm10, %ymm7
6058 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = [6,5,3,3,7,7,7,7]
6059 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm12, %ymm0
6060 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm7[1],ymm0[2,3,4],ymm7[5],ymm0[6,7]
6061 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7]
6062 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6063 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm4[4],ymm6[4],ymm4[5],ymm6[5],ymm4[6],ymm6[6],ymm4[7],ymm6[7],ymm4[12],ymm6[12],ymm4[13],ymm6[13],ymm4[14],ymm6[14],ymm4[15],ymm6[15]
6064 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm5[4],ymm3[4],ymm5[5],ymm3[5],ymm5[6],ymm3[6],ymm5[7],ymm3[7],ymm5[12],ymm3[12],ymm5[13],ymm3[13],ymm5[14],ymm3[14],ymm5[15],ymm3[15]
6065 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = [0,0,4,4,4,4,6,5]
6066 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm7, %ymm3
6067 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm14 = [0,1,4,5,4,5,5,7]
6068 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm14, %ymm4
6069 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0,1,2],ymm3[3],ymm4[4,5,6],ymm3[7]
6070 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm15[4],ymm2[4],ymm15[5],ymm2[5],ymm15[6],ymm2[6],ymm15[7],ymm2[7],ymm15[12],ymm2[12],ymm15[13],ymm2[13],ymm15[14],ymm2[14],ymm15[15],ymm2[15]
6071 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm4 = ymm11[4],ymm13[4],ymm11[5],ymm13[5],ymm11[6],ymm13[6],ymm11[7],ymm13[7],ymm11[12],ymm13[12],ymm11[13],ymm13[13],ymm11[14],ymm13[14],ymm11[15],ymm13[15]
6072 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = [4,4,2,1,6,5,6,5]
6073 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm13, %ymm5
6074 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = [4,5,1,3,5,7,5,7]
6075 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm6, %ymm6
6076 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0],ymm5[1],ymm6[2,3,4],ymm5[5],ymm6[6,7]
6077 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm5[0,1],ymm3[2,3],ymm5[4,5],ymm3[6,7]
6078 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6079 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm8, %ymm0
6080 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm9, %ymm1
6081 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7]
6082 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm10, %ymm1
6083 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm12, %ymm2
6084 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3,4],ymm1[5],ymm2[6,7]
6085 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
6086 ; AVX2-FAST-NEXT: vmovdqu %ymm0, (%rsp) # 32-byte Spill
6087 ; AVX2-FAST-NEXT: vmovdqa 64(%r8), %ymm9
6088 ; AVX2-FAST-NEXT: vmovdqa 64(%r9), %ymm8
6089 ; AVX2-FAST-NEXT: vmovdqa 64(%r10), %ymm3
6090 ; AVX2-FAST-NEXT: vmovdqa 64(%rax), %ymm4
6091 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm5 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[8],ymm4[8],ymm3[9],ymm4[9],ymm3[10],ymm4[10],ymm3[11],ymm4[11]
6092 ; AVX2-FAST-NEXT: vpermd %ymm5, %ymm7, %ymm6
6093 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm7 = ymm9[0],ymm8[0],ymm9[1],ymm8[1],ymm9[2],ymm8[2],ymm9[3],ymm8[3],ymm9[8],ymm8[8],ymm9[9],ymm8[9],ymm9[10],ymm8[10],ymm9[11],ymm8[11]
6094 ; AVX2-FAST-NEXT: vpermd %ymm7, %ymm14, %ymm10
6095 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm10[0,1,2],ymm6[3],ymm10[4,5,6],ymm6[7]
6096 ; AVX2-FAST-NEXT: vmovdqa 64(%rdi), %ymm12
6097 ; AVX2-FAST-NEXT: vmovdqa 64(%rsi), %ymm15
6098 ; AVX2-FAST-NEXT: vmovdqa 64(%rdx), %ymm11
6099 ; AVX2-FAST-NEXT: vmovdqa 64(%rcx), %ymm10
6100 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm11[0],ymm10[0],ymm11[1],ymm10[1],ymm11[2],ymm10[2],ymm11[3],ymm10[3],ymm11[8],ymm10[8],ymm11[9],ymm10[9],ymm11[10],ymm10[10],ymm11[11],ymm10[11]
6101 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm13, %ymm2
6102 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm12[0],ymm15[0],ymm12[1],ymm15[1],ymm12[2],ymm15[2],ymm12[3],ymm15[3],ymm12[8],ymm15[8],ymm12[9],ymm15[9],ymm12[10],ymm15[10],ymm12[11],ymm15[11]
6103 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = [4,5,1,3,5,7,5,7]
6104 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm13, %ymm14
6105 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm14[0],ymm2[1],ymm14[2,3,4],ymm2[5],ymm14[6,7]
6106 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1],ymm6[2,3],ymm2[4,5],ymm6[6,7]
6107 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6108 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = [0,2,4,6,4,6,6,7]
6109 ; AVX2-FAST-NEXT: vpermd %ymm5, %ymm6, %ymm2
6110 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm14 = [2,1,6,5,6,5,7,7]
6111 ; AVX2-FAST-NEXT: vpermd %ymm7, %ymm14, %ymm5
6112 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm5[0,1,2],ymm2[3],ymm5[4,5,6],ymm2[7]
6113 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = [4,6,2,3,6,7,6,7]
6114 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm5, %ymm1
6115 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = [6,5,3,3,7,7,7,7]
6116 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm7, %ymm0
6117 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3,4],ymm1[5],ymm0[6,7]
6118 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
6119 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6120 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm3[4],ymm4[4],ymm3[5],ymm4[5],ymm3[6],ymm4[6],ymm3[7],ymm4[7],ymm3[12],ymm4[12],ymm3[13],ymm4[13],ymm3[14],ymm4[14],ymm3[15],ymm4[15]
6121 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm9[4],ymm8[4],ymm9[5],ymm8[5],ymm9[6],ymm8[6],ymm9[7],ymm8[7],ymm9[12],ymm8[12],ymm9[13],ymm8[13],ymm9[14],ymm8[14],ymm9[15],ymm8[15]
6122 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = [0,0,4,4,4,4,6,5]
6123 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm9, %ymm2
6124 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,4,5,4,5,5,7]
6125 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm3, %ymm3
6126 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7]
6127 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm11[4],ymm10[4],ymm11[5],ymm10[5],ymm11[6],ymm10[6],ymm11[7],ymm10[7],ymm11[12],ymm10[12],ymm11[13],ymm10[13],ymm11[14],ymm10[14],ymm11[15],ymm10[15]
6128 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm4 = ymm12[4],ymm15[4],ymm12[5],ymm15[5],ymm12[6],ymm15[6],ymm12[7],ymm15[7],ymm12[12],ymm15[12],ymm12[13],ymm15[13],ymm12[14],ymm15[14],ymm12[15],ymm15[15]
6129 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = [4,4,2,1,6,5,6,5]
6130 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm7, %ymm7
6131 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm13, %ymm8
6132 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm7 = ymm8[0],ymm7[1],ymm8[2,3,4],ymm7[5],ymm8[6,7]
6133 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm7[0,1],ymm2[2,3],ymm7[4,5],ymm2[6,7]
6134 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6135 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm6, %ymm0
6136 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm14, %ymm1
6137 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7]
6138 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm5, %ymm1
6139 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [6,5,3,3,7,7,7,7]
6140 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm2, %ymm2
6141 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3,4],ymm1[5],ymm2[6,7]
6142 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
6143 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6144 ; AVX2-FAST-NEXT: vmovdqa 96(%r8), %ymm6
6145 ; AVX2-FAST-NEXT: vmovdqa 96(%r9), %ymm5
6146 ; AVX2-FAST-NEXT: vmovdqa 96(%r10), %ymm3
6147 ; AVX2-FAST-NEXT: vmovdqa 96(%rax), %ymm4
6148 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm8 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[8],ymm4[8],ymm3[9],ymm4[9],ymm3[10],ymm4[10],ymm3[11],ymm4[11]
6149 ; AVX2-FAST-NEXT: vpermd %ymm8, %ymm9, %ymm9
6150 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm10 = ymm6[0],ymm5[0],ymm6[1],ymm5[1],ymm6[2],ymm5[2],ymm6[3],ymm5[3],ymm6[8],ymm5[8],ymm6[9],ymm5[9],ymm6[10],ymm5[10],ymm6[11],ymm5[11]
6151 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [0,1,4,5,4,5,5,7]
6152 ; AVX2-FAST-NEXT: vpermd %ymm10, %ymm0, %ymm11
6153 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm9 = ymm11[0,1,2],ymm9[3],ymm11[4,5,6],ymm9[7]
6154 ; AVX2-FAST-NEXT: vmovdqa 96(%rdi), %ymm11
6155 ; AVX2-FAST-NEXT: vmovdqa 96(%rsi), %ymm12
6156 ; AVX2-FAST-NEXT: vmovdqa 96(%rdx), %ymm14
6157 ; AVX2-FAST-NEXT: vmovdqa 96(%rcx), %ymm15
6158 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm14[0],ymm15[0],ymm14[1],ymm15[1],ymm14[2],ymm15[2],ymm14[3],ymm15[3],ymm14[8],ymm15[8],ymm14[9],ymm15[9],ymm14[10],ymm15[10],ymm14[11],ymm15[11]
6159 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [4,4,2,1,6,5,6,5]
6160 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm0, %ymm2
6161 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm11[0],ymm12[0],ymm11[1],ymm12[1],ymm11[2],ymm12[2],ymm11[3],ymm12[3],ymm11[8],ymm12[8],ymm11[9],ymm12[9],ymm11[10],ymm12[10],ymm11[11],ymm12[11]
6162 ; AVX2-FAST-NEXT: vmovdqa %ymm13, %ymm7
6163 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm13, %ymm13
6164 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm13[0],ymm2[1],ymm13[2,3,4],ymm2[5],ymm13[6,7]
6165 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1],ymm9[2,3],ymm2[4,5],ymm9[6,7]
6166 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = [0,2,4,6,4,6,6,7]
6167 ; AVX2-FAST-NEXT: vpermd %ymm8, %ymm9, %ymm8
6168 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = [2,1,6,5,6,5,7,7]
6169 ; AVX2-FAST-NEXT: vpermd %ymm10, %ymm13, %ymm9
6170 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0,1,2],ymm8[3],ymm9[4,5,6],ymm8[7]
6171 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = [4,6,2,3,6,7,6,7]
6172 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm9, %ymm1
6173 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = [6,5,3,3,7,7,7,7]
6174 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm10, %ymm0
6175 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3,4],ymm1[5],ymm0[6,7]
6176 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm8[2,3],ymm0[4,5],ymm8[6,7]
6177 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm3[4],ymm4[4],ymm3[5],ymm4[5],ymm3[6],ymm4[6],ymm3[7],ymm4[7],ymm3[12],ymm4[12],ymm3[13],ymm4[13],ymm3[14],ymm4[14],ymm3[15],ymm4[15]
6178 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm6[4],ymm5[4],ymm6[5],ymm5[5],ymm6[6],ymm5[6],ymm6[7],ymm5[7],ymm6[12],ymm5[12],ymm6[13],ymm5[13],ymm6[14],ymm5[14],ymm6[15],ymm5[15]
6179 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = [0,0,4,4,4,4,6,5]
6180 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm4, %ymm4
6181 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = [0,1,4,5,4,5,5,7]
6182 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm5, %ymm5
6183 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0,1,2],ymm4[3],ymm5[4,5,6],ymm4[7]
6184 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm5 = ymm14[4],ymm15[4],ymm14[5],ymm15[5],ymm14[6],ymm15[6],ymm14[7],ymm15[7],ymm14[12],ymm15[12],ymm14[13],ymm15[13],ymm14[14],ymm15[14],ymm14[15],ymm15[15]
6185 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm6 = ymm11[4],ymm12[4],ymm11[5],ymm12[5],ymm11[6],ymm12[6],ymm11[7],ymm12[7],ymm11[12],ymm12[12],ymm11[13],ymm12[13],ymm11[14],ymm12[14],ymm11[15],ymm12[15]
6186 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = [4,4,2,1,6,5,6,5]
6187 ; AVX2-FAST-NEXT: vpermd %ymm5, %ymm8, %ymm8
6188 ; AVX2-FAST-NEXT: vpermd %ymm6, %ymm7, %ymm9
6189 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0],ymm8[1],ymm9[2,3,4],ymm8[5],ymm9[6,7]
6190 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm8[0,1],ymm4[2,3],ymm8[4,5],ymm4[6,7]
6191 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = [0,2,4,6,4,6,6,7]
6192 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm7, %ymm1
6193 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm13, %ymm3
6194 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0,1,2],ymm1[3],ymm3[4,5,6],ymm1[7]
6195 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [4,6,2,3,6,7,6,7]
6196 ; AVX2-FAST-NEXT: vpermd %ymm5, %ymm3, %ymm3
6197 ; AVX2-FAST-NEXT: vpermd %ymm6, %ymm10, %ymm5
6198 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm5[0],ymm3[1],ymm5[2,3,4],ymm3[5],ymm5[6,7]
6199 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0,1],ymm1[2,3],ymm3[4,5],ymm1[6,7]
6200 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
6201 ; AVX2-FAST-NEXT: vmovdqa %ymm1, 992(%rax)
6202 ; AVX2-FAST-NEXT: vmovdqa %ymm4, 960(%rax)
6203 ; AVX2-FAST-NEXT: vmovdqa %ymm0, 928(%rax)
6204 ; AVX2-FAST-NEXT: vmovdqa %ymm2, 896(%rax)
6205 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6206 ; AVX2-FAST-NEXT: vmovaps %ymm0, 736(%rax)
6207 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6208 ; AVX2-FAST-NEXT: vmovaps %ymm0, 704(%rax)
6209 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6210 ; AVX2-FAST-NEXT: vmovaps %ymm0, 672(%rax)
6211 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6212 ; AVX2-FAST-NEXT: vmovaps %ymm0, 640(%rax)
6213 ; AVX2-FAST-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
6214 ; AVX2-FAST-NEXT: vmovaps %ymm0, 480(%rax)
6215 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6216 ; AVX2-FAST-NEXT: vmovaps %ymm0, 448(%rax)
6217 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6218 ; AVX2-FAST-NEXT: vmovaps %ymm0, 416(%rax)
6219 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6220 ; AVX2-FAST-NEXT: vmovaps %ymm0, 384(%rax)
6221 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6222 ; AVX2-FAST-NEXT: vmovaps %ymm0, 224(%rax)
6223 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6224 ; AVX2-FAST-NEXT: vmovaps %ymm0, 192(%rax)
6225 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6226 ; AVX2-FAST-NEXT: vmovaps %ymm0, 160(%rax)
6227 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6228 ; AVX2-FAST-NEXT: vmovaps %ymm0, 128(%rax)
6229 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6230 ; AVX2-FAST-NEXT: vmovaps %ymm0, 864(%rax)
6231 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6232 ; AVX2-FAST-NEXT: vmovaps %ymm0, 832(%rax)
6233 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6234 ; AVX2-FAST-NEXT: vmovaps %ymm0, 800(%rax)
6235 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6236 ; AVX2-FAST-NEXT: vmovaps %ymm0, 768(%rax)
6237 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6238 ; AVX2-FAST-NEXT: vmovaps %ymm0, 608(%rax)
6239 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6240 ; AVX2-FAST-NEXT: vmovaps %ymm0, 576(%rax)
6241 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6242 ; AVX2-FAST-NEXT: vmovaps %ymm0, 544(%rax)
6243 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6244 ; AVX2-FAST-NEXT: vmovaps %ymm0, 512(%rax)
6245 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6246 ; AVX2-FAST-NEXT: vmovaps %ymm0, 352(%rax)
6247 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6248 ; AVX2-FAST-NEXT: vmovaps %ymm0, 320(%rax)
6249 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6250 ; AVX2-FAST-NEXT: vmovaps %ymm0, 288(%rax)
6251 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6252 ; AVX2-FAST-NEXT: vmovaps %ymm0, 256(%rax)
6253 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6254 ; AVX2-FAST-NEXT: vmovaps %ymm0, 96(%rax)
6255 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6256 ; AVX2-FAST-NEXT: vmovaps %ymm0, 64(%rax)
6257 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6258 ; AVX2-FAST-NEXT: vmovaps %ymm0, 32(%rax)
6259 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6260 ; AVX2-FAST-NEXT: vmovaps %ymm0, (%rax)
6261 ; AVX2-FAST-NEXT: addq $776, %rsp # imm = 0x308
6262 ; AVX2-FAST-NEXT: vzeroupper
6263 ; AVX2-FAST-NEXT: retq
6265 ; AVX2-FAST-PERLANE-LABEL: store_i16_stride8_vf64:
6266 ; AVX2-FAST-PERLANE: # %bb.0:
6267 ; AVX2-FAST-PERLANE-NEXT: subq $744, %rsp # imm = 0x2E8
6268 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
6269 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %r10
6270 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rax), %xmm6
6271 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rax), %xmm0
6272 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r10), %xmm7
6273 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
6274 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm3[0,0,1,1]
6275 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
6276 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %xmm8
6277 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %xmm9
6278 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
6279 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm2 = xmm4[0],zero,xmm4[1],zero
6280 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
6281 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3],ymm2[4,5,6],ymm1[7]
6282 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %xmm10
6283 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %xmm11
6284 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
6285 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm2 = xmm5[0,0,1,1]
6286 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,1,3]
6287 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %xmm12
6288 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %xmm13
6289 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3]
6290 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm15 = xmm14[0],zero,xmm14[1],zero
6291 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm15 = ymm15[0,1,1,3]
6292 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm15 = ymm15[0],ymm2[1],ymm15[2,3,4],ymm2[5],ymm15[6,7]
6293 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r10), %xmm2
6294 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm15[0,1],ymm1[2,3],ymm15[4,5],ymm1[6,7]
6295 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6296 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r9), %xmm1
6297 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
6298 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
6299 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
6300 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,2,1]
6301 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm15 = ymm4[0,1,2],ymm3[3],ymm4[4,5,6],ymm3[7]
6302 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r8), %xmm3
6303 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm4 = xmm5[2,2,3,3]
6304 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,1,3]
6305 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm5 = xmm14[2,2,3,3]
6306 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,1,1,3]
6307 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0],ymm4[1],ymm5[2,3,4],ymm4[5],ymm5[6,7]
6308 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rcx), %xmm4
6309 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1],ymm15[2,3],ymm5[4,5],ymm15[6,7]
6310 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6311 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdx), %xmm5
6312 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
6313 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm7 = xmm6[0,0,1,1]
6314 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,0,2,1]
6315 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm9[4],xmm8[4],xmm9[5],xmm8[5],xmm9[6],xmm8[6],xmm9[7],xmm8[7]
6316 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm9 = xmm8[0],zero,xmm8[1],zero
6317 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,2,1]
6318 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2],ymm7[3],ymm9[4,5,6],ymm7[7]
6319 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rsi), %xmm7
6320 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm10 = xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
6321 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm11 = xmm10[0,0,1,1]
6322 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,1,1,3]
6323 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm12 = xmm13[4],xmm12[4],xmm13[5],xmm12[5],xmm13[6],xmm12[6],xmm13[7],xmm12[7]
6324 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm13 = xmm12[0],zero,xmm12[1],zero
6325 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm13 = ymm13[0,1,1,3]
6326 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm11 = ymm13[0],ymm11[1],ymm13[2,3,4],ymm11[5],ymm13[6,7]
6327 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm9 = ymm11[0,1],ymm9[2,3],ymm11[4,5],ymm9[6,7]
6328 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6329 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
6330 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,2,1]
6331 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[2,2,3,3]
6332 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,0,2,1]
6333 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm6 = ymm8[0,1,2],ymm6[3],ymm8[4,5,6],ymm6[7]
6334 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm8 = xmm10[2,2,3,3]
6335 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,1,1,3]
6336 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm9 = xmm12[2,2,3,3]
6337 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,1,1,3]
6338 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0],ymm8[1],ymm9[2,3,4],ymm8[5],ymm9[6,7]
6339 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm6 = ymm8[0,1],ymm6[2,3],ymm8[4,5],ymm6[6,7]
6340 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6341 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
6342 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm8 = xmm6[0,0,1,1]
6343 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,0,2,1]
6344 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
6345 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm9 = xmm10[0],zero,xmm10[1],zero
6346 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,2,1]
6347 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0,1,2],ymm8[3],ymm9[4,5,6],ymm8[7]
6348 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
6349 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm9 = xmm11[0,0,1,1]
6350 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm9[0,1,1,3]
6351 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdi), %xmm9
6352 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm9[0],xmm7[0],xmm9[1],xmm7[1],xmm9[2],xmm7[2],xmm9[3],xmm7[3]
6353 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm14 = xmm13[0],zero,xmm13[1],zero
6354 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,1,1,3]
6355 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm12 = ymm14[0],ymm12[1],ymm14[2,3,4],ymm12[5],ymm14[6,7]
6356 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm8 = ymm12[0,1],ymm8[2,3],ymm12[4,5],ymm8[6,7]
6357 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6358 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
6359 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,2,1]
6360 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm8 = xmm10[2,2,3,3]
6361 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,0,2,1]
6362 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2],ymm6[3],ymm8[4,5,6],ymm6[7]
6363 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm6 = xmm11[2,2,3,3]
6364 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,1,1,3]
6365 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm10 = xmm13[2,2,3,3]
6366 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,1,1,3]
6367 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm10 = ymm10[0],ymm6[1],ymm10[2,3,4],ymm6[5],ymm10[6,7]
6368 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rax), %xmm6
6369 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm8 = ymm10[0,1],ymm8[2,3],ymm10[4,5],ymm8[6,7]
6370 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6371 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%r10), %xmm8
6372 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm2[4],xmm0[4],xmm2[5],xmm0[5],xmm2[6],xmm0[6],xmm2[7],xmm0[7]
6373 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm0 = xmm2[0,0,1,1]
6374 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
6375 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
6376 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
6377 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
6378 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm10 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7]
6379 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%r9), %xmm0
6380 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
6381 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm4[0,0,1,1]
6382 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,1,3]
6383 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm9[4],xmm7[4],xmm9[5],xmm7[5],xmm9[6],xmm7[6],xmm9[7],xmm7[7]
6384 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm7 = xmm5[0],zero,xmm5[1],zero
6385 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,1,1,3]
6386 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm7 = ymm7[0],ymm1[1],ymm7[2,3,4],ymm1[5],ymm7[6,7]
6387 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%r8), %xmm1
6388 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm7 = ymm7[0,1],ymm10[2,3],ymm7[4,5],ymm10[6,7]
6389 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6390 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
6391 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
6392 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
6393 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
6394 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7]
6395 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm3 = xmm4[2,2,3,3]
6396 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,1,3]
6397 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm4 = xmm5[2,2,3,3]
6398 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,1,3]
6399 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0],ymm3[1],ymm4[2,3,4],ymm3[5],ymm4[6,7]
6400 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7]
6401 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6402 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm8[0],xmm6[0],xmm8[1],xmm6[1],xmm8[2],xmm6[2],xmm8[3],xmm6[3]
6403 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm2 = xmm7[0,0,1,1]
6404 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
6405 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
6406 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm9[0],zero,xmm9[1],zero
6407 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
6408 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm10 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7]
6409 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rcx), %xmm2
6410 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rdx), %xmm3
6411 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
6412 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm4 = xmm11[0,0,1,1]
6413 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm4[0,1,1,3]
6414 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rsi), %xmm4
6415 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rdi), %xmm5
6416 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
6417 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm14 = xmm13[0],zero,xmm13[1],zero
6418 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,1,1,3]
6419 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm12 = ymm14[0],ymm12[1],ymm14[2,3,4],ymm12[5],ymm14[6,7]
6420 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm10 = ymm12[0,1],ymm10[2,3],ymm12[4,5],ymm10[6,7]
6421 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6422 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
6423 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,0,2,1]
6424 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
6425 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,2,1]
6426 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm7 = ymm9[0,1,2],ymm7[3],ymm9[4,5,6],ymm7[7]
6427 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm9 = xmm11[2,2,3,3]
6428 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,1,1,3]
6429 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm10 = xmm13[2,2,3,3]
6430 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,1,1,3]
6431 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm9 = ymm10[0],ymm9[1],ymm10[2,3,4],ymm9[5],ymm10[6,7]
6432 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm7 = ymm9[0,1],ymm7[2,3],ymm9[4,5],ymm7[6,7]
6433 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6434 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm8[4],xmm6[4],xmm8[5],xmm6[5],xmm8[6],xmm6[6],xmm8[7],xmm6[7]
6435 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
6436 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm6[0,0,1,1]
6437 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
6438 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm7 = xmm0[0],zero,xmm0[1],zero
6439 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,0,2,1]
6440 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm7[0,1,2],ymm1[3],ymm7[4,5,6],ymm1[7]
6441 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
6442 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
6443 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm4 = xmm2[0,0,1,1]
6444 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,1,3]
6445 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm5 = xmm3[0],zero,xmm3[1],zero
6446 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,1,1,3]
6447 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0],ymm4[1],ymm5[2,3,4],ymm4[5],ymm5[6,7]
6448 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm4[0,1],ymm1[2,3],ymm4[4,5],ymm1[6,7]
6449 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6450 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm6[2,2,3,3]
6451 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
6452 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
6453 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
6454 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2],ymm1[3],ymm0[4,5,6],ymm1[7]
6455 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm2[2,2,3,3]
6456 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,1,3]
6457 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm2 = xmm3[2,2,3,3]
6458 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,1,3]
6459 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3,4],ymm1[5],ymm2[6,7]
6460 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
6461 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6462 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rax), %xmm0
6463 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%r10), %xmm1
6464 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
6465 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm2 = xmm8[0,0,1,1]
6466 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm2[0,0,2,1]
6467 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%r9), %xmm2
6468 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%r8), %xmm4
6469 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
6470 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm5 = xmm9[0],zero,xmm9[1],zero
6471 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,2,1]
6472 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm10 = ymm5[0,1,2],ymm3[3],ymm5[4,5,6],ymm3[7]
6473 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rcx), %xmm3
6474 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rdx), %xmm5
6475 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
6476 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm6 = xmm11[0,0,1,1]
6477 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm6[0,1,1,3]
6478 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rsi), %xmm6
6479 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rdi), %xmm7
6480 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
6481 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm14 = xmm13[0],zero,xmm13[1],zero
6482 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,1,1,3]
6483 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm12 = ymm14[0],ymm12[1],ymm14[2,3,4],ymm12[5],ymm14[6,7]
6484 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm10 = ymm12[0,1],ymm10[2,3],ymm12[4,5],ymm10[6,7]
6485 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6486 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[2,2,3,3]
6487 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,0,2,1]
6488 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
6489 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,2,1]
6490 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0,1,2],ymm8[3],ymm9[4,5,6],ymm8[7]
6491 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm9 = xmm11[2,2,3,3]
6492 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,1,1,3]
6493 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm10 = xmm13[2,2,3,3]
6494 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,1,1,3]
6495 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm9 = ymm10[0],ymm9[1],ymm10[2,3,4],ymm9[5],ymm10[6,7]
6496 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0,1],ymm8[2,3],ymm9[4,5],ymm8[6,7]
6497 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6498 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
6499 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm4[4],xmm2[4],xmm4[5],xmm2[5],xmm4[6],xmm2[6],xmm4[7],xmm2[7]
6500 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[0,0,1,1]
6501 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
6502 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm4 = xmm1[0],zero,xmm1[1],zero
6503 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,2,1]
6504 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1,2],ymm2[3],ymm4[4,5,6],ymm2[7]
6505 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm5[4],xmm3[4],xmm5[5],xmm3[5],xmm5[6],xmm3[6],xmm5[7],xmm3[7]
6506 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
6507 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm5 = xmm3[0,0,1,1]
6508 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,1,1,3]
6509 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm4[0],zero,xmm4[1],zero
6510 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,1,1,3]
6511 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0],ymm5[1],ymm6[2,3,4],ymm5[5],ymm6[6,7]
6512 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
6513 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6514 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
6515 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
6516 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
6517 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
6518 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7]
6519 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm3[2,2,3,3]
6520 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,1,3]
6521 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm2 = xmm4[2,2,3,3]
6522 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,1,3]
6523 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3,4],ymm1[5],ymm2[6,7]
6524 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
6525 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6526 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r10), %ymm0
6527 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rax), %ymm1
6528 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[8],ymm1[8],ymm0[9],ymm1[9],ymm0[10],ymm1[10],ymm0[11],ymm1[11]
6529 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm3 = ymm2[0,0,2,1,4,4,6,5]
6530 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,2,2,3]
6531 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %ymm4
6532 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %ymm5
6533 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm6 = ymm4[0],ymm5[0],ymm4[1],ymm5[1],ymm4[2],ymm5[2],ymm4[3],ymm5[3],ymm4[8],ymm5[8],ymm4[9],ymm5[9],ymm4[10],ymm5[10],ymm4[11],ymm5[11]
6534 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm7 = ymm6[0,1,1,3,4,5,5,7]
6535 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,2,2,3]
6536 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm7[0,1,2],ymm3[3],ymm7[4,5,6],ymm3[7]
6537 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %ymm7
6538 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %ymm8
6539 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm9 = ymm7[0],ymm8[0],ymm7[1],ymm8[1],ymm7[2],ymm8[2],ymm7[3],ymm8[3],ymm7[8],ymm8[8],ymm7[9],ymm8[9],ymm7[10],ymm8[10],ymm7[11],ymm8[11]
6540 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm10 = ymm9[0,0,2,1,4,4,6,5]
6541 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,1,3,3]
6542 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %ymm11
6543 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %ymm12
6544 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm13 = ymm11[0],ymm12[0],ymm11[1],ymm12[1],ymm11[2],ymm12[2],ymm11[3],ymm12[3],ymm11[8],ymm12[8],ymm11[9],ymm12[9],ymm11[10],ymm12[10],ymm11[11],ymm12[11]
6545 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm14 = ymm13[0,1,1,3,4,5,5,7]
6546 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,1,3,3]
6547 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm10 = ymm14[0],ymm10[1],ymm14[2,3,4],ymm10[5],ymm14[6,7]
6548 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm10[0,1],ymm3[2,3],ymm10[4,5],ymm3[6,7]
6549 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6550 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,2,2,3,4,6,6,7]
6551 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,2,3]
6552 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm3 = ymm6[2,1,3,3,6,5,7,7]
6553 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,2,2,3]
6554 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7]
6555 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm3 = ymm9[0,2,2,3,4,6,6,7]
6556 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,3,3]
6557 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm6 = ymm13[2,1,3,3,6,5,7,7]
6558 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,1,3,3]
6559 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm6[0],ymm3[1],ymm6[2,3,4],ymm3[5],ymm6[6,7]
6560 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7]
6561 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6562 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm0[4],ymm1[4],ymm0[5],ymm1[5],ymm0[6],ymm1[6],ymm0[7],ymm1[7],ymm0[12],ymm1[12],ymm0[13],ymm1[13],ymm0[14],ymm1[14],ymm0[15],ymm1[15]
6563 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm4[4],ymm5[4],ymm4[5],ymm5[5],ymm4[6],ymm5[6],ymm4[7],ymm5[7],ymm4[12],ymm5[12],ymm4[13],ymm5[13],ymm4[14],ymm5[14],ymm4[15],ymm5[15]
6564 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm2 = ymm0[0,0,2,1,4,4,6,5]
6565 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,2,3]
6566 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm3 = ymm1[0,1,1,3,4,5,5,7]
6567 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,2,2,3]
6568 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7]
6569 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm7[4],ymm8[4],ymm7[5],ymm8[5],ymm7[6],ymm8[6],ymm7[7],ymm8[7],ymm7[12],ymm8[12],ymm7[13],ymm8[13],ymm7[14],ymm8[14],ymm7[15],ymm8[15]
6570 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm4 = ymm11[4],ymm12[4],ymm11[5],ymm12[5],ymm11[6],ymm12[6],ymm11[7],ymm12[7],ymm11[12],ymm12[12],ymm11[13],ymm12[13],ymm11[14],ymm12[14],ymm11[15],ymm12[15]
6571 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm5 = ymm3[0,0,2,1,4,4,6,5]
6572 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,1,3,3]
6573 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm6 = ymm4[0,1,1,3,4,5,5,7]
6574 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,1,3,3]
6575 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0],ymm5[1],ymm6[2,3,4],ymm5[5],ymm6[6,7]
6576 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm5[0,1],ymm2[2,3],ymm5[4,5],ymm2[6,7]
6577 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6578 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,2,2,3,4,6,6,7]
6579 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,2,3]
6580 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[2,1,3,3,6,5,7,7]
6581 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,2,3]
6582 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7]
6583 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm3[0,2,2,3,4,6,6,7]
6584 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,3]
6585 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm2 = ymm4[2,1,3,3,6,5,7,7]
6586 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,3,3]
6587 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3,4],ymm1[5],ymm2[6,7]
6588 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
6589 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6590 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r10), %ymm13
6591 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rax), %ymm11
6592 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm13[0],ymm11[0],ymm13[1],ymm11[1],ymm13[2],ymm11[2],ymm13[3],ymm11[3],ymm13[8],ymm11[8],ymm13[9],ymm11[9],ymm13[10],ymm11[10],ymm13[11],ymm11[11]
6593 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm3 = ymm2[0,0,2,1,4,4,6,5]
6594 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,2,2,3]
6595 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r8), %ymm4
6596 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r9), %ymm5
6597 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm6 = ymm4[0],ymm5[0],ymm4[1],ymm5[1],ymm4[2],ymm5[2],ymm4[3],ymm5[3],ymm4[8],ymm5[8],ymm4[9],ymm5[9],ymm4[10],ymm5[10],ymm4[11],ymm5[11]
6598 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm7 = ymm6[0,1,1,3,4,5,5,7]
6599 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,2,2,3]
6600 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm12 = ymm7[0,1,2],ymm3[3],ymm7[4,5,6],ymm3[7]
6601 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdx), %ymm7
6602 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rcx), %ymm8
6603 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm9 = ymm7[0],ymm8[0],ymm7[1],ymm8[1],ymm7[2],ymm8[2],ymm7[3],ymm8[3],ymm7[8],ymm8[8],ymm7[9],ymm8[9],ymm7[10],ymm8[10],ymm7[11],ymm8[11]
6604 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm10 = ymm9[0,0,2,1,4,4,6,5]
6605 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,1,3,3]
6606 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdi), %ymm1
6607 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rsi), %ymm3
6608 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm1[0],ymm3[0],ymm1[1],ymm3[1],ymm1[2],ymm3[2],ymm1[3],ymm3[3],ymm1[8],ymm3[8],ymm1[9],ymm3[9],ymm1[10],ymm3[10],ymm1[11],ymm3[11]
6609 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm15 = ymm0[0,1,1,3,4,5,5,7]
6610 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,1,3,3]
6611 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm10 = ymm15[0],ymm10[1],ymm15[2,3,4],ymm10[5],ymm15[6,7]
6612 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm10 = ymm10[0,1],ymm12[2,3],ymm10[4,5],ymm12[6,7]
6613 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6614 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,2,2,3,4,6,6,7]
6615 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,2,3]
6616 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm6 = ymm6[2,1,3,3,6,5,7,7]
6617 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,2,2,3]
6618 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm6[0,1,2],ymm2[3],ymm6[4,5,6],ymm2[7]
6619 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm6 = ymm9[0,2,2,3,4,6,6,7]
6620 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,1,3,3]
6621 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,1,3,3,6,5,7,7]
6622 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,3,3]
6623 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm6[1],ymm0[2,3,4],ymm6[5],ymm0[6,7]
6624 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
6625 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6626 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm13[4],ymm11[4],ymm13[5],ymm11[5],ymm13[6],ymm11[6],ymm13[7],ymm11[7],ymm13[12],ymm11[12],ymm13[13],ymm11[13],ymm13[14],ymm11[14],ymm13[15],ymm11[15]
6627 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm4[4],ymm5[4],ymm4[5],ymm5[5],ymm4[6],ymm5[6],ymm4[7],ymm5[7],ymm4[12],ymm5[12],ymm4[13],ymm5[13],ymm4[14],ymm5[14],ymm4[15],ymm5[15]
6628 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm4 = ymm0[0,0,2,1,4,4,6,5]
6629 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,2,2,3]
6630 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm5 = ymm2[0,1,1,3,4,5,5,7]
6631 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,2,2,3]
6632 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0,1,2],ymm4[3],ymm5[4,5,6],ymm4[7]
6633 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm5 = ymm7[4],ymm8[4],ymm7[5],ymm8[5],ymm7[6],ymm8[6],ymm7[7],ymm8[7],ymm7[12],ymm8[12],ymm7[13],ymm8[13],ymm7[14],ymm8[14],ymm7[15],ymm8[15]
6634 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm1[4],ymm3[4],ymm1[5],ymm3[5],ymm1[6],ymm3[6],ymm1[7],ymm3[7],ymm1[12],ymm3[12],ymm1[13],ymm3[13],ymm1[14],ymm3[14],ymm1[15],ymm3[15]
6635 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm3 = ymm5[0,0,2,1,4,4,6,5]
6636 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,3,3]
6637 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm6 = ymm1[0,1,1,3,4,5,5,7]
6638 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,1,3,3]
6639 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm6[0],ymm3[1],ymm6[2,3,4],ymm3[5],ymm6[6,7]
6640 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1],ymm4[2,3],ymm3[4,5],ymm4[6,7]
6641 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm3, (%rsp) # 32-byte Spill
6642 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,2,2,3,4,6,6,7]
6643 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,2,3]
6644 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[2,1,3,3,6,5,7,7]
6645 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,2,3]
6646 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2],ymm0[3],ymm2[4,5,6],ymm0[7]
6647 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm2 = ymm5[0,2,2,3,4,6,6,7]
6648 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,3,3]
6649 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[2,1,3,3,6,5,7,7]
6650 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,3]
6651 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3,4],ymm2[5],ymm1[6,7]
6652 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
6653 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6654 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%r10), %ymm10
6655 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rax), %ymm9
6656 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm10[0],ymm9[0],ymm10[1],ymm9[1],ymm10[2],ymm9[2],ymm10[3],ymm9[3],ymm10[8],ymm9[8],ymm10[9],ymm9[9],ymm10[10],ymm9[10],ymm10[11],ymm9[11]
6657 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm3 = ymm2[0,0,2,1,4,4,6,5]
6658 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,2,2,3]
6659 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%r8), %ymm8
6660 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%r9), %ymm5
6661 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm6 = ymm8[0],ymm5[0],ymm8[1],ymm5[1],ymm8[2],ymm5[2],ymm8[3],ymm5[3],ymm8[8],ymm5[8],ymm8[9],ymm5[9],ymm8[10],ymm5[10],ymm8[11],ymm5[11]
6662 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm7 = ymm6[0,1,1,3,4,5,5,7]
6663 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,2,2,3]
6664 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm7[0,1,2],ymm3[3],ymm7[4,5,6],ymm3[7]
6665 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rdx), %ymm11
6666 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rcx), %ymm13
6667 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm15 = ymm11[0],ymm13[0],ymm11[1],ymm13[1],ymm11[2],ymm13[2],ymm11[3],ymm13[3],ymm11[8],ymm13[8],ymm11[9],ymm13[9],ymm11[10],ymm13[10],ymm11[11],ymm13[11]
6668 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm7 = ymm15[0,0,2,1,4,4,6,5]
6669 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,1,3,3]
6670 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rdi), %ymm3
6671 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rsi), %ymm1
6672 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm3[0],ymm1[0],ymm3[1],ymm1[1],ymm3[2],ymm1[2],ymm3[3],ymm1[3],ymm3[8],ymm1[8],ymm3[9],ymm1[9],ymm3[10],ymm1[10],ymm3[11],ymm1[11]
6673 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm14 = ymm0[0,1,1,3,4,5,5,7]
6674 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,1,3,3]
6675 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm7 = ymm14[0],ymm7[1],ymm14[2,3,4],ymm7[5],ymm14[6,7]
6676 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm7[0,1],ymm4[2,3],ymm7[4,5],ymm4[6,7]
6677 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6678 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,2,2,3,4,6,6,7]
6679 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,2,3]
6680 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm4 = ymm6[2,1,3,3,6,5,7,7]
6681 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,2,2,3]
6682 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1,2],ymm2[3],ymm4[4,5,6],ymm2[7]
6683 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm4 = ymm15[0,2,2,3,4,6,6,7]
6684 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,1,3,3]
6685 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,1,3,3,6,5,7,7]
6686 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,3,3]
6687 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm4[1],ymm0[2,3,4],ymm4[5],ymm0[6,7]
6688 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
6689 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6690 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm10[4],ymm9[4],ymm10[5],ymm9[5],ymm10[6],ymm9[6],ymm10[7],ymm9[7],ymm10[12],ymm9[12],ymm10[13],ymm9[13],ymm10[14],ymm9[14],ymm10[15],ymm9[15]
6691 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm8[4],ymm5[4],ymm8[5],ymm5[5],ymm8[6],ymm5[6],ymm8[7],ymm5[7],ymm8[12],ymm5[12],ymm8[13],ymm5[13],ymm8[14],ymm5[14],ymm8[15],ymm5[15]
6692 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm4 = ymm0[0,0,2,1,4,4,6,5]
6693 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,2,2,3]
6694 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm5 = ymm2[0,1,1,3,4,5,5,7]
6695 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,2,2,3]
6696 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0,1,2],ymm4[3],ymm5[4,5,6],ymm4[7]
6697 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm8 = ymm11[4],ymm13[4],ymm11[5],ymm13[5],ymm11[6],ymm13[6],ymm11[7],ymm13[7],ymm11[12],ymm13[12],ymm11[13],ymm13[13],ymm11[14],ymm13[14],ymm11[15],ymm13[15]
6698 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm3[4],ymm1[4],ymm3[5],ymm1[5],ymm3[6],ymm1[6],ymm3[7],ymm1[7],ymm3[12],ymm1[12],ymm3[13],ymm1[13],ymm3[14],ymm1[14],ymm3[15],ymm1[15]
6699 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm3 = ymm8[0,0,2,1,4,4,6,5]
6700 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,3,3]
6701 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm5 = ymm1[0,1,1,3,4,5,5,7]
6702 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,1,3,3]
6703 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm5[0],ymm3[1],ymm5[2,3,4],ymm3[5],ymm5[6,7]
6704 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1],ymm4[2,3],ymm3[4,5],ymm4[6,7]
6705 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6706 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,2,2,3,4,6,6,7]
6707 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,2,3]
6708 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[2,1,3,3,6,5,7,7]
6709 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,2,3]
6710 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2],ymm0[3],ymm2[4,5,6],ymm0[7]
6711 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm2 = ymm8[0,2,2,3,4,6,6,7]
6712 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,3,3]
6713 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[2,1,3,3,6,5,7,7]
6714 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,3]
6715 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3,4],ymm2[5],ymm1[6,7]
6716 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm7 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
6717 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%r10), %ymm6
6718 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rax), %ymm5
6719 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm6[0],ymm5[0],ymm6[1],ymm5[1],ymm6[2],ymm5[2],ymm6[3],ymm5[3],ymm6[8],ymm5[8],ymm6[9],ymm5[9],ymm6[10],ymm5[10],ymm6[11],ymm5[11]
6720 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm3 = ymm2[0,0,2,1,4,4,6,5]
6721 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,2,2,3]
6722 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%r8), %ymm8
6723 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%r9), %ymm9
6724 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm10 = ymm8[0],ymm9[0],ymm8[1],ymm9[1],ymm8[2],ymm9[2],ymm8[3],ymm9[3],ymm8[8],ymm9[8],ymm8[9],ymm9[9],ymm8[10],ymm9[10],ymm8[11],ymm9[11]
6725 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm11 = ymm10[0,1,1,3,4,5,5,7]
6726 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,2,2,3]
6727 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm11[0,1,2],ymm3[3],ymm11[4,5,6],ymm3[7]
6728 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rdx), %ymm11
6729 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rcx), %ymm13
6730 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm14 = ymm11[0],ymm13[0],ymm11[1],ymm13[1],ymm11[2],ymm13[2],ymm11[3],ymm13[3],ymm11[8],ymm13[8],ymm11[9],ymm13[9],ymm11[10],ymm13[10],ymm11[11],ymm13[11]
6731 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm15 = ymm14[0,0,2,1,4,4,6,5]
6732 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,1,3,3]
6733 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rdi), %ymm3
6734 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rsi), %ymm1
6735 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm3[0],ymm1[0],ymm3[1],ymm1[1],ymm3[2],ymm1[2],ymm3[3],ymm1[3],ymm3[8],ymm1[8],ymm3[9],ymm1[9],ymm3[10],ymm1[10],ymm3[11],ymm1[11]
6736 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm12 = ymm0[0,1,1,3,4,5,5,7]
6737 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,1,3,3]
6738 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0],ymm15[1],ymm12[2,3,4],ymm15[5],ymm12[6,7]
6739 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm15 = ymm12[0,1],ymm4[2,3],ymm12[4,5],ymm4[6,7]
6740 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,2,2,3,4,6,6,7]
6741 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,2,3]
6742 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm4 = ymm10[2,1,3,3,6,5,7,7]
6743 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,2,2,3]
6744 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1,2],ymm2[3],ymm4[4,5,6],ymm2[7]
6745 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm4 = ymm14[0,2,2,3,4,6,6,7]
6746 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,1,3,3]
6747 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,1,3,3,6,5,7,7]
6748 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,3,3]
6749 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm4[1],ymm0[2,3,4],ymm4[5],ymm0[6,7]
6750 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm0[0,1],ymm2[2,3],ymm0[4,5],ymm2[6,7]
6751 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm6[4],ymm5[4],ymm6[5],ymm5[5],ymm6[6],ymm5[6],ymm6[7],ymm5[7],ymm6[12],ymm5[12],ymm6[13],ymm5[13],ymm6[14],ymm5[14],ymm6[15],ymm5[15]
6752 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm4 = ymm8[4],ymm9[4],ymm8[5],ymm9[5],ymm8[6],ymm9[6],ymm8[7],ymm9[7],ymm8[12],ymm9[12],ymm8[13],ymm9[13],ymm8[14],ymm9[14],ymm8[15],ymm9[15]
6753 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm5 = ymm0[0,0,2,1,4,4,6,5]
6754 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,2,2,3]
6755 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm6 = ymm4[0,1,1,3,4,5,5,7]
6756 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,2,2,3]
6757 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2],ymm5[3],ymm6[4,5,6],ymm5[7]
6758 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm6 = ymm11[4],ymm13[4],ymm11[5],ymm13[5],ymm11[6],ymm13[6],ymm11[7],ymm13[7],ymm11[12],ymm13[12],ymm11[13],ymm13[13],ymm11[14],ymm13[14],ymm11[15],ymm13[15]
6759 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm3[4],ymm1[4],ymm3[5],ymm1[5],ymm3[6],ymm1[6],ymm3[7],ymm1[7],ymm3[12],ymm1[12],ymm3[13],ymm1[13],ymm3[14],ymm1[14],ymm3[15],ymm1[15]
6760 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm3 = ymm6[0,0,2,1,4,4,6,5]
6761 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,3,3]
6762 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm8 = ymm1[0,1,1,3,4,5,5,7]
6763 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,1,3,3]
6764 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm8[0],ymm3[1],ymm8[2,3,4],ymm3[5],ymm8[6,7]
6765 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1],ymm5[2,3],ymm3[4,5],ymm5[6,7]
6766 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,2,2,3,4,6,6,7]
6767 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,2,3]
6768 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm4 = ymm4[2,1,3,3,6,5,7,7]
6769 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,2,2,3]
6770 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2],ymm0[3],ymm4[4,5,6],ymm0[7]
6771 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm4 = ymm6[0,2,2,3,4,6,6,7]
6772 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,1,3,3]
6773 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[2,1,3,3,6,5,7,7]
6774 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,3]
6775 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm4[1],ymm1[2,3,4],ymm4[5],ymm1[6,7]
6776 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
6777 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
6778 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, 992(%rax)
6779 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm3, 960(%rax)
6780 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm2, 928(%rax)
6781 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm15, 896(%rax)
6782 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm7, 736(%rax)
6783 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6784 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 704(%rax)
6785 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6786 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 672(%rax)
6787 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6788 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 640(%rax)
6789 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6790 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 480(%rax)
6791 ; AVX2-FAST-PERLANE-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
6792 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 448(%rax)
6793 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6794 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 416(%rax)
6795 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6796 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 384(%rax)
6797 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6798 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 224(%rax)
6799 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6800 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 192(%rax)
6801 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6802 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 160(%rax)
6803 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6804 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 128(%rax)
6805 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6806 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 864(%rax)
6807 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6808 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 832(%rax)
6809 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6810 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 800(%rax)
6811 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6812 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 768(%rax)
6813 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6814 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 608(%rax)
6815 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6816 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 576(%rax)
6817 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6818 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 544(%rax)
6819 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6820 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 512(%rax)
6821 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6822 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 352(%rax)
6823 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6824 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 320(%rax)
6825 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6826 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 288(%rax)
6827 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6828 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 256(%rax)
6829 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6830 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 96(%rax)
6831 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6832 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 64(%rax)
6833 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6834 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 32(%rax)
6835 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6836 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, (%rax)
6837 ; AVX2-FAST-PERLANE-NEXT: addq $744, %rsp # imm = 0x2E8
6838 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
6839 ; AVX2-FAST-PERLANE-NEXT: retq
6841 ; AVX512F-SLOW-LABEL: store_i16_stride8_vf64:
6842 ; AVX512F-SLOW: # %bb.0:
6843 ; AVX512F-SLOW-NEXT: subq $504, %rsp # imm = 0x1F8
6844 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
6845 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
6846 ; AVX512F-SLOW-NEXT: vmovdqa (%r10), %xmm0
6847 ; AVX512F-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6848 ; AVX512F-SLOW-NEXT: vmovdqa 64(%r10), %xmm4
6849 ; AVX512F-SLOW-NEXT: vmovdqa (%rax), %xmm1
6850 ; AVX512F-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6851 ; AVX512F-SLOW-NEXT: vmovdqa 64(%rax), %xmm3
6852 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
6853 ; AVX512F-SLOW-NEXT: vmovdqa (%r9), %xmm0
6854 ; AVX512F-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6855 ; AVX512F-SLOW-NEXT: vmovdqa (%r8), %xmm7
6856 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm7[4],xmm0[4],xmm7[5],xmm0[5],xmm7[6],xmm0[6],xmm7[7],xmm0[7]
6857 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm30 = <0,u,0,u,u,u,1,u,2,2,2,2,u,u,3,3>
6858 ; AVX512F-SLOW-NEXT: vpermd %zmm2, %zmm30, %zmm0
6859 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm29 = <0,0,0,0,u,u,1,1,2,2,2,2,u,u,3,3>
6860 ; AVX512F-SLOW-NEXT: movw $-30584, %r11w # imm = 0x8888
6861 ; AVX512F-SLOW-NEXT: kmovw %r11d, %k2
6862 ; AVX512F-SLOW-NEXT: vpermd %zmm1, %zmm29, %zmm0 {%k2}
6863 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6864 ; AVX512F-SLOW-NEXT: movw $8738, %r11w # imm = 0x2222
6865 ; AVX512F-SLOW-NEXT: kmovw %r11d, %k1
6866 ; AVX512F-SLOW-NEXT: vmovdqa 96(%r10), %ymm2
6867 ; AVX512F-SLOW-NEXT: vmovdqa 96(%rax), %ymm5
6868 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm5[0],ymm2[0],ymm5[1],ymm2[1],ymm5[2],ymm2[2],ymm5[3],ymm2[3],ymm5[8],ymm2[8],ymm5[9],ymm2[9],ymm5[10],ymm2[10],ymm5[11],ymm2[11]
6869 ; AVX512F-SLOW-NEXT: vmovdqa 96(%r9), %ymm8
6870 ; AVX512F-SLOW-NEXT: vmovdqa 96(%r8), %ymm9
6871 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm10 = ymm9[0],ymm8[0],ymm9[1],ymm8[1],ymm9[2],ymm8[2],ymm9[3],ymm8[3],ymm9[8],ymm8[8],ymm9[9],ymm8[9],ymm9[10],ymm8[10],ymm9[11],ymm8[11]
6872 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm19 = [0,1,4,5,4,5,5,7,2,1,6,5,6,5,7,7]
6873 ; AVX512F-SLOW-NEXT: vpermd %zmm10, %zmm19, %zmm0
6874 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm18 = [0,0,4,4,4,4,6,5,0,2,4,6,4,6,6,7]
6875 ; AVX512F-SLOW-NEXT: vpermd %zmm1, %zmm18, %zmm0 {%k2}
6876 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6877 ; AVX512F-SLOW-NEXT: vmovdqa 96(%rcx), %ymm10
6878 ; AVX512F-SLOW-NEXT: vmovdqa 96(%rdx), %ymm11
6879 ; AVX512F-SLOW-NEXT: vmovdqa 96(%rsi), %ymm12
6880 ; AVX512F-SLOW-NEXT: vmovdqa 96(%rdi), %ymm13
6881 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm13[0],ymm12[0],ymm13[1],ymm12[1],ymm13[2],ymm12[2],ymm13[3],ymm12[3],ymm13[8],ymm12[8],ymm13[9],ymm12[9],ymm13[10],ymm12[10],ymm13[11],ymm12[11]
6882 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm16 = [4,5,1,3,5,7,5,7,6,5,3,3,7,7,7,7]
6883 ; AVX512F-SLOW-NEXT: vpermd %zmm1, %zmm16, %zmm0
6884 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm14 = ymm11[0],ymm10[0],ymm11[1],ymm10[1],ymm11[2],ymm10[2],ymm11[3],ymm10[3],ymm11[8],ymm10[8],ymm11[9],ymm10[9],ymm11[10],ymm10[10],ymm11[11],ymm10[11]
6885 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm17 = [4,4,2,1,6,5,6,5,4,6,2,3,6,7,6,7]
6886 ; AVX512F-SLOW-NEXT: vpermd %zmm14, %zmm17, %zmm0 {%k1}
6887 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6888 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm5[4],ymm2[4],ymm5[5],ymm2[5],ymm5[6],ymm2[6],ymm5[7],ymm2[7],ymm5[12],ymm2[12],ymm5[13],ymm2[13],ymm5[14],ymm2[14],ymm5[15],ymm2[15]
6889 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm5 = ymm9[4],ymm8[4],ymm9[5],ymm8[5],ymm9[6],ymm8[6],ymm9[7],ymm8[7],ymm9[12],ymm8[12],ymm9[13],ymm8[13],ymm9[14],ymm8[14],ymm9[15],ymm8[15]
6890 ; AVX512F-SLOW-NEXT: vpermd %zmm5, %zmm19, %zmm0
6891 ; AVX512F-SLOW-NEXT: vpermd %zmm2, %zmm18, %zmm0 {%k2}
6892 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6893 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm11[4],ymm10[4],ymm11[5],ymm10[5],ymm11[6],ymm10[6],ymm11[7],ymm10[7],ymm11[12],ymm10[12],ymm11[13],ymm10[13],ymm11[14],ymm10[14],ymm11[15],ymm10[15]
6894 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm5 = ymm13[4],ymm12[4],ymm13[5],ymm12[5],ymm13[6],ymm12[6],ymm13[7],ymm12[7],ymm13[12],ymm12[12],ymm13[13],ymm12[13],ymm13[14],ymm12[14],ymm13[15],ymm12[15]
6895 ; AVX512F-SLOW-NEXT: vpermd %zmm5, %zmm16, %zmm31
6896 ; AVX512F-SLOW-NEXT: vpermd %zmm2, %zmm17, %zmm31 {%k1}
6897 ; AVX512F-SLOW-NEXT: vmovdqa 96(%r10), %xmm2
6898 ; AVX512F-SLOW-NEXT: vmovdqa 96(%rax), %xmm8
6899 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm8[0],xmm2[0],xmm8[1],xmm2[1],xmm8[2],xmm2[2],xmm8[3],xmm2[3]
6900 ; AVX512F-SLOW-NEXT: vmovdqa 96(%r9), %xmm10
6901 ; AVX512F-SLOW-NEXT: vmovdqa 96(%r8), %xmm11
6902 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
6903 ; AVX512F-SLOW-NEXT: vpermd %zmm12, %zmm30, %zmm0
6904 ; AVX512F-SLOW-NEXT: vpermd %zmm9, %zmm29, %zmm0 {%k2}
6905 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6906 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm8[4],xmm2[4],xmm8[5],xmm2[5],xmm8[6],xmm2[6],xmm8[7],xmm2[7]
6907 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
6908 ; AVX512F-SLOW-NEXT: vpermd %zmm8, %zmm30, %zmm0
6909 ; AVX512F-SLOW-NEXT: vpermd %zmm2, %zmm29, %zmm0 {%k2}
6910 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6911 ; AVX512F-SLOW-NEXT: vmovdqa 64(%r10), %ymm2
6912 ; AVX512F-SLOW-NEXT: vmovdqa 64(%rax), %ymm9
6913 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm8 = ymm9[0],ymm2[0],ymm9[1],ymm2[1],ymm9[2],ymm2[2],ymm9[3],ymm2[3],ymm9[8],ymm2[8],ymm9[9],ymm2[9],ymm9[10],ymm2[10],ymm9[11],ymm2[11]
6914 ; AVX512F-SLOW-NEXT: vmovdqa 64(%r9), %ymm10
6915 ; AVX512F-SLOW-NEXT: vmovdqa 64(%r8), %ymm11
6916 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm12 = ymm11[0],ymm10[0],ymm11[1],ymm10[1],ymm11[2],ymm10[2],ymm11[3],ymm10[3],ymm11[8],ymm10[8],ymm11[9],ymm10[9],ymm11[10],ymm10[10],ymm11[11],ymm10[11]
6917 ; AVX512F-SLOW-NEXT: vpermd %zmm12, %zmm19, %zmm0
6918 ; AVX512F-SLOW-NEXT: vpermd %zmm8, %zmm18, %zmm0 {%k2}
6919 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6920 ; AVX512F-SLOW-NEXT: vmovdqa 64(%rcx), %ymm12
6921 ; AVX512F-SLOW-NEXT: vmovdqa 64(%rdx), %ymm15
6922 ; AVX512F-SLOW-NEXT: vmovdqa 64(%rsi), %ymm1
6923 ; AVX512F-SLOW-NEXT: vmovdqa 64(%rdi), %ymm0
6924 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm8 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[8],ymm1[8],ymm0[9],ymm1[9],ymm0[10],ymm1[10],ymm0[11],ymm1[11]
6925 ; AVX512F-SLOW-NEXT: vpermd %zmm8, %zmm16, %zmm26
6926 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm8 = ymm15[0],ymm12[0],ymm15[1],ymm12[1],ymm15[2],ymm12[2],ymm15[3],ymm12[3],ymm15[8],ymm12[8],ymm15[9],ymm12[9],ymm15[10],ymm12[10],ymm15[11],ymm12[11]
6927 ; AVX512F-SLOW-NEXT: vpermd %zmm8, %zmm17, %zmm26 {%k1}
6928 ; AVX512F-SLOW-NEXT: vmovdqa 64(%r9), %xmm8
6929 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm9[4],ymm2[4],ymm9[5],ymm2[5],ymm9[6],ymm2[6],ymm9[7],ymm2[7],ymm9[12],ymm2[12],ymm9[13],ymm2[13],ymm9[14],ymm2[14],ymm9[15],ymm2[15]
6930 ; AVX512F-SLOW-NEXT: vmovdqa 64(%r8), %xmm9
6931 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm10 = ymm11[4],ymm10[4],ymm11[5],ymm10[5],ymm11[6],ymm10[6],ymm11[7],ymm10[7],ymm11[12],ymm10[12],ymm11[13],ymm10[13],ymm11[14],ymm10[14],ymm11[15],ymm10[15]
6932 ; AVX512F-SLOW-NEXT: vpermd %zmm10, %zmm19, %zmm5
6933 ; AVX512F-SLOW-NEXT: vpermd %zmm2, %zmm18, %zmm5 {%k2}
6934 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6935 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm15[4],ymm12[4],ymm15[5],ymm12[5],ymm15[6],ymm12[6],ymm15[7],ymm12[7],ymm15[12],ymm12[12],ymm15[13],ymm12[13],ymm15[14],ymm12[14],ymm15[15],ymm12[15]
6936 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm0[4],ymm1[4],ymm0[5],ymm1[5],ymm0[6],ymm1[6],ymm0[7],ymm1[7],ymm0[12],ymm1[12],ymm0[13],ymm1[13],ymm0[14],ymm1[14],ymm0[15],ymm1[15]
6937 ; AVX512F-SLOW-NEXT: vpermd %zmm0, %zmm16, %zmm24
6938 ; AVX512F-SLOW-NEXT: vpermd %zmm2, %zmm17, %zmm24 {%k1}
6939 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
6940 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
6941 ; AVX512F-SLOW-NEXT: vpermd %zmm1, %zmm30, %zmm1
6942 ; AVX512F-SLOW-NEXT: vpermd %zmm0, %zmm29, %zmm1 {%k2}
6943 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6944 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
6945 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm10 = xmm9[4],xmm8[4],xmm9[5],xmm8[5],xmm9[6],xmm8[6],xmm9[7],xmm8[7]
6946 ; AVX512F-SLOW-NEXT: vmovdqa 32(%r10), %ymm0
6947 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rax), %ymm1
6948 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[8],ymm0[8],ymm1[9],ymm0[9],ymm1[10],ymm0[10],ymm1[11],ymm0[11]
6949 ; AVX512F-SLOW-NEXT: vmovdqa 32(%r9), %ymm4
6950 ; AVX512F-SLOW-NEXT: vmovdqa 32(%r8), %ymm8
6951 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm9 = ymm8[0],ymm4[0],ymm8[1],ymm4[1],ymm8[2],ymm4[2],ymm8[3],ymm4[3],ymm8[8],ymm4[8],ymm8[9],ymm4[9],ymm8[10],ymm4[10],ymm8[11],ymm4[11]
6952 ; AVX512F-SLOW-NEXT: vpermd %zmm9, %zmm19, %zmm28
6953 ; AVX512F-SLOW-NEXT: vpermd %zmm3, %zmm18, %zmm28 {%k2}
6954 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rsi), %ymm9
6955 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rdi), %ymm11
6956 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm11[0],ymm9[0],ymm11[1],ymm9[1],ymm11[2],ymm9[2],ymm11[3],ymm9[3],ymm11[8],ymm9[8],ymm11[9],ymm9[9],ymm11[10],ymm9[10],ymm11[11],ymm9[11]
6957 ; AVX512F-SLOW-NEXT: vpermd %zmm3, %zmm16, %zmm23
6958 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rcx), %ymm12
6959 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rdx), %ymm3
6960 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm6 = ymm3[0],ymm12[0],ymm3[1],ymm12[1],ymm3[2],ymm12[2],ymm3[3],ymm12[3],ymm3[8],ymm12[8],ymm3[9],ymm12[9],ymm3[10],ymm12[10],ymm3[11],ymm12[11]
6961 ; AVX512F-SLOW-NEXT: vpermd %zmm6, %zmm17, %zmm23 {%k1}
6962 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm1[4],ymm0[4],ymm1[5],ymm0[5],ymm1[6],ymm0[6],ymm1[7],ymm0[7],ymm1[12],ymm0[12],ymm1[13],ymm0[13],ymm1[14],ymm0[14],ymm1[15],ymm0[15]
6963 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm8[4],ymm4[4],ymm8[5],ymm4[5],ymm8[6],ymm4[6],ymm8[7],ymm4[7],ymm8[12],ymm4[12],ymm8[13],ymm4[13],ymm8[14],ymm4[14],ymm8[15],ymm4[15]
6964 ; AVX512F-SLOW-NEXT: vpermd %zmm1, %zmm19, %zmm25
6965 ; AVX512F-SLOW-NEXT: vpermd %zmm0, %zmm18, %zmm25 {%k2}
6966 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm3[4],ymm12[4],ymm3[5],ymm12[5],ymm3[6],ymm12[6],ymm3[7],ymm12[7],ymm3[12],ymm12[12],ymm3[13],ymm12[13],ymm3[14],ymm12[14],ymm3[15],ymm12[15]
6967 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm11[4],ymm9[4],ymm11[5],ymm9[5],ymm11[6],ymm9[6],ymm11[7],ymm9[7],ymm11[12],ymm9[12],ymm11[13],ymm9[13],ymm11[14],ymm9[14],ymm11[15],ymm9[15]
6968 ; AVX512F-SLOW-NEXT: vpermd %zmm0, %zmm16, %zmm21
6969 ; AVX512F-SLOW-NEXT: vpermd %zmm3, %zmm17, %zmm21 {%k1}
6970 ; AVX512F-SLOW-NEXT: vmovdqa (%r10), %ymm3
6971 ; AVX512F-SLOW-NEXT: vmovdqa (%r9), %ymm4
6972 ; AVX512F-SLOW-NEXT: vmovdqa (%r8), %ymm6
6973 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm8 = ymm6[0],ymm4[0],ymm6[1],ymm4[1],ymm6[2],ymm4[2],ymm6[3],ymm4[3],ymm6[8],ymm4[8],ymm6[9],ymm4[9],ymm6[10],ymm4[10],ymm6[11],ymm4[11]
6974 ; AVX512F-SLOW-NEXT: vpermd %zmm8, %zmm19, %zmm27
6975 ; AVX512F-SLOW-NEXT: vmovdqa (%rax), %ymm8
6976 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm9 = ymm8[0],ymm3[0],ymm8[1],ymm3[1],ymm8[2],ymm3[2],ymm8[3],ymm3[3],ymm8[8],ymm3[8],ymm8[9],ymm3[9],ymm8[10],ymm3[10],ymm8[11],ymm3[11]
6977 ; AVX512F-SLOW-NEXT: vpermd %zmm9, %zmm18, %zmm27 {%k2}
6978 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm8[4],ymm3[4],ymm8[5],ymm3[5],ymm8[6],ymm3[6],ymm8[7],ymm3[7],ymm8[12],ymm3[12],ymm8[13],ymm3[13],ymm8[14],ymm3[14],ymm8[15],ymm3[15]
6979 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm4 = ymm6[4],ymm4[4],ymm6[5],ymm4[5],ymm6[6],ymm4[6],ymm6[7],ymm4[7],ymm6[12],ymm4[12],ymm6[13],ymm4[13],ymm6[14],ymm4[14],ymm6[15],ymm4[15]
6980 ; AVX512F-SLOW-NEXT: vmovdqa (%rcx), %ymm6
6981 ; AVX512F-SLOW-NEXT: vpermd %zmm4, %zmm19, %zmm20
6982 ; AVX512F-SLOW-NEXT: vmovdqa (%rsi), %ymm4
6983 ; AVX512F-SLOW-NEXT: vpermd %zmm3, %zmm18, %zmm20 {%k2}
6984 ; AVX512F-SLOW-NEXT: vmovdqa (%rdi), %ymm3
6985 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm8 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[8],ymm4[8],ymm3[9],ymm4[9],ymm3[10],ymm4[10],ymm3[11],ymm4[11]
6986 ; AVX512F-SLOW-NEXT: vpermd %zmm8, %zmm16, %zmm18
6987 ; AVX512F-SLOW-NEXT: vmovdqa (%rdx), %ymm8
6988 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm9 = ymm8[0],ymm6[0],ymm8[1],ymm6[1],ymm8[2],ymm6[2],ymm8[3],ymm6[3],ymm8[8],ymm6[8],ymm8[9],ymm6[9],ymm8[10],ymm6[10],ymm8[11],ymm6[11]
6989 ; AVX512F-SLOW-NEXT: vpermd %zmm9, %zmm17, %zmm18 {%k1}
6990 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm6 = ymm8[4],ymm6[4],ymm8[5],ymm6[5],ymm8[6],ymm6[6],ymm8[7],ymm6[7],ymm8[12],ymm6[12],ymm8[13],ymm6[13],ymm8[14],ymm6[14],ymm8[15],ymm6[15]
6991 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm3[4],ymm4[4],ymm3[5],ymm4[5],ymm3[6],ymm4[6],ymm3[7],ymm4[7],ymm3[12],ymm4[12],ymm3[13],ymm4[13],ymm3[14],ymm4[14],ymm3[15],ymm4[15]
6992 ; AVX512F-SLOW-NEXT: vmovdqa 32(%r10), %xmm4
6993 ; AVX512F-SLOW-NEXT: vpermd %zmm3, %zmm16, %zmm16
6994 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rax), %xmm3
6995 ; AVX512F-SLOW-NEXT: vpermd %zmm6, %zmm17, %zmm16 {%k1}
6996 ; AVX512F-SLOW-NEXT: vmovdqa 32(%r9), %xmm6
6997 ; AVX512F-SLOW-NEXT: vpermd %zmm10, %zmm30, %zmm19
6998 ; AVX512F-SLOW-NEXT: vmovdqa 32(%r8), %xmm8
6999 ; AVX512F-SLOW-NEXT: vpermd %zmm2, %zmm29, %zmm19 {%k2}
7000 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm8[0],xmm6[0],xmm8[1],xmm6[1],xmm8[2],xmm6[2],xmm8[3],xmm6[3]
7001 ; AVX512F-SLOW-NEXT: vpermd %zmm2, %zmm30, %zmm10
7002 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
7003 ; AVX512F-SLOW-NEXT: vpermd %zmm9, %zmm29, %zmm10 {%k2}
7004 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
7005 ; AVX512F-SLOW-NEXT: vmovdqa (%rcx), %xmm0
7006 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm8[4],xmm6[4],xmm8[5],xmm6[5],xmm8[6],xmm6[6],xmm8[7],xmm6[7]
7007 ; AVX512F-SLOW-NEXT: vmovdqa (%rdx), %xmm13
7008 ; AVX512F-SLOW-NEXT: vpermd %zmm4, %zmm30, %zmm17
7009 ; AVX512F-SLOW-NEXT: vpermd %zmm3, %zmm29, %zmm17 {%k2}
7010 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm13[4],xmm0[4],xmm13[5],xmm0[5],xmm13[6],xmm0[6],xmm13[7],xmm0[7]
7011 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm0, %xmm22
7012 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7013 ; AVX512F-SLOW-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm4 # 16-byte Folded Reload
7014 ; AVX512F-SLOW-NEXT: # xmm4 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
7015 ; AVX512F-SLOW-NEXT: vmovdqa (%rsi), %xmm9
7016 ; AVX512F-SLOW-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm7, %xmm6 # 16-byte Folded Reload
7017 ; AVX512F-SLOW-NEXT: # xmm6 = xmm7[0],mem[0],xmm7[1],mem[1],xmm7[2],mem[2],xmm7[3],mem[3]
7018 ; AVX512F-SLOW-NEXT: vmovdqa (%rdi), %xmm12
7019 ; AVX512F-SLOW-NEXT: vpermd %zmm6, %zmm30, %zmm8
7020 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm12[4],xmm9[4],xmm12[5],xmm9[5],xmm12[6],xmm9[6],xmm12[7],xmm9[7]
7021 ; AVX512F-SLOW-NEXT: vpermd %zmm4, %zmm29, %zmm8 {%k2}
7022 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm29 = <0,u,1,u,1,u,u,u,2,2,3,3,3,3,u,u>
7023 ; AVX512F-SLOW-NEXT: vpermd %zmm6, %zmm29, %zmm7
7024 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm30 = <0,0,1,1,1,1,u,u,2,2,3,3,3,3,u,u>
7025 ; AVX512F-SLOW-NEXT: vpermd %zmm3, %zmm30, %zmm7 {%k1}
7026 ; AVX512F-SLOW-NEXT: vmovdqa 96(%rsi), %xmm3
7027 ; AVX512F-SLOW-NEXT: vmovdqa 96(%rdi), %xmm2
7028 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
7029 ; AVX512F-SLOW-NEXT: vpermd %zmm6, %zmm29, %zmm6
7030 ; AVX512F-SLOW-NEXT: vmovdqa 96(%rcx), %xmm1
7031 ; AVX512F-SLOW-NEXT: vmovdqa 96(%rdx), %xmm0
7032 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
7033 ; AVX512F-SLOW-NEXT: vpermd %zmm14, %zmm30, %zmm6 {%k1}
7034 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
7035 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm2[4],xmm3[4],xmm2[5],xmm3[5],xmm2[6],xmm3[6],xmm2[7],xmm3[7]
7036 ; AVX512F-SLOW-NEXT: vpermd %zmm1, %zmm29, %zmm5
7037 ; AVX512F-SLOW-NEXT: vpermd %zmm0, %zmm30, %zmm5 {%k1}
7038 ; AVX512F-SLOW-NEXT: vmovdqa 64(%rsi), %xmm1
7039 ; AVX512F-SLOW-NEXT: vmovdqa 64(%rdi), %xmm2
7040 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
7041 ; AVX512F-SLOW-NEXT: vpermd %zmm3, %zmm29, %zmm4
7042 ; AVX512F-SLOW-NEXT: vmovdqa 64(%rcx), %xmm14
7043 ; AVX512F-SLOW-NEXT: vmovdqa 64(%rdx), %xmm0
7044 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm15 = xmm0[0],xmm14[0],xmm0[1],xmm14[1],xmm0[2],xmm14[2],xmm0[3],xmm14[3]
7045 ; AVX512F-SLOW-NEXT: vpermd %zmm15, %zmm30, %zmm4 {%k1}
7046 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm14[4],xmm0[5],xmm14[5],xmm0[6],xmm14[6],xmm0[7],xmm14[7]
7047 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
7048 ; AVX512F-SLOW-NEXT: vpermd %zmm1, %zmm29, %zmm3
7049 ; AVX512F-SLOW-NEXT: vpermd %zmm0, %zmm30, %zmm3 {%k1}
7050 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rsi), %xmm1
7051 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rdi), %xmm2
7052 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
7053 ; AVX512F-SLOW-NEXT: vpermd %zmm14, %zmm29, %zmm14
7054 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rcx), %xmm15
7055 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rdx), %xmm0
7056 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm0[0],xmm15[0],xmm0[1],xmm15[1],xmm0[2],xmm15[2],xmm0[3],xmm15[3]
7057 ; AVX512F-SLOW-NEXT: vpermd %zmm11, %zmm30, %zmm14 {%k1}
7058 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm15[4],xmm0[5],xmm15[5],xmm0[6],xmm15[6],xmm0[7],xmm15[7]
7059 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
7060 ; AVX512F-SLOW-NEXT: vpermd %zmm1, %zmm29, %zmm1
7061 ; AVX512F-SLOW-NEXT: vpermd %zmm0, %zmm30, %zmm1 {%k1}
7062 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm22, %xmm0
7063 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm13[0],xmm0[0],xmm13[1],xmm0[1],xmm13[2],xmm0[2],xmm13[3],xmm0[3]
7064 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm12[0],xmm9[0],xmm12[1],xmm9[1],xmm12[2],xmm9[2],xmm12[3],xmm9[3]
7065 ; AVX512F-SLOW-NEXT: vpermd %zmm2, %zmm29, %zmm2
7066 ; AVX512F-SLOW-NEXT: vpermd %zmm0, %zmm30, %zmm2 {%k1}
7067 ; AVX512F-SLOW-NEXT: movb $-86, %al
7068 ; AVX512F-SLOW-NEXT: kmovw %eax, %k1
7069 ; AVX512F-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7070 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm0, %zmm7 {%k1}
7071 ; AVX512F-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7072 ; AVX512F-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
7073 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm9, %zmm0 {%k1}
7074 ; AVX512F-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
7075 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm9, %zmm31 {%k1}
7076 ; AVX512F-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
7077 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm9, %zmm6 {%k1}
7078 ; AVX512F-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
7079 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm9, %zmm5 {%k1}
7080 ; AVX512F-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
7081 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm9, %zmm26 {%k1}
7082 ; AVX512F-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
7083 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm9, %zmm24 {%k1}
7084 ; AVX512F-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
7085 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm9, %zmm4 {%k1}
7086 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm19, %zmm3 {%k1}
7087 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm28, %zmm23 {%k1}
7088 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm25, %zmm21 {%k1}
7089 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm10, %zmm14 {%k1}
7090 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm17, %zmm1 {%k1}
7091 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm27, %zmm18 {%k1}
7092 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm20, %zmm16 {%k1}
7093 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm8, %zmm2 {%k1}
7094 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
7095 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm2, (%rax)
7096 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm16, 192(%rax)
7097 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm18, 128(%rax)
7098 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm1, 320(%rax)
7099 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm14, 256(%rax)
7100 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm21, 448(%rax)
7101 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm23, 384(%rax)
7102 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm3, 576(%rax)
7103 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm4, 512(%rax)
7104 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm24, 704(%rax)
7105 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm26, 640(%rax)
7106 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm5, 832(%rax)
7107 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm6, 768(%rax)
7108 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm31, 960(%rax)
7109 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm0, 896(%rax)
7110 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm7, 64(%rax)
7111 ; AVX512F-SLOW-NEXT: addq $504, %rsp # imm = 0x1F8
7112 ; AVX512F-SLOW-NEXT: vzeroupper
7113 ; AVX512F-SLOW-NEXT: retq
7115 ; AVX512F-FAST-LABEL: store_i16_stride8_vf64:
7116 ; AVX512F-FAST: # %bb.0:
7117 ; AVX512F-FAST-NEXT: subq $2312, %rsp # imm = 0x908
7118 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
7119 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
7120 ; AVX512F-FAST-NEXT: vmovdqa (%r10), %xmm12
7121 ; AVX512F-FAST-NEXT: vmovdqa 64(%r10), %xmm0
7122 ; AVX512F-FAST-NEXT: vmovdqa (%rax), %xmm1
7123 ; AVX512F-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7124 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm1[4],xmm12[4],xmm1[5],xmm12[5],xmm1[6],xmm12[6],xmm1[7],xmm12[7]
7125 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm1, %zmm1, %zmm1
7126 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7127 ; AVX512F-FAST-NEXT: vmovdqa (%r9), %xmm2
7128 ; AVX512F-FAST-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7129 ; AVX512F-FAST-NEXT: vmovdqa (%r8), %xmm1
7130 ; AVX512F-FAST-NEXT: vmovdqa %xmm1, (%rsp) # 16-byte Spill
7131 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
7132 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm1, %zmm1, %zmm1
7133 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7134 ; AVX512F-FAST-NEXT: vmovdqa 96(%r10), %ymm1
7135 ; AVX512F-FAST-NEXT: vmovdqa 96(%rax), %ymm2
7136 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm2[0],ymm1[0],ymm2[1],ymm1[1],ymm2[2],ymm1[2],ymm2[3],ymm1[3],ymm2[8],ymm1[8],ymm2[9],ymm1[9],ymm2[10],ymm1[10],ymm2[11],ymm1[11]
7137 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm3, %zmm3
7138 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7139 ; AVX512F-FAST-NEXT: vmovdqa 96(%r9), %ymm3
7140 ; AVX512F-FAST-NEXT: vmovdqa 96(%r8), %ymm4
7141 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm5 = ymm4[0],ymm3[0],ymm4[1],ymm3[1],ymm4[2],ymm3[2],ymm4[3],ymm3[3],ymm4[8],ymm3[8],ymm4[9],ymm3[9],ymm4[10],ymm3[10],ymm4[11],ymm3[11]
7142 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm5, %zmm5, %zmm5
7143 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7144 ; AVX512F-FAST-NEXT: vmovdqa 96(%rcx), %ymm5
7145 ; AVX512F-FAST-NEXT: vmovdqa 96(%rdx), %ymm6
7146 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm7 = ymm6[0],ymm5[0],ymm6[1],ymm5[1],ymm6[2],ymm5[2],ymm6[3],ymm5[3],ymm6[8],ymm5[8],ymm6[9],ymm5[9],ymm6[10],ymm5[10],ymm6[11],ymm5[11]
7147 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm7, %zmm7, %zmm7
7148 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7149 ; AVX512F-FAST-NEXT: vmovdqa 96(%rsi), %ymm7
7150 ; AVX512F-FAST-NEXT: vmovdqa 96(%rdi), %ymm8
7151 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm9 = ymm8[0],ymm7[0],ymm8[1],ymm7[1],ymm8[2],ymm7[2],ymm8[3],ymm7[3],ymm8[8],ymm7[8],ymm8[9],ymm7[9],ymm8[10],ymm7[10],ymm8[11],ymm7[11]
7152 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm9, %zmm9, %zmm9
7153 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7154 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm2[4],ymm1[4],ymm2[5],ymm1[5],ymm2[6],ymm1[6],ymm2[7],ymm1[7],ymm2[12],ymm1[12],ymm2[13],ymm1[13],ymm2[14],ymm1[14],ymm2[15],ymm1[15]
7155 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm1, %zmm1
7156 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7157 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm4[4],ymm3[4],ymm4[5],ymm3[5],ymm4[6],ymm3[6],ymm4[7],ymm3[7],ymm4[12],ymm3[12],ymm4[13],ymm3[13],ymm4[14],ymm3[14],ymm4[15],ymm3[15]
7158 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm1, %zmm1
7159 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7160 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm6[4],ymm5[4],ymm6[5],ymm5[5],ymm6[6],ymm5[6],ymm6[7],ymm5[7],ymm6[12],ymm5[12],ymm6[13],ymm5[13],ymm6[14],ymm5[14],ymm6[15],ymm5[15]
7161 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm1, %zmm1
7162 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7163 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm8[4],ymm7[4],ymm8[5],ymm7[5],ymm8[6],ymm7[6],ymm8[7],ymm7[7],ymm8[12],ymm7[12],ymm8[13],ymm7[13],ymm8[14],ymm7[14],ymm8[15],ymm7[15]
7164 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm1, %zmm1
7165 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7166 ; AVX512F-FAST-NEXT: vmovdqa 96(%r10), %xmm1
7167 ; AVX512F-FAST-NEXT: vmovdqa 96(%rax), %xmm2
7168 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
7169 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm3, %zmm3, %zmm3
7170 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7171 ; AVX512F-FAST-NEXT: vmovdqa 96(%r9), %xmm3
7172 ; AVX512F-FAST-NEXT: vmovdqa 96(%r8), %xmm4
7173 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
7174 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm5, %zmm5, %zmm5
7175 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7176 ; AVX512F-FAST-NEXT: vmovdqa 96(%rcx), %xmm5
7177 ; AVX512F-FAST-NEXT: vmovdqa 96(%rdx), %xmm6
7178 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
7179 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7180 ; AVX512F-FAST-NEXT: vmovdqa 96(%rsi), %xmm7
7181 ; AVX512F-FAST-NEXT: vmovdqa 96(%rdi), %xmm8
7182 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3]
7183 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7184 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
7185 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm1, %zmm1, %zmm1
7186 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7187 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
7188 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm1, %zmm1, %zmm1
7189 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7190 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm6[4],xmm5[4],xmm6[5],xmm5[5],xmm6[6],xmm5[6],xmm6[7],xmm5[7]
7191 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7192 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
7193 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7194 ; AVX512F-FAST-NEXT: vmovdqa 64(%r10), %ymm2
7195 ; AVX512F-FAST-NEXT: vmovdqa 64(%rax), %ymm3
7196 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm3[0],ymm2[0],ymm3[1],ymm2[1],ymm3[2],ymm2[2],ymm3[3],ymm2[3],ymm3[8],ymm2[8],ymm3[9],ymm2[9],ymm3[10],ymm2[10],ymm3[11],ymm2[11]
7197 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm1, %zmm25
7198 ; AVX512F-FAST-NEXT: vmovdqa 64(%r9), %ymm4
7199 ; AVX512F-FAST-NEXT: vmovdqa 64(%r8), %ymm5
7200 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm5[0],ymm4[0],ymm5[1],ymm4[1],ymm5[2],ymm4[2],ymm5[3],ymm4[3],ymm5[8],ymm4[8],ymm5[9],ymm4[9],ymm5[10],ymm4[10],ymm5[11],ymm4[11]
7201 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm1, %zmm24
7202 ; AVX512F-FAST-NEXT: vmovdqa 64(%rcx), %ymm6
7203 ; AVX512F-FAST-NEXT: vmovdqa 64(%rdx), %ymm7
7204 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm7[0],ymm6[0],ymm7[1],ymm6[1],ymm7[2],ymm6[2],ymm7[3],ymm6[3],ymm7[8],ymm6[8],ymm7[9],ymm6[9],ymm7[10],ymm6[10],ymm7[11],ymm6[11]
7205 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm3[4],ymm2[4],ymm3[5],ymm2[5],ymm3[6],ymm2[6],ymm3[7],ymm2[7],ymm3[12],ymm2[12],ymm3[13],ymm2[13],ymm3[14],ymm2[14],ymm3[15],ymm2[15]
7206 ; AVX512F-FAST-NEXT: vmovdqa 64(%rsi), %ymm8
7207 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} ymm4 = ymm5[4],ymm4[4],ymm5[5],ymm4[5],ymm5[6],ymm4[6],ymm5[7],ymm4[7],ymm5[12],ymm4[12],ymm5[13],ymm4[13],ymm5[14],ymm4[14],ymm5[15],ymm4[15]
7208 ; AVX512F-FAST-NEXT: vmovdqa 64(%rdi), %ymm5
7209 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm7[4],ymm6[4],ymm7[5],ymm6[5],ymm7[6],ymm6[6],ymm7[7],ymm6[7],ymm7[12],ymm6[12],ymm7[13],ymm6[13],ymm7[14],ymm6[14],ymm7[15],ymm6[15]
7210 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm7 = ymm5[0],ymm8[0],ymm5[1],ymm8[1],ymm5[2],ymm8[2],ymm5[3],ymm8[3],ymm5[8],ymm8[8],ymm5[9],ymm8[9],ymm5[10],ymm8[10],ymm5[11],ymm8[11]
7211 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} ymm5 = ymm5[4],ymm8[4],ymm5[5],ymm8[5],ymm5[6],ymm8[6],ymm5[7],ymm8[7],ymm5[12],ymm8[12],ymm5[13],ymm8[13],ymm5[14],ymm8[14],ymm5[15],ymm8[15]
7212 ; AVX512F-FAST-NEXT: vmovdqa 64(%rax), %xmm8
7213 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm8[0],xmm0[0],xmm8[1],xmm0[1],xmm8[2],xmm0[2],xmm8[3],xmm0[3]
7214 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm8[4],xmm0[4],xmm8[5],xmm0[5],xmm8[6],xmm0[6],xmm8[7],xmm0[7]
7215 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm1, %zmm1
7216 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7217 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm7, %zmm7, %zmm1
7218 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7219 ; AVX512F-FAST-NEXT: vmovdqa 64(%r9), %xmm1
7220 ; AVX512F-FAST-NEXT: vmovdqa 64(%r8), %xmm7
7221 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm7[0],xmm1[0],xmm7[1],xmm1[1],xmm7[2],xmm1[2],xmm7[3],xmm1[3]
7222 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm7[4],xmm1[4],xmm7[5],xmm1[5],xmm7[6],xmm1[6],xmm7[7],xmm1[7]
7223 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm2, %zmm2, %zmm30
7224 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm4, %zmm4, %zmm18
7225 ; AVX512F-FAST-NEXT: vmovdqa 64(%rcx), %xmm2
7226 ; AVX512F-FAST-NEXT: vmovdqa 64(%rdx), %xmm4
7227 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
7228 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7229 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm4[4],xmm2[4],xmm4[5],xmm2[5],xmm4[6],xmm2[6],xmm4[7],xmm2[7]
7230 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7231 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm3, %zmm1
7232 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7233 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm5, %zmm5, %zmm31
7234 ; AVX512F-FAST-NEXT: vmovdqa 64(%rsi), %xmm2
7235 ; AVX512F-FAST-NEXT: vmovdqa 64(%rdi), %xmm3
7236 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
7237 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7238 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
7239 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7240 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm6, %zmm6, %zmm1
7241 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7242 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm8, %zmm8, %zmm1
7243 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7244 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm0, %zmm0, %zmm0
7245 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7246 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm7, %zmm7, %zmm0
7247 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7248 ; AVX512F-FAST-NEXT: vmovdqa 32(%r10), %ymm0
7249 ; AVX512F-FAST-NEXT: vmovdqa 32(%rax), %ymm2
7250 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm2[0],ymm0[0],ymm2[1],ymm0[1],ymm2[2],ymm0[2],ymm2[3],ymm0[3],ymm2[8],ymm0[8],ymm2[9],ymm0[9],ymm2[10],ymm0[10],ymm2[11],ymm0[11]
7251 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm3, %zmm27
7252 ; AVX512F-FAST-NEXT: vmovdqa 32(%r9), %ymm3
7253 ; AVX512F-FAST-NEXT: vmovdqa 32(%r8), %ymm4
7254 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm5 = ymm4[0],ymm3[0],ymm4[1],ymm3[1],ymm4[2],ymm3[2],ymm4[3],ymm3[3],ymm4[8],ymm3[8],ymm4[9],ymm3[9],ymm4[10],ymm3[10],ymm4[11],ymm3[11]
7255 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm5, %zmm5, %zmm17
7256 ; AVX512F-FAST-NEXT: vmovdqa 32(%rcx), %ymm5
7257 ; AVX512F-FAST-NEXT: vmovdqa 32(%rdx), %ymm6
7258 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm7 = ymm6[0],ymm5[0],ymm6[1],ymm5[1],ymm6[2],ymm5[2],ymm6[3],ymm5[3],ymm6[8],ymm5[8],ymm6[9],ymm5[9],ymm6[10],ymm5[10],ymm6[11],ymm5[11]
7259 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm7, %zmm7, %zmm29
7260 ; AVX512F-FAST-NEXT: vmovdqa 32(%rsi), %ymm7
7261 ; AVX512F-FAST-NEXT: vmovdqa 32(%rdi), %ymm8
7262 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm9 = ymm8[0],ymm7[0],ymm8[1],ymm7[1],ymm8[2],ymm7[2],ymm8[3],ymm7[3],ymm8[8],ymm7[8],ymm8[9],ymm7[9],ymm8[10],ymm7[10],ymm8[11],ymm7[11]
7263 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm9, %zmm9, %zmm28
7264 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} ymm9 = ymm2[4],ymm0[4],ymm2[5],ymm0[5],ymm2[6],ymm0[6],ymm2[7],ymm0[7],ymm2[12],ymm0[12],ymm2[13],ymm0[13],ymm2[14],ymm0[14],ymm2[15],ymm0[15]
7265 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm4[4],ymm3[4],ymm4[5],ymm3[5],ymm4[6],ymm3[6],ymm4[7],ymm3[7],ymm4[12],ymm3[12],ymm4[13],ymm3[13],ymm4[14],ymm3[14],ymm4[15],ymm3[15]
7266 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm6[4],ymm5[4],ymm6[5],ymm5[5],ymm6[6],ymm5[6],ymm6[7],ymm5[7],ymm6[12],ymm5[12],ymm6[13],ymm5[13],ymm6[14],ymm5[14],ymm6[15],ymm5[15]
7267 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm8[4],ymm7[4],ymm8[5],ymm7[5],ymm8[6],ymm7[6],ymm8[7],ymm7[7],ymm8[12],ymm7[12],ymm8[13],ymm7[13],ymm8[14],ymm7[14],ymm8[15],ymm7[15]
7268 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm9, %zmm9, %zmm15
7269 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm3, %zmm13
7270 ; AVX512F-FAST-NEXT: vmovdqa 32(%r10), %xmm3
7271 ; AVX512F-FAST-NEXT: vmovdqa 32(%rax), %xmm4
7272 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
7273 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
7274 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm0, %zmm21
7275 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm2, %zmm2, %zmm9
7276 ; AVX512F-FAST-NEXT: vmovdqa 32(%r9), %xmm0
7277 ; AVX512F-FAST-NEXT: vmovdqa 32(%r8), %xmm2
7278 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
7279 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm2[4],xmm0[4],xmm2[5],xmm0[5],xmm2[6],xmm0[6],xmm2[7],xmm0[7]
7280 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm5, %zmm5, %zmm26
7281 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm4, %zmm4, %zmm23
7282 ; AVX512F-FAST-NEXT: vmovdqa 32(%rcx), %xmm2
7283 ; AVX512F-FAST-NEXT: vmovdqa 32(%rdx), %xmm4
7284 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
7285 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7286 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm4[4],xmm2[4],xmm4[5],xmm2[5],xmm4[6],xmm2[6],xmm4[7],xmm2[7]
7287 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7288 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm3, %zmm3, %zmm22
7289 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm0, %zmm0, %zmm20
7290 ; AVX512F-FAST-NEXT: vmovdqa 32(%rsi), %xmm0
7291 ; AVX512F-FAST-NEXT: vmovdqa 32(%rdi), %xmm2
7292 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
7293 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7294 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm2[4],xmm0[4],xmm2[5],xmm0[5],xmm2[6],xmm0[6],xmm2[7],xmm0[7]
7295 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7296 ; AVX512F-FAST-NEXT: vmovdqa (%r10), %ymm6
7297 ; AVX512F-FAST-NEXT: vmovdqa (%rax), %ymm3
7298 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm3[0],ymm6[0],ymm3[1],ymm6[1],ymm3[2],ymm6[2],ymm3[3],ymm6[3],ymm3[8],ymm6[8],ymm3[9],ymm6[9],ymm3[10],ymm6[10],ymm3[11],ymm6[11]
7299 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm2, %zmm2, %zmm7
7300 ; AVX512F-FAST-NEXT: vmovdqa (%r9), %ymm5
7301 ; AVX512F-FAST-NEXT: vmovdqa (%r8), %ymm10
7302 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm4 = ymm10[0],ymm5[0],ymm10[1],ymm5[1],ymm10[2],ymm5[2],ymm10[3],ymm5[3],ymm10[8],ymm5[8],ymm10[9],ymm5[9],ymm10[10],ymm5[10],ymm10[11],ymm5[11]
7303 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm4, %zmm4, %zmm19
7304 ; AVX512F-FAST-NEXT: vmovdqa (%rcx), %ymm11
7305 ; AVX512F-FAST-NEXT: vmovdqa (%rdx), %ymm14
7306 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm4 = ymm14[0],ymm11[0],ymm14[1],ymm11[1],ymm14[2],ymm11[2],ymm14[3],ymm11[3],ymm14[8],ymm11[8],ymm14[9],ymm11[9],ymm14[10],ymm11[10],ymm14[11],ymm11[11]
7307 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm4, %zmm4, %zmm16
7308 ; AVX512F-FAST-NEXT: vmovdqa (%rsi), %ymm1
7309 ; AVX512F-FAST-NEXT: vmovdqa (%rdi), %ymm0
7310 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm4 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[8],ymm1[8],ymm0[9],ymm1[9],ymm0[10],ymm1[10],ymm0[11],ymm1[11]
7311 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm4, %zmm4, %zmm4
7312 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm3[4],ymm6[4],ymm3[5],ymm6[5],ymm3[6],ymm6[6],ymm3[7],ymm6[7],ymm3[12],ymm6[12],ymm3[13],ymm6[13],ymm3[14],ymm6[14],ymm3[15],ymm6[15]
7313 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm3, %zmm6
7314 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm10[4],ymm5[4],ymm10[5],ymm5[5],ymm10[6],ymm5[6],ymm10[7],ymm5[7],ymm10[12],ymm5[12],ymm10[13],ymm5[13],ymm10[14],ymm5[14],ymm10[15],ymm5[15]
7315 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm3, %zmm2
7316 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm14[4],ymm11[4],ymm14[5],ymm11[5],ymm14[6],ymm11[6],ymm14[7],ymm11[7],ymm14[12],ymm11[12],ymm14[13],ymm11[13],ymm14[14],ymm11[14],ymm14[15],ymm11[15]
7317 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm0[4],ymm1[4],ymm0[5],ymm1[5],ymm0[6],ymm1[6],ymm0[7],ymm1[7],ymm0[12],ymm1[12],ymm0[13],ymm1[13],ymm0[14],ymm1[14],ymm0[15],ymm1[15]
7318 ; AVX512F-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7319 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm12[0],xmm1[1],xmm12[1],xmm1[2],xmm12[2],xmm1[3],xmm12[3]
7320 ; AVX512F-FAST-NEXT: vmovdqa (%rsp), %xmm5 # 16-byte Reload
7321 ; AVX512F-FAST-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm5, %xmm5 # 16-byte Folded Reload
7322 ; AVX512F-FAST-NEXT: # xmm5 = xmm5[0],mem[0],xmm5[1],mem[1],xmm5[2],mem[2],xmm5[3],mem[3]
7323 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm3, %zmm10
7324 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm0, %zmm8
7325 ; AVX512F-FAST-NEXT: vmovdqa (%rcx), %xmm0
7326 ; AVX512F-FAST-NEXT: vmovdqa (%rdx), %xmm3
7327 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm11 = xmm3[4],xmm0[4],xmm3[5],xmm0[5],xmm3[6],xmm0[6],xmm3[7],xmm0[7]
7328 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7329 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm3[0],xmm0[0],xmm3[1],xmm0[1],xmm3[2],xmm0[2],xmm3[3],xmm0[3]
7330 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7331 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm1, %zmm1, %zmm14
7332 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm5, %zmm5, %zmm12
7333 ; AVX512F-FAST-NEXT: vmovdqa (%rsi), %xmm0
7334 ; AVX512F-FAST-NEXT: vmovdqa (%rdi), %xmm1
7335 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm11 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
7336 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
7337 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7338 ; AVX512F-FAST-NEXT: movw $-30584, %ax # imm = 0x8888
7339 ; AVX512F-FAST-NEXT: kmovw %eax, %k2
7340 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,1,4,5,4,5,5,7,10,9,14,13,14,13,15,15]
7341 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm3 # 64-byte Folded Reload
7342 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,4,4,4,4,6,5,8,10,12,14,12,14,14,15]
7343 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm3 {%k2} # 64-byte Folded Reload
7344 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm3, (%rsp) # 64-byte Spill
7345 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm3 # 64-byte Folded Reload
7346 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm3 {%k2} # 64-byte Folded Reload
7347 ; AVX512F-FAST-NEXT: vpermd %zmm24, %zmm0, %zmm24
7348 ; AVX512F-FAST-NEXT: vpermd %zmm25, %zmm1, %zmm24 {%k2}
7349 ; AVX512F-FAST-NEXT: vpermd %zmm18, %zmm0, %zmm25
7350 ; AVX512F-FAST-NEXT: vpermd %zmm30, %zmm1, %zmm25 {%k2}
7351 ; AVX512F-FAST-NEXT: vpermd %zmm17, %zmm0, %zmm5
7352 ; AVX512F-FAST-NEXT: vpermd %zmm27, %zmm1, %zmm5 {%k2}
7353 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7354 ; AVX512F-FAST-NEXT: vpermd %zmm13, %zmm0, %zmm27
7355 ; AVX512F-FAST-NEXT: vpermd %zmm15, %zmm1, %zmm27 {%k2}
7356 ; AVX512F-FAST-NEXT: vpermd %zmm19, %zmm0, %zmm5
7357 ; AVX512F-FAST-NEXT: vpermd %zmm7, %zmm1, %zmm5 {%k2}
7358 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7359 ; AVX512F-FAST-NEXT: vpermd %zmm2, %zmm0, %zmm19
7360 ; AVX512F-FAST-NEXT: vpermd %zmm6, %zmm1, %zmm19 {%k2}
7361 ; AVX512F-FAST-NEXT: movw $8738, %ax # imm = 0x2222
7362 ; AVX512F-FAST-NEXT: kmovw %eax, %k1
7363 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [4,5,1,3,5,7,5,7,14,13,11,11,15,15,15,15]
7364 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm6 # 64-byte Folded Reload
7365 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [4,4,2,1,6,5,6,5,12,14,10,11,14,15,14,15]
7366 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm6 {%k1} # 64-byte Folded Reload
7367 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm7 # 64-byte Folded Reload
7368 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm7 {%k1} # 64-byte Folded Reload
7369 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm13 # 64-byte Folded Reload
7370 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm13 {%k1} # 64-byte Folded Reload
7371 ; AVX512F-FAST-NEXT: vpermd %zmm31, %zmm0, %zmm15
7372 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm15 {%k1} # 64-byte Folded Reload
7373 ; AVX512F-FAST-NEXT: vpermd %zmm28, %zmm0, %zmm17
7374 ; AVX512F-FAST-NEXT: vpermd %zmm29, %zmm1, %zmm17 {%k1}
7375 ; AVX512F-FAST-NEXT: vpermd %zmm9, %zmm0, %zmm9
7376 ; AVX512F-FAST-NEXT: vpermd %zmm21, %zmm1, %zmm9 {%k1}
7377 ; AVX512F-FAST-NEXT: vpermd %zmm4, %zmm0, %zmm4
7378 ; AVX512F-FAST-NEXT: vpermd %zmm16, %zmm1, %zmm4 {%k1}
7379 ; AVX512F-FAST-NEXT: vpermd %zmm8, %zmm0, %zmm8
7380 ; AVX512F-FAST-NEXT: vpermd %zmm10, %zmm1, %zmm8 {%k1}
7381 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = <0,u,0,u,u,u,1,u,10,10,10,10,u,u,11,11>
7382 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm1 # 64-byte Folded Reload
7383 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm10 = <0,0,0,0,u,u,1,1,10,10,10,10,u,u,11,11>
7384 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm1 {%k2} # 64-byte Folded Reload
7385 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm16 # 64-byte Folded Reload
7386 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm16 {%k2} # 64-byte Folded Reload
7387 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm21 # 64-byte Folded Reload
7388 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm21 {%k2} # 64-byte Folded Reload
7389 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm28 # 64-byte Folded Reload
7390 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm28 {%k2} # 64-byte Folded Reload
7391 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm29 # 64-byte Folded Reload
7392 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm29 {%k2} # 64-byte Folded Reload
7393 ; AVX512F-FAST-NEXT: vpermd %zmm23, %zmm0, %zmm23
7394 ; AVX512F-FAST-NEXT: vpermd %zmm26, %zmm10, %zmm23 {%k2}
7395 ; AVX512F-FAST-NEXT: vpermd %zmm20, %zmm0, %zmm20
7396 ; AVX512F-FAST-NEXT: vpermd %zmm22, %zmm10, %zmm20 {%k2}
7397 ; AVX512F-FAST-NEXT: vpermd %zmm12, %zmm0, %zmm0
7398 ; AVX512F-FAST-NEXT: vpermd %zmm14, %zmm10, %zmm0 {%k2}
7399 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm11, %zmm11, %zmm10
7400 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm11 = <0,u,1,u,1,u,u,u,10,u,11,u,11,u,u,u>
7401 ; AVX512F-FAST-NEXT: vpermd %zmm10, %zmm11, %zmm12
7402 ; AVX512F-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
7403 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm2, %zmm2, %zmm10
7404 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm11 = <0,0,1,1,1,1,u,u,10,10,11,11,11,11,u,u>
7405 ; AVX512F-FAST-NEXT: vpermd %zmm10, %zmm11, %zmm12 {%k1}
7406 ; AVX512F-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
7407 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm5, %zmm5, %zmm10
7408 ; AVX512F-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
7409 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm5, %zmm5, %zmm14
7410 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm22 = <0,u,1,u,1,u,u,u,10,10,11,11,11,11,u,u>
7411 ; AVX512F-FAST-NEXT: vpermd %zmm14, %zmm22, %zmm14
7412 ; AVX512F-FAST-NEXT: vpermd %zmm10, %zmm11, %zmm14 {%k1}
7413 ; AVX512F-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
7414 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm5, %zmm5, %zmm10
7415 ; AVX512F-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
7416 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm5, %zmm5, %zmm26
7417 ; AVX512F-FAST-NEXT: vpermd %zmm26, %zmm22, %zmm26
7418 ; AVX512F-FAST-NEXT: vpermd %zmm10, %zmm11, %zmm26 {%k1}
7419 ; AVX512F-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
7420 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm5, %zmm5, %zmm10
7421 ; AVX512F-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
7422 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm5, %zmm5, %zmm30
7423 ; AVX512F-FAST-NEXT: vpermd %zmm30, %zmm22, %zmm30
7424 ; AVX512F-FAST-NEXT: vpermd %zmm10, %zmm11, %zmm30 {%k1}
7425 ; AVX512F-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
7426 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm5, %zmm5, %zmm10
7427 ; AVX512F-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
7428 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm5, %zmm5, %zmm31
7429 ; AVX512F-FAST-NEXT: vpermd %zmm31, %zmm22, %zmm31
7430 ; AVX512F-FAST-NEXT: vpermd %zmm10, %zmm11, %zmm31 {%k1}
7431 ; AVX512F-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
7432 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm5, %zmm5, %zmm10
7433 ; AVX512F-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
7434 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm2, %zmm2, %zmm5
7435 ; AVX512F-FAST-NEXT: vpermd %zmm5, %zmm22, %zmm5
7436 ; AVX512F-FAST-NEXT: vpermd %zmm10, %zmm11, %zmm5 {%k1}
7437 ; AVX512F-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
7438 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm10, %zmm10, %zmm10
7439 ; AVX512F-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
7440 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm2, %zmm2, %zmm18
7441 ; AVX512F-FAST-NEXT: vpermd %zmm18, %zmm22, %zmm18
7442 ; AVX512F-FAST-NEXT: vpermd %zmm10, %zmm11, %zmm18 {%k1}
7443 ; AVX512F-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
7444 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm2, %zmm2, %zmm10
7445 ; AVX512F-FAST-NEXT: vpermd %zmm10, %zmm22, %zmm10
7446 ; AVX512F-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
7447 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm2, %zmm2, %zmm22
7448 ; AVX512F-FAST-NEXT: vpermd %zmm22, %zmm11, %zmm10 {%k1}
7449 ; AVX512F-FAST-NEXT: movb $-86, %al
7450 ; AVX512F-FAST-NEXT: kmovw %eax, %k1
7451 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm1, %zmm12 {%k1}
7452 ; AVX512F-FAST-NEXT: vmovdqu64 (%rsp), %zmm1 # 64-byte Reload
7453 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm1, %zmm6 {%k1}
7454 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm3, %zmm7 {%k1}
7455 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm16, %zmm14 {%k1}
7456 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm21, %zmm26 {%k1}
7457 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm24, %zmm13 {%k1}
7458 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm25, %zmm15 {%k1}
7459 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm28, %zmm30 {%k1}
7460 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm29, %zmm31 {%k1}
7461 ; AVX512F-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7462 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm1, %zmm17 {%k1}
7463 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm27, %zmm9 {%k1}
7464 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm23, %zmm5 {%k1}
7465 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm20, %zmm18 {%k1}
7466 ; AVX512F-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
7467 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm1, %zmm4 {%k1}
7468 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm19, %zmm8 {%k1}
7469 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm0, %zmm10 {%k1}
7470 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
7471 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm10, (%rax)
7472 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm8, 192(%rax)
7473 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm4, 128(%rax)
7474 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm18, 320(%rax)
7475 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm5, 256(%rax)
7476 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm9, 448(%rax)
7477 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm17, 384(%rax)
7478 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm31, 576(%rax)
7479 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm30, 512(%rax)
7480 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm15, 704(%rax)
7481 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm13, 640(%rax)
7482 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm26, 832(%rax)
7483 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm14, 768(%rax)
7484 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm7, 960(%rax)
7485 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm6, 896(%rax)
7486 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm12, 64(%rax)
7487 ; AVX512F-FAST-NEXT: addq $2312, %rsp # imm = 0x908
7488 ; AVX512F-FAST-NEXT: vzeroupper
7489 ; AVX512F-FAST-NEXT: retq
7491 ; AVX512BW-LABEL: store_i16_stride8_vf64:
7492 ; AVX512BW: # %bb.0:
7493 ; AVX512BW-NEXT: subq $2056, %rsp # imm = 0x808
7494 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
7495 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r10
7496 ; AVX512BW-NEXT: vmovdqa64 (%r8), %zmm1
7497 ; AVX512BW-NEXT: vmovdqa64 (%r9), %zmm27
7498 ; AVX512BW-NEXT: vmovdqa64 (%r10), %zmm2
7499 ; AVX512BW-NEXT: vmovdqa64 64(%r10), %zmm0
7500 ; AVX512BW-NEXT: vmovdqa64 (%rax), %zmm30
7501 ; AVX512BW-NEXT: vmovdqa64 64(%rax), %zmm28
7502 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = <u,u,u,u,u,u,4,36,u,u,u,u,u,u,5,37,u,u,u,u,u,u,6,38,u,u,u,u,u,u,7,39>
7503 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm5
7504 ; AVX512BW-NEXT: vmovdqa64 %zmm2, %zmm4
7505 ; AVX512BW-NEXT: vpermt2w %zmm30, %zmm3, %zmm5
7506 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7507 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <u,u,u,u,4,36,u,u,u,u,u,u,5,37,u,u,u,u,u,u,6,38,u,u,u,u,u,u,7,39,u,u>
7508 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm5
7509 ; AVX512BW-NEXT: vpermt2w %zmm27, %zmm2, %zmm5
7510 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7511 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm5 = <u,u,u,u,u,u,0,32,u,u,u,u,u,u,1,33,u,u,u,u,u,u,2,34,u,u,u,u,u,u,3,35>
7512 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm6
7513 ; AVX512BW-NEXT: vpermt2w %zmm30, %zmm5, %zmm6
7514 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7515 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm6 = <u,u,u,u,0,32,u,u,u,u,u,u,1,33,u,u,u,u,u,u,2,34,u,u,u,u,u,u,3,35,u,u>
7516 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm7
7517 ; AVX512BW-NEXT: vpermt2w %zmm27, %zmm6, %zmm7
7518 ; AVX512BW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7519 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <u,u,u,u,u,u,12,44,u,u,u,u,u,u,13,45,u,u,u,u,u,u,14,46,u,u,u,u,u,u,15,47>
7520 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm8
7521 ; AVX512BW-NEXT: vpermt2w %zmm30, %zmm7, %zmm8
7522 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7523 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm8 = <u,u,u,u,12,44,u,u,u,u,u,u,13,45,u,u,u,u,u,u,14,46,u,u,u,u,u,u,15,47,u,u>
7524 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm9
7525 ; AVX512BW-NEXT: vpermt2w %zmm27, %zmm8, %zmm9
7526 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7527 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm9 = <u,u,u,u,u,u,8,40,u,u,u,u,u,u,9,41,u,u,u,u,u,u,10,42,u,u,u,u,u,u,11,43>
7528 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm10
7529 ; AVX512BW-NEXT: vpermt2w %zmm30, %zmm9, %zmm10
7530 ; AVX512BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7531 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm10 = <u,u,u,u,8,40,u,u,u,u,u,u,9,41,u,u,u,u,u,u,10,42,u,u,u,u,u,u,11,43,u,u>
7532 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm11
7533 ; AVX512BW-NEXT: vpermt2w %zmm27, %zmm10, %zmm11
7534 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7535 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm11 = <u,u,u,u,u,u,20,52,u,u,u,u,u,u,21,53,u,u,u,u,u,u,22,54,u,u,u,u,u,u,23,55>
7536 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm12
7537 ; AVX512BW-NEXT: vpermt2w %zmm30, %zmm11, %zmm12
7538 ; AVX512BW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7539 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,u,u,20,52,u,u,u,u,u,u,21,53,u,u,u,u,u,u,22,54,u,u,u,u,u,u,23,55,u,u>
7540 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm13
7541 ; AVX512BW-NEXT: vpermt2w %zmm27, %zmm12, %zmm13
7542 ; AVX512BW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7543 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm13 = <u,u,u,u,u,u,16,48,u,u,u,u,u,u,17,49,u,u,u,u,u,u,18,50,u,u,u,u,u,u,19,51>
7544 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm14
7545 ; AVX512BW-NEXT: vpermt2w %zmm30, %zmm13, %zmm14
7546 ; AVX512BW-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7547 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm14 = <u,u,u,u,16,48,u,u,u,u,u,u,17,49,u,u,u,u,u,u,18,50,u,u,u,u,u,u,19,51,u,u>
7548 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm15 = <u,u,u,u,u,u,28,60,u,u,u,u,u,u,29,61,u,u,u,u,u,u,30,62,u,u,u,u,u,u,31,63>
7549 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm16
7550 ; AVX512BW-NEXT: vpermt2w %zmm30, %zmm15, %zmm16
7551 ; AVX512BW-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7552 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm31 = <u,u,u,u,u,u,24,56,u,u,u,u,u,u,25,57,u,u,u,u,u,u,26,58,u,u,u,u,u,u,27,59>
7553 ; AVX512BW-NEXT: vpermt2w %zmm30, %zmm31, %zmm4
7554 ; AVX512BW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7555 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm4
7556 ; AVX512BW-NEXT: vpermt2w %zmm27, %zmm14, %zmm4
7557 ; AVX512BW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7558 ; AVX512BW-NEXT: vpermi2w %zmm28, %zmm0, %zmm3
7559 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7560 ; AVX512BW-NEXT: vpermi2w %zmm28, %zmm0, %zmm5
7561 ; AVX512BW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7562 ; AVX512BW-NEXT: vpermi2w %zmm28, %zmm0, %zmm7
7563 ; AVX512BW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7564 ; AVX512BW-NEXT: vpermi2w %zmm28, %zmm0, %zmm9
7565 ; AVX512BW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7566 ; AVX512BW-NEXT: vpermi2w %zmm28, %zmm0, %zmm11
7567 ; AVX512BW-NEXT: vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7568 ; AVX512BW-NEXT: vpermi2w %zmm28, %zmm0, %zmm13
7569 ; AVX512BW-NEXT: vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7570 ; AVX512BW-NEXT: vpermi2w %zmm28, %zmm0, %zmm15
7571 ; AVX512BW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7572 ; AVX512BW-NEXT: vpermt2w %zmm28, %zmm31, %zmm0
7573 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7574 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm28 = <u,u,u,u,28,60,u,u,u,u,u,u,29,61,u,u,u,u,u,u,30,62,u,u,u,u,u,u,31,63,u,u>
7575 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm31
7576 ; AVX512BW-NEXT: vpermt2w %zmm27, %zmm28, %zmm31
7577 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm0 = <u,u,u,u,24,56,u,u,u,u,u,u,25,57,u,u,u,u,u,u,26,58,u,u,u,u,u,u,27,59,u,u>
7578 ; AVX512BW-NEXT: vpermt2w %zmm27, %zmm0, %zmm1
7579 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7580 ; AVX512BW-NEXT: vmovdqa64 64(%r8), %zmm27
7581 ; AVX512BW-NEXT: vmovdqa64 64(%r9), %zmm1
7582 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm27, %zmm2
7583 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7584 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm27, %zmm6
7585 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7586 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm27, %zmm8
7587 ; AVX512BW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7588 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm27, %zmm10
7589 ; AVX512BW-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7590 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm27, %zmm12
7591 ; AVX512BW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7592 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm27, %zmm14
7593 ; AVX512BW-NEXT: vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7594 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm27, %zmm28
7595 ; AVX512BW-NEXT: vpermt2w %zmm1, %zmm0, %zmm27
7596 ; AVX512BW-NEXT: vmovdqa64 (%rdx), %zmm16
7597 ; AVX512BW-NEXT: vmovdqa64 (%rcx), %zmm0
7598 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,4,36,u,u,u,u,u,u,5,37,u,u,u,u,u,u,6,38,u,u,u,u,u,u,7,39,u,u,u,u>
7599 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm20
7600 ; AVX512BW-NEXT: vpermt2w %zmm0, %zmm1, %zmm20
7601 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm2
7602 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,0,32,u,u,u,u,u,u,1,33,u,u,u,u,u,u,2,34,u,u,u,u,u,u,3,35,u,u,u,u>
7603 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm21
7604 ; AVX512BW-NEXT: vpermt2w %zmm0, %zmm1, %zmm21
7605 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm3
7606 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,12,44,u,u,u,u,u,u,13,45,u,u,u,u,u,u,14,46,u,u,u,u,u,u,15,47,u,u,u,u>
7607 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm22
7608 ; AVX512BW-NEXT: vpermt2w %zmm0, %zmm1, %zmm22
7609 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm4
7610 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,8,40,u,u,u,u,u,u,9,41,u,u,u,u,u,u,10,42,u,u,u,u,u,u,11,43,u,u,u,u>
7611 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm23
7612 ; AVX512BW-NEXT: vpermt2w %zmm0, %zmm1, %zmm23
7613 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm6
7614 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,20,52,u,u,u,u,u,u,21,53,u,u,u,u,u,u,22,54,u,u,u,u,u,u,23,55,u,u,u,u>
7615 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm24
7616 ; AVX512BW-NEXT: vpermt2w %zmm0, %zmm1, %zmm24
7617 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm7
7618 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm29 = <u,u,16,48,u,u,u,u,u,u,17,49,u,u,u,u,u,u,18,50,u,u,u,u,u,u,19,51,u,u,u,u>
7619 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm25
7620 ; AVX512BW-NEXT: vpermt2w %zmm0, %zmm29, %zmm25
7621 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm30 = <u,u,28,60,u,u,u,u,u,u,29,61,u,u,u,u,u,u,30,62,u,u,u,u,u,u,31,63,u,u,u,u>
7622 ; AVX512BW-NEXT: vmovdqa64 %zmm16, %zmm26
7623 ; AVX512BW-NEXT: vpermt2w %zmm0, %zmm30, %zmm26
7624 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <u,u,24,56,u,u,u,u,u,u,25,57,u,u,u,u,u,u,26,58,u,u,u,u,u,u,27,59,u,u,u,u>
7625 ; AVX512BW-NEXT: vpermt2w %zmm0, %zmm1, %zmm16
7626 ; AVX512BW-NEXT: vmovdqa64 64(%rdx), %zmm5
7627 ; AVX512BW-NEXT: vmovdqa64 64(%rcx), %zmm0
7628 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm5, %zmm2
7629 ; AVX512BW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7630 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm5, %zmm3
7631 ; AVX512BW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7632 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm5, %zmm4
7633 ; AVX512BW-NEXT: vmovdqu64 %zmm4, (%rsp) # 64-byte Spill
7634 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm5, %zmm6
7635 ; AVX512BW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7636 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm5, %zmm7
7637 ; AVX512BW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7638 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm5, %zmm29
7639 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm5, %zmm30
7640 ; AVX512BW-NEXT: vpermt2w %zmm0, %zmm1, %zmm5
7641 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm3
7642 ; AVX512BW-NEXT: vmovdqa64 (%rsi), %zmm0
7643 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <4,36,u,u,u,u,u,u,5,37,u,u,u,u,u,u,6,38,u,u,u,u,u,u,7,39,u,u,u,u,u,u>
7644 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm18
7645 ; AVX512BW-NEXT: vpermt2w %zmm0, %zmm12, %zmm18
7646 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm10 = <0,32,u,u,u,u,u,u,1,33,u,u,u,u,u,u,2,34,u,u,u,u,u,u,3,35,u,u,u,u,u,u>
7647 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm17
7648 ; AVX512BW-NEXT: vpermt2w %zmm0, %zmm10, %zmm17
7649 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm8 = <12,44,u,u,u,u,u,u,13,45,u,u,u,u,u,u,14,46,u,u,u,u,u,u,15,47,u,u,u,u,u,u>
7650 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm15
7651 ; AVX512BW-NEXT: vpermt2w %zmm0, %zmm8, %zmm15
7652 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <8,40,u,u,u,u,u,u,9,41,u,u,u,u,u,u,10,42,u,u,u,u,u,u,11,43,u,u,u,u,u,u>
7653 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm14
7654 ; AVX512BW-NEXT: vpermt2w %zmm0, %zmm7, %zmm14
7655 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm6 = <20,52,u,u,u,u,u,u,21,53,u,u,u,u,u,u,22,54,u,u,u,u,u,u,23,55,u,u,u,u,u,u>
7656 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm13
7657 ; AVX512BW-NEXT: vpermt2w %zmm0, %zmm6, %zmm13
7658 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = <16,48,u,u,u,u,u,u,17,49,u,u,u,u,u,u,18,50,u,u,u,u,u,u,19,51,u,u,u,u,u,u>
7659 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm11
7660 ; AVX512BW-NEXT: vpermt2w %zmm0, %zmm4, %zmm11
7661 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <28,60,u,u,u,u,u,u,29,61,u,u,u,u,u,u,30,62,u,u,u,u,u,u,31,63,u,u,u,u,u,u>
7662 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm9
7663 ; AVX512BW-NEXT: vpermt2w %zmm0, %zmm2, %zmm9
7664 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm19 = <24,56,u,u,u,u,u,u,25,57,u,u,u,u,u,u,26,58,u,u,u,u,u,u,27,59,u,u,u,u,u,u>
7665 ; AVX512BW-NEXT: vpermt2w %zmm0, %zmm19, %zmm3
7666 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %zmm1
7667 ; AVX512BW-NEXT: vmovdqa64 64(%rsi), %zmm0
7668 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm1, %zmm12
7669 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm1, %zmm10
7670 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm1, %zmm8
7671 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm1, %zmm7
7672 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm1, %zmm6
7673 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm1, %zmm4
7674 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm1, %zmm2
7675 ; AVX512BW-NEXT: vpermt2w %zmm0, %zmm19, %zmm1
7676 ; AVX512BW-NEXT: movw $-30584, %ax # imm = 0x8888
7677 ; AVX512BW-NEXT: kmovd %eax, %k1
7678 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7679 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
7680 ; AVX512BW-NEXT: vmovdqa32 %zmm19, %zmm0 {%k1}
7681 ; AVX512BW-NEXT: movw $8738, %ax # imm = 0x2222
7682 ; AVX512BW-NEXT: kmovd %eax, %k2
7683 ; AVX512BW-NEXT: vmovdqa32 %zmm20, %zmm18 {%k2}
7684 ; AVX512BW-NEXT: movb $-86, %al
7685 ; AVX512BW-NEXT: kmovd %eax, %k3
7686 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm18 {%k3}
7687 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7688 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
7689 ; AVX512BW-NEXT: vmovdqa32 %zmm0, %zmm19 {%k1}
7690 ; AVX512BW-NEXT: vmovdqa32 %zmm21, %zmm17 {%k2}
7691 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm17 {%k3}
7692 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7693 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
7694 ; AVX512BW-NEXT: vmovdqa32 %zmm0, %zmm19 {%k1}
7695 ; AVX512BW-NEXT: vmovdqa32 %zmm22, %zmm15 {%k2}
7696 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm15 {%k3}
7697 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7698 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
7699 ; AVX512BW-NEXT: vmovdqa32 %zmm0, %zmm19 {%k1}
7700 ; AVX512BW-NEXT: vmovdqa32 %zmm23, %zmm14 {%k2}
7701 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm14 {%k3}
7702 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7703 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
7704 ; AVX512BW-NEXT: vmovdqa32 %zmm0, %zmm19 {%k1}
7705 ; AVX512BW-NEXT: vmovdqa32 %zmm24, %zmm13 {%k2}
7706 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm13 {%k3}
7707 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7708 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
7709 ; AVX512BW-NEXT: vmovdqa32 %zmm0, %zmm19 {%k1}
7710 ; AVX512BW-NEXT: vmovdqa32 %zmm25, %zmm11 {%k2}
7711 ; AVX512BW-NEXT: vmovdqa64 %zmm19, %zmm11 {%k3}
7712 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7713 ; AVX512BW-NEXT: vmovdqa32 %zmm0, %zmm31 {%k1}
7714 ; AVX512BW-NEXT: vmovdqa32 %zmm26, %zmm9 {%k2}
7715 ; AVX512BW-NEXT: vmovdqa64 %zmm31, %zmm9 {%k3}
7716 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7717 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
7718 ; AVX512BW-NEXT: vmovdqa32 %zmm19, %zmm0 {%k1}
7719 ; AVX512BW-NEXT: vmovdqa32 %zmm16, %zmm3 {%k2}
7720 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm3 {%k3}
7721 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7722 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
7723 ; AVX512BW-NEXT: vmovdqa32 %zmm16, %zmm0 {%k1}
7724 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
7725 ; AVX512BW-NEXT: vmovdqa32 %zmm16, %zmm12 {%k2}
7726 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm12 {%k3}
7727 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7728 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
7729 ; AVX512BW-NEXT: vmovdqa32 %zmm16, %zmm0 {%k1}
7730 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
7731 ; AVX512BW-NEXT: vmovdqa32 %zmm16, %zmm10 {%k2}
7732 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm10 {%k3}
7733 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7734 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
7735 ; AVX512BW-NEXT: vmovdqa32 %zmm16, %zmm0 {%k1}
7736 ; AVX512BW-NEXT: vmovdqu64 (%rsp), %zmm16 # 64-byte Reload
7737 ; AVX512BW-NEXT: vmovdqa32 %zmm16, %zmm8 {%k2}
7738 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm8 {%k3}
7739 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7740 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
7741 ; AVX512BW-NEXT: vmovdqa32 %zmm16, %zmm0 {%k1}
7742 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
7743 ; AVX512BW-NEXT: vmovdqa32 %zmm16, %zmm7 {%k2}
7744 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm7 {%k3}
7745 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7746 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
7747 ; AVX512BW-NEXT: vmovdqa32 %zmm16, %zmm0 {%k1}
7748 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
7749 ; AVX512BW-NEXT: vmovdqa32 %zmm16, %zmm6 {%k2}
7750 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm6 {%k3}
7751 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7752 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
7753 ; AVX512BW-NEXT: vmovdqa32 %zmm16, %zmm0 {%k1}
7754 ; AVX512BW-NEXT: vmovdqa32 %zmm29, %zmm4 {%k2}
7755 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm4 {%k3}
7756 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7757 ; AVX512BW-NEXT: vmovdqa32 %zmm0, %zmm28 {%k1}
7758 ; AVX512BW-NEXT: vmovdqa32 %zmm30, %zmm2 {%k2}
7759 ; AVX512BW-NEXT: vmovdqa64 %zmm28, %zmm2 {%k3}
7760 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7761 ; AVX512BW-NEXT: vmovdqa32 %zmm0, %zmm27 {%k1}
7762 ; AVX512BW-NEXT: vmovdqa32 %zmm5, %zmm1 {%k2}
7763 ; AVX512BW-NEXT: vmovdqa64 %zmm27, %zmm1 {%k3}
7764 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
7765 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 896(%rax)
7766 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 960(%rax)
7767 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 768(%rax)
7768 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 832(%rax)
7769 ; AVX512BW-NEXT: vmovdqa64 %zmm7, 640(%rax)
7770 ; AVX512BW-NEXT: vmovdqa64 %zmm8, 704(%rax)
7771 ; AVX512BW-NEXT: vmovdqa64 %zmm10, 512(%rax)
7772 ; AVX512BW-NEXT: vmovdqa64 %zmm12, 576(%rax)
7773 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 384(%rax)
7774 ; AVX512BW-NEXT: vmovdqa64 %zmm9, 448(%rax)
7775 ; AVX512BW-NEXT: vmovdqa64 %zmm11, 256(%rax)
7776 ; AVX512BW-NEXT: vmovdqa64 %zmm13, 320(%rax)
7777 ; AVX512BW-NEXT: vmovdqa64 %zmm14, 128(%rax)
7778 ; AVX512BW-NEXT: vmovdqa64 %zmm15, 192(%rax)
7779 ; AVX512BW-NEXT: vmovdqa64 %zmm17, (%rax)
7780 ; AVX512BW-NEXT: vmovdqa64 %zmm18, 64(%rax)
7781 ; AVX512BW-NEXT: addq $2056, %rsp # imm = 0x808
7782 ; AVX512BW-NEXT: vzeroupper
7783 ; AVX512BW-NEXT: retq
7784 %in.vec0 = load <64 x i16>, ptr %in.vecptr0, align 64
7785 %in.vec1 = load <64 x i16>, ptr %in.vecptr1, align 64
7786 %in.vec2 = load <64 x i16>, ptr %in.vecptr2, align 64
7787 %in.vec3 = load <64 x i16>, ptr %in.vecptr3, align 64
7788 %in.vec4 = load <64 x i16>, ptr %in.vecptr4, align 64
7789 %in.vec5 = load <64 x i16>, ptr %in.vecptr5, align 64
7790 %in.vec6 = load <64 x i16>, ptr %in.vecptr6, align 64
7791 %in.vec7 = load <64 x i16>, ptr %in.vecptr7, align 64
7792 %1 = shufflevector <64 x i16> %in.vec0, <64 x i16> %in.vec1, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
7793 %2 = shufflevector <64 x i16> %in.vec2, <64 x i16> %in.vec3, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
7794 %3 = shufflevector <64 x i16> %in.vec4, <64 x i16> %in.vec5, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
7795 %4 = shufflevector <64 x i16> %in.vec6, <64 x i16> %in.vec7, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
7796 %5 = shufflevector <128 x i16> %1, <128 x i16> %2, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255>
7797 %6 = shufflevector <128 x i16> %3, <128 x i16> %4, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255>
7798 %7 = shufflevector <256 x i16> %5, <256 x i16> %6, <512 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255, i32 256, i32 257, i32 258, i32 259, i32 260, i32 261, i32 262, i32 263, i32 264, i32 265, i32 266, i32 267, i32 268, i32 269, i32 270, i32 271, i32 272, i32 273, i32 274, i32 275, i32 276, i32 277, i32 278, i32 279, i32 280, i32 281, i32 282, i32 283, i32 284, i32 285, i32 286, i32 287, i32 288, i32 289, i32 290, i32 291, i32 292, i32 293, i32 294, i32 295, i32 296, i32 297, i32 298, i32 299, i32 300, i32 301, i32 302, i32 303, i32 304, i32 305, i32 306, i32 307, i32 308, i32 309, i32 310, i32 311, i32 312, i32 313, i32 314, i32 315, i32 316, i32 317, i32 318, i32 319, i32 320, i32 321, i32 322, i32 323, i32 324, i32 325, i32 326, i32 327, i32 328, i32 329, i32 330, i32 331, i32 332, i32 333, i32 334, i32 335, i32 336, i32 337, i32 338, i32 339, i32 340, i32 341, i32 342, i32 343, i32 344, i32 345, i32 346, i32 347, i32 348, i32 349, i32 350, i32 351, i32 352, i32 353, i32 354, i32 355, i32 356, i32 357, i32 358, i32 359, i32 360, i32 361, i32 362, i32 363, i32 364, i32 365, i32 366, i32 367, i32 368, i32 369, i32 370, i32 371, i32 372, i32 373, i32 374, i32 375, i32 376, i32 377, i32 378, i32 379, i32 380, i32 381, i32 382, i32 383, i32 384, i32 385, i32 386, i32 387, i32 388, i32 389, i32 390, i32 391, i32 392, i32 393, i32 394, i32 395, i32 396, i32 397, i32 398, i32 399, i32 400, i32 401, i32 402, i32 403, i32 404, i32 405, i32 406, i32 407, i32 408, i32 409, i32 410, i32 411, i32 412, i32 413, i32 414, i32 415, i32 416, i32 417, i32 418, i32 419, i32 420, i32 421, i32 422, i32 423, i32 424, i32 425, i32 426, i32 427, i32 428, i32 429, i32 430, i32 431, i32 432, i32 433, i32 434, i32 435, i32 436, i32 437, i32 438, i32 439, i32 440, i32 441, i32 442, i32 443, i32 444, i32 445, i32 446, i32 447, i32 448, i32 449, i32 450, i32 451, i32 452, i32 453, i32 454, i32 455, i32 456, i32 457, i32 458, i32 459, i32 460, i32 461, i32 462, i32 463, i32 464, i32 465, i32 466, i32 467, i32 468, i32 469, i32 470, i32 471, i32 472, i32 473, i32 474, i32 475, i32 476, i32 477, i32 478, i32 479, i32 480, i32 481, i32 482, i32 483, i32 484, i32 485, i32 486, i32 487, i32 488, i32 489, i32 490, i32 491, i32 492, i32 493, i32 494, i32 495, i32 496, i32 497, i32 498, i32 499, i32 500, i32 501, i32 502, i32 503, i32 504, i32 505, i32 506, i32 507, i32 508, i32 509, i32 510, i32 511>
7799 %interleaved.vec = shufflevector <512 x i16> %7, <512 x i16> poison, <512 x i32> <i32 0, i32 64, i32 128, i32 192, i32 256, i32 320, i32 384, i32 448, i32 1, i32 65, i32 129, i32 193, i32 257, i32 321, i32 385, i32 449, i32 2, i32 66, i32 130, i32 194, i32 258, i32 322, i32 386, i32 450, i32 3, i32 67, i32 131, i32 195, i32 259, i32 323, i32 387, i32 451, i32 4, i32 68, i32 132, i32 196, i32 260, i32 324, i32 388, i32 452, i32 5, i32 69, i32 133, i32 197, i32 261, i32 325, i32 389, i32 453, i32 6, i32 70, i32 134, i32 198, i32 262, i32 326, i32 390, i32 454, i32 7, i32 71, i32 135, i32 199, i32 263, i32 327, i32 391, i32 455, i32 8, i32 72, i32 136, i32 200, i32 264, i32 328, i32 392, i32 456, i32 9, i32 73, i32 137, i32 201, i32 265, i32 329, i32 393, i32 457, i32 10, i32 74, i32 138, i32 202, i32 266, i32 330, i32 394, i32 458, i32 11, i32 75, i32 139, i32 203, i32 267, i32 331, i32 395, i32 459, i32 12, i32 76, i32 140, i32 204, i32 268, i32 332, i32 396, i32 460, i32 13, i32 77, i32 141, i32 205, i32 269, i32 333, i32 397, i32 461, i32 14, i32 78, i32 142, i32 206, i32 270, i32 334, i32 398, i32 462, i32 15, i32 79, i32 143, i32 207, i32 271, i32 335, i32 399, i32 463, i32 16, i32 80, i32 144, i32 208, i32 272, i32 336, i32 400, i32 464, i32 17, i32 81, i32 145, i32 209, i32 273, i32 337, i32 401, i32 465, i32 18, i32 82, i32 146, i32 210, i32 274, i32 338, i32 402, i32 466, i32 19, i32 83, i32 147, i32 211, i32 275, i32 339, i32 403, i32 467, i32 20, i32 84, i32 148, i32 212, i32 276, i32 340, i32 404, i32 468, i32 21, i32 85, i32 149, i32 213, i32 277, i32 341, i32 405, i32 469, i32 22, i32 86, i32 150, i32 214, i32 278, i32 342, i32 406, i32 470, i32 23, i32 87, i32 151, i32 215, i32 279, i32 343, i32 407, i32 471, i32 24, i32 88, i32 152, i32 216, i32 280, i32 344, i32 408, i32 472, i32 25, i32 89, i32 153, i32 217, i32 281, i32 345, i32 409, i32 473, i32 26, i32 90, i32 154, i32 218, i32 282, i32 346, i32 410, i32 474, i32 27, i32 91, i32 155, i32 219, i32 283, i32 347, i32 411, i32 475, i32 28, i32 92, i32 156, i32 220, i32 284, i32 348, i32 412, i32 476, i32 29, i32 93, i32 157, i32 221, i32 285, i32 349, i32 413, i32 477, i32 30, i32 94, i32 158, i32 222, i32 286, i32 350, i32 414, i32 478, i32 31, i32 95, i32 159, i32 223, i32 287, i32 351, i32 415, i32 479, i32 32, i32 96, i32 160, i32 224, i32 288, i32 352, i32 416, i32 480, i32 33, i32 97, i32 161, i32 225, i32 289, i32 353, i32 417, i32 481, i32 34, i32 98, i32 162, i32 226, i32 290, i32 354, i32 418, i32 482, i32 35, i32 99, i32 163, i32 227, i32 291, i32 355, i32 419, i32 483, i32 36, i32 100, i32 164, i32 228, i32 292, i32 356, i32 420, i32 484, i32 37, i32 101, i32 165, i32 229, i32 293, i32 357, i32 421, i32 485, i32 38, i32 102, i32 166, i32 230, i32 294, i32 358, i32 422, i32 486, i32 39, i32 103, i32 167, i32 231, i32 295, i32 359, i32 423, i32 487, i32 40, i32 104, i32 168, i32 232, i32 296, i32 360, i32 424, i32 488, i32 41, i32 105, i32 169, i32 233, i32 297, i32 361, i32 425, i32 489, i32 42, i32 106, i32 170, i32 234, i32 298, i32 362, i32 426, i32 490, i32 43, i32 107, i32 171, i32 235, i32 299, i32 363, i32 427, i32 491, i32 44, i32 108, i32 172, i32 236, i32 300, i32 364, i32 428, i32 492, i32 45, i32 109, i32 173, i32 237, i32 301, i32 365, i32 429, i32 493, i32 46, i32 110, i32 174, i32 238, i32 302, i32 366, i32 430, i32 494, i32 47, i32 111, i32 175, i32 239, i32 303, i32 367, i32 431, i32 495, i32 48, i32 112, i32 176, i32 240, i32 304, i32 368, i32 432, i32 496, i32 49, i32 113, i32 177, i32 241, i32 305, i32 369, i32 433, i32 497, i32 50, i32 114, i32 178, i32 242, i32 306, i32 370, i32 434, i32 498, i32 51, i32 115, i32 179, i32 243, i32 307, i32 371, i32 435, i32 499, i32 52, i32 116, i32 180, i32 244, i32 308, i32 372, i32 436, i32 500, i32 53, i32 117, i32 181, i32 245, i32 309, i32 373, i32 437, i32 501, i32 54, i32 118, i32 182, i32 246, i32 310, i32 374, i32 438, i32 502, i32 55, i32 119, i32 183, i32 247, i32 311, i32 375, i32 439, i32 503, i32 56, i32 120, i32 184, i32 248, i32 312, i32 376, i32 440, i32 504, i32 57, i32 121, i32 185, i32 249, i32 313, i32 377, i32 441, i32 505, i32 58, i32 122, i32 186, i32 250, i32 314, i32 378, i32 442, i32 506, i32 59, i32 123, i32 187, i32 251, i32 315, i32 379, i32 443, i32 507, i32 60, i32 124, i32 188, i32 252, i32 316, i32 380, i32 444, i32 508, i32 61, i32 125, i32 189, i32 253, i32 317, i32 381, i32 445, i32 509, i32 62, i32 126, i32 190, i32 254, i32 318, i32 382, i32 446, i32 510, i32 63, i32 127, i32 191, i32 255, i32 319, i32 383, i32 447, i32 511>
7800 store <512 x i16> %interleaved.vec, ptr %out.vec, align 64
7803 ;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
7808 ; AVX512-FAST: {{.*}}
7809 ; AVX512-SLOW: {{.*}}
7810 ; AVX512BW-FAST: {{.*}}
7811 ; AVX512BW-ONLY-FAST: {{.*}}
7812 ; AVX512BW-ONLY-SLOW: {{.*}}
7813 ; AVX512BW-SLOW: {{.*}}
7814 ; AVX512DQ-FAST: {{.*}}
7815 ; AVX512DQ-SLOW: {{.*}}
7816 ; AVX512DQBW-FAST: {{.*}}
7817 ; AVX512DQBW-SLOW: {{.*}}
7818 ; AVX512F-ONLY-FAST: {{.*}}
7819 ; AVX512F-ONLY-SLOW: {{.*}}
7822 ; FALLBACK10: {{.*}}
7823 ; FALLBACK11: {{.*}}
7824 ; FALLBACK12: {{.*}}