1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+sse2 | FileCheck %s --check-prefixes=SSE,FALLBACK0
3 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx | FileCheck %s --check-prefixes=AVX,AVX1,AVX1-ONLY,FALLBACK1
4 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2 | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-SLOW,FALLBACK2
5 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-FAST,FALLBACK3
6 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-FAST-PERLANE,FALLBACK4
7 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512F-ONLY,AVX512-SLOW,AVX512F-SLOW,AVX512F-ONLY-SLOW,FALLBACK5
8 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512F-ONLY,AVX512-FAST,AVX512F-FAST,AVX512F-ONLY-FAST,FALLBACK6
9 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512DQ-ONLY,AVX512-SLOW,AVX512F-SLOW,AVX512DQ-SLOW,FALLBACK7
10 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512DQ-ONLY,AVX512-FAST,AVX512F-FAST,AVX512DQ-FAST,FALLBACK8
11 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512BW-ONLY,AVX512-SLOW,AVX512BW-SLOW,AVX512BW-ONLY-SLOW,FALLBACK9
12 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512BW-ONLY,AVX512-FAST,AVX512BW-FAST,AVX512BW-ONLY-FAST,FALLBACK10
13 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512DQBW-ONLY,AVX512-SLOW,AVX512BW-SLOW,AVX512DQBW-SLOW,FALLBACK11
14 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512DQBW-ONLY,AVX512-FAST,AVX512BW-FAST,AVX512DQBW-FAST,FALLBACK12
16 ; These patterns are produced by LoopVectorizer for interleaved stores.
18 define void @store_i16_stride4_vf2(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %out.vec) nounwind {
19 ; SSE-LABEL: store_i16_stride4_vf2:
21 ; SSE-NEXT: movdqa (%rdi), %xmm0
22 ; SSE-NEXT: movdqa (%rdx), %xmm1
23 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
24 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
25 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
26 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,1,3]
27 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[0,2,1,3,4,5,6,7]
28 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,6,5,7]
29 ; SSE-NEXT: movdqa %xmm0, (%r8)
32 ; AVX1-LABEL: store_i16_stride4_vf2:
34 ; AVX1-NEXT: vmovdqa (%rdi), %xmm0
35 ; AVX1-NEXT: vmovdqa (%rdx), %xmm1
36 ; AVX1-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
37 ; AVX1-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
38 ; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
39 ; AVX1-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,8,9,2,3,10,11,4,5,12,13,6,7,14,15]
40 ; AVX1-NEXT: vmovdqa %xmm0, (%r8)
43 ; AVX512F-LABEL: store_i16_stride4_vf2:
45 ; AVX512F-NEXT: vmovdqa (%rdi), %xmm0
46 ; AVX512F-NEXT: vmovdqa (%rdx), %xmm1
47 ; AVX512F-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
48 ; AVX512F-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
49 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
50 ; AVX512F-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,8,9,2,3,10,11,4,5,12,13,6,7,14,15]
51 ; AVX512F-NEXT: vmovdqa %xmm0, (%r8)
54 ; AVX512BW-LABEL: store_i16_stride4_vf2:
56 ; AVX512BW-NEXT: vmovdqa (%rdi), %xmm0
57 ; AVX512BW-NEXT: vmovdqa (%rdx), %xmm1
58 ; AVX512BW-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
59 ; AVX512BW-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
60 ; AVX512BW-NEXT: vmovdqa {{.*#+}} xmm2 = [0,2,8,10,1,3,9,11]
61 ; AVX512BW-NEXT: vpermi2w %xmm1, %xmm0, %xmm2
62 ; AVX512BW-NEXT: vmovdqa %xmm2, (%r8)
64 %in.vec0 = load <2 x i16>, ptr %in.vecptr0, align 64
65 %in.vec1 = load <2 x i16>, ptr %in.vecptr1, align 64
66 %in.vec2 = load <2 x i16>, ptr %in.vecptr2, align 64
67 %in.vec3 = load <2 x i16>, ptr %in.vecptr3, align 64
68 %1 = shufflevector <2 x i16> %in.vec0, <2 x i16> %in.vec1, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
69 %2 = shufflevector <2 x i16> %in.vec2, <2 x i16> %in.vec3, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
70 %3 = shufflevector <4 x i16> %1, <4 x i16> %2, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
71 %interleaved.vec = shufflevector <8 x i16> %3, <8 x i16> poison, <8 x i32> <i32 0, i32 2, i32 4, i32 6, i32 1, i32 3, i32 5, i32 7>
72 store <8 x i16> %interleaved.vec, ptr %out.vec, align 64
76 define void @store_i16_stride4_vf4(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %out.vec) nounwind {
77 ; SSE-LABEL: store_i16_stride4_vf4:
79 ; SSE-NEXT: movq {{.*#+}} xmm0 = mem[0],zero
80 ; SSE-NEXT: movq {{.*#+}} xmm1 = mem[0],zero
81 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
82 ; SSE-NEXT: movq {{.*#+}} xmm1 = mem[0],zero
83 ; SSE-NEXT: movq {{.*#+}} xmm2 = mem[0],zero
84 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
85 ; SSE-NEXT: movdqa %xmm0, %xmm2
86 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
87 ; SSE-NEXT: punpckhdq {{.*#+}} xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
88 ; SSE-NEXT: movdqa %xmm0, 16(%r8)
89 ; SSE-NEXT: movdqa %xmm2, (%r8)
92 ; AVX1-ONLY-LABEL: store_i16_stride4_vf4:
94 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
95 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
96 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm1[0],xmm0[0]
97 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
98 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
99 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm4[0],xmm3[0]
100 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
101 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
102 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1]
103 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm5[3,1,2,3]
104 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[2,0,3,1,4,5,6,7]
105 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[3,1,2,3]
106 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[2,0,3,1,4,5,6,7]
107 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
108 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
109 ; AVX1-ONLY-NEXT: vmovaps %ymm0, (%r8)
110 ; AVX1-ONLY-NEXT: vzeroupper
111 ; AVX1-ONLY-NEXT: retq
113 ; AVX2-SLOW-LABEL: store_i16_stride4_vf4:
114 ; AVX2-SLOW: # %bb.0:
115 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
116 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
117 ; AVX2-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
118 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
119 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
120 ; AVX2-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
121 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
122 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,1,8,9,u,u,u,u,2,3,10,11,u,u,u,u,u,u,u,u,20,21,28,29,u,u,u,u,22,23,30,31]
123 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,0,1]
124 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,0,1,8,9,u,u,u,u,2,3,10,11,20,21,28,29,u,u,u,u,22,23,30,31,u,u,u,u]
125 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6],ymm1[7]
126 ; AVX2-SLOW-NEXT: vmovdqa %ymm0, (%r8)
127 ; AVX2-SLOW-NEXT: vzeroupper
128 ; AVX2-SLOW-NEXT: retq
130 ; AVX2-FAST-LABEL: store_i16_stride4_vf4:
131 ; AVX2-FAST: # %bb.0:
132 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
133 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
134 ; AVX2-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
135 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
136 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
137 ; AVX2-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
138 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
139 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [0,2,4,6,1,3,5,7]
140 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm1, %ymm0
141 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[0,1,4,5,8,9,12,13,2,3,6,7,10,11,14,15,16,17,20,21,24,25,28,29,18,19,22,23,26,27,30,31]
142 ; AVX2-FAST-NEXT: vmovdqa %ymm0, (%r8)
143 ; AVX2-FAST-NEXT: vzeroupper
144 ; AVX2-FAST-NEXT: retq
146 ; AVX2-FAST-PERLANE-LABEL: store_i16_stride4_vf4:
147 ; AVX2-FAST-PERLANE: # %bb.0:
148 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
149 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
150 ; AVX2-FAST-PERLANE-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
151 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
152 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
153 ; AVX2-FAST-PERLANE-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
154 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
155 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,1,8,9,u,u,u,u,2,3,10,11,u,u,u,u,u,u,u,u,20,21,28,29,u,u,u,u,22,23,30,31]
156 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,0,1]
157 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,0,1,8,9,u,u,u,u,2,3,10,11,20,21,28,29,u,u,u,u,22,23,30,31,u,u,u,u]
158 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6],ymm1[7]
159 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, (%r8)
160 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
161 ; AVX2-FAST-PERLANE-NEXT: retq
163 ; AVX512F-SLOW-LABEL: store_i16_stride4_vf4:
164 ; AVX512F-SLOW: # %bb.0:
165 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
166 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
167 ; AVX512F-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
168 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
169 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
170 ; AVX512F-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
171 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
172 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,1,8,9,u,u,u,u,2,3,10,11,u,u,u,u,u,u,u,u,20,21,28,29,u,u,u,u,22,23,30,31]
173 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,0,1]
174 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,0,1,8,9,u,u,u,u,2,3,10,11,20,21,28,29,u,u,u,u,22,23,30,31,u,u,u,u]
175 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6],ymm1[7]
176 ; AVX512F-SLOW-NEXT: vmovdqa %ymm0, (%r8)
177 ; AVX512F-SLOW-NEXT: vzeroupper
178 ; AVX512F-SLOW-NEXT: retq
180 ; AVX512F-FAST-LABEL: store_i16_stride4_vf4:
181 ; AVX512F-FAST: # %bb.0:
182 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
183 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
184 ; AVX512F-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
185 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
186 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
187 ; AVX512F-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
188 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
189 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [0,2,4,6,1,3,5,7]
190 ; AVX512F-FAST-NEXT: vpermd %ymm0, %ymm1, %ymm0
191 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[0,1,4,5,8,9,12,13,2,3,6,7,10,11,14,15,16,17,20,21,24,25,28,29,18,19,22,23,26,27,30,31]
192 ; AVX512F-FAST-NEXT: vmovdqa %ymm0, (%r8)
193 ; AVX512F-FAST-NEXT: vzeroupper
194 ; AVX512F-FAST-NEXT: retq
196 ; AVX512BW-LABEL: store_i16_stride4_vf4:
198 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
199 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
200 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
201 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
202 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
203 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
204 ; AVX512BW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
205 ; AVX512BW-NEXT: vmovdqa {{.*#+}} ymm1 = [0,4,8,12,1,5,9,13,2,6,10,14,3,7,11,15]
206 ; AVX512BW-NEXT: vpermw %ymm0, %ymm1, %ymm0
207 ; AVX512BW-NEXT: vmovdqa %ymm0, (%r8)
208 ; AVX512BW-NEXT: vzeroupper
209 ; AVX512BW-NEXT: retq
210 %in.vec0 = load <4 x i16>, ptr %in.vecptr0, align 64
211 %in.vec1 = load <4 x i16>, ptr %in.vecptr1, align 64
212 %in.vec2 = load <4 x i16>, ptr %in.vecptr2, align 64
213 %in.vec3 = load <4 x i16>, ptr %in.vecptr3, align 64
214 %1 = shufflevector <4 x i16> %in.vec0, <4 x i16> %in.vec1, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
215 %2 = shufflevector <4 x i16> %in.vec2, <4 x i16> %in.vec3, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
216 %3 = shufflevector <8 x i16> %1, <8 x i16> %2, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
217 %interleaved.vec = shufflevector <16 x i16> %3, <16 x i16> poison, <16 x i32> <i32 0, i32 4, i32 8, i32 12, i32 1, i32 5, i32 9, i32 13, i32 2, i32 6, i32 10, i32 14, i32 3, i32 7, i32 11, i32 15>
218 store <16 x i16> %interleaved.vec, ptr %out.vec, align 64
222 define void @store_i16_stride4_vf8(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %out.vec) nounwind {
223 ; SSE-LABEL: store_i16_stride4_vf8:
225 ; SSE-NEXT: movdqa (%rdi), %xmm0
226 ; SSE-NEXT: movdqa (%rsi), %xmm1
227 ; SSE-NEXT: movdqa (%rdx), %xmm2
228 ; SSE-NEXT: movdqa (%rcx), %xmm3
229 ; SSE-NEXT: movdqa %xmm2, %xmm4
230 ; SSE-NEXT: punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
231 ; SSE-NEXT: movdqa %xmm0, %xmm5
232 ; SSE-NEXT: punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
233 ; SSE-NEXT: movdqa %xmm5, %xmm6
234 ; SSE-NEXT: punpckldq {{.*#+}} xmm6 = xmm6[0],xmm4[0],xmm6[1],xmm4[1]
235 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
236 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm3[4],xmm2[5],xmm3[5],xmm2[6],xmm3[6],xmm2[7],xmm3[7]
237 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
238 ; SSE-NEXT: movdqa %xmm0, %xmm1
239 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm2[2],xmm1[3],xmm2[3]
240 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1]
241 ; SSE-NEXT: movdqa %xmm0, 32(%r8)
242 ; SSE-NEXT: movdqa %xmm1, 48(%r8)
243 ; SSE-NEXT: movdqa %xmm5, 16(%r8)
244 ; SSE-NEXT: movdqa %xmm6, (%r8)
247 ; AVX1-ONLY-LABEL: store_i16_stride4_vf8:
248 ; AVX1-ONLY: # %bb.0:
249 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm0
250 ; AVX1-ONLY-NEXT: vmovdqa (%rsi), %xmm1
251 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm2
252 ; AVX1-ONLY-NEXT: vmovdqa (%rcx), %xmm3
253 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
254 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
255 ; AVX1-ONLY-NEXT: vpunpckhdq {{.*#+}} xmm6 = xmm5[2],xmm4[2],xmm5[3],xmm4[3]
256 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
257 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm4, %ymm4
258 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm2[4],xmm3[4],xmm2[5],xmm3[5],xmm2[6],xmm3[6],xmm2[7],xmm3[7]
259 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
260 ; AVX1-ONLY-NEXT: vpunpckhdq {{.*#+}} xmm1 = xmm0[2],xmm2[2],xmm0[3],xmm2[3]
261 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1]
262 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
263 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 32(%r8)
264 ; AVX1-ONLY-NEXT: vmovaps %ymm4, (%r8)
265 ; AVX1-ONLY-NEXT: vzeroupper
266 ; AVX1-ONLY-NEXT: retq
268 ; AVX2-ONLY-LABEL: store_i16_stride4_vf8:
269 ; AVX2-ONLY: # %bb.0:
270 ; AVX2-ONLY-NEXT: vmovdqa (%rdi), %xmm0
271 ; AVX2-ONLY-NEXT: vmovdqa (%rdx), %xmm1
272 ; AVX2-ONLY-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
273 ; AVX2-ONLY-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
274 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm2 = ymm1[0,2,0,2]
275 ; AVX2-ONLY-NEXT: vmovdqa {{.*#+}} ymm3 = [u,u,u,u,0,1,8,9,u,u,u,u,2,3,10,11,u,u,u,u,4,5,12,13,u,u,u,u,6,7,14,15]
276 ; AVX2-ONLY-NEXT: vpshufb %ymm3, %ymm2, %ymm2
277 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,0,2]
278 ; AVX2-ONLY-NEXT: vmovdqa {{.*#+}} ymm5 = [0,1,8,9,u,u,u,u,2,3,10,11,u,u,u,u,4,5,12,13,u,u,u,u,6,7,14,15,u,u,u,u]
279 ; AVX2-ONLY-NEXT: vpshufb %ymm5, %ymm4, %ymm4
280 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0],ymm2[1],ymm4[2],ymm2[3],ymm4[4],ymm2[5],ymm4[6],ymm2[7]
281 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,3,1,3]
282 ; AVX2-ONLY-NEXT: vpshufb %ymm3, %ymm1, %ymm1
283 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,1,3]
284 ; AVX2-ONLY-NEXT: vpshufb %ymm5, %ymm0, %ymm0
285 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2],ymm1[3],ymm0[4],ymm1[5],ymm0[6],ymm1[7]
286 ; AVX2-ONLY-NEXT: vmovdqa %ymm0, 32(%r8)
287 ; AVX2-ONLY-NEXT: vmovdqa %ymm2, (%r8)
288 ; AVX2-ONLY-NEXT: vzeroupper
289 ; AVX2-ONLY-NEXT: retq
291 ; AVX512F-LABEL: store_i16_stride4_vf8:
293 ; AVX512F-NEXT: vmovdqa (%rdi), %xmm0
294 ; AVX512F-NEXT: vmovdqa (%rdx), %xmm1
295 ; AVX512F-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
296 ; AVX512F-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
297 ; AVX512F-NEXT: vpermq {{.*#+}} ymm2 = ymm1[1,3,1,3]
298 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm3 = [u,u,u,u,0,1,8,9,u,u,u,u,2,3,10,11,u,u,u,u,4,5,12,13,u,u,u,u,6,7,14,15]
299 ; AVX512F-NEXT: vpshufb %ymm3, %ymm2, %ymm2
300 ; AVX512F-NEXT: vpermq {{.*#+}} ymm4 = ymm0[1,3,1,3]
301 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm5 = [0,1,8,9,u,u,u,u,2,3,10,11,u,u,u,u,4,5,12,13,u,u,u,u,6,7,14,15,u,u,u,u]
302 ; AVX512F-NEXT: vpshufb %ymm5, %ymm4, %ymm4
303 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0],ymm2[1],ymm4[2],ymm2[3],ymm4[4],ymm2[5],ymm4[6],ymm2[7]
304 ; AVX512F-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,0,2]
305 ; AVX512F-NEXT: vpshufb %ymm3, %ymm1, %ymm1
306 ; AVX512F-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,0,2]
307 ; AVX512F-NEXT: vpshufb %ymm5, %ymm0, %ymm0
308 ; AVX512F-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2],ymm1[3],ymm0[4],ymm1[5],ymm0[6],ymm1[7]
309 ; AVX512F-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0
310 ; AVX512F-NEXT: vmovdqa64 %zmm0, (%r8)
311 ; AVX512F-NEXT: vzeroupper
314 ; AVX512BW-LABEL: store_i16_stride4_vf8:
316 ; AVX512BW-NEXT: vmovdqa (%rdi), %xmm0
317 ; AVX512BW-NEXT: vmovdqa (%rdx), %xmm1
318 ; AVX512BW-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
319 ; AVX512BW-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
320 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
321 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,8,16,24,1,9,17,25,2,10,18,26,3,11,19,27,4,12,20,28,5,13,21,29,6,14,22,30,7,15,23,31]
322 ; AVX512BW-NEXT: vpermw %zmm0, %zmm1, %zmm0
323 ; AVX512BW-NEXT: vmovdqa64 %zmm0, (%r8)
324 ; AVX512BW-NEXT: vzeroupper
325 ; AVX512BW-NEXT: retq
326 %in.vec0 = load <8 x i16>, ptr %in.vecptr0, align 64
327 %in.vec1 = load <8 x i16>, ptr %in.vecptr1, align 64
328 %in.vec2 = load <8 x i16>, ptr %in.vecptr2, align 64
329 %in.vec3 = load <8 x i16>, ptr %in.vecptr3, align 64
330 %1 = shufflevector <8 x i16> %in.vec0, <8 x i16> %in.vec1, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
331 %2 = shufflevector <8 x i16> %in.vec2, <8 x i16> %in.vec3, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
332 %3 = shufflevector <16 x i16> %1, <16 x i16> %2, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
333 %interleaved.vec = shufflevector <32 x i16> %3, <32 x i16> poison, <32 x i32> <i32 0, i32 8, i32 16, i32 24, i32 1, i32 9, i32 17, i32 25, i32 2, i32 10, i32 18, i32 26, i32 3, i32 11, i32 19, i32 27, i32 4, i32 12, i32 20, i32 28, i32 5, i32 13, i32 21, i32 29, i32 6, i32 14, i32 22, i32 30, i32 7, i32 15, i32 23, i32 31>
334 store <32 x i16> %interleaved.vec, ptr %out.vec, align 64
338 define void @store_i16_stride4_vf16(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %out.vec) nounwind {
339 ; SSE-LABEL: store_i16_stride4_vf16:
341 ; SSE-NEXT: movdqa (%rdi), %xmm0
342 ; SSE-NEXT: movdqa 16(%rdi), %xmm1
343 ; SSE-NEXT: movdqa (%rsi), %xmm5
344 ; SSE-NEXT: movdqa 16(%rsi), %xmm6
345 ; SSE-NEXT: movdqa (%rdx), %xmm7
346 ; SSE-NEXT: movdqa 16(%rdx), %xmm4
347 ; SSE-NEXT: movdqa (%rcx), %xmm8
348 ; SSE-NEXT: movdqa 16(%rcx), %xmm9
349 ; SSE-NEXT: movdqa %xmm7, %xmm10
350 ; SSE-NEXT: punpcklwd {{.*#+}} xmm10 = xmm10[0],xmm8[0],xmm10[1],xmm8[1],xmm10[2],xmm8[2],xmm10[3],xmm8[3]
351 ; SSE-NEXT: movdqa %xmm0, %xmm2
352 ; SSE-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3]
353 ; SSE-NEXT: movdqa %xmm2, %xmm3
354 ; SSE-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm10[2],xmm3[3],xmm10[3]
355 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm10[0],xmm2[1],xmm10[1]
356 ; SSE-NEXT: punpckhwd {{.*#+}} xmm7 = xmm7[4],xmm8[4],xmm7[5],xmm8[5],xmm7[6],xmm8[6],xmm7[7],xmm8[7]
357 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm5[4],xmm0[5],xmm5[5],xmm0[6],xmm5[6],xmm0[7],xmm5[7]
358 ; SSE-NEXT: movdqa %xmm0, %xmm5
359 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm7[2],xmm5[3],xmm7[3]
360 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm7[0],xmm0[1],xmm7[1]
361 ; SSE-NEXT: movdqa %xmm4, %xmm7
362 ; SSE-NEXT: punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm9[0],xmm7[1],xmm9[1],xmm7[2],xmm9[2],xmm7[3],xmm9[3]
363 ; SSE-NEXT: movdqa %xmm1, %xmm8
364 ; SSE-NEXT: punpcklwd {{.*#+}} xmm8 = xmm8[0],xmm6[0],xmm8[1],xmm6[1],xmm8[2],xmm6[2],xmm8[3],xmm6[3]
365 ; SSE-NEXT: movdqa %xmm8, %xmm10
366 ; SSE-NEXT: punpckhdq {{.*#+}} xmm10 = xmm10[2],xmm7[2],xmm10[3],xmm7[3]
367 ; SSE-NEXT: punpckldq {{.*#+}} xmm8 = xmm8[0],xmm7[0],xmm8[1],xmm7[1]
368 ; SSE-NEXT: punpckhwd {{.*#+}} xmm4 = xmm4[4],xmm9[4],xmm4[5],xmm9[5],xmm4[6],xmm9[6],xmm4[7],xmm9[7]
369 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm6[4],xmm1[5],xmm6[5],xmm1[6],xmm6[6],xmm1[7],xmm6[7]
370 ; SSE-NEXT: movdqa %xmm1, %xmm6
371 ; SSE-NEXT: punpckhdq {{.*#+}} xmm6 = xmm6[2],xmm4[2],xmm6[3],xmm4[3]
372 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1]
373 ; SSE-NEXT: movdqa %xmm1, 96(%r8)
374 ; SSE-NEXT: movdqa %xmm6, 112(%r8)
375 ; SSE-NEXT: movdqa %xmm8, 64(%r8)
376 ; SSE-NEXT: movdqa %xmm10, 80(%r8)
377 ; SSE-NEXT: movdqa %xmm0, 32(%r8)
378 ; SSE-NEXT: movdqa %xmm5, 48(%r8)
379 ; SSE-NEXT: movdqa %xmm2, (%r8)
380 ; SSE-NEXT: movdqa %xmm3, 16(%r8)
383 ; AVX1-ONLY-LABEL: store_i16_stride4_vf16:
384 ; AVX1-ONLY: # %bb.0:
385 ; AVX1-ONLY-NEXT: vmovdqa (%rcx), %xmm5
386 ; AVX1-ONLY-NEXT: vmovdqa 16(%rcx), %xmm0
387 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm6
388 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdx), %xmm1
389 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
390 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm2[0,0,1,1]
391 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
392 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm2
393 ; AVX1-ONLY-NEXT: vmovdqa (%rsi), %xmm7
394 ; AVX1-ONLY-NEXT: vmovdqa 16(%rsi), %xmm3
395 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm8
396 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdi), %xmm4
397 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3]
398 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm10 = xmm9[0],zero,xmm9[1],zero
399 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
400 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm10, %ymm9
401 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm9[0],ymm2[1],ymm9[2],ymm2[3],ymm9[4],ymm2[5],ymm9[6],ymm2[7]
402 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm6[4],xmm5[4],xmm6[5],xmm5[5],xmm6[6],xmm5[6],xmm6[7],xmm5[7]
403 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm5[0,0,1,1]
404 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
405 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm6, %ymm5
406 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
407 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm7 = xmm6[0],zero,xmm6[1],zero
408 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
409 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm7, %ymm6
410 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm6[0],ymm5[1],ymm6[2],ymm5[3],ymm6[4],ymm5[5],ymm6[6],ymm5[7]
411 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
412 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm6[0,0,1,1]
413 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
414 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm7, %ymm6
415 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
416 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm8 = xmm7[0],zero,xmm7[1],zero
417 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
418 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm8, %ymm7
419 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0],ymm6[1],ymm7[2],ymm6[3],ymm7[4],ymm6[5],ymm7[6],ymm6[7]
420 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
421 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[0,0,1,1]
422 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
423 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
424 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
425 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm1[0],zero,xmm1[1],zero
426 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
427 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm3, %ymm1
428 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4],ymm0[5],ymm1[6],ymm0[7]
429 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 64(%r8)
430 ; AVX1-ONLY-NEXT: vmovaps %ymm6, 96(%r8)
431 ; AVX1-ONLY-NEXT: vmovaps %ymm5, 32(%r8)
432 ; AVX1-ONLY-NEXT: vmovaps %ymm2, (%r8)
433 ; AVX1-ONLY-NEXT: vzeroupper
434 ; AVX1-ONLY-NEXT: retq
436 ; AVX2-SLOW-LABEL: store_i16_stride4_vf16:
437 ; AVX2-SLOW: # %bb.0:
438 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %xmm5
439 ; AVX2-SLOW-NEXT: vmovdqa 16(%rcx), %xmm0
440 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %xmm6
441 ; AVX2-SLOW-NEXT: vmovdqa 16(%rdx), %xmm1
442 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
443 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm2[0,0,1,1]
444 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
445 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm3, %ymm2
446 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %xmm7
447 ; AVX2-SLOW-NEXT: vmovdqa 16(%rsi), %xmm3
448 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %xmm8
449 ; AVX2-SLOW-NEXT: vmovdqa 16(%rdi), %xmm4
450 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3]
451 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm10 = xmm9[0],zero,xmm9[1],zero
452 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
453 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm10, %ymm9
454 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm9[0],ymm2[1],ymm9[2],ymm2[3],ymm9[4],ymm2[5],ymm9[6],ymm2[7]
455 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm6[4],xmm5[4],xmm6[5],xmm5[5],xmm6[6],xmm5[6],xmm6[7],xmm5[7]
456 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm5[0,0,1,1]
457 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
458 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm6, %ymm5
459 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
460 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm7 = xmm6[0],zero,xmm6[1],zero
461 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
462 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm7, %ymm6
463 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0],ymm5[1],ymm6[2],ymm5[3],ymm6[4],ymm5[5],ymm6[6],ymm5[7]
464 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
465 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm7 = xmm6[0,0,1,1]
466 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
467 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm7, %ymm6
468 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
469 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm8 = xmm7[0],zero,xmm7[1],zero
470 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
471 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm7, %ymm8, %ymm7
472 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm7[0],ymm6[1],ymm7[2],ymm6[3],ymm7[4],ymm6[5],ymm7[6],ymm6[7]
473 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
474 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[0,0,1,1]
475 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
476 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
477 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
478 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm1[0],zero,xmm1[1],zero
479 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
480 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm3, %ymm1
481 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4],ymm0[5],ymm1[6],ymm0[7]
482 ; AVX2-SLOW-NEXT: vmovdqa %ymm0, 64(%r8)
483 ; AVX2-SLOW-NEXT: vmovdqa %ymm6, 96(%r8)
484 ; AVX2-SLOW-NEXT: vmovdqa %ymm5, 32(%r8)
485 ; AVX2-SLOW-NEXT: vmovdqa %ymm2, (%r8)
486 ; AVX2-SLOW-NEXT: vzeroupper
487 ; AVX2-SLOW-NEXT: retq
489 ; AVX2-FAST-LABEL: store_i16_stride4_vf16:
490 ; AVX2-FAST: # %bb.0:
491 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %xmm2
492 ; AVX2-FAST-NEXT: vmovdqa 16(%rsi), %xmm0
493 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %xmm3
494 ; AVX2-FAST-NEXT: vmovdqa 16(%rdi), %xmm1
495 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
496 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm5 = xmm4[0],zero,xmm4[1],zero
497 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
498 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4
499 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %xmm5
500 ; AVX2-FAST-NEXT: vmovdqa 16(%rcx), %xmm6
501 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %xmm7
502 ; AVX2-FAST-NEXT: vmovdqa 16(%rdx), %xmm8
503 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm7[0],xmm5[0],xmm7[1],xmm5[1],xmm7[2],xmm5[2],xmm7[3],xmm5[3]
504 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = [0,0,1,1,2,2,3,3]
505 ; AVX2-FAST-NEXT: vpermd %ymm9, %ymm10, %ymm9
506 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0],ymm9[1],ymm4[2],ymm9[3],ymm4[4],ymm9[5],ymm4[6],ymm9[7]
507 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
508 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm2[0],zero,xmm2[1],zero
509 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
510 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm2, %ymm3, %ymm2
511 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm7[4],xmm5[4],xmm7[5],xmm5[5],xmm7[6],xmm5[6],xmm7[7],xmm5[7]
512 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm10, %ymm3
513 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0],ymm3[1],ymm2[2],ymm3[3],ymm2[4],ymm3[5],ymm2[6],ymm3[7]
514 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
515 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm5 = xmm3[0],zero,xmm3[1],zero
516 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
517 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm3, %ymm5, %ymm3
518 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm8[4],xmm6[4],xmm8[5],xmm6[5],xmm8[6],xmm6[6],xmm8[7],xmm6[7]
519 ; AVX2-FAST-NEXT: vpermd %ymm5, %ymm10, %ymm5
520 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0],ymm5[1],ymm3[2],ymm5[3],ymm3[4],ymm5[5],ymm3[6],ymm5[7]
521 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
522 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm1 = xmm0[0],zero,xmm0[1],zero
523 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
524 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
525 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm8[0],xmm6[0],xmm8[1],xmm6[1],xmm8[2],xmm6[2],xmm8[3],xmm6[3]
526 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm10, %ymm1
527 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2],ymm1[3],ymm0[4],ymm1[5],ymm0[6],ymm1[7]
528 ; AVX2-FAST-NEXT: vmovdqa %ymm0, 64(%r8)
529 ; AVX2-FAST-NEXT: vmovdqa %ymm3, 96(%r8)
530 ; AVX2-FAST-NEXT: vmovdqa %ymm2, 32(%r8)
531 ; AVX2-FAST-NEXT: vmovdqa %ymm4, (%r8)
532 ; AVX2-FAST-NEXT: vzeroupper
533 ; AVX2-FAST-NEXT: retq
535 ; AVX2-FAST-PERLANE-LABEL: store_i16_stride4_vf16:
536 ; AVX2-FAST-PERLANE: # %bb.0:
537 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %xmm5
538 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%rcx), %xmm0
539 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %xmm6
540 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%rdx), %xmm1
541 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
542 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm3 = xmm2[0,0,1,1]
543 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
544 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm3, %ymm2
545 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %xmm7
546 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%rsi), %xmm3
547 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %xmm8
548 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%rdi), %xmm4
549 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3]
550 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm10 = xmm9[0],zero,xmm9[1],zero
551 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
552 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm9, %ymm10, %ymm9
553 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm9[0],ymm2[1],ymm9[2],ymm2[3],ymm9[4],ymm2[5],ymm9[6],ymm2[7]
554 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm6[4],xmm5[4],xmm6[5],xmm5[5],xmm6[6],xmm5[6],xmm6[7],xmm5[7]
555 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm6 = xmm5[0,0,1,1]
556 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
557 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm5, %ymm6, %ymm5
558 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
559 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm7 = xmm6[0],zero,xmm6[1],zero
560 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
561 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm6, %ymm7, %ymm6
562 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0],ymm5[1],ymm6[2],ymm5[3],ymm6[4],ymm5[5],ymm6[6],ymm5[7]
563 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
564 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm7 = xmm6[0,0,1,1]
565 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
566 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm6, %ymm7, %ymm6
567 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
568 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm8 = xmm7[0],zero,xmm7[1],zero
569 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
570 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm7, %ymm8, %ymm7
571 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm6 = ymm7[0],ymm6[1],ymm7[2],ymm6[3],ymm7[4],ymm6[5],ymm7[6],ymm6[7]
572 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
573 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[0,0,1,1]
574 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
575 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
576 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
577 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm1[0],zero,xmm1[1],zero
578 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
579 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm3, %ymm1
580 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4],ymm0[5],ymm1[6],ymm0[7]
581 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, 64(%r8)
582 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm6, 96(%r8)
583 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm5, 32(%r8)
584 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm2, (%r8)
585 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
586 ; AVX2-FAST-PERLANE-NEXT: retq
588 ; AVX512F-LABEL: store_i16_stride4_vf16:
590 ; AVX512F-NEXT: vmovdqa (%rcx), %xmm0
591 ; AVX512F-NEXT: vmovdqa 16(%rcx), %xmm1
592 ; AVX512F-NEXT: vmovdqa (%rdx), %xmm2
593 ; AVX512F-NEXT: vmovdqa 16(%rdx), %xmm3
594 ; AVX512F-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
595 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
596 ; AVX512F-NEXT: vinserti32x4 $2, %xmm4, %zmm1, %zmm1
597 ; AVX512F-NEXT: vmovdqa (%rsi), %xmm3
598 ; AVX512F-NEXT: vmovdqa 16(%rsi), %xmm4
599 ; AVX512F-NEXT: vmovdqa (%rdi), %xmm5
600 ; AVX512F-NEXT: vmovdqa 16(%rdi), %xmm6
601 ; AVX512F-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm6[4],xmm4[4],xmm6[5],xmm4[5],xmm6[6],xmm4[6],xmm6[7],xmm4[7]
602 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm6[0],xmm4[0],xmm6[1],xmm4[1],xmm6[2],xmm4[2],xmm6[3],xmm4[3]
603 ; AVX512F-NEXT: vinserti32x4 $2, %xmm7, %zmm4, %zmm4
604 ; AVX512F-NEXT: vmovdqa64 {{.*#+}} zmm6 = [0,16,1,17,2,18,3,19,8,24,9,25,10,26,11,27]
605 ; AVX512F-NEXT: vpermt2d %zmm1, %zmm6, %zmm4
606 ; AVX512F-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm2[4],xmm0[4],xmm2[5],xmm0[5],xmm2[6],xmm0[6],xmm2[7],xmm0[7]
607 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
608 ; AVX512F-NEXT: vinserti32x4 $2, %xmm1, %zmm0, %zmm0
609 ; AVX512F-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm5[4],xmm3[4],xmm5[5],xmm3[5],xmm5[6],xmm3[6],xmm5[7],xmm3[7]
610 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
611 ; AVX512F-NEXT: vinserti32x4 $2, %xmm1, %zmm2, %zmm1
612 ; AVX512F-NEXT: vpermt2d %zmm0, %zmm6, %zmm1
613 ; AVX512F-NEXT: vmovdqa64 %zmm1, (%r8)
614 ; AVX512F-NEXT: vmovdqa64 %zmm4, 64(%r8)
615 ; AVX512F-NEXT: vzeroupper
618 ; AVX512BW-LABEL: store_i16_stride4_vf16:
620 ; AVX512BW-NEXT: vmovdqa (%rdi), %ymm0
621 ; AVX512BW-NEXT: vmovdqa (%rdx), %ymm1
622 ; AVX512BW-NEXT: vinserti64x4 $1, (%rsi), %zmm0, %zmm0
623 ; AVX512BW-NEXT: vinserti64x4 $1, (%rcx), %zmm1, %zmm1
624 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = [0,16,32,48,1,17,33,49,2,18,34,50,3,19,35,51,4,20,36,52,5,21,37,53,6,22,38,54,7,23,39,55]
625 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm2
626 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [8,24,40,56,9,25,41,57,10,26,42,58,11,27,43,59,12,28,44,60,13,29,45,61,14,30,46,62,15,31,47,63]
627 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm3
628 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 64(%r8)
629 ; AVX512BW-NEXT: vmovdqa64 %zmm2, (%r8)
630 ; AVX512BW-NEXT: vzeroupper
631 ; AVX512BW-NEXT: retq
632 %in.vec0 = load <16 x i16>, ptr %in.vecptr0, align 64
633 %in.vec1 = load <16 x i16>, ptr %in.vecptr1, align 64
634 %in.vec2 = load <16 x i16>, ptr %in.vecptr2, align 64
635 %in.vec3 = load <16 x i16>, ptr %in.vecptr3, align 64
636 %1 = shufflevector <16 x i16> %in.vec0, <16 x i16> %in.vec1, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
637 %2 = shufflevector <16 x i16> %in.vec2, <16 x i16> %in.vec3, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
638 %3 = shufflevector <32 x i16> %1, <32 x i16> %2, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
639 %interleaved.vec = shufflevector <64 x i16> %3, <64 x i16> poison, <64 x i32> <i32 0, i32 16, i32 32, i32 48, i32 1, i32 17, i32 33, i32 49, i32 2, i32 18, i32 34, i32 50, i32 3, i32 19, i32 35, i32 51, i32 4, i32 20, i32 36, i32 52, i32 5, i32 21, i32 37, i32 53, i32 6, i32 22, i32 38, i32 54, i32 7, i32 23, i32 39, i32 55, i32 8, i32 24, i32 40, i32 56, i32 9, i32 25, i32 41, i32 57, i32 10, i32 26, i32 42, i32 58, i32 11, i32 27, i32 43, i32 59, i32 12, i32 28, i32 44, i32 60, i32 13, i32 29, i32 45, i32 61, i32 14, i32 30, i32 46, i32 62, i32 15, i32 31, i32 47, i32 63>
640 store <64 x i16> %interleaved.vec, ptr %out.vec, align 64
644 define void @store_i16_stride4_vf32(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %out.vec) nounwind {
645 ; SSE-LABEL: store_i16_stride4_vf32:
647 ; SSE-NEXT: movdqa (%rdi), %xmm5
648 ; SSE-NEXT: movdqa 16(%rdi), %xmm11
649 ; SSE-NEXT: movdqa 32(%rdi), %xmm4
650 ; SSE-NEXT: movdqa 48(%rdi), %xmm2
651 ; SSE-NEXT: movdqa (%rsi), %xmm0
652 ; SSE-NEXT: movdqa 16(%rsi), %xmm3
653 ; SSE-NEXT: movdqa 32(%rsi), %xmm9
654 ; SSE-NEXT: movdqa (%rdx), %xmm7
655 ; SSE-NEXT: movdqa 16(%rdx), %xmm13
656 ; SSE-NEXT: movdqa 32(%rdx), %xmm10
657 ; SSE-NEXT: movdqa (%rcx), %xmm8
658 ; SSE-NEXT: movdqa 16(%rcx), %xmm14
659 ; SSE-NEXT: movdqa 32(%rcx), %xmm12
660 ; SSE-NEXT: movdqa %xmm7, %xmm15
661 ; SSE-NEXT: punpcklwd {{.*#+}} xmm15 = xmm15[0],xmm8[0],xmm15[1],xmm8[1],xmm15[2],xmm8[2],xmm15[3],xmm8[3]
662 ; SSE-NEXT: movdqa %xmm5, %xmm6
663 ; SSE-NEXT: punpcklwd {{.*#+}} xmm6 = xmm6[0],xmm0[0],xmm6[1],xmm0[1],xmm6[2],xmm0[2],xmm6[3],xmm0[3]
664 ; SSE-NEXT: movdqa %xmm6, %xmm1
665 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm15[2],xmm1[3],xmm15[3]
666 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
667 ; SSE-NEXT: punpckldq {{.*#+}} xmm6 = xmm6[0],xmm15[0],xmm6[1],xmm15[1]
668 ; SSE-NEXT: punpckhwd {{.*#+}} xmm7 = xmm7[4],xmm8[4],xmm7[5],xmm8[5],xmm7[6],xmm8[6],xmm7[7],xmm8[7]
669 ; SSE-NEXT: punpckhwd {{.*#+}} xmm5 = xmm5[4],xmm0[4],xmm5[5],xmm0[5],xmm5[6],xmm0[6],xmm5[7],xmm0[7]
670 ; SSE-NEXT: movdqa %xmm5, %xmm0
671 ; SSE-NEXT: punpckhdq {{.*#+}} xmm0 = xmm0[2],xmm7[2],xmm0[3],xmm7[3]
672 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
673 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm7[0],xmm5[1],xmm7[1]
674 ; SSE-NEXT: movdqa %xmm13, %xmm15
675 ; SSE-NEXT: punpcklwd {{.*#+}} xmm15 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
676 ; SSE-NEXT: movdqa %xmm11, %xmm7
677 ; SSE-NEXT: punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm3[0],xmm7[1],xmm3[1],xmm7[2],xmm3[2],xmm7[3],xmm3[3]
678 ; SSE-NEXT: movdqa %xmm7, %xmm0
679 ; SSE-NEXT: punpckhdq {{.*#+}} xmm0 = xmm0[2],xmm15[2],xmm0[3],xmm15[3]
680 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
681 ; SSE-NEXT: punpckldq {{.*#+}} xmm7 = xmm7[0],xmm15[0],xmm7[1],xmm15[1]
682 ; SSE-NEXT: punpckhwd {{.*#+}} xmm13 = xmm13[4],xmm14[4],xmm13[5],xmm14[5],xmm13[6],xmm14[6],xmm13[7],xmm14[7]
683 ; SSE-NEXT: punpckhwd {{.*#+}} xmm11 = xmm11[4],xmm3[4],xmm11[5],xmm3[5],xmm11[6],xmm3[6],xmm11[7],xmm3[7]
684 ; SSE-NEXT: movdqa %xmm11, %xmm8
685 ; SSE-NEXT: punpckhdq {{.*#+}} xmm8 = xmm8[2],xmm13[2],xmm8[3],xmm13[3]
686 ; SSE-NEXT: punpckldq {{.*#+}} xmm11 = xmm11[0],xmm13[0],xmm11[1],xmm13[1]
687 ; SSE-NEXT: movdqa %xmm10, %xmm15
688 ; SSE-NEXT: punpcklwd {{.*#+}} xmm15 = xmm15[0],xmm12[0],xmm15[1],xmm12[1],xmm15[2],xmm12[2],xmm15[3],xmm12[3]
689 ; SSE-NEXT: movdqa %xmm4, %xmm13
690 ; SSE-NEXT: punpcklwd {{.*#+}} xmm13 = xmm13[0],xmm9[0],xmm13[1],xmm9[1],xmm13[2],xmm9[2],xmm13[3],xmm9[3]
691 ; SSE-NEXT: movdqa %xmm13, %xmm14
692 ; SSE-NEXT: punpckhdq {{.*#+}} xmm14 = xmm14[2],xmm15[2],xmm14[3],xmm15[3]
693 ; SSE-NEXT: punpckldq {{.*#+}} xmm13 = xmm13[0],xmm15[0],xmm13[1],xmm15[1]
694 ; SSE-NEXT: movdqa 48(%rdx), %xmm15
695 ; SSE-NEXT: punpckhwd {{.*#+}} xmm10 = xmm10[4],xmm12[4],xmm10[5],xmm12[5],xmm10[6],xmm12[6],xmm10[7],xmm12[7]
696 ; SSE-NEXT: movdqa 48(%rcx), %xmm12
697 ; SSE-NEXT: punpckhwd {{.*#+}} xmm4 = xmm4[4],xmm9[4],xmm4[5],xmm9[5],xmm4[6],xmm9[6],xmm4[7],xmm9[7]
698 ; SSE-NEXT: movdqa %xmm4, %xmm9
699 ; SSE-NEXT: punpckhdq {{.*#+}} xmm9 = xmm9[2],xmm10[2],xmm9[3],xmm10[3]
700 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm10[0],xmm4[1],xmm10[1]
701 ; SSE-NEXT: movdqa %xmm15, %xmm10
702 ; SSE-NEXT: punpcklwd {{.*#+}} xmm10 = xmm10[0],xmm12[0],xmm10[1],xmm12[1],xmm10[2],xmm12[2],xmm10[3],xmm12[3]
703 ; SSE-NEXT: movdqa 48(%rsi), %xmm1
704 ; SSE-NEXT: movdqa %xmm2, %xmm3
705 ; SSE-NEXT: punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
706 ; SSE-NEXT: movdqa %xmm3, %xmm0
707 ; SSE-NEXT: punpckhdq {{.*#+}} xmm0 = xmm0[2],xmm10[2],xmm0[3],xmm10[3]
708 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm10[0],xmm3[1],xmm10[1]
709 ; SSE-NEXT: punpckhwd {{.*#+}} xmm15 = xmm15[4],xmm12[4],xmm15[5],xmm12[5],xmm15[6],xmm12[6],xmm15[7],xmm12[7]
710 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
711 ; SSE-NEXT: movdqa %xmm2, %xmm1
712 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm15[2],xmm1[3],xmm15[3]
713 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm15[0],xmm2[1],xmm15[1]
714 ; SSE-NEXT: movdqa %xmm2, 224(%r8)
715 ; SSE-NEXT: movdqa %xmm1, 240(%r8)
716 ; SSE-NEXT: movdqa %xmm3, 192(%r8)
717 ; SSE-NEXT: movdqa %xmm0, 208(%r8)
718 ; SSE-NEXT: movdqa %xmm4, 160(%r8)
719 ; SSE-NEXT: movdqa %xmm9, 176(%r8)
720 ; SSE-NEXT: movdqa %xmm13, 128(%r8)
721 ; SSE-NEXT: movdqa %xmm14, 144(%r8)
722 ; SSE-NEXT: movdqa %xmm11, 96(%r8)
723 ; SSE-NEXT: movdqa %xmm8, 112(%r8)
724 ; SSE-NEXT: movdqa %xmm7, 64(%r8)
725 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
726 ; SSE-NEXT: movaps %xmm0, 80(%r8)
727 ; SSE-NEXT: movdqa %xmm5, 32(%r8)
728 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
729 ; SSE-NEXT: movaps %xmm0, 48(%r8)
730 ; SSE-NEXT: movdqa %xmm6, (%r8)
731 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
732 ; SSE-NEXT: movaps %xmm0, 16(%r8)
735 ; AVX1-ONLY-LABEL: store_i16_stride4_vf32:
736 ; AVX1-ONLY: # %bb.0:
737 ; AVX1-ONLY-NEXT: vmovdqa (%rcx), %xmm3
738 ; AVX1-ONLY-NEXT: vmovdqa 16(%rcx), %xmm0
739 ; AVX1-ONLY-NEXT: vmovdqa 32(%rcx), %xmm4
740 ; AVX1-ONLY-NEXT: vmovdqa 48(%rcx), %xmm7
741 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm6
742 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdx), %xmm1
743 ; AVX1-ONLY-NEXT: vmovdqa 32(%rdx), %xmm5
744 ; AVX1-ONLY-NEXT: vmovdqa 48(%rdx), %xmm8
745 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm6[4],xmm3[4],xmm6[5],xmm3[5],xmm6[6],xmm3[6],xmm6[7],xmm3[7]
746 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm2[0,0,1,1]
747 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
748 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm9, %ymm2
749 ; AVX1-ONLY-NEXT: vmovdqa (%rsi), %xmm9
750 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm10
751 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm11 = xmm10[4],xmm9[4],xmm10[5],xmm9[5],xmm10[6],xmm9[6],xmm10[7],xmm9[7]
752 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm12 = xmm11[0],zero,xmm11[1],zero
753 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[2,2,3,3]
754 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm12, %ymm11
755 ; AVX1-ONLY-NEXT: vmovdqa 48(%rsi), %xmm12
756 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm11[0],ymm2[1],ymm11[2],ymm2[3],ymm11[4],ymm2[5],ymm11[6],ymm2[7]
757 ; AVX1-ONLY-NEXT: vmovdqa 48(%rdi), %xmm11
758 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm6[0],xmm3[0],xmm6[1],xmm3[1],xmm6[2],xmm3[2],xmm6[3],xmm3[3]
759 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm3[0,0,1,1]
760 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
761 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm6, %ymm3
762 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
763 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm9 = xmm6[0],zero,xmm6[1],zero
764 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
765 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm9, %ymm6
766 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm6[0],ymm3[1],ymm6[2],ymm3[3],ymm6[4],ymm3[5],ymm6[6],ymm3[7]
767 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3]
768 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm6[0,0,1,1]
769 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
770 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm9, %ymm6
771 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
772 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm10 = xmm9[0],zero,xmm9[1],zero
773 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
774 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm10, %ymm9
775 ; AVX1-ONLY-NEXT: vmovdqa 32(%rsi), %xmm10
776 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm9[0],ymm6[1],ymm9[2],ymm6[3],ymm9[4],ymm6[5],ymm9[6],ymm6[7]
777 ; AVX1-ONLY-NEXT: vmovdqa 32(%rdi), %xmm9
778 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
779 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm7[0,0,1,1]
780 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
781 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm8, %ymm7
782 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm11[4],xmm12[4],xmm11[5],xmm12[5],xmm11[6],xmm12[6],xmm11[7],xmm12[7]
783 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm11 = xmm8[0],zero,xmm8[1],zero
784 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[2,2,3,3]
785 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm11, %ymm8
786 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm8[0],ymm7[1],ymm8[2],ymm7[3],ymm8[4],ymm7[5],ymm8[6],ymm7[7]
787 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
788 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm8[0,0,1,1]
789 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[2,2,3,3]
790 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm11, %ymm8
791 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3]
792 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm12 = xmm11[0],zero,xmm11[1],zero
793 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[2,2,3,3]
794 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm12, %ymm11
795 ; AVX1-ONLY-NEXT: vmovdqa 16(%rsi), %xmm12
796 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm11[0],ymm8[1],ymm11[2],ymm8[3],ymm11[4],ymm8[5],ymm11[6],ymm8[7]
797 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdi), %xmm11
798 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
799 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm4[0,0,1,1]
800 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
801 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm5, %ymm4
802 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm9[4],xmm10[4],xmm9[5],xmm10[5],xmm9[6],xmm10[6],xmm9[7],xmm10[7]
803 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm9 = xmm5[0],zero,xmm5[1],zero
804 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
805 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm9, %ymm5
806 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm5[0],ymm4[1],ymm5[2],ymm4[3],ymm5[4],ymm4[5],ymm5[6],ymm4[7]
807 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
808 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm5[0,0,1,1]
809 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
810 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm9, %ymm5
811 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
812 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm10 = xmm9[0],zero,xmm9[1],zero
813 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
814 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm10, %ymm9
815 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm9[0],ymm5[1],ymm9[2],ymm5[3],ymm9[4],ymm5[5],ymm9[6],ymm5[7]
816 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
817 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[0,0,1,1]
818 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
819 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
820 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm11[4],xmm12[4],xmm11[5],xmm12[5],xmm11[6],xmm12[6],xmm11[7],xmm12[7]
821 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm9 = xmm1[0],zero,xmm1[1],zero
822 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
823 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm9, %ymm1
824 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4],ymm0[5],ymm1[6],ymm0[7]
825 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 96(%r8)
826 ; AVX1-ONLY-NEXT: vmovaps %ymm5, 64(%r8)
827 ; AVX1-ONLY-NEXT: vmovaps %ymm4, 160(%r8)
828 ; AVX1-ONLY-NEXT: vmovaps %ymm8, 128(%r8)
829 ; AVX1-ONLY-NEXT: vmovaps %ymm7, 224(%r8)
830 ; AVX1-ONLY-NEXT: vmovaps %ymm6, 192(%r8)
831 ; AVX1-ONLY-NEXT: vmovaps %ymm3, (%r8)
832 ; AVX1-ONLY-NEXT: vmovaps %ymm2, 32(%r8)
833 ; AVX1-ONLY-NEXT: vzeroupper
834 ; AVX1-ONLY-NEXT: retq
836 ; AVX2-SLOW-LABEL: store_i16_stride4_vf32:
837 ; AVX2-SLOW: # %bb.0:
838 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %xmm3
839 ; AVX2-SLOW-NEXT: vmovdqa 16(%rcx), %xmm0
840 ; AVX2-SLOW-NEXT: vmovdqa 32(%rcx), %xmm4
841 ; AVX2-SLOW-NEXT: vmovdqa 48(%rcx), %xmm7
842 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %xmm6
843 ; AVX2-SLOW-NEXT: vmovdqa 16(%rdx), %xmm1
844 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdx), %xmm5
845 ; AVX2-SLOW-NEXT: vmovdqa 48(%rdx), %xmm8
846 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm6[4],xmm3[4],xmm6[5],xmm3[5],xmm6[6],xmm3[6],xmm6[7],xmm3[7]
847 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm2[0,0,1,1]
848 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
849 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm9, %ymm2
850 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %xmm9
851 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %xmm10
852 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm11 = xmm10[4],xmm9[4],xmm10[5],xmm9[5],xmm10[6],xmm9[6],xmm10[7],xmm9[7]
853 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm12 = xmm11[0],zero,xmm11[1],zero
854 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[2,2,3,3]
855 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm11, %ymm12, %ymm11
856 ; AVX2-SLOW-NEXT: vmovdqa 48(%rsi), %xmm12
857 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm11[0],ymm2[1],ymm11[2],ymm2[3],ymm11[4],ymm2[5],ymm11[6],ymm2[7]
858 ; AVX2-SLOW-NEXT: vmovdqa 48(%rdi), %xmm11
859 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm6[0],xmm3[0],xmm6[1],xmm3[1],xmm6[2],xmm3[2],xmm6[3],xmm3[3]
860 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm3[0,0,1,1]
861 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
862 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm6, %ymm3
863 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
864 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm9 = xmm6[0],zero,xmm6[1],zero
865 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
866 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm9, %ymm6
867 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm6[0],ymm3[1],ymm6[2],ymm3[3],ymm6[4],ymm3[5],ymm6[6],ymm3[7]
868 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3]
869 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm6[0,0,1,1]
870 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
871 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm9, %ymm6
872 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
873 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm10 = xmm9[0],zero,xmm9[1],zero
874 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
875 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm10, %ymm9
876 ; AVX2-SLOW-NEXT: vmovdqa 32(%rsi), %xmm10
877 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm9[0],ymm6[1],ymm9[2],ymm6[3],ymm9[4],ymm6[5],ymm9[6],ymm6[7]
878 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdi), %xmm9
879 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
880 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm8 = xmm7[0,0,1,1]
881 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
882 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm7, %ymm8, %ymm7
883 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm11[4],xmm12[4],xmm11[5],xmm12[5],xmm11[6],xmm12[6],xmm11[7],xmm12[7]
884 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm11 = xmm8[0],zero,xmm8[1],zero
885 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[2,2,3,3]
886 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm8, %ymm11, %ymm8
887 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm7 = ymm8[0],ymm7[1],ymm8[2],ymm7[3],ymm8[4],ymm7[5],ymm8[6],ymm7[7]
888 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
889 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm11 = xmm8[0,0,1,1]
890 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[2,2,3,3]
891 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm8, %ymm11, %ymm8
892 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3]
893 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm12 = xmm11[0],zero,xmm11[1],zero
894 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[2,2,3,3]
895 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm11, %ymm12, %ymm11
896 ; AVX2-SLOW-NEXT: vmovdqa 16(%rsi), %xmm12
897 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm11[0],ymm8[1],ymm11[2],ymm8[3],ymm11[4],ymm8[5],ymm11[6],ymm8[7]
898 ; AVX2-SLOW-NEXT: vmovdqa 16(%rdi), %xmm11
899 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
900 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm4[0,0,1,1]
901 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
902 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4
903 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm9[4],xmm10[4],xmm9[5],xmm10[5],xmm9[6],xmm10[6],xmm9[7],xmm10[7]
904 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm9 = xmm5[0],zero,xmm5[1],zero
905 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
906 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm9, %ymm5
907 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0],ymm4[1],ymm5[2],ymm4[3],ymm5[4],ymm4[5],ymm5[6],ymm4[7]
908 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
909 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm5[0,0,1,1]
910 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
911 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm9, %ymm5
912 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
913 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm10 = xmm9[0],zero,xmm9[1],zero
914 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
915 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm10, %ymm9
916 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm9[0],ymm5[1],ymm9[2],ymm5[3],ymm9[4],ymm5[5],ymm9[6],ymm5[7]
917 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
918 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[0,0,1,1]
919 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
920 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
921 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm11[4],xmm12[4],xmm11[5],xmm12[5],xmm11[6],xmm12[6],xmm11[7],xmm12[7]
922 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm9 = xmm1[0],zero,xmm1[1],zero
923 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
924 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm9, %ymm1
925 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4],ymm0[5],ymm1[6],ymm0[7]
926 ; AVX2-SLOW-NEXT: vmovdqa %ymm0, 96(%r8)
927 ; AVX2-SLOW-NEXT: vmovdqa %ymm5, 64(%r8)
928 ; AVX2-SLOW-NEXT: vmovdqa %ymm4, 160(%r8)
929 ; AVX2-SLOW-NEXT: vmovdqa %ymm8, 128(%r8)
930 ; AVX2-SLOW-NEXT: vmovdqa %ymm7, 224(%r8)
931 ; AVX2-SLOW-NEXT: vmovdqa %ymm6, 192(%r8)
932 ; AVX2-SLOW-NEXT: vmovdqa %ymm3, (%r8)
933 ; AVX2-SLOW-NEXT: vmovdqa %ymm2, 32(%r8)
934 ; AVX2-SLOW-NEXT: vzeroupper
935 ; AVX2-SLOW-NEXT: retq
937 ; AVX2-FAST-LABEL: store_i16_stride4_vf32:
938 ; AVX2-FAST: # %bb.0:
939 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %xmm5
940 ; AVX2-FAST-NEXT: vmovdqa 16(%rsi), %xmm0
941 ; AVX2-FAST-NEXT: vmovdqa 32(%rsi), %xmm4
942 ; AVX2-FAST-NEXT: vmovdqa 48(%rsi), %xmm8
943 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %xmm7
944 ; AVX2-FAST-NEXT: vmovdqa 16(%rdi), %xmm1
945 ; AVX2-FAST-NEXT: vmovdqa 32(%rdi), %xmm6
946 ; AVX2-FAST-NEXT: vmovdqa 48(%rdi), %xmm9
947 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm7[4],xmm5[4],xmm7[5],xmm5[5],xmm7[6],xmm5[6],xmm7[7],xmm5[7]
948 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm2[0],zero,xmm2[1],zero
949 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
950 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm2, %ymm3, %ymm2
951 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %xmm10
952 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %xmm11
953 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm12 = xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
954 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [0,0,1,1,2,2,3,3]
955 ; AVX2-FAST-NEXT: vpermd %ymm12, %ymm3, %ymm12
956 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0],ymm12[1],ymm2[2],ymm12[3],ymm2[4],ymm12[5],ymm2[6],ymm12[7]
957 ; AVX2-FAST-NEXT: vmovdqa 48(%rcx), %xmm12
958 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm7[0],xmm5[0],xmm7[1],xmm5[1],xmm7[2],xmm5[2],xmm7[3],xmm5[3]
959 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm7 = xmm5[0],zero,xmm5[1],zero
960 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
961 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm5, %ymm7, %ymm5
962 ; AVX2-FAST-NEXT: vmovdqa 48(%rdx), %xmm13
963 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
964 ; AVX2-FAST-NEXT: vpermd %ymm7, %ymm3, %ymm7
965 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0],ymm7[1],ymm5[2],ymm7[3],ymm5[4],ymm7[5],ymm5[6],ymm7[7]
966 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
967 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm10 = xmm7[0],zero,xmm7[1],zero
968 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
969 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm7, %ymm10, %ymm7
970 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3]
971 ; AVX2-FAST-NEXT: vpermd %ymm10, %ymm3, %ymm10
972 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm7 = ymm7[0],ymm10[1],ymm7[2],ymm10[3],ymm7[4],ymm10[5],ymm7[6],ymm10[7]
973 ; AVX2-FAST-NEXT: vmovdqa 32(%rcx), %xmm10
974 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm9[4],xmm8[4],xmm9[5],xmm8[5],xmm9[6],xmm8[6],xmm9[7],xmm8[7]
975 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm9 = xmm8[0],zero,xmm8[1],zero
976 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[2,2,3,3]
977 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm8, %ymm9, %ymm8
978 ; AVX2-FAST-NEXT: vmovdqa 32(%rdx), %xmm11
979 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm9 = xmm13[4],xmm12[4],xmm13[5],xmm12[5],xmm13[6],xmm12[6],xmm13[7],xmm12[7]
980 ; AVX2-FAST-NEXT: vpermd %ymm9, %ymm3, %ymm9
981 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0],ymm9[1],ymm8[2],ymm9[3],ymm8[4],ymm9[5],ymm8[6],ymm9[7]
982 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm6[0],xmm4[0],xmm6[1],xmm4[1],xmm6[2],xmm4[2],xmm6[3],xmm4[3]
983 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm12 = xmm9[0],zero,xmm9[1],zero
984 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
985 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm9, %ymm12, %ymm9
986 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
987 ; AVX2-FAST-NEXT: vpermd %ymm12, %ymm3, %ymm12
988 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0],ymm12[1],ymm9[2],ymm12[3],ymm9[4],ymm12[5],ymm9[6],ymm12[7]
989 ; AVX2-FAST-NEXT: vmovdqa 16(%rcx), %xmm12
990 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm6[4],xmm4[4],xmm6[5],xmm4[5],xmm6[6],xmm4[6],xmm6[7],xmm4[7]
991 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm4[0],zero,xmm4[1],zero
992 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
993 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm4, %ymm6, %ymm4
994 ; AVX2-FAST-NEXT: vmovdqa 16(%rdx), %xmm6
995 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm10 = xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
996 ; AVX2-FAST-NEXT: vpermd %ymm10, %ymm3, %ymm10
997 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0],ymm10[1],ymm4[2],ymm10[3],ymm4[4],ymm10[5],ymm4[6],ymm10[7]
998 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
999 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm11 = xmm10[0],zero,xmm10[1],zero
1000 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm10 = xmm10[2,2,3,3]
1001 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm10, %ymm11, %ymm10
1002 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm6[0],xmm12[0],xmm6[1],xmm12[1],xmm6[2],xmm12[2],xmm6[3],xmm12[3]
1003 ; AVX2-FAST-NEXT: vpermd %ymm11, %ymm3, %ymm11
1004 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm10 = ymm10[0],ymm11[1],ymm10[2],ymm11[3],ymm10[4],ymm11[5],ymm10[6],ymm11[7]
1005 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
1006 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm1 = xmm0[0],zero,xmm0[1],zero
1007 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
1008 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
1009 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm6[4],xmm12[4],xmm6[5],xmm12[5],xmm6[6],xmm12[6],xmm6[7],xmm12[7]
1010 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm3, %ymm1
1011 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2],ymm1[3],ymm0[4],ymm1[5],ymm0[6],ymm1[7]
1012 ; AVX2-FAST-NEXT: vmovdqa %ymm0, 96(%r8)
1013 ; AVX2-FAST-NEXT: vmovdqa %ymm10, 64(%r8)
1014 ; AVX2-FAST-NEXT: vmovdqa %ymm4, 160(%r8)
1015 ; AVX2-FAST-NEXT: vmovdqa %ymm9, 128(%r8)
1016 ; AVX2-FAST-NEXT: vmovdqa %ymm8, 224(%r8)
1017 ; AVX2-FAST-NEXT: vmovdqa %ymm7, 192(%r8)
1018 ; AVX2-FAST-NEXT: vmovdqa %ymm5, (%r8)
1019 ; AVX2-FAST-NEXT: vmovdqa %ymm2, 32(%r8)
1020 ; AVX2-FAST-NEXT: vzeroupper
1021 ; AVX2-FAST-NEXT: retq
1023 ; AVX2-FAST-PERLANE-LABEL: store_i16_stride4_vf32:
1024 ; AVX2-FAST-PERLANE: # %bb.0:
1025 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %xmm3
1026 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%rcx), %xmm0
1027 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rcx), %xmm4
1028 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 48(%rcx), %xmm7
1029 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %xmm6
1030 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%rdx), %xmm1
1031 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdx), %xmm5
1032 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 48(%rdx), %xmm8
1033 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm6[4],xmm3[4],xmm6[5],xmm3[5],xmm6[6],xmm3[6],xmm6[7],xmm3[7]
1034 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm9 = xmm2[0,0,1,1]
1035 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
1036 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm9, %ymm2
1037 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %xmm9
1038 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %xmm10
1039 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm11 = xmm10[4],xmm9[4],xmm10[5],xmm9[5],xmm10[6],xmm9[6],xmm10[7],xmm9[7]
1040 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm12 = xmm11[0],zero,xmm11[1],zero
1041 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[2,2,3,3]
1042 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm11, %ymm12, %ymm11
1043 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 48(%rsi), %xmm12
1044 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm11[0],ymm2[1],ymm11[2],ymm2[3],ymm11[4],ymm2[5],ymm11[6],ymm2[7]
1045 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 48(%rdi), %xmm11
1046 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm6[0],xmm3[0],xmm6[1],xmm3[1],xmm6[2],xmm3[2],xmm6[3],xmm3[3]
1047 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm6 = xmm3[0,0,1,1]
1048 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
1049 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm6, %ymm3
1050 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
1051 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm9 = xmm6[0],zero,xmm6[1],zero
1052 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
1053 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm6, %ymm9, %ymm6
1054 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm6[0],ymm3[1],ymm6[2],ymm3[3],ymm6[4],ymm3[5],ymm6[6],ymm3[7]
1055 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3]
1056 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm9 = xmm6[0,0,1,1]
1057 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
1058 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm6, %ymm9, %ymm6
1059 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
1060 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm10 = xmm9[0],zero,xmm9[1],zero
1061 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
1062 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm9, %ymm10, %ymm9
1063 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rsi), %xmm10
1064 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm6 = ymm9[0],ymm6[1],ymm9[2],ymm6[3],ymm9[4],ymm6[5],ymm9[6],ymm6[7]
1065 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdi), %xmm9
1066 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
1067 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm8 = xmm7[0,0,1,1]
1068 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
1069 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm7, %ymm8, %ymm7
1070 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm11[4],xmm12[4],xmm11[5],xmm12[5],xmm11[6],xmm12[6],xmm11[7],xmm12[7]
1071 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm11 = xmm8[0],zero,xmm8[1],zero
1072 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[2,2,3,3]
1073 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm8, %ymm11, %ymm8
1074 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm7 = ymm8[0],ymm7[1],ymm8[2],ymm7[3],ymm8[4],ymm7[5],ymm8[6],ymm7[7]
1075 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
1076 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm11 = xmm8[0,0,1,1]
1077 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[2,2,3,3]
1078 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm8, %ymm11, %ymm8
1079 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3]
1080 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm12 = xmm11[0],zero,xmm11[1],zero
1081 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[2,2,3,3]
1082 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm11, %ymm12, %ymm11
1083 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%rsi), %xmm12
1084 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm8 = ymm11[0],ymm8[1],ymm11[2],ymm8[3],ymm11[4],ymm8[5],ymm11[6],ymm8[7]
1085 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%rdi), %xmm11
1086 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
1087 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm5 = xmm4[0,0,1,1]
1088 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
1089 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4
1090 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm9[4],xmm10[4],xmm9[5],xmm10[5],xmm9[6],xmm10[6],xmm9[7],xmm10[7]
1091 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm9 = xmm5[0],zero,xmm5[1],zero
1092 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
1093 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm5, %ymm9, %ymm5
1094 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0],ymm4[1],ymm5[2],ymm4[3],ymm5[4],ymm4[5],ymm5[6],ymm4[7]
1095 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
1096 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm9 = xmm5[0,0,1,1]
1097 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
1098 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm5, %ymm9, %ymm5
1099 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
1100 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm10 = xmm9[0],zero,xmm9[1],zero
1101 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
1102 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm9, %ymm10, %ymm9
1103 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm9[0],ymm5[1],ymm9[2],ymm5[3],ymm9[4],ymm5[5],ymm9[6],ymm5[7]
1104 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
1105 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[0,0,1,1]
1106 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
1107 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
1108 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm11[4],xmm12[4],xmm11[5],xmm12[5],xmm11[6],xmm12[6],xmm11[7],xmm12[7]
1109 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm9 = xmm1[0],zero,xmm1[1],zero
1110 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
1111 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm9, %ymm1
1112 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4],ymm0[5],ymm1[6],ymm0[7]
1113 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, 96(%r8)
1114 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm5, 64(%r8)
1115 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm4, 160(%r8)
1116 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm8, 128(%r8)
1117 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm7, 224(%r8)
1118 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm6, 192(%r8)
1119 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm3, (%r8)
1120 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm2, 32(%r8)
1121 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
1122 ; AVX2-FAST-PERLANE-NEXT: retq
1124 ; AVX512F-ONLY-SLOW-LABEL: store_i16_stride4_vf32:
1125 ; AVX512F-ONLY-SLOW: # %bb.0:
1126 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rcx), %xmm0
1127 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 16(%rcx), %xmm2
1128 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rcx), %xmm9
1129 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 48(%rcx), %xmm5
1130 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdx), %xmm1
1131 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 16(%rdx), %xmm3
1132 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rdx), %xmm10
1133 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 48(%rdx), %xmm6
1134 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
1135 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm7 = xmm4[0,0,1,1]
1136 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
1137 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm7, %ymm4
1138 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
1139 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm2[0,0,1,1]
1140 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
1141 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm3, %ymm2
1142 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm2, %zmm13
1143 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rsi), %xmm2
1144 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 16(%rsi), %xmm4
1145 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rsi), %xmm11
1146 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 48(%rsi), %xmm7
1147 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdi), %xmm3
1148 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 16(%rdi), %xmm14
1149 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rdi), %xmm12
1150 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 48(%rdi), %xmm8
1151 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm15 = xmm14[4],xmm4[4],xmm14[5],xmm4[5],xmm14[6],xmm4[6],xmm14[7],xmm4[7]
1152 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm16 = xmm15[0],zero,xmm15[1],zero
1153 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[2,2,3,3]
1154 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $1, %xmm15, %ymm16, %ymm15
1155 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm14[0],xmm4[0],xmm14[1],xmm4[1],xmm14[2],xmm4[2],xmm14[3],xmm4[3]
1156 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm14 = xmm4[0],zero,xmm4[1],zero
1157 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
1158 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm14, %ymm4
1159 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm15, %zmm4, %zmm4
1160 ; AVX512F-ONLY-SLOW-NEXT: movw $-21846, %ax # imm = 0xAAAA
1161 ; AVX512F-ONLY-SLOW-NEXT: kmovw %eax, %k1
1162 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm13, %zmm4 {%k1}
1163 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm13 = xmm10[4],xmm9[4],xmm10[5],xmm9[5],xmm10[6],xmm9[6],xmm10[7],xmm9[7]
1164 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm14 = xmm13[0,0,1,1]
1165 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm13 = xmm13[2,2,3,3]
1166 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm13, %ymm14, %ymm13
1167 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
1168 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm10 = xmm9[0,0,1,1]
1169 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
1170 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm10, %ymm9
1171 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm13, %zmm9, %zmm9
1172 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm10 = xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
1173 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm13 = xmm10[0],zero,xmm10[1],zero
1174 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm10 = xmm10[2,2,3,3]
1175 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm13, %ymm10
1176 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3]
1177 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm12 = xmm11[0],zero,xmm11[1],zero
1178 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[2,2,3,3]
1179 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm11, %ymm12, %ymm11
1180 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm10, %zmm11, %zmm10
1181 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm9, %zmm10 {%k1}
1182 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm9 = xmm6[4],xmm5[4],xmm6[5],xmm5[5],xmm6[6],xmm5[6],xmm6[7],xmm5[7]
1183 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm11 = xmm9[0,0,1,1]
1184 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
1185 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm11, %ymm9
1186 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
1187 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm5[0,0,1,1]
1188 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
1189 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm6, %ymm5
1190 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm9, %zmm5, %zmm5
1191 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
1192 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm9 = xmm6[0],zero,xmm6[1],zero
1193 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
1194 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm9, %ymm6
1195 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3]
1196 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm8 = xmm7[0],zero,xmm7[1],zero
1197 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
1198 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm7, %ymm8, %ymm7
1199 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm6, %zmm7, %zmm6
1200 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm5, %zmm6 {%k1}
1201 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
1202 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm7 = xmm5[0,0,1,1]
1203 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
1204 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm7, %ymm5
1205 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
1206 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[0,0,1,1]
1207 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
1208 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
1209 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm0, %zmm0
1210 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
1211 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm5 = xmm1[0],zero,xmm1[1],zero
1212 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
1213 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm5, %ymm1
1214 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
1215 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm2[0],zero,xmm2[1],zero
1216 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
1217 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm3, %ymm2
1218 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm1
1219 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm0, %zmm1 {%k1}
1220 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, (%r8)
1221 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, 192(%r8)
1222 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, 128(%r8)
1223 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, 64(%r8)
1224 ; AVX512F-ONLY-SLOW-NEXT: vzeroupper
1225 ; AVX512F-ONLY-SLOW-NEXT: retq
1227 ; AVX512F-FAST-LABEL: store_i16_stride4_vf32:
1228 ; AVX512F-FAST: # %bb.0:
1229 ; AVX512F-FAST-NEXT: vmovdqa (%rsi), %xmm0
1230 ; AVX512F-FAST-NEXT: vmovdqa 16(%rsi), %xmm1
1231 ; AVX512F-FAST-NEXT: vmovdqa 32(%rsi), %xmm9
1232 ; AVX512F-FAST-NEXT: vmovdqa 48(%rsi), %xmm5
1233 ; AVX512F-FAST-NEXT: vmovdqa (%rdi), %xmm2
1234 ; AVX512F-FAST-NEXT: vmovdqa 16(%rdi), %xmm3
1235 ; AVX512F-FAST-NEXT: vmovdqa 32(%rdi), %xmm10
1236 ; AVX512F-FAST-NEXT: vmovdqa 48(%rdi), %xmm6
1237 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
1238 ; AVX512F-FAST-NEXT: vpmovzxdq {{.*#+}} xmm7 = xmm4[0],zero,xmm4[1],zero
1239 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
1240 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm4, %ymm7, %ymm4
1241 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
1242 ; AVX512F-FAST-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm1[0],zero,xmm1[1],zero
1243 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
1244 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm1, %ymm3, %ymm1
1245 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm4, %zmm1, %zmm1
1246 ; AVX512F-FAST-NEXT: vmovdqa (%rcx), %xmm3
1247 ; AVX512F-FAST-NEXT: vmovdqa 16(%rcx), %xmm11
1248 ; AVX512F-FAST-NEXT: vmovdqa 32(%rcx), %xmm12
1249 ; AVX512F-FAST-NEXT: vmovdqa 48(%rcx), %xmm7
1250 ; AVX512F-FAST-NEXT: vmovdqa (%rdx), %xmm4
1251 ; AVX512F-FAST-NEXT: vmovdqa 16(%rdx), %xmm13
1252 ; AVX512F-FAST-NEXT: vmovdqa 32(%rdx), %xmm14
1253 ; AVX512F-FAST-NEXT: vmovdqa 48(%rdx), %xmm8
1254 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm15 = xmm13[4],xmm11[4],xmm13[5],xmm11[5],xmm13[6],xmm11[6],xmm13[7],xmm11[7]
1255 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm13[0],xmm11[0],xmm13[1],xmm11[1],xmm13[2],xmm11[2],xmm13[3],xmm11[3]
1256 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm15, %zmm11, %zmm11
1257 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm13 = [0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
1258 ; AVX512F-FAST-NEXT: movw $-21846, %ax # imm = 0xAAAA
1259 ; AVX512F-FAST-NEXT: kmovw %eax, %k1
1260 ; AVX512F-FAST-NEXT: vpermd %zmm11, %zmm13, %zmm1 {%k1}
1261 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm11 = xmm10[4],xmm9[4],xmm10[5],xmm9[5],xmm10[6],xmm9[6],xmm10[7],xmm9[7]
1262 ; AVX512F-FAST-NEXT: vpmovzxdq {{.*#+}} xmm15 = xmm11[0],zero,xmm11[1],zero
1263 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[2,2,3,3]
1264 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm11, %ymm15, %ymm11
1265 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
1266 ; AVX512F-FAST-NEXT: vpmovzxdq {{.*#+}} xmm10 = xmm9[0],zero,xmm9[1],zero
1267 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
1268 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm9, %ymm10, %ymm9
1269 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm11, %zmm9, %zmm9
1270 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm10 = xmm14[4],xmm12[4],xmm14[5],xmm12[5],xmm14[6],xmm12[6],xmm14[7],xmm12[7]
1271 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm14[0],xmm12[0],xmm14[1],xmm12[1],xmm14[2],xmm12[2],xmm14[3],xmm12[3]
1272 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm10, %zmm11, %zmm10
1273 ; AVX512F-FAST-NEXT: vpermd %zmm10, %zmm13, %zmm9 {%k1}
1274 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm10 = xmm6[4],xmm5[4],xmm6[5],xmm5[5],xmm6[6],xmm5[6],xmm6[7],xmm5[7]
1275 ; AVX512F-FAST-NEXT: vpmovzxdq {{.*#+}} xmm11 = xmm10[0],zero,xmm10[1],zero
1276 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} xmm10 = xmm10[2,2,3,3]
1277 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm10, %ymm11, %ymm10
1278 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
1279 ; AVX512F-FAST-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm5[0],zero,xmm5[1],zero
1280 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
1281 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm5, %ymm6, %ymm5
1282 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm10, %zmm5, %zmm5
1283 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
1284 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3]
1285 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm6, %zmm7, %zmm6
1286 ; AVX512F-FAST-NEXT: vpermd %zmm6, %zmm13, %zmm5 {%k1}
1287 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm2[4],xmm0[4],xmm2[5],xmm0[5],xmm2[6],xmm0[6],xmm2[7],xmm0[7]
1288 ; AVX512F-FAST-NEXT: vpmovzxdq {{.*#+}} xmm7 = xmm6[0],zero,xmm6[1],zero
1289 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
1290 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm6, %ymm7, %ymm6
1291 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
1292 ; AVX512F-FAST-NEXT: vpmovzxdq {{.*#+}} xmm2 = xmm0[0],zero,xmm0[1],zero
1293 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
1294 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm0
1295 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm6, %zmm0, %zmm0
1296 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
1297 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
1298 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm2, %zmm3, %zmm2
1299 ; AVX512F-FAST-NEXT: vpermd %zmm2, %zmm13, %zmm0 {%k1}
1300 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm0, (%r8)
1301 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm5, 192(%r8)
1302 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm9, 128(%r8)
1303 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm1, 64(%r8)
1304 ; AVX512F-FAST-NEXT: vzeroupper
1305 ; AVX512F-FAST-NEXT: retq
1307 ; AVX512DQ-SLOW-LABEL: store_i16_stride4_vf32:
1308 ; AVX512DQ-SLOW: # %bb.0:
1309 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rsi), %xmm1
1310 ; AVX512DQ-SLOW-NEXT: vmovdqa 16(%rsi), %xmm0
1311 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rsi), %xmm7
1312 ; AVX512DQ-SLOW-NEXT: vmovdqa 48(%rsi), %xmm5
1313 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdi), %xmm2
1314 ; AVX512DQ-SLOW-NEXT: vmovdqa 16(%rdi), %xmm3
1315 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rdi), %xmm10
1316 ; AVX512DQ-SLOW-NEXT: vmovdqa 48(%rdi), %xmm6
1317 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm3[4],xmm0[4],xmm3[5],xmm0[5],xmm3[6],xmm0[6],xmm3[7],xmm0[7]
1318 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm8 = xmm4[0],zero,xmm4[1],zero
1319 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
1320 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm8, %ymm4
1321 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm3[0],xmm0[0],xmm3[1],xmm0[1],xmm3[2],xmm0[2],xmm3[3],xmm0[3]
1322 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm0[0],zero,xmm0[1],zero
1323 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
1324 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm3, %ymm0
1325 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm0
1326 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rcx), %xmm3
1327 ; AVX512DQ-SLOW-NEXT: vmovdqa 16(%rcx), %xmm13
1328 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rcx), %xmm11
1329 ; AVX512DQ-SLOW-NEXT: vmovdqa 48(%rcx), %xmm8
1330 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdx), %xmm4
1331 ; AVX512DQ-SLOW-NEXT: vmovdqa 16(%rdx), %xmm14
1332 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rdx), %xmm12
1333 ; AVX512DQ-SLOW-NEXT: vmovdqa 48(%rdx), %xmm9
1334 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm15 = xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
1335 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm16 = xmm15[0,0,1,1]
1336 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[2,2,3,3]
1337 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $1, %xmm15, %ymm16, %ymm15
1338 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3]
1339 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm14 = xmm13[0,0,1,1]
1340 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm13 = xmm13[2,2,3,3]
1341 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm13, %ymm14, %ymm13
1342 ; AVX512DQ-SLOW-NEXT: movw $-21846, %ax # imm = 0xAAAA
1343 ; AVX512DQ-SLOW-NEXT: kmovw %eax, %k1
1344 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm15, %zmm13, %zmm0 {%k1}
1345 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm13 = xmm10[4],xmm7[4],xmm10[5],xmm7[5],xmm10[6],xmm7[6],xmm10[7],xmm7[7]
1346 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm14 = xmm13[0],zero,xmm13[1],zero
1347 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm13 = xmm13[2,2,3,3]
1348 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm13, %ymm14, %ymm13
1349 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm10[0],xmm7[0],xmm10[1],xmm7[1],xmm10[2],xmm7[2],xmm10[3],xmm7[3]
1350 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm10 = xmm7[0],zero,xmm7[1],zero
1351 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
1352 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm7, %ymm10, %ymm7
1353 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm13, %zmm7, %zmm7
1354 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm10 = xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
1355 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm13 = xmm10[0,0,1,1]
1356 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm10 = xmm10[2,2,3,3]
1357 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm13, %ymm10
1358 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3]
1359 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm12 = xmm11[0,0,1,1]
1360 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[2,2,3,3]
1361 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm11, %ymm12, %ymm11
1362 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm10, %zmm11, %zmm7 {%k1}
1363 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm10 = xmm6[4],xmm5[4],xmm6[5],xmm5[5],xmm6[6],xmm5[6],xmm6[7],xmm5[7]
1364 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm11 = xmm10[0],zero,xmm10[1],zero
1365 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm10 = xmm10[2,2,3,3]
1366 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm11, %ymm10
1367 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
1368 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm5[0],zero,xmm5[1],zero
1369 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
1370 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm6, %ymm5
1371 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm10, %zmm5, %zmm5
1372 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm9[4],xmm8[4],xmm9[5],xmm8[5],xmm9[6],xmm8[6],xmm9[7],xmm8[7]
1373 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm10 = xmm6[0,0,1,1]
1374 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
1375 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm10, %ymm6
1376 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
1377 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm8[0,0,1,1]
1378 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[2,2,3,3]
1379 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm8, %ymm9, %ymm8
1380 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm6, %zmm8, %zmm5 {%k1}
1381 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
1382 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm8 = xmm6[0],zero,xmm6[1],zero
1383 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
1384 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm8, %ymm6
1385 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
1386 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm2 = xmm1[0],zero,xmm1[1],zero
1387 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
1388 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
1389 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm6, %zmm1, %zmm1
1390 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
1391 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm2[0,0,1,1]
1392 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
1393 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm6, %ymm2
1394 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
1395 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm3[0,0,1,1]
1396 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
1397 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm4, %ymm3
1398 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm2, %zmm3, %zmm1 {%k1}
1399 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, (%r8)
1400 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, 192(%r8)
1401 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, 128(%r8)
1402 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, 64(%r8)
1403 ; AVX512DQ-SLOW-NEXT: vzeroupper
1404 ; AVX512DQ-SLOW-NEXT: retq
1406 ; AVX512BW-LABEL: store_i16_stride4_vf32:
1407 ; AVX512BW: # %bb.0:
1408 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm0
1409 ; AVX512BW-NEXT: vmovdqa64 (%rsi), %zmm1
1410 ; AVX512BW-NEXT: vmovdqa64 (%rdx), %zmm2
1411 ; AVX512BW-NEXT: vmovdqa64 (%rcx), %zmm3
1412 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = [u,u,0,32,u,u,1,33,u,u,2,34,u,u,3,35,u,u,4,36,u,u,5,37,u,u,6,38,u,u,7,39]
1413 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm2, %zmm4
1414 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm5 = [0,32,u,u,1,33,u,u,2,34,u,u,3,35,u,u,4,36,u,u,5,37,u,u,6,38,u,u,7,39,u,u]
1415 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm5
1416 ; AVX512BW-NEXT: movw $-21846, %ax # imm = 0xAAAA
1417 ; AVX512BW-NEXT: kmovd %eax, %k1
1418 ; AVX512BW-NEXT: vmovdqa32 %zmm4, %zmm5 {%k1}
1419 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = [u,u,8,40,u,u,9,41,u,u,10,42,u,u,11,43,u,u,12,44,u,u,13,45,u,u,14,46,u,u,15,47]
1420 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm2, %zmm4
1421 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm6 = [8,40,u,u,9,41,u,u,10,42,u,u,11,43,u,u,12,44,u,u,13,45,u,u,14,46,u,u,15,47,u,u]
1422 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm6
1423 ; AVX512BW-NEXT: vmovdqa32 %zmm4, %zmm6 {%k1}
1424 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = [u,u,16,48,u,u,17,49,u,u,18,50,u,u,19,51,u,u,20,52,u,u,21,53,u,u,22,54,u,u,23,55]
1425 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm2, %zmm4
1426 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm7 = [16,48,u,u,17,49,u,u,18,50,u,u,19,51,u,u,20,52,u,u,21,53,u,u,22,54,u,u,23,55,u,u]
1427 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm7
1428 ; AVX512BW-NEXT: vmovdqa32 %zmm4, %zmm7 {%k1}
1429 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = [u,u,24,56,u,u,25,57,u,u,26,58,u,u,27,59,u,u,28,60,u,u,29,61,u,u,30,62,u,u,31,63]
1430 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm2, %zmm4
1431 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = [24,56,u,u,25,57,u,u,26,58,u,u,27,59,u,u,28,60,u,u,29,61,u,u,30,62,u,u,31,63,u,u]
1432 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm2
1433 ; AVX512BW-NEXT: vmovdqa32 %zmm4, %zmm2 {%k1}
1434 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 192(%r8)
1435 ; AVX512BW-NEXT: vmovdqa64 %zmm7, 128(%r8)
1436 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 64(%r8)
1437 ; AVX512BW-NEXT: vmovdqa64 %zmm5, (%r8)
1438 ; AVX512BW-NEXT: vzeroupper
1439 ; AVX512BW-NEXT: retq
1440 %in.vec0 = load <32 x i16>, ptr %in.vecptr0, align 64
1441 %in.vec1 = load <32 x i16>, ptr %in.vecptr1, align 64
1442 %in.vec2 = load <32 x i16>, ptr %in.vecptr2, align 64
1443 %in.vec3 = load <32 x i16>, ptr %in.vecptr3, align 64
1444 %1 = shufflevector <32 x i16> %in.vec0, <32 x i16> %in.vec1, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
1445 %2 = shufflevector <32 x i16> %in.vec2, <32 x i16> %in.vec3, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
1446 %3 = shufflevector <64 x i16> %1, <64 x i16> %2, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
1447 %interleaved.vec = shufflevector <128 x i16> %3, <128 x i16> poison, <128 x i32> <i32 0, i32 32, i32 64, i32 96, i32 1, i32 33, i32 65, i32 97, i32 2, i32 34, i32 66, i32 98, i32 3, i32 35, i32 67, i32 99, i32 4, i32 36, i32 68, i32 100, i32 5, i32 37, i32 69, i32 101, i32 6, i32 38, i32 70, i32 102, i32 7, i32 39, i32 71, i32 103, i32 8, i32 40, i32 72, i32 104, i32 9, i32 41, i32 73, i32 105, i32 10, i32 42, i32 74, i32 106, i32 11, i32 43, i32 75, i32 107, i32 12, i32 44, i32 76, i32 108, i32 13, i32 45, i32 77, i32 109, i32 14, i32 46, i32 78, i32 110, i32 15, i32 47, i32 79, i32 111, i32 16, i32 48, i32 80, i32 112, i32 17, i32 49, i32 81, i32 113, i32 18, i32 50, i32 82, i32 114, i32 19, i32 51, i32 83, i32 115, i32 20, i32 52, i32 84, i32 116, i32 21, i32 53, i32 85, i32 117, i32 22, i32 54, i32 86, i32 118, i32 23, i32 55, i32 87, i32 119, i32 24, i32 56, i32 88, i32 120, i32 25, i32 57, i32 89, i32 121, i32 26, i32 58, i32 90, i32 122, i32 27, i32 59, i32 91, i32 123, i32 28, i32 60, i32 92, i32 124, i32 29, i32 61, i32 93, i32 125, i32 30, i32 62, i32 94, i32 126, i32 31, i32 63, i32 95, i32 127>
1448 store <128 x i16> %interleaved.vec, ptr %out.vec, align 64
1452 define void @store_i16_stride4_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %out.vec) nounwind {
1453 ; SSE-LABEL: store_i16_stride4_vf64:
1455 ; SSE-NEXT: subq $184, %rsp
1456 ; SSE-NEXT: movdqa (%rdi), %xmm10
1457 ; SSE-NEXT: movdqa 16(%rdi), %xmm11
1458 ; SSE-NEXT: movdqa 32(%rdi), %xmm12
1459 ; SSE-NEXT: movdqa 48(%rdi), %xmm13
1460 ; SSE-NEXT: movdqa (%rsi), %xmm5
1461 ; SSE-NEXT: movdqa 16(%rsi), %xmm2
1462 ; SSE-NEXT: movdqa 32(%rsi), %xmm0
1463 ; SSE-NEXT: movdqa (%rdx), %xmm6
1464 ; SSE-NEXT: movdqa 16(%rdx), %xmm4
1465 ; SSE-NEXT: movdqa 32(%rdx), %xmm1
1466 ; SSE-NEXT: movdqa (%rcx), %xmm7
1467 ; SSE-NEXT: movdqa 16(%rcx), %xmm8
1468 ; SSE-NEXT: movdqa 32(%rcx), %xmm3
1469 ; SSE-NEXT: movdqa %xmm6, %xmm9
1470 ; SSE-NEXT: punpcklwd {{.*#+}} xmm9 = xmm9[0],xmm7[0],xmm9[1],xmm7[1],xmm9[2],xmm7[2],xmm9[3],xmm7[3]
1471 ; SSE-NEXT: movdqa %xmm10, %xmm14
1472 ; SSE-NEXT: punpcklwd {{.*#+}} xmm14 = xmm14[0],xmm5[0],xmm14[1],xmm5[1],xmm14[2],xmm5[2],xmm14[3],xmm5[3]
1473 ; SSE-NEXT: movdqa %xmm14, %xmm15
1474 ; SSE-NEXT: punpckldq {{.*#+}} xmm15 = xmm15[0],xmm9[0],xmm15[1],xmm9[1]
1475 ; SSE-NEXT: movdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1476 ; SSE-NEXT: punpckhdq {{.*#+}} xmm14 = xmm14[2],xmm9[2],xmm14[3],xmm9[3]
1477 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1478 ; SSE-NEXT: punpckhwd {{.*#+}} xmm6 = xmm6[4],xmm7[4],xmm6[5],xmm7[5],xmm6[6],xmm7[6],xmm6[7],xmm7[7]
1479 ; SSE-NEXT: punpckhwd {{.*#+}} xmm10 = xmm10[4],xmm5[4],xmm10[5],xmm5[5],xmm10[6],xmm5[6],xmm10[7],xmm5[7]
1480 ; SSE-NEXT: movdqa %xmm10, %xmm5
1481 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1]
1482 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1483 ; SSE-NEXT: punpckhdq {{.*#+}} xmm10 = xmm10[2],xmm6[2],xmm10[3],xmm6[3]
1484 ; SSE-NEXT: movdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1485 ; SSE-NEXT: movdqa %xmm4, %xmm5
1486 ; SSE-NEXT: punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm8[0],xmm5[1],xmm8[1],xmm5[2],xmm8[2],xmm5[3],xmm8[3]
1487 ; SSE-NEXT: movdqa %xmm11, %xmm6
1488 ; SSE-NEXT: punpcklwd {{.*#+}} xmm6 = xmm6[0],xmm2[0],xmm6[1],xmm2[1],xmm6[2],xmm2[2],xmm6[3],xmm2[3]
1489 ; SSE-NEXT: movdqa %xmm6, %xmm7
1490 ; SSE-NEXT: punpckldq {{.*#+}} xmm7 = xmm7[0],xmm5[0],xmm7[1],xmm5[1]
1491 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1492 ; SSE-NEXT: punpckhdq {{.*#+}} xmm6 = xmm6[2],xmm5[2],xmm6[3],xmm5[3]
1493 ; SSE-NEXT: movdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1494 ; SSE-NEXT: punpckhwd {{.*#+}} xmm4 = xmm4[4],xmm8[4],xmm4[5],xmm8[5],xmm4[6],xmm8[6],xmm4[7],xmm8[7]
1495 ; SSE-NEXT: punpckhwd {{.*#+}} xmm11 = xmm11[4],xmm2[4],xmm11[5],xmm2[5],xmm11[6],xmm2[6],xmm11[7],xmm2[7]
1496 ; SSE-NEXT: movdqa %xmm11, %xmm2
1497 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1]
1498 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1499 ; SSE-NEXT: punpckhdq {{.*#+}} xmm11 = xmm11[2],xmm4[2],xmm11[3],xmm4[3]
1500 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1501 ; SSE-NEXT: movdqa %xmm1, %xmm2
1502 ; SSE-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
1503 ; SSE-NEXT: movdqa %xmm12, %xmm4
1504 ; SSE-NEXT: punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm0[0],xmm4[1],xmm0[1],xmm4[2],xmm0[2],xmm4[3],xmm0[3]
1505 ; SSE-NEXT: movdqa %xmm4, %xmm5
1506 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm2[0],xmm5[1],xmm2[1]
1507 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1508 ; SSE-NEXT: punpckhdq {{.*#+}} xmm4 = xmm4[2],xmm2[2],xmm4[3],xmm2[3]
1509 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1510 ; SSE-NEXT: movdqa 48(%rdx), %xmm2
1511 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm3[4],xmm1[5],xmm3[5],xmm1[6],xmm3[6],xmm1[7],xmm3[7]
1512 ; SSE-NEXT: movdqa 48(%rcx), %xmm3
1513 ; SSE-NEXT: punpckhwd {{.*#+}} xmm12 = xmm12[4],xmm0[4],xmm12[5],xmm0[5],xmm12[6],xmm0[6],xmm12[7],xmm0[7]
1514 ; SSE-NEXT: movdqa %xmm12, %xmm0
1515 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
1516 ; SSE-NEXT: movdqa %xmm0, (%rsp) # 16-byte Spill
1517 ; SSE-NEXT: punpckhdq {{.*#+}} xmm12 = xmm12[2],xmm1[2],xmm12[3],xmm1[3]
1518 ; SSE-NEXT: movdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1519 ; SSE-NEXT: movdqa %xmm2, %xmm0
1520 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
1521 ; SSE-NEXT: movdqa 48(%rsi), %xmm1
1522 ; SSE-NEXT: movdqa %xmm13, %xmm4
1523 ; SSE-NEXT: punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
1524 ; SSE-NEXT: movdqa %xmm4, %xmm5
1525 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm0[0],xmm5[1],xmm0[1]
1526 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1527 ; SSE-NEXT: punpckhdq {{.*#+}} xmm4 = xmm4[2],xmm0[2],xmm4[3],xmm0[3]
1528 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1529 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm3[4],xmm2[5],xmm3[5],xmm2[6],xmm3[6],xmm2[7],xmm3[7]
1530 ; SSE-NEXT: punpckhwd {{.*#+}} xmm13 = xmm13[4],xmm1[4],xmm13[5],xmm1[5],xmm13[6],xmm1[6],xmm13[7],xmm1[7]
1531 ; SSE-NEXT: movdqa %xmm13, %xmm0
1532 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1]
1533 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1534 ; SSE-NEXT: punpckhdq {{.*#+}} xmm13 = xmm13[2],xmm2[2],xmm13[3],xmm2[3]
1535 ; SSE-NEXT: movdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1536 ; SSE-NEXT: movdqa 64(%rdx), %xmm0
1537 ; SSE-NEXT: movdqa 64(%rcx), %xmm1
1538 ; SSE-NEXT: movdqa %xmm0, %xmm2
1539 ; SSE-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
1540 ; SSE-NEXT: movdqa 64(%rdi), %xmm13
1541 ; SSE-NEXT: movdqa 64(%rsi), %xmm3
1542 ; SSE-NEXT: movdqa %xmm13, %xmm14
1543 ; SSE-NEXT: punpcklwd {{.*#+}} xmm14 = xmm14[0],xmm3[0],xmm14[1],xmm3[1],xmm14[2],xmm3[2],xmm14[3],xmm3[3]
1544 ; SSE-NEXT: movdqa %xmm14, %xmm4
1545 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm2[0],xmm4[1],xmm2[1]
1546 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1547 ; SSE-NEXT: punpckhdq {{.*#+}} xmm14 = xmm14[2],xmm2[2],xmm14[3],xmm2[3]
1548 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
1549 ; SSE-NEXT: punpckhwd {{.*#+}} xmm13 = xmm13[4],xmm3[4],xmm13[5],xmm3[5],xmm13[6],xmm3[6],xmm13[7],xmm3[7]
1550 ; SSE-NEXT: movdqa %xmm13, %xmm1
1551 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
1552 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1553 ; SSE-NEXT: punpckhdq {{.*#+}} xmm13 = xmm13[2],xmm0[2],xmm13[3],xmm0[3]
1554 ; SSE-NEXT: movdqa 80(%rdx), %xmm0
1555 ; SSE-NEXT: movdqa 80(%rcx), %xmm1
1556 ; SSE-NEXT: movdqa %xmm0, %xmm2
1557 ; SSE-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
1558 ; SSE-NEXT: movdqa 80(%rdi), %xmm11
1559 ; SSE-NEXT: movdqa 80(%rsi), %xmm7
1560 ; SSE-NEXT: movdqa %xmm11, %xmm8
1561 ; SSE-NEXT: punpcklwd {{.*#+}} xmm8 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3]
1562 ; SSE-NEXT: movdqa %xmm8, %xmm3
1563 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
1564 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1565 ; SSE-NEXT: punpckhdq {{.*#+}} xmm8 = xmm8[2],xmm2[2],xmm8[3],xmm2[3]
1566 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
1567 ; SSE-NEXT: punpckhwd {{.*#+}} xmm11 = xmm11[4],xmm7[4],xmm11[5],xmm7[5],xmm11[6],xmm7[6],xmm11[7],xmm7[7]
1568 ; SSE-NEXT: movdqa %xmm11, %xmm15
1569 ; SSE-NEXT: punpckldq {{.*#+}} xmm15 = xmm15[0],xmm0[0],xmm15[1],xmm0[1]
1570 ; SSE-NEXT: punpckhdq {{.*#+}} xmm11 = xmm11[2],xmm0[2],xmm11[3],xmm0[3]
1571 ; SSE-NEXT: movdqa 96(%rdx), %xmm1
1572 ; SSE-NEXT: movdqa 96(%rcx), %xmm6
1573 ; SSE-NEXT: movdqa %xmm1, %xmm0
1574 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm6[0],xmm0[1],xmm6[1],xmm0[2],xmm6[2],xmm0[3],xmm6[3]
1575 ; SSE-NEXT: movdqa 96(%rdi), %xmm5
1576 ; SSE-NEXT: movdqa 96(%rsi), %xmm4
1577 ; SSE-NEXT: movdqa %xmm5, %xmm9
1578 ; SSE-NEXT: punpcklwd {{.*#+}} xmm9 = xmm9[0],xmm4[0],xmm9[1],xmm4[1],xmm9[2],xmm4[2],xmm9[3],xmm4[3]
1579 ; SSE-NEXT: movdqa %xmm9, %xmm12
1580 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm0[0],xmm12[1],xmm0[1]
1581 ; SSE-NEXT: punpckhdq {{.*#+}} xmm9 = xmm9[2],xmm0[2],xmm9[3],xmm0[3]
1582 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm6[4],xmm1[5],xmm6[5],xmm1[6],xmm6[6],xmm1[7],xmm6[7]
1583 ; SSE-NEXT: punpckhwd {{.*#+}} xmm5 = xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
1584 ; SSE-NEXT: movdqa %xmm5, %xmm10
1585 ; SSE-NEXT: punpckldq {{.*#+}} xmm10 = xmm10[0],xmm1[0],xmm10[1],xmm1[1]
1586 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm1[2],xmm5[3],xmm1[3]
1587 ; SSE-NEXT: movdqa 112(%rdx), %xmm2
1588 ; SSE-NEXT: movdqa 112(%rcx), %xmm7
1589 ; SSE-NEXT: movdqa %xmm2, %xmm6
1590 ; SSE-NEXT: punpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
1591 ; SSE-NEXT: movdqa 112(%rdi), %xmm0
1592 ; SSE-NEXT: movdqa 112(%rsi), %xmm4
1593 ; SSE-NEXT: movdqa %xmm0, %xmm1
1594 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1],xmm1[2],xmm4[2],xmm1[3],xmm4[3]
1595 ; SSE-NEXT: movdqa %xmm1, %xmm3
1596 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm6[0],xmm3[1],xmm6[1]
1597 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm6[2],xmm1[3],xmm6[3]
1598 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm7[4],xmm2[5],xmm7[5],xmm2[6],xmm7[6],xmm2[7],xmm7[7]
1599 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm4[4],xmm0[5],xmm4[5],xmm0[6],xmm4[6],xmm0[7],xmm4[7]
1600 ; SSE-NEXT: movdqa %xmm0, %xmm4
1601 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm2[0],xmm4[1],xmm2[1]
1602 ; SSE-NEXT: punpckhdq {{.*#+}} xmm0 = xmm0[2],xmm2[2],xmm0[3],xmm2[3]
1603 ; SSE-NEXT: movdqa %xmm0, 496(%r8)
1604 ; SSE-NEXT: movdqa %xmm4, 480(%r8)
1605 ; SSE-NEXT: movdqa %xmm1, 464(%r8)
1606 ; SSE-NEXT: movdqa %xmm3, 448(%r8)
1607 ; SSE-NEXT: movdqa %xmm5, 432(%r8)
1608 ; SSE-NEXT: movdqa %xmm10, 416(%r8)
1609 ; SSE-NEXT: movdqa %xmm9, 400(%r8)
1610 ; SSE-NEXT: movdqa %xmm12, 384(%r8)
1611 ; SSE-NEXT: movdqa %xmm11, 368(%r8)
1612 ; SSE-NEXT: movdqa %xmm15, 352(%r8)
1613 ; SSE-NEXT: movdqa %xmm8, 336(%r8)
1614 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1615 ; SSE-NEXT: movaps %xmm0, 320(%r8)
1616 ; SSE-NEXT: movdqa %xmm13, 304(%r8)
1617 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1618 ; SSE-NEXT: movaps %xmm0, 288(%r8)
1619 ; SSE-NEXT: movdqa %xmm14, 272(%r8)
1620 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1621 ; SSE-NEXT: movaps %xmm0, 256(%r8)
1622 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1623 ; SSE-NEXT: movaps %xmm0, 240(%r8)
1624 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1625 ; SSE-NEXT: movaps %xmm0, 224(%r8)
1626 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1627 ; SSE-NEXT: movaps %xmm0, 208(%r8)
1628 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1629 ; SSE-NEXT: movaps %xmm0, 192(%r8)
1630 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1631 ; SSE-NEXT: movaps %xmm0, 176(%r8)
1632 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
1633 ; SSE-NEXT: movaps %xmm0, 160(%r8)
1634 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1635 ; SSE-NEXT: movaps %xmm0, 144(%r8)
1636 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1637 ; SSE-NEXT: movaps %xmm0, 128(%r8)
1638 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1639 ; SSE-NEXT: movaps %xmm0, 112(%r8)
1640 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1641 ; SSE-NEXT: movaps %xmm0, 96(%r8)
1642 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1643 ; SSE-NEXT: movaps %xmm0, 80(%r8)
1644 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1645 ; SSE-NEXT: movaps %xmm0, 64(%r8)
1646 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1647 ; SSE-NEXT: movaps %xmm0, 48(%r8)
1648 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1649 ; SSE-NEXT: movaps %xmm0, 32(%r8)
1650 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1651 ; SSE-NEXT: movaps %xmm0, 16(%r8)
1652 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1653 ; SSE-NEXT: movaps %xmm0, (%r8)
1654 ; SSE-NEXT: addq $184, %rsp
1657 ; AVX1-ONLY-LABEL: store_i16_stride4_vf64:
1658 ; AVX1-ONLY: # %bb.0:
1659 ; AVX1-ONLY-NEXT: subq $40, %rsp
1660 ; AVX1-ONLY-NEXT: vmovdqa 16(%rcx), %xmm1
1661 ; AVX1-ONLY-NEXT: vmovdqa 32(%rcx), %xmm9
1662 ; AVX1-ONLY-NEXT: vmovdqa 48(%rcx), %xmm6
1663 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdx), %xmm2
1664 ; AVX1-ONLY-NEXT: vmovdqa 32(%rdx), %xmm10
1665 ; AVX1-ONLY-NEXT: vmovdqa 48(%rdx), %xmm8
1666 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
1667 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[0,0,1,1]
1668 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
1669 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm3, %ymm0
1670 ; AVX1-ONLY-NEXT: vmovdqa 16(%rsi), %xmm3
1671 ; AVX1-ONLY-NEXT: vmovdqa 32(%rsi), %xmm11
1672 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdi), %xmm4
1673 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
1674 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm7 = xmm5[0],zero,xmm5[1],zero
1675 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
1676 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm7, %ymm5
1677 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm5[0],ymm0[1],ymm5[2],ymm0[3],ymm5[4],ymm0[5],ymm5[6],ymm0[7]
1678 ; AVX1-ONLY-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
1679 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
1680 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm1[0,0,1,1]
1681 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
1682 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
1683 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
1684 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm2[0],zero,xmm2[1],zero
1685 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
1686 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm2
1687 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0],ymm1[1],ymm2[2],ymm1[3],ymm2[4],ymm1[5],ymm2[6],ymm1[7]
1688 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1689 ; AVX1-ONLY-NEXT: vmovdqa 64(%rcx), %xmm3
1690 ; AVX1-ONLY-NEXT: vmovdqa 64(%rdx), %xmm4
1691 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
1692 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm2[0,0,1,1]
1693 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
1694 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm5, %ymm2
1695 ; AVX1-ONLY-NEXT: vmovdqa 64(%rsi), %xmm5
1696 ; AVX1-ONLY-NEXT: vmovdqa 64(%rdi), %xmm7
1697 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm7[0],xmm5[0],xmm7[1],xmm5[1],xmm7[2],xmm5[2],xmm7[3],xmm5[3]
1698 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm13 = xmm12[0],zero,xmm12[1],zero
1699 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm12[2,2,3,3]
1700 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm13, %ymm12
1701 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm12[0],ymm2[1],ymm12[2],ymm2[3],ymm12[4],ymm2[5],ymm12[6],ymm2[7]
1702 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1703 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
1704 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm3[0,0,1,1]
1705 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
1706 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm4, %ymm3
1707 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm7[4],xmm5[4],xmm7[5],xmm5[5],xmm7[6],xmm5[6],xmm7[7],xmm5[7]
1708 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm5 = xmm4[0],zero,xmm4[1],zero
1709 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
1710 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm5, %ymm4
1711 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0],ymm3[1],ymm4[2],ymm3[3],ymm4[4],ymm3[5],ymm4[6],ymm3[7]
1712 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1713 ; AVX1-ONLY-NEXT: vmovdqa 80(%rcx), %xmm5
1714 ; AVX1-ONLY-NEXT: vmovdqa 80(%rdx), %xmm7
1715 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm7[0],xmm5[0],xmm7[1],xmm5[1],xmm7[2],xmm5[2],xmm7[3],xmm5[3]
1716 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm4[0,0,1,1]
1717 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
1718 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm12, %ymm4
1719 ; AVX1-ONLY-NEXT: vmovdqa 80(%rsi), %xmm13
1720 ; AVX1-ONLY-NEXT: vmovdqa 80(%rdi), %xmm14
1721 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3]
1722 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm15 = xmm12[0],zero,xmm12[1],zero
1723 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm12[2,2,3,3]
1724 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm15, %ymm15
1725 ; AVX1-ONLY-NEXT: vmovdqa 48(%rsi), %xmm12
1726 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm15[0],ymm4[1],ymm15[2],ymm4[3],ymm15[4],ymm4[5],ymm15[6],ymm4[7]
1727 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1728 ; AVX1-ONLY-NEXT: vmovdqa 32(%rdi), %xmm15
1729 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm7[4],xmm5[4],xmm7[5],xmm5[5],xmm7[6],xmm5[6],xmm7[7],xmm5[7]
1730 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm5[0,0,1,1]
1731 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
1732 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm7, %ymm5
1733 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
1734 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm13 = xmm7[0],zero,xmm7[1],zero
1735 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
1736 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm13, %ymm7
1737 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm7[0],ymm5[1],ymm7[2],ymm5[3],ymm7[4],ymm5[5],ymm7[6],ymm5[7]
1738 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
1739 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm7[0,0,1,1]
1740 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
1741 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm13, %ymm7
1742 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm15[0],xmm11[0],xmm15[1],xmm11[1],xmm15[2],xmm11[2],xmm15[3],xmm11[3]
1743 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm14 = xmm13[0],zero,xmm13[1],zero
1744 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm13[2,2,3,3]
1745 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm14, %ymm13
1746 ; AVX1-ONLY-NEXT: vmovdqa 48(%rdi), %xmm14
1747 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm13[0],ymm7[1],ymm13[2],ymm7[3],ymm13[4],ymm7[5],ymm13[6],ymm7[7]
1748 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm9 = xmm10[4],xmm9[4],xmm10[5],xmm9[5],xmm10[6],xmm9[6],xmm10[7],xmm9[7]
1749 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm9[0,0,1,1]
1750 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
1751 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm10, %ymm9
1752 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm10 = xmm15[4],xmm11[4],xmm15[5],xmm11[5],xmm15[6],xmm11[6],xmm15[7],xmm11[7]
1753 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm11 = xmm10[0],zero,xmm10[1],zero
1754 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm10[2,2,3,3]
1755 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm11, %ymm10
1756 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm9 = ymm10[0],ymm9[1],ymm10[2],ymm9[3],ymm10[4],ymm9[5],ymm10[6],ymm9[7]
1757 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm8[0],xmm6[0],xmm8[1],xmm6[1],xmm8[2],xmm6[2],xmm8[3],xmm6[3]
1758 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm10[0,0,1,1]
1759 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm10[2,2,3,3]
1760 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm11, %ymm10
1761 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm14[0],xmm12[0],xmm14[1],xmm12[1],xmm14[2],xmm12[2],xmm14[3],xmm12[3]
1762 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm13 = xmm11[0],zero,xmm11[1],zero
1763 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[2,2,3,3]
1764 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm13, %ymm11
1765 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm11[0],ymm10[1],ymm11[2],ymm10[3],ymm11[4],ymm10[5],ymm11[6],ymm10[7]
1766 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm8[4],xmm6[4],xmm8[5],xmm6[5],xmm8[6],xmm6[6],xmm8[7],xmm6[7]
1767 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm6[0,0,1,1]
1768 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
1769 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm8, %ymm6
1770 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm14[4],xmm12[4],xmm14[5],xmm12[5],xmm14[6],xmm12[6],xmm14[7],xmm12[7]
1771 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm11 = xmm8[0],zero,xmm8[1],zero
1772 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[2,2,3,3]
1773 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm11, %ymm8
1774 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm8[0],ymm6[1],ymm8[2],ymm6[3],ymm8[4],ymm6[5],ymm8[6],ymm6[7]
1775 ; AVX1-ONLY-NEXT: vmovdqa 96(%rcx), %xmm11
1776 ; AVX1-ONLY-NEXT: vmovdqa 96(%rdx), %xmm12
1777 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3]
1778 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm8[0,0,1,1]
1779 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[2,2,3,3]
1780 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm13, %ymm8
1781 ; AVX1-ONLY-NEXT: vmovdqa 96(%rsi), %xmm13
1782 ; AVX1-ONLY-NEXT: vmovdqa 96(%rdi), %xmm14
1783 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm15 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3]
1784 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm15[0],zero,xmm15[1],zero
1785 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[2,2,3,3]
1786 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm15, %ymm0, %ymm0
1787 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm8 = ymm0[0],ymm8[1],ymm0[2],ymm8[3],ymm0[4],ymm8[5],ymm0[6],ymm8[7]
1788 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
1789 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm0[0,0,1,1]
1790 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
1791 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm11, %ymm0
1792 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm11 = xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
1793 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm12 = xmm11[0],zero,xmm11[1],zero
1794 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[2,2,3,3]
1795 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm12, %ymm11
1796 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm11[0],ymm0[1],ymm11[2],ymm0[3],ymm11[4],ymm0[5],ymm11[6],ymm0[7]
1797 ; AVX1-ONLY-NEXT: vmovdqa 112(%rcx), %xmm1
1798 ; AVX1-ONLY-NEXT: vmovdqa 112(%rdx), %xmm13
1799 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm13[0],xmm1[0],xmm13[1],xmm1[1],xmm13[2],xmm1[2],xmm13[3],xmm1[3]
1800 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm12[0,0,1,1]
1801 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm12[2,2,3,3]
1802 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm14, %ymm12
1803 ; AVX1-ONLY-NEXT: vmovdqa 112(%rsi), %xmm14
1804 ; AVX1-ONLY-NEXT: vmovdqa 112(%rdi), %xmm15
1805 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
1806 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm2 = xmm0[0],zero,xmm0[1],zero
1807 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
1808 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm2, %ymm0
1809 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm0[0],ymm12[1],ymm0[2],ymm12[3],ymm0[4],ymm12[5],ymm0[6],ymm12[7]
1810 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm13[4],xmm1[4],xmm13[5],xmm1[5],xmm13[6],xmm1[6],xmm13[7],xmm1[7]
1811 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[0,0,1,1]
1812 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
1813 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
1814 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm15[4],xmm14[4],xmm15[5],xmm14[5],xmm15[6],xmm14[6],xmm15[7],xmm14[7]
1815 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm2 = xmm1[0],zero,xmm1[1],zero
1816 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
1817 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
1818 ; AVX1-ONLY-NEXT: vmovdqa (%rcx), %xmm2
1819 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4],ymm0[5],ymm1[6],ymm0[7]
1820 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm1
1821 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
1822 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm13[0,0,1,1]
1823 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm13[2,2,3,3]
1824 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm14, %ymm13
1825 ; AVX1-ONLY-NEXT: vmovdqa (%rsi), %xmm14
1826 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm15
1827 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
1828 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm0[0],zero,xmm0[1],zero
1829 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
1830 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm3, %ymm0
1831 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0],ymm13[1],ymm0[2],ymm13[3],ymm0[4],ymm13[5],ymm0[6],ymm13[7]
1832 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
1833 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm1[0,0,1,1]
1834 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
1835 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
1836 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm15[4],xmm14[4],xmm15[5],xmm14[5],xmm15[6],xmm14[6],xmm15[7],xmm14[7]
1837 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm2[0],zero,xmm2[1],zero
1838 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
1839 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm2
1840 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2],ymm1[3],ymm2[4],ymm1[5],ymm2[6],ymm1[7]
1841 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 32(%r8)
1842 ; AVX1-ONLY-NEXT: vmovaps %ymm0, (%r8)
1843 ; AVX1-ONLY-NEXT: vmovaps %ymm4, 480(%r8)
1844 ; AVX1-ONLY-NEXT: vmovaps %ymm12, 448(%r8)
1845 ; AVX1-ONLY-NEXT: vmovaps %ymm11, 416(%r8)
1846 ; AVX1-ONLY-NEXT: vmovaps %ymm8, 384(%r8)
1847 ; AVX1-ONLY-NEXT: vmovaps %ymm6, 224(%r8)
1848 ; AVX1-ONLY-NEXT: vmovaps %ymm10, 192(%r8)
1849 ; AVX1-ONLY-NEXT: vmovaps %ymm9, 160(%r8)
1850 ; AVX1-ONLY-NEXT: vmovaps %ymm7, 128(%r8)
1851 ; AVX1-ONLY-NEXT: vmovaps %ymm5, 352(%r8)
1852 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1853 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 320(%r8)
1854 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1855 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 288(%r8)
1856 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1857 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 256(%r8)
1858 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1859 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 64(%r8)
1860 ; AVX1-ONLY-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
1861 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 96(%r8)
1862 ; AVX1-ONLY-NEXT: addq $40, %rsp
1863 ; AVX1-ONLY-NEXT: vzeroupper
1864 ; AVX1-ONLY-NEXT: retq
1866 ; AVX2-SLOW-LABEL: store_i16_stride4_vf64:
1867 ; AVX2-SLOW: # %bb.0:
1868 ; AVX2-SLOW-NEXT: subq $40, %rsp
1869 ; AVX2-SLOW-NEXT: vmovdqa 16(%rcx), %xmm1
1870 ; AVX2-SLOW-NEXT: vmovdqa 32(%rcx), %xmm9
1871 ; AVX2-SLOW-NEXT: vmovdqa 48(%rcx), %xmm6
1872 ; AVX2-SLOW-NEXT: vmovdqa 16(%rdx), %xmm2
1873 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdx), %xmm10
1874 ; AVX2-SLOW-NEXT: vmovdqa 48(%rdx), %xmm8
1875 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
1876 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[0,0,1,1]
1877 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
1878 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm3, %ymm0
1879 ; AVX2-SLOW-NEXT: vmovdqa 16(%rsi), %xmm3
1880 ; AVX2-SLOW-NEXT: vmovdqa 32(%rsi), %xmm11
1881 ; AVX2-SLOW-NEXT: vmovdqa 16(%rdi), %xmm4
1882 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
1883 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm7 = xmm5[0],zero,xmm5[1],zero
1884 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
1885 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm7, %ymm5
1886 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm5[0],ymm0[1],ymm5[2],ymm0[3],ymm5[4],ymm0[5],ymm5[6],ymm0[7]
1887 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, (%rsp) # 32-byte Spill
1888 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
1889 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm1[0,0,1,1]
1890 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
1891 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
1892 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
1893 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm2[0],zero,xmm2[1],zero
1894 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
1895 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm3, %ymm2
1896 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0],ymm1[1],ymm2[2],ymm1[3],ymm2[4],ymm1[5],ymm2[6],ymm1[7]
1897 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1898 ; AVX2-SLOW-NEXT: vmovdqa 64(%rcx), %xmm3
1899 ; AVX2-SLOW-NEXT: vmovdqa 64(%rdx), %xmm4
1900 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
1901 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm2[0,0,1,1]
1902 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
1903 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm5, %ymm2
1904 ; AVX2-SLOW-NEXT: vmovdqa 64(%rsi), %xmm5
1905 ; AVX2-SLOW-NEXT: vmovdqa 64(%rdi), %xmm7
1906 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm7[0],xmm5[0],xmm7[1],xmm5[1],xmm7[2],xmm5[2],xmm7[3],xmm5[3]
1907 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm13 = xmm12[0],zero,xmm12[1],zero
1908 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm12 = xmm12[2,2,3,3]
1909 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm12, %ymm13, %ymm12
1910 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm12[0],ymm2[1],ymm12[2],ymm2[3],ymm12[4],ymm2[5],ymm12[6],ymm2[7]
1911 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1912 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
1913 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm3[0,0,1,1]
1914 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
1915 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm4, %ymm3
1916 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm7[4],xmm5[4],xmm7[5],xmm5[5],xmm7[6],xmm5[6],xmm7[7],xmm5[7]
1917 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm5 = xmm4[0],zero,xmm4[1],zero
1918 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
1919 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4
1920 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0],ymm3[1],ymm4[2],ymm3[3],ymm4[4],ymm3[5],ymm4[6],ymm3[7]
1921 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1922 ; AVX2-SLOW-NEXT: vmovdqa 80(%rcx), %xmm5
1923 ; AVX2-SLOW-NEXT: vmovdqa 80(%rdx), %xmm7
1924 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm7[0],xmm5[0],xmm7[1],xmm5[1],xmm7[2],xmm5[2],xmm7[3],xmm5[3]
1925 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm12 = xmm4[0,0,1,1]
1926 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
1927 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm12, %ymm4
1928 ; AVX2-SLOW-NEXT: vmovdqa 80(%rsi), %xmm13
1929 ; AVX2-SLOW-NEXT: vmovdqa 80(%rdi), %xmm14
1930 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3]
1931 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm15 = xmm12[0],zero,xmm12[1],zero
1932 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm12 = xmm12[2,2,3,3]
1933 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm12, %ymm15, %ymm15
1934 ; AVX2-SLOW-NEXT: vmovdqa 48(%rsi), %xmm12
1935 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm15[0],ymm4[1],ymm15[2],ymm4[3],ymm15[4],ymm4[5],ymm15[6],ymm4[7]
1936 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1937 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdi), %xmm15
1938 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm7[4],xmm5[4],xmm7[5],xmm5[5],xmm7[6],xmm5[6],xmm7[7],xmm5[7]
1939 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm7 = xmm5[0,0,1,1]
1940 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
1941 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm7, %ymm5
1942 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
1943 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm13 = xmm7[0],zero,xmm7[1],zero
1944 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
1945 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm7, %ymm13, %ymm7
1946 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm7[0],ymm5[1],ymm7[2],ymm5[3],ymm7[4],ymm5[5],ymm7[6],ymm5[7]
1947 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
1948 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm13 = xmm7[0,0,1,1]
1949 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
1950 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm7, %ymm13, %ymm7
1951 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm15[0],xmm11[0],xmm15[1],xmm11[1],xmm15[2],xmm11[2],xmm15[3],xmm11[3]
1952 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm14 = xmm13[0],zero,xmm13[1],zero
1953 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm13 = xmm13[2,2,3,3]
1954 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm13, %ymm14, %ymm13
1955 ; AVX2-SLOW-NEXT: vmovdqa 48(%rdi), %xmm14
1956 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm7 = ymm13[0],ymm7[1],ymm13[2],ymm7[3],ymm13[4],ymm7[5],ymm13[6],ymm7[7]
1957 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm9 = xmm10[4],xmm9[4],xmm10[5],xmm9[5],xmm10[6],xmm9[6],xmm10[7],xmm9[7]
1958 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm10 = xmm9[0,0,1,1]
1959 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
1960 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm10, %ymm9
1961 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm10 = xmm15[4],xmm11[4],xmm15[5],xmm11[5],xmm15[6],xmm11[6],xmm15[7],xmm11[7]
1962 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm11 = xmm10[0],zero,xmm10[1],zero
1963 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm10 = xmm10[2,2,3,3]
1964 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm11, %ymm10
1965 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm9 = ymm10[0],ymm9[1],ymm10[2],ymm9[3],ymm10[4],ymm9[5],ymm10[6],ymm9[7]
1966 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm8[0],xmm6[0],xmm8[1],xmm6[1],xmm8[2],xmm6[2],xmm8[3],xmm6[3]
1967 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm11 = xmm10[0,0,1,1]
1968 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm10 = xmm10[2,2,3,3]
1969 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm11, %ymm10
1970 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm14[0],xmm12[0],xmm14[1],xmm12[1],xmm14[2],xmm12[2],xmm14[3],xmm12[3]
1971 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm13 = xmm11[0],zero,xmm11[1],zero
1972 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[2,2,3,3]
1973 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm11, %ymm13, %ymm11
1974 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm10 = ymm11[0],ymm10[1],ymm11[2],ymm10[3],ymm11[4],ymm10[5],ymm11[6],ymm10[7]
1975 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm8[4],xmm6[4],xmm8[5],xmm6[5],xmm8[6],xmm6[6],xmm8[7],xmm6[7]
1976 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm8 = xmm6[0,0,1,1]
1977 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
1978 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm8, %ymm6
1979 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm14[4],xmm12[4],xmm14[5],xmm12[5],xmm14[6],xmm12[6],xmm14[7],xmm12[7]
1980 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm11 = xmm8[0],zero,xmm8[1],zero
1981 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[2,2,3,3]
1982 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm8, %ymm11, %ymm8
1983 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm8[0],ymm6[1],ymm8[2],ymm6[3],ymm8[4],ymm6[5],ymm8[6],ymm6[7]
1984 ; AVX2-SLOW-NEXT: vmovdqa 96(%rcx), %xmm11
1985 ; AVX2-SLOW-NEXT: vmovdqa 96(%rdx), %xmm12
1986 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3]
1987 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm13 = xmm8[0,0,1,1]
1988 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[2,2,3,3]
1989 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm8, %ymm13, %ymm8
1990 ; AVX2-SLOW-NEXT: vmovdqa 96(%rsi), %xmm13
1991 ; AVX2-SLOW-NEXT: vmovdqa 96(%rdi), %xmm14
1992 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm15 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3]
1993 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm15[0],zero,xmm15[1],zero
1994 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[2,2,3,3]
1995 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm15, %ymm0, %ymm0
1996 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm0[0],ymm8[1],ymm0[2],ymm8[3],ymm0[4],ymm8[5],ymm0[6],ymm8[7]
1997 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
1998 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm11 = xmm0[0,0,1,1]
1999 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2000 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm11, %ymm0
2001 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm11 = xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
2002 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm12 = xmm11[0],zero,xmm11[1],zero
2003 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[2,2,3,3]
2004 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm11, %ymm12, %ymm11
2005 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm11 = ymm11[0],ymm0[1],ymm11[2],ymm0[3],ymm11[4],ymm0[5],ymm11[6],ymm0[7]
2006 ; AVX2-SLOW-NEXT: vmovdqa 112(%rcx), %xmm1
2007 ; AVX2-SLOW-NEXT: vmovdqa 112(%rdx), %xmm13
2008 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm13[0],xmm1[0],xmm13[1],xmm1[1],xmm13[2],xmm1[2],xmm13[3],xmm1[3]
2009 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm14 = xmm12[0,0,1,1]
2010 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm12 = xmm12[2,2,3,3]
2011 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm12, %ymm14, %ymm12
2012 ; AVX2-SLOW-NEXT: vmovdqa 112(%rsi), %xmm14
2013 ; AVX2-SLOW-NEXT: vmovdqa 112(%rdi), %xmm15
2014 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
2015 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm2 = xmm0[0],zero,xmm0[1],zero
2016 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2017 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm0
2018 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = ymm0[0],ymm12[1],ymm0[2],ymm12[3],ymm0[4],ymm12[5],ymm0[6],ymm12[7]
2019 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm13[4],xmm1[4],xmm13[5],xmm1[5],xmm13[6],xmm1[6],xmm13[7],xmm1[7]
2020 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[0,0,1,1]
2021 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2022 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
2023 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm15[4],xmm14[4],xmm15[5],xmm14[5],xmm15[6],xmm14[6],xmm15[7],xmm14[7]
2024 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm2 = xmm1[0],zero,xmm1[1],zero
2025 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
2026 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
2027 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %xmm2
2028 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4],ymm0[5],ymm1[6],ymm0[7]
2029 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %xmm1
2030 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
2031 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm14 = xmm13[0,0,1,1]
2032 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm13 = xmm13[2,2,3,3]
2033 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm13, %ymm14, %ymm13
2034 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %xmm14
2035 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %xmm15
2036 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
2037 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm0[0],zero,xmm0[1],zero
2038 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2039 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm3, %ymm0
2040 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm13[1],ymm0[2],ymm13[3],ymm0[4],ymm13[5],ymm0[6],ymm13[7]
2041 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
2042 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm1[0,0,1,1]
2043 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
2044 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
2045 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm15[4],xmm14[4],xmm15[5],xmm14[5],xmm15[6],xmm14[6],xmm15[7],xmm14[7]
2046 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm2[0],zero,xmm2[1],zero
2047 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
2048 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm3, %ymm2
2049 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2],ymm1[3],ymm2[4],ymm1[5],ymm2[6],ymm1[7]
2050 ; AVX2-SLOW-NEXT: vmovdqa %ymm1, 32(%r8)
2051 ; AVX2-SLOW-NEXT: vmovdqa %ymm0, (%r8)
2052 ; AVX2-SLOW-NEXT: vmovdqa %ymm4, 480(%r8)
2053 ; AVX2-SLOW-NEXT: vmovdqa %ymm12, 448(%r8)
2054 ; AVX2-SLOW-NEXT: vmovdqa %ymm11, 416(%r8)
2055 ; AVX2-SLOW-NEXT: vmovdqa %ymm8, 384(%r8)
2056 ; AVX2-SLOW-NEXT: vmovdqa %ymm6, 224(%r8)
2057 ; AVX2-SLOW-NEXT: vmovdqa %ymm10, 192(%r8)
2058 ; AVX2-SLOW-NEXT: vmovdqa %ymm9, 160(%r8)
2059 ; AVX2-SLOW-NEXT: vmovdqa %ymm7, 128(%r8)
2060 ; AVX2-SLOW-NEXT: vmovdqa %ymm5, 352(%r8)
2061 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2062 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 320(%r8)
2063 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2064 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 288(%r8)
2065 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2066 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 256(%r8)
2067 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2068 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 64(%r8)
2069 ; AVX2-SLOW-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
2070 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 96(%r8)
2071 ; AVX2-SLOW-NEXT: addq $40, %rsp
2072 ; AVX2-SLOW-NEXT: vzeroupper
2073 ; AVX2-SLOW-NEXT: retq
2075 ; AVX2-FAST-LABEL: store_i16_stride4_vf64:
2076 ; AVX2-FAST: # %bb.0:
2077 ; AVX2-FAST-NEXT: subq $40, %rsp
2078 ; AVX2-FAST-NEXT: vmovdqa 16(%rsi), %xmm1
2079 ; AVX2-FAST-NEXT: vmovdqa 32(%rsi), %xmm10
2080 ; AVX2-FAST-NEXT: vmovdqa 48(%rsi), %xmm7
2081 ; AVX2-FAST-NEXT: vmovdqa 16(%rdi), %xmm3
2082 ; AVX2-FAST-NEXT: vmovdqa 32(%rdi), %xmm11
2083 ; AVX2-FAST-NEXT: vmovdqa 48(%rdi), %xmm9
2084 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
2085 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm2 = xmm0[0],zero,xmm0[1],zero
2086 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2087 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm0
2088 ; AVX2-FAST-NEXT: vmovdqa 16(%rcx), %xmm4
2089 ; AVX2-FAST-NEXT: vmovdqa 32(%rcx), %xmm12
2090 ; AVX2-FAST-NEXT: vmovdqa 16(%rdx), %xmm5
2091 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
2092 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [0,0,1,1,2,2,3,3]
2093 ; AVX2-FAST-NEXT: vpermd %ymm6, %ymm2, %ymm6
2094 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm6[1],ymm0[2],ymm6[3],ymm0[4],ymm6[5],ymm0[6],ymm6[7]
2095 ; AVX2-FAST-NEXT: vmovdqu %ymm0, (%rsp) # 32-byte Spill
2096 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
2097 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm1[0],zero,xmm1[1],zero
2098 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
2099 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm1, %ymm3, %ymm1
2100 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
2101 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm2, %ymm3
2102 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0],ymm3[1],ymm1[2],ymm3[3],ymm1[4],ymm3[5],ymm1[6],ymm3[7]
2103 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2104 ; AVX2-FAST-NEXT: vmovdqa 64(%rsi), %xmm4
2105 ; AVX2-FAST-NEXT: vmovdqa 64(%rdi), %xmm5
2106 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
2107 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm3[0],zero,xmm3[1],zero
2108 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
2109 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm3, %ymm6, %ymm3
2110 ; AVX2-FAST-NEXT: vmovdqa 64(%rcx), %xmm6
2111 ; AVX2-FAST-NEXT: vmovdqa 64(%rdx), %xmm8
2112 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm8[0],xmm6[0],xmm8[1],xmm6[1],xmm8[2],xmm6[2],xmm8[3],xmm6[3]
2113 ; AVX2-FAST-NEXT: vpermd %ymm13, %ymm2, %ymm13
2114 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm3[0],ymm13[1],ymm3[2],ymm13[3],ymm3[4],ymm13[5],ymm3[6],ymm13[7]
2115 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2116 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
2117 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm5 = xmm4[0],zero,xmm4[1],zero
2118 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
2119 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4
2120 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm8[4],xmm6[4],xmm8[5],xmm6[5],xmm8[6],xmm6[6],xmm8[7],xmm6[7]
2121 ; AVX2-FAST-NEXT: vpermd %ymm5, %ymm2, %ymm5
2122 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0],ymm5[1],ymm4[2],ymm5[3],ymm4[4],ymm5[5],ymm4[6],ymm5[7]
2123 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2124 ; AVX2-FAST-NEXT: vmovdqa 80(%rsi), %xmm6
2125 ; AVX2-FAST-NEXT: vmovdqa 80(%rdi), %xmm8
2126 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm8[0],xmm6[0],xmm8[1],xmm6[1],xmm8[2],xmm6[2],xmm8[3],xmm6[3]
2127 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm13 = xmm5[0],zero,xmm5[1],zero
2128 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
2129 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm5, %ymm13, %ymm5
2130 ; AVX2-FAST-NEXT: vmovdqa 80(%rcx), %xmm14
2131 ; AVX2-FAST-NEXT: vmovdqa 80(%rdx), %xmm15
2132 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
2133 ; AVX2-FAST-NEXT: vpermd %ymm13, %ymm2, %ymm13
2134 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm5[0],ymm13[1],ymm5[2],ymm13[3],ymm5[4],ymm13[5],ymm5[6],ymm13[7]
2135 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2136 ; AVX2-FAST-NEXT: vmovdqa 48(%rcx), %xmm13
2137 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm8[4],xmm6[4],xmm8[5],xmm6[5],xmm8[6],xmm6[6],xmm8[7],xmm6[7]
2138 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm8 = xmm6[0],zero,xmm6[1],zero
2139 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
2140 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm6, %ymm8, %ymm6
2141 ; AVX2-FAST-NEXT: vmovdqa 32(%rdx), %xmm0
2142 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm15[4],xmm14[4],xmm15[5],xmm14[5],xmm15[6],xmm14[6],xmm15[7],xmm14[7]
2143 ; AVX2-FAST-NEXT: vpermd %ymm8, %ymm2, %ymm8
2144 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0],ymm8[1],ymm6[2],ymm8[3],ymm6[4],ymm8[5],ymm6[6],ymm8[7]
2145 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
2146 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm14 = xmm8[0],zero,xmm8[1],zero
2147 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[2,2,3,3]
2148 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm8, %ymm14, %ymm8
2149 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm0[0],xmm12[0],xmm0[1],xmm12[1],xmm0[2],xmm12[2],xmm0[3],xmm12[3]
2150 ; AVX2-FAST-NEXT: vpermd %ymm14, %ymm2, %ymm14
2151 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0],ymm14[1],ymm8[2],ymm14[3],ymm8[4],ymm14[5],ymm8[6],ymm14[7]
2152 ; AVX2-FAST-NEXT: vmovdqa 48(%rdx), %xmm14
2153 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm10 = xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
2154 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm11 = xmm10[0],zero,xmm10[1],zero
2155 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm10 = xmm10[2,2,3,3]
2156 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm10, %ymm11, %ymm10
2157 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm12[4],xmm0[5],xmm12[5],xmm0[6],xmm12[6],xmm0[7],xmm12[7]
2158 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm2, %ymm0
2159 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm10 = ymm10[0],ymm0[1],ymm10[2],ymm0[3],ymm10[4],ymm0[5],ymm10[6],ymm0[7]
2160 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm9[0],xmm7[0],xmm9[1],xmm7[1],xmm9[2],xmm7[2],xmm9[3],xmm7[3]
2161 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm11 = xmm0[0],zero,xmm0[1],zero
2162 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2163 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm0, %ymm11, %ymm0
2164 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3]
2165 ; AVX2-FAST-NEXT: vpermd %ymm11, %ymm2, %ymm11
2166 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm11 = ymm0[0],ymm11[1],ymm0[2],ymm11[3],ymm0[4],ymm11[5],ymm0[6],ymm11[7]
2167 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm9[4],xmm7[4],xmm9[5],xmm7[5],xmm9[6],xmm7[6],xmm9[7],xmm7[7]
2168 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm7 = xmm0[0],zero,xmm0[1],zero
2169 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2170 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm0, %ymm7, %ymm0
2171 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
2172 ; AVX2-FAST-NEXT: vpermd %ymm7, %ymm2, %ymm7
2173 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm7 = ymm0[0],ymm7[1],ymm0[2],ymm7[3],ymm0[4],ymm7[5],ymm0[6],ymm7[7]
2174 ; AVX2-FAST-NEXT: vmovdqa 96(%rsi), %xmm0
2175 ; AVX2-FAST-NEXT: vmovdqa 96(%rdi), %xmm12
2176 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm12[0],xmm0[0],xmm12[1],xmm0[1],xmm12[2],xmm0[2],xmm12[3],xmm0[3]
2177 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm13 = xmm9[0],zero,xmm9[1],zero
2178 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
2179 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm9, %ymm13, %ymm9
2180 ; AVX2-FAST-NEXT: vmovdqa 96(%rcx), %xmm13
2181 ; AVX2-FAST-NEXT: vmovdqa 96(%rdx), %xmm14
2182 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm15 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3]
2183 ; AVX2-FAST-NEXT: vpermd %ymm15, %ymm2, %ymm15
2184 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0],ymm15[1],ymm9[2],ymm15[3],ymm9[4],ymm15[5],ymm9[6],ymm15[7]
2185 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm12[4],xmm0[4],xmm12[5],xmm0[5],xmm12[6],xmm0[6],xmm12[7],xmm0[7]
2186 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm12 = xmm0[0],zero,xmm0[1],zero
2187 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2188 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm0, %ymm12, %ymm0
2189 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm12 = xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
2190 ; AVX2-FAST-NEXT: vpermd %ymm12, %ymm2, %ymm12
2191 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm12 = ymm0[0],ymm12[1],ymm0[2],ymm12[3],ymm0[4],ymm12[5],ymm0[6],ymm12[7]
2192 ; AVX2-FAST-NEXT: vmovdqa 112(%rsi), %xmm1
2193 ; AVX2-FAST-NEXT: vmovdqa 112(%rdi), %xmm14
2194 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm14[0],xmm1[0],xmm14[1],xmm1[1],xmm14[2],xmm1[2],xmm14[3],xmm1[3]
2195 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm15 = xmm13[0],zero,xmm13[1],zero
2196 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm13 = xmm13[2,2,3,3]
2197 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm13, %ymm15, %ymm13
2198 ; AVX2-FAST-NEXT: vmovdqa 112(%rcx), %xmm15
2199 ; AVX2-FAST-NEXT: vmovdqa 112(%rdx), %xmm0
2200 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm0[0],xmm15[0],xmm0[1],xmm15[1],xmm0[2],xmm15[2],xmm0[3],xmm15[3]
2201 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm2, %ymm3
2202 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm13 = ymm13[0],ymm3[1],ymm13[2],ymm3[3],ymm13[4],ymm3[5],ymm13[6],ymm3[7]
2203 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm14[4],xmm1[4],xmm14[5],xmm1[5],xmm14[6],xmm1[6],xmm14[7],xmm1[7]
2204 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm1[0],zero,xmm1[1],zero
2205 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
2206 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm1, %ymm3, %ymm1
2207 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm15[4],xmm0[5],xmm15[5],xmm0[6],xmm15[6],xmm0[7],xmm15[7]
2208 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm2, %ymm0
2209 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4],ymm0[5],ymm1[6],ymm0[7]
2210 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %xmm1
2211 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %xmm3
2212 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
2213 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm15 = xmm14[0],zero,xmm14[1],zero
2214 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm14 = xmm14[2,2,3,3]
2215 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm14, %ymm15, %ymm14
2216 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %xmm15
2217 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %xmm0
2218 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm0[0],xmm15[0],xmm0[1],xmm15[1],xmm0[2],xmm15[2],xmm0[3],xmm15[3]
2219 ; AVX2-FAST-NEXT: vpermd %ymm5, %ymm2, %ymm5
2220 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm14[0],ymm5[1],ymm14[2],ymm5[3],ymm14[4],ymm5[5],ymm14[6],ymm5[7]
2221 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
2222 ; AVX2-FAST-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm1[0],zero,xmm1[1],zero
2223 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
2224 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm1, %ymm3, %ymm1
2225 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm15[4],xmm0[5],xmm15[5],xmm0[6],xmm15[6],xmm0[7],xmm15[7]
2226 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm2, %ymm0
2227 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4],ymm0[5],ymm1[6],ymm0[7]
2228 ; AVX2-FAST-NEXT: vmovdqa %ymm0, 32(%r8)
2229 ; AVX2-FAST-NEXT: vmovdqa %ymm5, (%r8)
2230 ; AVX2-FAST-NEXT: vmovdqa %ymm4, 480(%r8)
2231 ; AVX2-FAST-NEXT: vmovdqa %ymm13, 448(%r8)
2232 ; AVX2-FAST-NEXT: vmovdqa %ymm12, 416(%r8)
2233 ; AVX2-FAST-NEXT: vmovdqa %ymm9, 384(%r8)
2234 ; AVX2-FAST-NEXT: vmovdqa %ymm7, 224(%r8)
2235 ; AVX2-FAST-NEXT: vmovdqa %ymm11, 192(%r8)
2236 ; AVX2-FAST-NEXT: vmovdqa %ymm10, 160(%r8)
2237 ; AVX2-FAST-NEXT: vmovdqa %ymm8, 128(%r8)
2238 ; AVX2-FAST-NEXT: vmovdqa %ymm6, 352(%r8)
2239 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2240 ; AVX2-FAST-NEXT: vmovaps %ymm0, 320(%r8)
2241 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2242 ; AVX2-FAST-NEXT: vmovaps %ymm0, 288(%r8)
2243 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2244 ; AVX2-FAST-NEXT: vmovaps %ymm0, 256(%r8)
2245 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2246 ; AVX2-FAST-NEXT: vmovaps %ymm0, 64(%r8)
2247 ; AVX2-FAST-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
2248 ; AVX2-FAST-NEXT: vmovaps %ymm0, 96(%r8)
2249 ; AVX2-FAST-NEXT: addq $40, %rsp
2250 ; AVX2-FAST-NEXT: vzeroupper
2251 ; AVX2-FAST-NEXT: retq
2253 ; AVX2-FAST-PERLANE-LABEL: store_i16_stride4_vf64:
2254 ; AVX2-FAST-PERLANE: # %bb.0:
2255 ; AVX2-FAST-PERLANE-NEXT: subq $40, %rsp
2256 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%rcx), %xmm1
2257 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rcx), %xmm9
2258 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 48(%rcx), %xmm6
2259 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%rdx), %xmm2
2260 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdx), %xmm10
2261 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 48(%rdx), %xmm8
2262 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
2263 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[0,0,1,1]
2264 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2265 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm3, %ymm0
2266 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%rsi), %xmm3
2267 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rsi), %xmm11
2268 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%rdi), %xmm4
2269 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
2270 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm7 = xmm5[0],zero,xmm5[1],zero
2271 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
2272 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm5, %ymm7, %ymm5
2273 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm5[0],ymm0[1],ymm5[2],ymm0[3],ymm5[4],ymm0[5],ymm5[6],ymm0[7]
2274 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, (%rsp) # 32-byte Spill
2275 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
2276 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm2 = xmm1[0,0,1,1]
2277 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
2278 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
2279 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
2280 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm2[0],zero,xmm2[1],zero
2281 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
2282 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm3, %ymm2
2283 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0],ymm1[1],ymm2[2],ymm1[3],ymm2[4],ymm1[5],ymm2[6],ymm1[7]
2284 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2285 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rcx), %xmm3
2286 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rdx), %xmm4
2287 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
2288 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm5 = xmm2[0,0,1,1]
2289 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
2290 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm5, %ymm2
2291 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rsi), %xmm5
2292 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rdi), %xmm7
2293 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm7[0],xmm5[0],xmm7[1],xmm5[1],xmm7[2],xmm5[2],xmm7[3],xmm5[3]
2294 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm13 = xmm12[0],zero,xmm12[1],zero
2295 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm12 = xmm12[2,2,3,3]
2296 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm12, %ymm13, %ymm12
2297 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm12[0],ymm2[1],ymm12[2],ymm2[3],ymm12[4],ymm2[5],ymm12[6],ymm2[7]
2298 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2299 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
2300 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm4 = xmm3[0,0,1,1]
2301 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
2302 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm4, %ymm3
2303 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm7[4],xmm5[4],xmm7[5],xmm5[5],xmm7[6],xmm5[6],xmm7[7],xmm5[7]
2304 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm5 = xmm4[0],zero,xmm4[1],zero
2305 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
2306 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4
2307 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0],ymm3[1],ymm4[2],ymm3[3],ymm4[4],ymm3[5],ymm4[6],ymm3[7]
2308 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2309 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 80(%rcx), %xmm5
2310 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 80(%rdx), %xmm7
2311 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm7[0],xmm5[0],xmm7[1],xmm5[1],xmm7[2],xmm5[2],xmm7[3],xmm5[3]
2312 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm12 = xmm4[0,0,1,1]
2313 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
2314 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm4, %ymm12, %ymm4
2315 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 80(%rsi), %xmm13
2316 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 80(%rdi), %xmm14
2317 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3]
2318 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm15 = xmm12[0],zero,xmm12[1],zero
2319 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm12 = xmm12[2,2,3,3]
2320 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm12, %ymm15, %ymm15
2321 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 48(%rsi), %xmm12
2322 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm15[0],ymm4[1],ymm15[2],ymm4[3],ymm15[4],ymm4[5],ymm15[6],ymm4[7]
2323 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2324 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdi), %xmm15
2325 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm7[4],xmm5[4],xmm7[5],xmm5[5],xmm7[6],xmm5[6],xmm7[7],xmm5[7]
2326 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm7 = xmm5[0,0,1,1]
2327 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
2328 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm5, %ymm7, %ymm5
2329 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
2330 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm13 = xmm7[0],zero,xmm7[1],zero
2331 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
2332 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm7, %ymm13, %ymm7
2333 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm7[0],ymm5[1],ymm7[2],ymm5[3],ymm7[4],ymm5[5],ymm7[6],ymm5[7]
2334 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
2335 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm13 = xmm7[0,0,1,1]
2336 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
2337 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm7, %ymm13, %ymm7
2338 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm15[0],xmm11[0],xmm15[1],xmm11[1],xmm15[2],xmm11[2],xmm15[3],xmm11[3]
2339 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm14 = xmm13[0],zero,xmm13[1],zero
2340 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm13 = xmm13[2,2,3,3]
2341 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm13, %ymm14, %ymm13
2342 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 48(%rdi), %xmm14
2343 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm7 = ymm13[0],ymm7[1],ymm13[2],ymm7[3],ymm13[4],ymm7[5],ymm13[6],ymm7[7]
2344 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm9 = xmm10[4],xmm9[4],xmm10[5],xmm9[5],xmm10[6],xmm9[6],xmm10[7],xmm9[7]
2345 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm10 = xmm9[0,0,1,1]
2346 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
2347 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm9, %ymm10, %ymm9
2348 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm10 = xmm15[4],xmm11[4],xmm15[5],xmm11[5],xmm15[6],xmm11[6],xmm15[7],xmm11[7]
2349 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm11 = xmm10[0],zero,xmm10[1],zero
2350 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm10 = xmm10[2,2,3,3]
2351 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm10, %ymm11, %ymm10
2352 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm9 = ymm10[0],ymm9[1],ymm10[2],ymm9[3],ymm10[4],ymm9[5],ymm10[6],ymm9[7]
2353 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm8[0],xmm6[0],xmm8[1],xmm6[1],xmm8[2],xmm6[2],xmm8[3],xmm6[3]
2354 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm11 = xmm10[0,0,1,1]
2355 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm10 = xmm10[2,2,3,3]
2356 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm10, %ymm11, %ymm10
2357 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm14[0],xmm12[0],xmm14[1],xmm12[1],xmm14[2],xmm12[2],xmm14[3],xmm12[3]
2358 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm13 = xmm11[0],zero,xmm11[1],zero
2359 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[2,2,3,3]
2360 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm11, %ymm13, %ymm11
2361 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm10 = ymm11[0],ymm10[1],ymm11[2],ymm10[3],ymm11[4],ymm10[5],ymm11[6],ymm10[7]
2362 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm8[4],xmm6[4],xmm8[5],xmm6[5],xmm8[6],xmm6[6],xmm8[7],xmm6[7]
2363 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm8 = xmm6[0,0,1,1]
2364 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
2365 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm6, %ymm8, %ymm6
2366 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm14[4],xmm12[4],xmm14[5],xmm12[5],xmm14[6],xmm12[6],xmm14[7],xmm12[7]
2367 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm11 = xmm8[0],zero,xmm8[1],zero
2368 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[2,2,3,3]
2369 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm8, %ymm11, %ymm8
2370 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm6 = ymm8[0],ymm6[1],ymm8[2],ymm6[3],ymm8[4],ymm6[5],ymm8[6],ymm6[7]
2371 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rcx), %xmm11
2372 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rdx), %xmm12
2373 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3]
2374 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm13 = xmm8[0,0,1,1]
2375 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[2,2,3,3]
2376 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm8, %ymm13, %ymm8
2377 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rsi), %xmm13
2378 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rdi), %xmm14
2379 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm15 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3]
2380 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm15[0],zero,xmm15[1],zero
2381 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[2,2,3,3]
2382 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm15, %ymm0, %ymm0
2383 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm8 = ymm0[0],ymm8[1],ymm0[2],ymm8[3],ymm0[4],ymm8[5],ymm0[6],ymm8[7]
2384 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
2385 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm11 = xmm0[0,0,1,1]
2386 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2387 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm11, %ymm0
2388 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm11 = xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
2389 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm12 = xmm11[0],zero,xmm11[1],zero
2390 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[2,2,3,3]
2391 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm11, %ymm12, %ymm11
2392 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm11 = ymm11[0],ymm0[1],ymm11[2],ymm0[3],ymm11[4],ymm0[5],ymm11[6],ymm0[7]
2393 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 112(%rcx), %xmm1
2394 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 112(%rdx), %xmm13
2395 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm13[0],xmm1[0],xmm13[1],xmm1[1],xmm13[2],xmm1[2],xmm13[3],xmm1[3]
2396 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm14 = xmm12[0,0,1,1]
2397 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm12 = xmm12[2,2,3,3]
2398 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm12, %ymm14, %ymm12
2399 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 112(%rsi), %xmm14
2400 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 112(%rdi), %xmm15
2401 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
2402 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm2 = xmm0[0],zero,xmm0[1],zero
2403 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2404 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm0
2405 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm12 = ymm0[0],ymm12[1],ymm0[2],ymm12[3],ymm0[4],ymm12[5],ymm0[6],ymm12[7]
2406 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm13[4],xmm1[4],xmm13[5],xmm1[5],xmm13[6],xmm1[6],xmm13[7],xmm1[7]
2407 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[0,0,1,1]
2408 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2409 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
2410 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm15[4],xmm14[4],xmm15[5],xmm14[5],xmm15[6],xmm14[6],xmm15[7],xmm14[7]
2411 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm2 = xmm1[0],zero,xmm1[1],zero
2412 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
2413 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
2414 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %xmm2
2415 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4],ymm0[5],ymm1[6],ymm0[7]
2416 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %xmm1
2417 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
2418 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm14 = xmm13[0,0,1,1]
2419 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm13 = xmm13[2,2,3,3]
2420 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm13, %ymm14, %ymm13
2421 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %xmm14
2422 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %xmm15
2423 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
2424 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm0[0],zero,xmm0[1],zero
2425 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2426 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm3, %ymm0
2427 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm13[1],ymm0[2],ymm13[3],ymm0[4],ymm13[5],ymm0[6],ymm13[7]
2428 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
2429 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm2 = xmm1[0,0,1,1]
2430 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
2431 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
2432 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm15[4],xmm14[4],xmm15[5],xmm14[5],xmm15[6],xmm14[6],xmm15[7],xmm14[7]
2433 ; AVX2-FAST-PERLANE-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm2[0],zero,xmm2[1],zero
2434 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
2435 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm3, %ymm2
2436 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2],ymm1[3],ymm2[4],ymm1[5],ymm2[6],ymm1[7]
2437 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm1, 32(%r8)
2438 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, (%r8)
2439 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm4, 480(%r8)
2440 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm12, 448(%r8)
2441 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm11, 416(%r8)
2442 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm8, 384(%r8)
2443 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm6, 224(%r8)
2444 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm10, 192(%r8)
2445 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm9, 160(%r8)
2446 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm7, 128(%r8)
2447 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm5, 352(%r8)
2448 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2449 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 320(%r8)
2450 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2451 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 288(%r8)
2452 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2453 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 256(%r8)
2454 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2455 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 64(%r8)
2456 ; AVX2-FAST-PERLANE-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
2457 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 96(%r8)
2458 ; AVX2-FAST-PERLANE-NEXT: addq $40, %rsp
2459 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
2460 ; AVX2-FAST-PERLANE-NEXT: retq
2462 ; AVX512F-ONLY-SLOW-LABEL: store_i16_stride4_vf64:
2463 ; AVX512F-ONLY-SLOW: # %bb.0:
2464 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 (%rcx), %xmm20
2465 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 16(%rcx), %xmm2
2466 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rcx), %xmm10
2467 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 48(%rcx), %xmm5
2468 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdx), %xmm1
2469 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 16(%rdx), %xmm3
2470 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rdx), %xmm11
2471 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 48(%rdx), %xmm6
2472 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
2473 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm7 = xmm4[0,0,1,1]
2474 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
2475 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm7, %ymm4
2476 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
2477 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm2[0,0,1,1]
2478 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
2479 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm3, %ymm2
2480 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm2, %zmm9
2481 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rsi), %xmm2
2482 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 16(%rsi), %xmm3
2483 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rsi), %xmm12
2484 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 48(%rsi), %xmm7
2485 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdi), %xmm4
2486 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 16(%rdi), %xmm14
2487 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rdi), %xmm13
2488 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 48(%rdi), %xmm8
2489 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm15 = xmm14[4],xmm3[4],xmm14[5],xmm3[5],xmm14[6],xmm3[6],xmm14[7],xmm3[7]
2490 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm16 = xmm15[0],zero,xmm15[1],zero
2491 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[2,2,3,3]
2492 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $1, %xmm15, %ymm16, %ymm15
2493 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm14[0],xmm3[0],xmm14[1],xmm3[1],xmm14[2],xmm3[2],xmm14[3],xmm3[3]
2494 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm14 = xmm3[0],zero,xmm3[1],zero
2495 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
2496 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm14, %ymm3
2497 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm15, %zmm3, %zmm17
2498 ; AVX512F-ONLY-SLOW-NEXT: movw $-21846, %ax # imm = 0xAAAA
2499 ; AVX512F-ONLY-SLOW-NEXT: kmovw %eax, %k1
2500 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm9, %zmm17 {%k1}
2501 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%rcx), %xmm9
2502 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%rdx), %xmm14
2503 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm15 = xmm14[4],xmm9[4],xmm14[5],xmm9[5],xmm14[6],xmm9[6],xmm14[7],xmm9[7]
2504 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm16 = xmm15[0,0,1,1]
2505 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[2,2,3,3]
2506 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $1, %xmm15, %ymm16, %ymm15
2507 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm14[0],xmm9[0],xmm14[1],xmm9[1],xmm14[2],xmm9[2],xmm14[3],xmm9[3]
2508 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm14 = xmm9[0,0,1,1]
2509 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
2510 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm14, %ymm9
2511 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm15, %zmm9, %zmm14
2512 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%rsi), %xmm9
2513 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%rdi), %xmm15
2514 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm15[4],xmm9[4],xmm15[5],xmm9[5],xmm15[6],xmm9[6],xmm15[7],xmm9[7]
2515 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm16 = xmm0[0],zero,xmm0[1],zero
2516 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2517 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $1, %xmm0, %ymm16, %ymm0
2518 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm15[0],xmm9[0],xmm15[1],xmm9[1],xmm15[2],xmm9[2],xmm15[3],xmm9[3]
2519 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm15 = xmm9[0],zero,xmm9[1],zero
2520 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
2521 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm15, %ymm9
2522 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm9, %zmm18
2523 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm14, %zmm18 {%k1}
2524 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 112(%rcx), %xmm0
2525 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 112(%rdx), %xmm14
2526 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm15 = xmm14[4],xmm0[4],xmm14[5],xmm0[5],xmm14[6],xmm0[6],xmm14[7],xmm0[7]
2527 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm16 = xmm15[0,0,1,1]
2528 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[2,2,3,3]
2529 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $1, %xmm15, %ymm16, %ymm15
2530 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm14[0],xmm0[0],xmm14[1],xmm0[1],xmm14[2],xmm0[2],xmm14[3],xmm0[3]
2531 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm14 = xmm0[0,0,1,1]
2532 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2533 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm14, %ymm0
2534 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm15, %zmm0, %zmm0
2535 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 112(%rsi), %xmm14
2536 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 112(%rdi), %xmm15
2537 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm15[4],xmm14[4],xmm15[5],xmm14[5],xmm15[6],xmm14[6],xmm15[7],xmm14[7]
2538 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm16 = xmm3[0],zero,xmm3[1],zero
2539 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
2540 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $1, %xmm3, %ymm16, %ymm3
2541 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
2542 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm15 = xmm14[0],zero,xmm14[1],zero
2543 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm14 = xmm14[2,2,3,3]
2544 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm14, %ymm15, %ymm14
2545 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm14, %zmm19
2546 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm0, %zmm19 {%k1}
2547 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rcx), %xmm0
2548 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rdx), %xmm3
2549 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm15 = xmm3[4],xmm0[4],xmm3[5],xmm0[5],xmm3[6],xmm0[6],xmm3[7],xmm0[7]
2550 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm16 = xmm15[0,0,1,1]
2551 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[2,2,3,3]
2552 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $1, %xmm15, %ymm16, %ymm15
2553 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm3[0],xmm0[0],xmm3[1],xmm0[1],xmm3[2],xmm0[2],xmm3[3],xmm0[3]
2554 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[0,0,1,1]
2555 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2556 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm3, %ymm0
2557 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm15, %zmm0, %zmm0
2558 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rsi), %xmm3
2559 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rdi), %xmm15
2560 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm9 = xmm15[4],xmm3[4],xmm15[5],xmm3[5],xmm15[6],xmm3[6],xmm15[7],xmm3[7]
2561 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm16 = xmm9[0],zero,xmm9[1],zero
2562 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
2563 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $1, %xmm9, %ymm16, %ymm9
2564 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm15[0],xmm3[0],xmm15[1],xmm3[1],xmm15[2],xmm3[2],xmm15[3],xmm3[3]
2565 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm15 = xmm3[0],zero,xmm3[1],zero
2566 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
2567 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm15, %ymm3
2568 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm9, %zmm3, %zmm15
2569 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm0, %zmm15 {%k1}
2570 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 80(%rcx), %xmm0
2571 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 80(%rdx), %xmm3
2572 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm9 = xmm3[4],xmm0[4],xmm3[5],xmm0[5],xmm3[6],xmm0[6],xmm3[7],xmm0[7]
2573 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm16 = xmm9[0,0,1,1]
2574 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
2575 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $1, %xmm9, %ymm16, %ymm9
2576 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm3[0],xmm0[0],xmm3[1],xmm0[1],xmm3[2],xmm0[2],xmm3[3],xmm0[3]
2577 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[0,0,1,1]
2578 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2579 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm3, %ymm0
2580 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm9, %zmm0, %zmm0
2581 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 80(%rsi), %xmm3
2582 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 80(%rdi), %xmm9
2583 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm14 = xmm9[4],xmm3[4],xmm9[5],xmm3[5],xmm9[6],xmm3[6],xmm9[7],xmm3[7]
2584 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm16 = xmm14[0],zero,xmm14[1],zero
2585 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm14 = xmm14[2,2,3,3]
2586 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $1, %xmm14, %ymm16, %ymm14
2587 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm9[0],xmm3[0],xmm9[1],xmm3[1],xmm9[2],xmm3[2],xmm9[3],xmm3[3]
2588 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm9 = xmm3[0],zero,xmm3[1],zero
2589 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
2590 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm9, %ymm3
2591 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm14, %zmm3, %zmm16
2592 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm0, %zmm16 {%k1}
2593 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
2594 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[0,0,1,1]
2595 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2596 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm3, %ymm0
2597 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
2598 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm3[0,0,1,1]
2599 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
2600 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm9, %ymm3
2601 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm3, %zmm0
2602 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm13[4],xmm12[4],xmm13[5],xmm12[5],xmm13[6],xmm12[6],xmm13[7],xmm12[7]
2603 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm9 = xmm3[0],zero,xmm3[1],zero
2604 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
2605 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm9, %ymm3
2606 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3]
2607 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm10 = xmm9[0],zero,xmm9[1],zero
2608 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
2609 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm10, %ymm9
2610 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm9, %zmm3
2611 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm0, %zmm3 {%k1}
2612 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm6[4],xmm5[4],xmm6[5],xmm5[5],xmm6[6],xmm5[6],xmm6[7],xmm5[7]
2613 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm0[0,0,1,1]
2614 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2615 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm9, %ymm0
2616 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
2617 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm5[0,0,1,1]
2618 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
2619 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm6, %ymm5
2620 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm5, %zmm0
2621 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
2622 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm5[0],zero,xmm5[1],zero
2623 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
2624 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm6, %ymm5
2625 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3]
2626 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm7 = xmm6[0],zero,xmm6[1],zero
2627 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
2628 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm7, %ymm6
2629 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm6, %zmm5
2630 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm0, %zmm5 {%k1}
2631 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm20, %xmm7
2632 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm1[4],xmm7[4],xmm1[5],xmm7[5],xmm1[6],xmm7[6],xmm1[7],xmm7[7]
2633 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm0[0,0,1,1]
2634 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2635 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm6, %ymm0
2636 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm7[0],xmm1[1],xmm7[1],xmm1[2],xmm7[2],xmm1[3],xmm7[3]
2637 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm1[0,0,1,1]
2638 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
2639 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm6, %ymm1
2640 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
2641 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm4[4],xmm2[4],xmm4[5],xmm2[5],xmm4[6],xmm2[6],xmm4[7],xmm2[7]
2642 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm1[0],zero,xmm1[1],zero
2643 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
2644 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm6, %ymm1
2645 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
2646 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm4 = xmm2[0],zero,xmm2[1],zero
2647 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
2648 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2
2649 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm1
2650 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm0, %zmm1 {%k1}
2651 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, (%r8)
2652 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, 192(%r8)
2653 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, 128(%r8)
2654 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, 320(%r8)
2655 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm15, 256(%r8)
2656 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm19, 448(%r8)
2657 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, 384(%r8)
2658 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm17, 64(%r8)
2659 ; AVX512F-ONLY-SLOW-NEXT: vzeroupper
2660 ; AVX512F-ONLY-SLOW-NEXT: retq
2662 ; AVX512F-FAST-LABEL: store_i16_stride4_vf64:
2663 ; AVX512F-FAST: # %bb.0:
2664 ; AVX512F-FAST-NEXT: vmovdqa64 (%rsi), %xmm19
2665 ; AVX512F-FAST-NEXT: vmovdqa 16(%rsi), %xmm0
2666 ; AVX512F-FAST-NEXT: vmovdqa 32(%rsi), %xmm11
2667 ; AVX512F-FAST-NEXT: vmovdqa 48(%rsi), %xmm6
2668 ; AVX512F-FAST-NEXT: vmovdqa64 (%rdi), %xmm20
2669 ; AVX512F-FAST-NEXT: vmovdqa 16(%rdi), %xmm3
2670 ; AVX512F-FAST-NEXT: vmovdqa 32(%rdi), %xmm12
2671 ; AVX512F-FAST-NEXT: vmovdqa 48(%rdi), %xmm7
2672 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm3[4],xmm0[4],xmm3[5],xmm0[5],xmm3[6],xmm0[6],xmm3[7],xmm0[7]
2673 ; AVX512F-FAST-NEXT: vpmovzxdq {{.*#+}} xmm5 = xmm4[0],zero,xmm4[1],zero
2674 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
2675 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4
2676 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm3[0],xmm0[0],xmm3[1],xmm0[1],xmm3[2],xmm0[2],xmm3[3],xmm0[3]
2677 ; AVX512F-FAST-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm0[0],zero,xmm0[1],zero
2678 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2679 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm0, %ymm3, %ymm0
2680 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm18
2681 ; AVX512F-FAST-NEXT: vmovdqa (%rcx), %xmm3
2682 ; AVX512F-FAST-NEXT: vmovdqa 16(%rcx), %xmm5
2683 ; AVX512F-FAST-NEXT: vmovdqa 32(%rcx), %xmm13
2684 ; AVX512F-FAST-NEXT: vmovdqa 48(%rcx), %xmm9
2685 ; AVX512F-FAST-NEXT: vmovdqa (%rdx), %xmm4
2686 ; AVX512F-FAST-NEXT: vmovdqa 16(%rdx), %xmm8
2687 ; AVX512F-FAST-NEXT: vmovdqa 32(%rdx), %xmm14
2688 ; AVX512F-FAST-NEXT: vmovdqa 48(%rdx), %xmm10
2689 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm15 = xmm8[4],xmm5[4],xmm8[5],xmm5[5],xmm8[6],xmm5[6],xmm8[7],xmm5[7]
2690 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm8[0],xmm5[0],xmm8[1],xmm5[1],xmm8[2],xmm5[2],xmm8[3],xmm5[3]
2691 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm15, %zmm5, %zmm8
2692 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = [0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
2693 ; AVX512F-FAST-NEXT: movw $-21846, %ax # imm = 0xAAAA
2694 ; AVX512F-FAST-NEXT: kmovw %eax, %k1
2695 ; AVX512F-FAST-NEXT: vpermd %zmm8, %zmm5, %zmm18 {%k1}
2696 ; AVX512F-FAST-NEXT: vmovdqa 96(%rsi), %xmm8
2697 ; AVX512F-FAST-NEXT: vmovdqa 96(%rdi), %xmm15
2698 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm15[4],xmm8[4],xmm15[5],xmm8[5],xmm15[6],xmm8[6],xmm15[7],xmm8[7]
2699 ; AVX512F-FAST-NEXT: vpmovzxdq {{.*#+}} xmm16 = xmm0[0],zero,xmm0[1],zero
2700 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2701 ; AVX512F-FAST-NEXT: vinserti32x4 $1, %xmm0, %ymm16, %ymm0
2702 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm15[0],xmm8[0],xmm15[1],xmm8[1],xmm15[2],xmm8[2],xmm15[3],xmm8[3]
2703 ; AVX512F-FAST-NEXT: vpmovzxdq {{.*#+}} xmm15 = xmm8[0],zero,xmm8[1],zero
2704 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[2,2,3,3]
2705 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm8, %ymm15, %ymm8
2706 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm8, %zmm8
2707 ; AVX512F-FAST-NEXT: vmovdqa 96(%rcx), %xmm0
2708 ; AVX512F-FAST-NEXT: vmovdqa 96(%rdx), %xmm15
2709 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm15[4],xmm0[4],xmm15[5],xmm0[5],xmm15[6],xmm0[6],xmm15[7],xmm0[7]
2710 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm0[0],xmm15[1],xmm0[1],xmm15[2],xmm0[2],xmm15[3],xmm0[3]
2711 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm1, %zmm0, %zmm0
2712 ; AVX512F-FAST-NEXT: vpermd %zmm0, %zmm5, %zmm8 {%k1}
2713 ; AVX512F-FAST-NEXT: vmovdqa 112(%rsi), %xmm0
2714 ; AVX512F-FAST-NEXT: vmovdqa 112(%rdi), %xmm1
2715 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm15 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
2716 ; AVX512F-FAST-NEXT: vpmovzxdq {{.*#+}} xmm16 = xmm15[0],zero,xmm15[1],zero
2717 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[2,2,3,3]
2718 ; AVX512F-FAST-NEXT: vinserti32x4 $1, %xmm15, %ymm16, %ymm15
2719 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
2720 ; AVX512F-FAST-NEXT: vpmovzxdq {{.*#+}} xmm1 = xmm0[0],zero,xmm0[1],zero
2721 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2722 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
2723 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm15, %zmm0, %zmm15
2724 ; AVX512F-FAST-NEXT: vmovdqa 112(%rcx), %xmm0
2725 ; AVX512F-FAST-NEXT: vmovdqa 112(%rdx), %xmm1
2726 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
2727 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
2728 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm2, %zmm0, %zmm0
2729 ; AVX512F-FAST-NEXT: vpermd %zmm0, %zmm5, %zmm15 {%k1}
2730 ; AVX512F-FAST-NEXT: vmovdqa 64(%rsi), %xmm0
2731 ; AVX512F-FAST-NEXT: vmovdqa 64(%rdi), %xmm1
2732 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
2733 ; AVX512F-FAST-NEXT: vpmovzxdq {{.*#+}} xmm16 = xmm2[0],zero,xmm2[1],zero
2734 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
2735 ; AVX512F-FAST-NEXT: vinserti32x4 $1, %xmm2, %ymm16, %ymm2
2736 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
2737 ; AVX512F-FAST-NEXT: vpmovzxdq {{.*#+}} xmm1 = xmm0[0],zero,xmm0[1],zero
2738 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2739 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
2740 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm16
2741 ; AVX512F-FAST-NEXT: vmovdqa 64(%rcx), %xmm0
2742 ; AVX512F-FAST-NEXT: vmovdqa 64(%rdx), %xmm1
2743 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
2744 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
2745 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm2, %zmm0, %zmm0
2746 ; AVX512F-FAST-NEXT: vpermd %zmm0, %zmm5, %zmm16 {%k1}
2747 ; AVX512F-FAST-NEXT: vmovdqa 80(%rsi), %xmm0
2748 ; AVX512F-FAST-NEXT: vmovdqa 80(%rdi), %xmm1
2749 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
2750 ; AVX512F-FAST-NEXT: vpmovzxdq {{.*#+}} xmm17 = xmm2[0],zero,xmm2[1],zero
2751 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
2752 ; AVX512F-FAST-NEXT: vinserti32x4 $1, %xmm2, %ymm17, %ymm2
2753 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
2754 ; AVX512F-FAST-NEXT: vpmovzxdq {{.*#+}} xmm1 = xmm0[0],zero,xmm0[1],zero
2755 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2756 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
2757 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm17
2758 ; AVX512F-FAST-NEXT: vmovdqa 80(%rcx), %xmm0
2759 ; AVX512F-FAST-NEXT: vmovdqa 80(%rdx), %xmm1
2760 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
2761 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
2762 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm2, %zmm0, %zmm0
2763 ; AVX512F-FAST-NEXT: vpermd %zmm0, %zmm5, %zmm17 {%k1}
2764 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
2765 ; AVX512F-FAST-NEXT: vpmovzxdq {{.*#+}} xmm1 = xmm0[0],zero,xmm0[1],zero
2766 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2767 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
2768 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3]
2769 ; AVX512F-FAST-NEXT: vpmovzxdq {{.*#+}} xmm2 = xmm1[0],zero,xmm1[1],zero
2770 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
2771 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
2772 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
2773 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
2774 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3]
2775 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm1, %zmm2, %zmm1
2776 ; AVX512F-FAST-NEXT: vpermd %zmm1, %zmm5, %zmm0 {%k1}
2777 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
2778 ; AVX512F-FAST-NEXT: vpmovzxdq {{.*#+}} xmm2 = xmm1[0],zero,xmm1[1],zero
2779 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
2780 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
2781 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
2782 ; AVX512F-FAST-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm2[0],zero,xmm2[1],zero
2783 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
2784 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm2, %ymm6, %ymm2
2785 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm1
2786 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm10[4],xmm9[4],xmm10[5],xmm9[5],xmm10[6],xmm9[6],xmm10[7],xmm9[7]
2787 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
2788 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm2, %zmm6, %zmm2
2789 ; AVX512F-FAST-NEXT: vpermd %zmm2, %zmm5, %zmm1 {%k1}
2790 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm19, %xmm7
2791 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm20, %xmm9
2792 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm9[4],xmm7[4],xmm9[5],xmm7[5],xmm9[6],xmm7[6],xmm9[7],xmm7[7]
2793 ; AVX512F-FAST-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm2[0],zero,xmm2[1],zero
2794 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
2795 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm2, %ymm6, %ymm2
2796 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm9[0],xmm7[0],xmm9[1],xmm7[1],xmm9[2],xmm7[2],xmm9[3],xmm7[3]
2797 ; AVX512F-FAST-NEXT: vpmovzxdq {{.*#+}} xmm7 = xmm6[0],zero,xmm6[1],zero
2798 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
2799 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm6, %ymm7, %ymm6
2800 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm2, %zmm6, %zmm2
2801 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
2802 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
2803 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm6, %zmm3, %zmm3
2804 ; AVX512F-FAST-NEXT: vpermd %zmm3, %zmm5, %zmm2 {%k1}
2805 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm2, (%r8)
2806 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm1, 192(%r8)
2807 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm0, 128(%r8)
2808 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm17, 320(%r8)
2809 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm16, 256(%r8)
2810 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm15, 448(%r8)
2811 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm8, 384(%r8)
2812 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm18, 64(%r8)
2813 ; AVX512F-FAST-NEXT: vzeroupper
2814 ; AVX512F-FAST-NEXT: retq
2816 ; AVX512DQ-SLOW-LABEL: store_i16_stride4_vf64:
2817 ; AVX512DQ-SLOW: # %bb.0:
2818 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rsi), %xmm1
2819 ; AVX512DQ-SLOW-NEXT: vmovdqa 16(%rsi), %xmm0
2820 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rsi), %xmm10
2821 ; AVX512DQ-SLOW-NEXT: vmovdqa 48(%rsi), %xmm5
2822 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdi), %xmm2
2823 ; AVX512DQ-SLOW-NEXT: vmovdqa 16(%rdi), %xmm3
2824 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rdi), %xmm11
2825 ; AVX512DQ-SLOW-NEXT: vmovdqa 48(%rdi), %xmm7
2826 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm3[4],xmm0[4],xmm3[5],xmm0[5],xmm3[6],xmm0[6],xmm3[7],xmm0[7]
2827 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm4[0],zero,xmm4[1],zero
2828 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
2829 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm6, %ymm4
2830 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm3[0],xmm0[0],xmm3[1],xmm0[1],xmm3[2],xmm0[2],xmm3[3],xmm0[3]
2831 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm0[0],zero,xmm0[1],zero
2832 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2833 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm3, %ymm0
2834 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm17
2835 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rcx), %xmm3
2836 ; AVX512DQ-SLOW-NEXT: vmovdqa 16(%rcx), %xmm6
2837 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rcx), %xmm12
2838 ; AVX512DQ-SLOW-NEXT: vmovdqa 48(%rcx), %xmm8
2839 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdx), %xmm4
2840 ; AVX512DQ-SLOW-NEXT: vmovdqa 16(%rdx), %xmm13
2841 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rdx), %xmm14
2842 ; AVX512DQ-SLOW-NEXT: vmovdqa 48(%rdx), %xmm9
2843 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm15 = xmm13[4],xmm6[4],xmm13[5],xmm6[5],xmm13[6],xmm6[6],xmm13[7],xmm6[7]
2844 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm16 = xmm15[0,0,1,1]
2845 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[2,2,3,3]
2846 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $1, %xmm15, %ymm16, %ymm15
2847 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm13[0],xmm6[0],xmm13[1],xmm6[1],xmm13[2],xmm6[2],xmm13[3],xmm6[3]
2848 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm13 = xmm6[0,0,1,1]
2849 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
2850 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm13, %ymm6
2851 ; AVX512DQ-SLOW-NEXT: movw $-21846, %ax # imm = 0xAAAA
2852 ; AVX512DQ-SLOW-NEXT: kmovw %eax, %k1
2853 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm15, %zmm6, %zmm17 {%k1}
2854 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%rsi), %xmm6
2855 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%rdi), %xmm13
2856 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm15 = xmm13[4],xmm6[4],xmm13[5],xmm6[5],xmm13[6],xmm6[6],xmm13[7],xmm6[7]
2857 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm16 = xmm15[0],zero,xmm15[1],zero
2858 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[2,2,3,3]
2859 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $1, %xmm15, %ymm16, %ymm15
2860 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm13[0],xmm6[0],xmm13[1],xmm6[1],xmm13[2],xmm6[2],xmm13[3],xmm6[3]
2861 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm13 = xmm6[0],zero,xmm6[1],zero
2862 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
2863 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm13, %ymm6
2864 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm15, %zmm6, %zmm18
2865 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%rcx), %xmm13
2866 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%rdx), %xmm15
2867 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm15[4],xmm13[4],xmm15[5],xmm13[5],xmm15[6],xmm13[6],xmm15[7],xmm13[7]
2868 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm16 = xmm0[0,0,1,1]
2869 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2870 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $1, %xmm0, %ymm16, %ymm0
2871 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm15[0],xmm13[0],xmm15[1],xmm13[1],xmm15[2],xmm13[2],xmm15[3],xmm13[3]
2872 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm13[0,0,1,1]
2873 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm13 = xmm13[2,2,3,3]
2874 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm13, %ymm15, %ymm13
2875 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm0, %zmm13, %zmm18 {%k1}
2876 ; AVX512DQ-SLOW-NEXT: vmovdqa 112(%rsi), %xmm0
2877 ; AVX512DQ-SLOW-NEXT: vmovdqa 112(%rdi), %xmm13
2878 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm15 = xmm13[4],xmm0[4],xmm13[5],xmm0[5],xmm13[6],xmm0[6],xmm13[7],xmm0[7]
2879 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm16 = xmm15[0],zero,xmm15[1],zero
2880 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[2,2,3,3]
2881 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $1, %xmm15, %ymm16, %ymm15
2882 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm13[0],xmm0[0],xmm13[1],xmm0[1],xmm13[2],xmm0[2],xmm13[3],xmm0[3]
2883 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm13 = xmm0[0],zero,xmm0[1],zero
2884 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2885 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm13, %ymm0
2886 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm15, %zmm0, %zmm19
2887 ; AVX512DQ-SLOW-NEXT: vmovdqa 112(%rcx), %xmm0
2888 ; AVX512DQ-SLOW-NEXT: vmovdqa 112(%rdx), %xmm15
2889 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm15[4],xmm0[4],xmm15[5],xmm0[5],xmm15[6],xmm0[6],xmm15[7],xmm0[7]
2890 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm16 = xmm6[0,0,1,1]
2891 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
2892 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $1, %xmm6, %ymm16, %ymm6
2893 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm0[0],xmm15[1],xmm0[1],xmm15[2],xmm0[2],xmm15[3],xmm0[3]
2894 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm0[0,0,1,1]
2895 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2896 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm15, %ymm0
2897 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm6, %zmm0, %zmm19 {%k1}
2898 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rsi), %xmm0
2899 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rdi), %xmm6
2900 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm15 = xmm6[4],xmm0[4],xmm6[5],xmm0[5],xmm6[6],xmm0[6],xmm6[7],xmm0[7]
2901 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm16 = xmm15[0],zero,xmm15[1],zero
2902 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[2,2,3,3]
2903 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $1, %xmm15, %ymm16, %ymm15
2904 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm6[0],xmm0[0],xmm6[1],xmm0[1],xmm6[2],xmm0[2],xmm6[3],xmm0[3]
2905 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm0[0],zero,xmm0[1],zero
2906 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2907 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm6, %ymm0
2908 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm15, %zmm0, %zmm15
2909 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rcx), %xmm0
2910 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rdx), %xmm6
2911 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm13 = xmm6[4],xmm0[4],xmm6[5],xmm0[5],xmm6[6],xmm0[6],xmm6[7],xmm0[7]
2912 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm16 = xmm13[0,0,1,1]
2913 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm13 = xmm13[2,2,3,3]
2914 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $1, %xmm13, %ymm16, %ymm13
2915 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm6[0],xmm0[0],xmm6[1],xmm0[1],xmm6[2],xmm0[2],xmm6[3],xmm0[3]
2916 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm0[0,0,1,1]
2917 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2918 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm6, %ymm0
2919 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm13, %zmm0, %zmm15 {%k1}
2920 ; AVX512DQ-SLOW-NEXT: vmovdqa 80(%rsi), %xmm0
2921 ; AVX512DQ-SLOW-NEXT: vmovdqa 80(%rdi), %xmm6
2922 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm13 = xmm6[4],xmm0[4],xmm6[5],xmm0[5],xmm6[6],xmm0[6],xmm6[7],xmm0[7]
2923 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm16 = xmm13[0],zero,xmm13[1],zero
2924 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm13 = xmm13[2,2,3,3]
2925 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $1, %xmm13, %ymm16, %ymm13
2926 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm6[0],xmm0[0],xmm6[1],xmm0[1],xmm6[2],xmm0[2],xmm6[3],xmm0[3]
2927 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm0[0],zero,xmm0[1],zero
2928 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2929 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm6, %ymm0
2930 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm13, %zmm0, %zmm16
2931 ; AVX512DQ-SLOW-NEXT: vmovdqa 80(%rcx), %xmm0
2932 ; AVX512DQ-SLOW-NEXT: vmovdqa 80(%rdx), %xmm6
2933 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm13 = xmm6[4],xmm0[4],xmm6[5],xmm0[5],xmm6[6],xmm0[6],xmm6[7],xmm0[7]
2934 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm20 = xmm13[0,0,1,1]
2935 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm13 = xmm13[2,2,3,3]
2936 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $1, %xmm13, %ymm20, %ymm13
2937 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm6[0],xmm0[0],xmm6[1],xmm0[1],xmm6[2],xmm0[2],xmm6[3],xmm0[3]
2938 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm0[0,0,1,1]
2939 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2940 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm6, %ymm0
2941 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm13, %zmm0, %zmm16 {%k1}
2942 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
2943 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm0[0],zero,xmm0[1],zero
2944 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2945 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm6, %ymm0
2946 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
2947 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm10 = xmm6[0],zero,xmm6[1],zero
2948 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
2949 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm10, %ymm6
2950 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm6, %zmm10
2951 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm14[4],xmm12[4],xmm14[5],xmm12[5],xmm14[6],xmm12[6],xmm14[7],xmm12[7]
2952 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm0[0,0,1,1]
2953 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2954 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm6, %ymm0
2955 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm12[0],xmm14[1],xmm12[1],xmm14[2],xmm12[2],xmm14[3],xmm12[3]
2956 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm11 = xmm6[0,0,1,1]
2957 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
2958 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm11, %ymm6
2959 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm0, %zmm6, %zmm10 {%k1}
2960 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm7[4],xmm5[4],xmm7[5],xmm5[5],xmm7[6],xmm5[6],xmm7[7],xmm5[7]
2961 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm0[0],zero,xmm0[1],zero
2962 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2963 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm6, %ymm0
2964 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm7[0],xmm5[0],xmm7[1],xmm5[1],xmm7[2],xmm5[2],xmm7[3],xmm5[3]
2965 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm5[0],zero,xmm5[1],zero
2966 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
2967 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm6, %ymm5
2968 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm5, %zmm0
2969 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm9[4],xmm8[4],xmm9[5],xmm8[5],xmm9[6],xmm8[6],xmm9[7],xmm8[7]
2970 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm5[0,0,1,1]
2971 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
2972 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm6, %ymm5
2973 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
2974 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm7 = xmm6[0,0,1,1]
2975 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
2976 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm7, %ymm6
2977 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm5, %zmm6, %zmm0 {%k1}
2978 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
2979 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm5[0],zero,xmm5[1],zero
2980 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
2981 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm6, %ymm5
2982 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
2983 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm2 = xmm1[0],zero,xmm1[1],zero
2984 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
2985 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
2986 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm1, %zmm1
2987 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
2988 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm2[0,0,1,1]
2989 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
2990 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm5, %ymm2
2991 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
2992 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm3[0,0,1,1]
2993 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
2994 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm4, %ymm3
2995 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm2, %zmm3, %zmm1 {%k1}
2996 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, (%r8)
2997 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, 192(%r8)
2998 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, 128(%r8)
2999 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm16, 320(%r8)
3000 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm15, 256(%r8)
3001 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm19, 448(%r8)
3002 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm18, 384(%r8)
3003 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm17, 64(%r8)
3004 ; AVX512DQ-SLOW-NEXT: vzeroupper
3005 ; AVX512DQ-SLOW-NEXT: retq
3007 ; AVX512BW-LABEL: store_i16_stride4_vf64:
3008 ; AVX512BW: # %bb.0:
3009 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm0
3010 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %zmm1
3011 ; AVX512BW-NEXT: vmovdqa64 (%rsi), %zmm2
3012 ; AVX512BW-NEXT: vmovdqa64 64(%rsi), %zmm3
3013 ; AVX512BW-NEXT: vmovdqa64 (%rdx), %zmm4
3014 ; AVX512BW-NEXT: vmovdqa64 64(%rdx), %zmm5
3015 ; AVX512BW-NEXT: vmovdqa64 (%rcx), %zmm6
3016 ; AVX512BW-NEXT: vmovdqa64 64(%rcx), %zmm7
3017 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm8 = [u,u,8,40,u,u,9,41,u,u,10,42,u,u,11,43,u,u,12,44,u,u,13,45,u,u,14,46,u,u,15,47]
3018 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm9
3019 ; AVX512BW-NEXT: vpermt2w %zmm6, %zmm8, %zmm9
3020 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm10 = [8,40,u,u,9,41,u,u,10,42,u,u,11,43,u,u,12,44,u,u,13,45,u,u,14,46,u,u,15,47,u,u]
3021 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm11
3022 ; AVX512BW-NEXT: vpermt2w %zmm2, %zmm10, %zmm11
3023 ; AVX512BW-NEXT: movw $-21846, %ax # imm = 0xAAAA
3024 ; AVX512BW-NEXT: kmovd %eax, %k1
3025 ; AVX512BW-NEXT: vmovdqa32 %zmm9, %zmm11 {%k1}
3026 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm9 = [u,u,0,32,u,u,1,33,u,u,2,34,u,u,3,35,u,u,4,36,u,u,5,37,u,u,6,38,u,u,7,39]
3027 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm12
3028 ; AVX512BW-NEXT: vpermt2w %zmm6, %zmm9, %zmm12
3029 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm13 = [0,32,u,u,1,33,u,u,2,34,u,u,3,35,u,u,4,36,u,u,5,37,u,u,6,38,u,u,7,39,u,u]
3030 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm14
3031 ; AVX512BW-NEXT: vpermt2w %zmm2, %zmm13, %zmm14
3032 ; AVX512BW-NEXT: vmovdqa32 %zmm12, %zmm14 {%k1}
3033 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm12 = [u,u,24,56,u,u,25,57,u,u,26,58,u,u,27,59,u,u,28,60,u,u,29,61,u,u,30,62,u,u,31,63]
3034 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm15
3035 ; AVX512BW-NEXT: vpermt2w %zmm6, %zmm12, %zmm15
3036 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm16 = [24,56,u,u,25,57,u,u,26,58,u,u,27,59,u,u,28,60,u,u,29,61,u,u,30,62,u,u,31,63,u,u]
3037 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm17
3038 ; AVX512BW-NEXT: vpermt2w %zmm2, %zmm16, %zmm17
3039 ; AVX512BW-NEXT: vmovdqa32 %zmm15, %zmm17 {%k1}
3040 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm15 = [u,u,16,48,u,u,17,49,u,u,18,50,u,u,19,51,u,u,20,52,u,u,21,53,u,u,22,54,u,u,23,55]
3041 ; AVX512BW-NEXT: vpermt2w %zmm6, %zmm15, %zmm4
3042 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm6 = [16,48,u,u,17,49,u,u,18,50,u,u,19,51,u,u,20,52,u,u,21,53,u,u,22,54,u,u,23,55,u,u]
3043 ; AVX512BW-NEXT: vpermt2w %zmm2, %zmm6, %zmm0
3044 ; AVX512BW-NEXT: vmovdqa32 %zmm4, %zmm0 {%k1}
3045 ; AVX512BW-NEXT: vpermi2w %zmm7, %zmm5, %zmm8
3046 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm1, %zmm10
3047 ; AVX512BW-NEXT: vmovdqa32 %zmm8, %zmm10 {%k1}
3048 ; AVX512BW-NEXT: vpermi2w %zmm7, %zmm5, %zmm9
3049 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm1, %zmm13
3050 ; AVX512BW-NEXT: vmovdqa32 %zmm9, %zmm13 {%k1}
3051 ; AVX512BW-NEXT: vpermi2w %zmm7, %zmm5, %zmm12
3052 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm1, %zmm16
3053 ; AVX512BW-NEXT: vmovdqa32 %zmm12, %zmm16 {%k1}
3054 ; AVX512BW-NEXT: vpermt2w %zmm7, %zmm15, %zmm5
3055 ; AVX512BW-NEXT: vpermt2w %zmm3, %zmm6, %zmm1
3056 ; AVX512BW-NEXT: vmovdqa32 %zmm5, %zmm1 {%k1}
3057 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 384(%r8)
3058 ; AVX512BW-NEXT: vmovdqa64 %zmm16, 448(%r8)
3059 ; AVX512BW-NEXT: vmovdqa64 %zmm13, 256(%r8)
3060 ; AVX512BW-NEXT: vmovdqa64 %zmm10, 320(%r8)
3061 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 128(%r8)
3062 ; AVX512BW-NEXT: vmovdqa64 %zmm17, 192(%r8)
3063 ; AVX512BW-NEXT: vmovdqa64 %zmm14, (%r8)
3064 ; AVX512BW-NEXT: vmovdqa64 %zmm11, 64(%r8)
3065 ; AVX512BW-NEXT: vzeroupper
3066 ; AVX512BW-NEXT: retq
3067 %in.vec0 = load <64 x i16>, ptr %in.vecptr0, align 64
3068 %in.vec1 = load <64 x i16>, ptr %in.vecptr1, align 64
3069 %in.vec2 = load <64 x i16>, ptr %in.vecptr2, align 64
3070 %in.vec3 = load <64 x i16>, ptr %in.vecptr3, align 64
3071 %1 = shufflevector <64 x i16> %in.vec0, <64 x i16> %in.vec1, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
3072 %2 = shufflevector <64 x i16> %in.vec2, <64 x i16> %in.vec3, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
3073 %3 = shufflevector <128 x i16> %1, <128 x i16> %2, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255>
3074 %interleaved.vec = shufflevector <256 x i16> %3, <256 x i16> poison, <256 x i32> <i32 0, i32 64, i32 128, i32 192, i32 1, i32 65, i32 129, i32 193, i32 2, i32 66, i32 130, i32 194, i32 3, i32 67, i32 131, i32 195, i32 4, i32 68, i32 132, i32 196, i32 5, i32 69, i32 133, i32 197, i32 6, i32 70, i32 134, i32 198, i32 7, i32 71, i32 135, i32 199, i32 8, i32 72, i32 136, i32 200, i32 9, i32 73, i32 137, i32 201, i32 10, i32 74, i32 138, i32 202, i32 11, i32 75, i32 139, i32 203, i32 12, i32 76, i32 140, i32 204, i32 13, i32 77, i32 141, i32 205, i32 14, i32 78, i32 142, i32 206, i32 15, i32 79, i32 143, i32 207, i32 16, i32 80, i32 144, i32 208, i32 17, i32 81, i32 145, i32 209, i32 18, i32 82, i32 146, i32 210, i32 19, i32 83, i32 147, i32 211, i32 20, i32 84, i32 148, i32 212, i32 21, i32 85, i32 149, i32 213, i32 22, i32 86, i32 150, i32 214, i32 23, i32 87, i32 151, i32 215, i32 24, i32 88, i32 152, i32 216, i32 25, i32 89, i32 153, i32 217, i32 26, i32 90, i32 154, i32 218, i32 27, i32 91, i32 155, i32 219, i32 28, i32 92, i32 156, i32 220, i32 29, i32 93, i32 157, i32 221, i32 30, i32 94, i32 158, i32 222, i32 31, i32 95, i32 159, i32 223, i32 32, i32 96, i32 160, i32 224, i32 33, i32 97, i32 161, i32 225, i32 34, i32 98, i32 162, i32 226, i32 35, i32 99, i32 163, i32 227, i32 36, i32 100, i32 164, i32 228, i32 37, i32 101, i32 165, i32 229, i32 38, i32 102, i32 166, i32 230, i32 39, i32 103, i32 167, i32 231, i32 40, i32 104, i32 168, i32 232, i32 41, i32 105, i32 169, i32 233, i32 42, i32 106, i32 170, i32 234, i32 43, i32 107, i32 171, i32 235, i32 44, i32 108, i32 172, i32 236, i32 45, i32 109, i32 173, i32 237, i32 46, i32 110, i32 174, i32 238, i32 47, i32 111, i32 175, i32 239, i32 48, i32 112, i32 176, i32 240, i32 49, i32 113, i32 177, i32 241, i32 50, i32 114, i32 178, i32 242, i32 51, i32 115, i32 179, i32 243, i32 52, i32 116, i32 180, i32 244, i32 53, i32 117, i32 181, i32 245, i32 54, i32 118, i32 182, i32 246, i32 55, i32 119, i32 183, i32 247, i32 56, i32 120, i32 184, i32 248, i32 57, i32 121, i32 185, i32 249, i32 58, i32 122, i32 186, i32 250, i32 59, i32 123, i32 187, i32 251, i32 60, i32 124, i32 188, i32 252, i32 61, i32 125, i32 189, i32 253, i32 62, i32 126, i32 190, i32 254, i32 63, i32 127, i32 191, i32 255>
3075 store <256 x i16> %interleaved.vec, ptr %out.vec, align 64
3078 ;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
3082 ; AVX512-FAST: {{.*}}
3083 ; AVX512-SLOW: {{.*}}
3084 ; AVX512BW-FAST: {{.*}}
3085 ; AVX512BW-ONLY: {{.*}}
3086 ; AVX512BW-ONLY-FAST: {{.*}}
3087 ; AVX512BW-ONLY-SLOW: {{.*}}
3088 ; AVX512BW-SLOW: {{.*}}
3089 ; AVX512DQ-FAST: {{.*}}
3090 ; AVX512DQ-ONLY: {{.*}}
3091 ; AVX512DQBW-FAST: {{.*}}
3092 ; AVX512DQBW-ONLY: {{.*}}
3093 ; AVX512DQBW-SLOW: {{.*}}
3094 ; AVX512F-ONLY: {{.*}}
3095 ; AVX512F-ONLY-FAST: {{.*}}
3098 ; FALLBACK10: {{.*}}
3099 ; FALLBACK11: {{.*}}
3100 ; FALLBACK12: {{.*}}