1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+sse2 | FileCheck %s --check-prefixes=SSE,FALLBACK0
3 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx | FileCheck %s --check-prefixes=AVX,AVX1,AVX1-ONLY,FALLBACK1
4 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2 | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-SLOW,FALLBACK2
5 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-FAST,FALLBACK3
6 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-FAST-PERLANE,FALLBACK4
7 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-SLOW,AVX512F-SLOW,AVX512F-ONLY-SLOW,FALLBACK5
8 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-FAST,AVX512F-FAST,AVX512F-ONLY-FAST,FALLBACK6
9 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-SLOW,AVX512F-SLOW,AVX512DQ-SLOW,FALLBACK7
10 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-FAST,AVX512F-FAST,AVX512DQ-FAST,FALLBACK8
11 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-SLOW,AVX512BW-SLOW,AVX512BW-ONLY-SLOW,FALLBACK9
12 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-FAST,AVX512BW-FAST,AVX512BW-ONLY-FAST,FALLBACK10
13 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-SLOW,AVX512BW-SLOW,AVX512DQBW-SLOW,FALLBACK11
14 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-FAST,AVX512BW-FAST,AVX512DQBW-FAST,FALLBACK12
16 ; These patterns are produced by LoopVectorizer for interleaved stores.
18 define void @store_i16_stride6_vf2(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
19 ; SSE-LABEL: store_i16_stride6_vf2:
21 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
22 ; SSE-NEXT: movdqa (%rdi), %xmm0
23 ; SSE-NEXT: movdqa (%rdx), %xmm1
24 ; SSE-NEXT: movdqa (%r8), %xmm2
25 ; SSE-NEXT: movdqa (%r9), %xmm3
26 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
27 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
28 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
29 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,1,3]
30 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm0[0,1,2,3,4,5,4,6]
31 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
32 ; SSE-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
33 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm4[3,3]
34 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[0,2,1,3,4,5,6,7]
35 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,1],xmm2[0,2]
36 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[3,1,2,3]
37 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[2,0,3,3,4,5,6,7]
38 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
39 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[3,1,2,0]
40 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,6,6,7]
41 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,1,2,3]
42 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[1,2,0,3,4,5,6,7]
43 ; SSE-NEXT: movaps %xmm0, (%rax)
44 ; SSE-NEXT: movq %xmm1, 16(%rax)
47 ; AVX1-ONLY-LABEL: store_i16_stride6_vf2:
49 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
50 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm0
51 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm1
52 ; AVX1-ONLY-NEXT: vmovdqa (%r8), %xmm2
53 ; AVX1-ONLY-NEXT: vmovdqa (%r9), %xmm3
54 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
55 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
56 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
57 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
58 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm4 = xmm0[0,1,2,3,5,7,6,7]
59 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,2,2]
60 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[1,3,2,3,4,5,6,7]
61 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
62 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,4,5,8,9,12,13,u,u,u,u,2,3,6,7]
63 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
64 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,0,0,0]
65 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3],xmm2[4,5],xmm0[6,7]
66 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, (%rax)
67 ; AVX1-ONLY-NEXT: vmovq %xmm1, 16(%rax)
68 ; AVX1-ONLY-NEXT: retq
70 ; AVX2-SLOW-LABEL: store_i16_stride6_vf2:
72 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
73 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %xmm0
74 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %xmm1
75 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %xmm2
76 ; AVX2-SLOW-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
77 ; AVX2-SLOW-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
78 ; AVX2-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
79 ; AVX2-SLOW-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1]
80 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
81 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,1,4,5,8,9,12,13,u,u,u,u,2,3,6,7,u,u,u,u,18,19,22,23,u,u,u,u,u,u,u,u]
82 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,1,3]
83 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm0[1,3,2,3,4,5,6,7,9,11,10,11,12,13,14,15]
84 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm0 = ymm0[0,1,2,3,4,6,6,7,8,9,10,11,12,14,14,15]
85 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3],ymm0[4],ymm1[5,6,7]
86 ; AVX2-SLOW-NEXT: vextracti128 $1, %ymm0, %xmm1
87 ; AVX2-SLOW-NEXT: vmovq %xmm1, 16(%rax)
88 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, (%rax)
89 ; AVX2-SLOW-NEXT: vzeroupper
90 ; AVX2-SLOW-NEXT: retq
92 ; AVX2-FAST-LABEL: store_i16_stride6_vf2:
94 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
95 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %xmm0
96 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %xmm1
97 ; AVX2-FAST-NEXT: vmovdqa (%r8), %xmm2
98 ; AVX2-FAST-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
99 ; AVX2-FAST-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
100 ; AVX2-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
101 ; AVX2-FAST-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1]
102 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
103 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,1,4,5,8,9,12,13,u,u,u,u,2,3,6,7,u,u,u,u,18,19,22,23,u,u,u,u,u,u,u,u]
104 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,1,3]
105 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,8,9,12,13,u,u,u,u,18,19,22,23,u,u,u,u,u,u,u,u,u,u,u,u]
106 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3],ymm0[4],ymm1[5,6,7]
107 ; AVX2-FAST-NEXT: vextracti128 $1, %ymm0, %xmm1
108 ; AVX2-FAST-NEXT: vmovq %xmm1, 16(%rax)
109 ; AVX2-FAST-NEXT: vmovdqa %xmm0, (%rax)
110 ; AVX2-FAST-NEXT: vzeroupper
111 ; AVX2-FAST-NEXT: retq
113 ; AVX2-FAST-PERLANE-LABEL: store_i16_stride6_vf2:
114 ; AVX2-FAST-PERLANE: # %bb.0:
115 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
116 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %xmm0
117 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %xmm1
118 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %xmm2
119 ; AVX2-FAST-PERLANE-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
120 ; AVX2-FAST-PERLANE-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
121 ; AVX2-FAST-PERLANE-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
122 ; AVX2-FAST-PERLANE-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1]
123 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
124 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,1,4,5,8,9,12,13,u,u,u,u,2,3,6,7,u,u,u,u,18,19,22,23,u,u,u,u,u,u,u,u]
125 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,1,3]
126 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,8,9,12,13,u,u,u,u,18,19,22,23,u,u,u,u,u,u,u,u,u,u,u,u]
127 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3],ymm0[4],ymm1[5,6,7]
128 ; AVX2-FAST-PERLANE-NEXT: vextracti128 $1, %ymm0, %xmm1
129 ; AVX2-FAST-PERLANE-NEXT: vmovq %xmm1, 16(%rax)
130 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, (%rax)
131 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
132 ; AVX2-FAST-PERLANE-NEXT: retq
134 ; AVX512F-SLOW-LABEL: store_i16_stride6_vf2:
135 ; AVX512F-SLOW: # %bb.0:
136 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
137 ; AVX512F-SLOW-NEXT: vmovdqa (%rdi), %xmm0
138 ; AVX512F-SLOW-NEXT: vmovdqa (%rdx), %xmm1
139 ; AVX512F-SLOW-NEXT: vmovdqa (%r8), %xmm2
140 ; AVX512F-SLOW-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
141 ; AVX512F-SLOW-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
142 ; AVX512F-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
143 ; AVX512F-SLOW-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1]
144 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
145 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,1,4,5,8,9,12,13,u,u,u,u,2,3,6,7,u,u,u,u,18,19,22,23,u,u,u,u,u,u,u,u]
146 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,1,3]
147 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm0[1,3,2,3,4,5,6,7,9,11,10,11,12,13,14,15]
148 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} ymm0 = ymm0[0,1,2,3,4,6,6,7,8,9,10,11,12,14,14,15]
149 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3],ymm0[4],ymm1[5,6,7]
150 ; AVX512F-SLOW-NEXT: vextracti128 $1, %ymm0, %xmm1
151 ; AVX512F-SLOW-NEXT: vmovq %xmm1, 16(%rax)
152 ; AVX512F-SLOW-NEXT: vmovdqa %xmm0, (%rax)
153 ; AVX512F-SLOW-NEXT: vzeroupper
154 ; AVX512F-SLOW-NEXT: retq
156 ; AVX512F-FAST-LABEL: store_i16_stride6_vf2:
157 ; AVX512F-FAST: # %bb.0:
158 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
159 ; AVX512F-FAST-NEXT: vmovdqa (%rdi), %xmm0
160 ; AVX512F-FAST-NEXT: vmovdqa (%rdx), %xmm1
161 ; AVX512F-FAST-NEXT: vmovdqa (%r8), %xmm2
162 ; AVX512F-FAST-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
163 ; AVX512F-FAST-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
164 ; AVX512F-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
165 ; AVX512F-FAST-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1]
166 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
167 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,1,4,5,8,9,12,13,u,u,u,u,2,3,6,7,u,u,u,u,18,19,22,23,u,u,u,u,u,u,u,u]
168 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,1,3]
169 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,8,9,12,13,u,u,u,u,18,19,22,23,u,u,u,u,u,u,u,u,u,u,u,u]
170 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3],ymm0[4],ymm1[5,6,7]
171 ; AVX512F-FAST-NEXT: vextracti128 $1, %ymm0, %xmm1
172 ; AVX512F-FAST-NEXT: vmovq %xmm1, 16(%rax)
173 ; AVX512F-FAST-NEXT: vmovdqa %xmm0, (%rax)
174 ; AVX512F-FAST-NEXT: vzeroupper
175 ; AVX512F-FAST-NEXT: retq
177 ; AVX512BW-LABEL: store_i16_stride6_vf2:
179 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
180 ; AVX512BW-NEXT: vmovdqa (%rdi), %xmm0
181 ; AVX512BW-NEXT: vmovdqa (%rdx), %xmm1
182 ; AVX512BW-NEXT: vmovdqa (%r8), %xmm2
183 ; AVX512BW-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
184 ; AVX512BW-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
185 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
186 ; AVX512BW-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1]
187 ; AVX512BW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
188 ; AVX512BW-NEXT: vmovdqa {{.*#+}} ymm1 = <0,2,4,6,8,10,1,3,5,7,9,11,u,u,u,u>
189 ; AVX512BW-NEXT: vpermw %ymm0, %ymm1, %ymm0
190 ; AVX512BW-NEXT: vextracti128 $1, %ymm0, %xmm1
191 ; AVX512BW-NEXT: vmovq %xmm1, 16(%rax)
192 ; AVX512BW-NEXT: vmovdqa %xmm0, (%rax)
193 ; AVX512BW-NEXT: vzeroupper
194 ; AVX512BW-NEXT: retq
195 %in.vec0 = load <2 x i16>, ptr %in.vecptr0, align 64
196 %in.vec1 = load <2 x i16>, ptr %in.vecptr1, align 64
197 %in.vec2 = load <2 x i16>, ptr %in.vecptr2, align 64
198 %in.vec3 = load <2 x i16>, ptr %in.vecptr3, align 64
199 %in.vec4 = load <2 x i16>, ptr %in.vecptr4, align 64
200 %in.vec5 = load <2 x i16>, ptr %in.vecptr5, align 64
201 %1 = shufflevector <2 x i16> %in.vec0, <2 x i16> %in.vec1, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
202 %2 = shufflevector <2 x i16> %in.vec2, <2 x i16> %in.vec3, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
203 %3 = shufflevector <2 x i16> %in.vec4, <2 x i16> %in.vec5, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
204 %4 = shufflevector <4 x i16> %1, <4 x i16> %2, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
205 %5 = shufflevector <4 x i16> %3, <4 x i16> poison, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 undef, i32 undef, i32 undef, i32 undef>
206 %6 = shufflevector <8 x i16> %4, <8 x i16> %5, <12 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11>
207 %interleaved.vec = shufflevector <12 x i16> %6, <12 x i16> poison, <12 x i32> <i32 0, i32 2, i32 4, i32 6, i32 8, i32 10, i32 1, i32 3, i32 5, i32 7, i32 9, i32 11>
208 store <12 x i16> %interleaved.vec, ptr %out.vec, align 64
212 define void @store_i16_stride6_vf4(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
213 ; SSE-LABEL: store_i16_stride6_vf4:
215 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
216 ; SSE-NEXT: movq {{.*#+}} xmm0 = mem[0],zero
217 ; SSE-NEXT: movq {{.*#+}} xmm1 = mem[0],zero
218 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
219 ; SSE-NEXT: movq {{.*#+}} xmm1 = mem[0],zero
220 ; SSE-NEXT: movq {{.*#+}} xmm2 = mem[0],zero
221 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
222 ; SSE-NEXT: movq {{.*#+}} xmm2 = mem[0],zero
223 ; SSE-NEXT: movq {{.*#+}} xmm3 = mem[0],zero
224 ; SSE-NEXT: movdqa %xmm3, %xmm4
225 ; SSE-NEXT: punpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm2[0]
226 ; SSE-NEXT: movdqa %xmm1, %xmm5
227 ; SSE-NEXT: punpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm0[0]
228 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm4[0,1,2,0]
229 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,6,4,6,7]
230 ; SSE-NEXT: shufps {{.*#+}} xmm6 = xmm6[2,1],xmm0[1,3]
231 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[2,0],xmm6[0,2]
232 ; SSE-NEXT: movdqa %xmm0, %xmm6
233 ; SSE-NEXT: punpckhqdq {{.*#+}} xmm6 = xmm6[1],xmm1[1]
234 ; SSE-NEXT: punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
235 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[1,1],xmm1[1,1]
236 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[2,0],xmm6[0,2]
237 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,3],xmm1[3,3]
238 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm4[3,1,1,3]
239 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[2,0,2,3,4,5,6,7]
240 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,5,7]
241 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,2],xmm1[0,3]
242 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0,1,3]
243 ; SSE-NEXT: movaps %xmm3, 16(%rax)
244 ; SSE-NEXT: movaps %xmm5, (%rax)
245 ; SSE-NEXT: movaps %xmm0, 32(%rax)
248 ; AVX1-ONLY-LABEL: store_i16_stride6_vf4:
249 ; AVX1-ONLY: # %bb.0:
250 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
251 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
252 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
253 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
254 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
255 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
256 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm2[0],xmm1[0]
257 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
258 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm5 = mem[0],zero
259 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm5[0],xmm4[0]
260 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm7 = xmm3[2,3,10,11,u,u,u,u,u,u,u,u,4,5,12,13]
261 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm0[0,1,1,3]
262 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,4,6,6,7]
263 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1,2,3],xmm8[4,5],xmm7[6,7]
264 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
265 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm7[0,1],xmm4[2,3],xmm7[4,5,6,7]
266 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
267 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,1,0,2,4,5,6,7]
268 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm2 = xmm0[0,1,8,9,u,u,u,u,u,u,u,u,2,3,10,11]
269 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3],xmm2[4,5,6,7]
270 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm6[0,1,2,0]
271 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,6,4,6,7]
272 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3],xmm2[4,5],xmm1[6,7]
273 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm1, %ymm1
274 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm3[3,1,2,3]
275 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[3,1,2,3,4,5,6,7]
276 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[3,1,2,3]
277 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,1,3,1,4,5,6,7]
278 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm2[0]
279 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm2 = xmm6[4,5,12,13,u,u,u,u,u,u,u,u,6,7,14,15]
280 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm2[0,1],xmm0[2,3,4,5],xmm2[6,7]
281 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, 32(%rax)
282 ; AVX1-ONLY-NEXT: vmovaps %ymm1, (%rax)
283 ; AVX1-ONLY-NEXT: vzeroupper
284 ; AVX1-ONLY-NEXT: retq
286 ; AVX2-SLOW-LABEL: store_i16_stride6_vf4:
287 ; AVX2-SLOW: # %bb.0:
288 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
289 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
290 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
291 ; AVX2-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
292 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
293 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
294 ; AVX2-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
295 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm2
296 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
297 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
298 ; AVX2-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm4[0],xmm3[0]
299 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm6 = ymm2[0,1,8,9,u,u,u,u,u,u,u,u,2,3,10,11,18,19,26,27,u,u,u,u,u,u,u,u,20,21,28,29]
300 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,0,1]
301 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,0,1,8,9,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20,21,28,29,u,u,u,u]
302 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm6[0],ymm2[1],ymm6[2,3,4,5],ymm2[6],ymm6[7]
303 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
304 ; AVX2-SLOW-NEXT: vpbroadcastq %xmm3, %ymm3
305 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1],ymm3[2],ymm2[3,4],ymm3[5],ymm2[6,7]
306 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[3,1,2,3]
307 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[3,1,2,3,4,5,6,7]
308 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[3,1,2,3]
309 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,1,3,1,4,5,6,7]
310 ; AVX2-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
311 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm1 = xmm5[4,5,12,13,u,u,u,u,u,u,u,u,6,7,14,15]
312 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0],xmm0[1,2],xmm1[3]
313 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, 32(%rax)
314 ; AVX2-SLOW-NEXT: vmovdqa %ymm2, (%rax)
315 ; AVX2-SLOW-NEXT: vzeroupper
316 ; AVX2-SLOW-NEXT: retq
318 ; AVX2-FAST-LABEL: store_i16_stride6_vf4:
319 ; AVX2-FAST: # %bb.0:
320 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
321 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
322 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
323 ; AVX2-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
324 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
325 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
326 ; AVX2-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
327 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm2
328 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
329 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
330 ; AVX2-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm4[0],xmm3[0]
331 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm6 = ymm2[0,1,8,9,u,u,u,u,u,u,u,u,2,3,10,11,18,19,26,27,u,u,u,u,u,u,u,u,20,21,28,29]
332 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [4,6,1,3,4,6,1,3]
333 ; AVX2-FAST-NEXT: # ymm7 = mem[0,1,0,1]
334 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm7, %ymm2
335 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,0,1,4,5,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,28,29,u,u,u,u]
336 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm6[0],ymm2[1],ymm6[2,3,4,5],ymm2[6],ymm6[7]
337 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
338 ; AVX2-FAST-NEXT: vpbroadcastq %xmm3, %ymm3
339 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1],ymm3[2],ymm2[3,4],ymm3[5],ymm2[6,7]
340 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[6,7,14,15,4,5,6,7,u,u,u,u,u,u,u,u]
341 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[12,13,14,15,6,7,14,15,u,u,u,u,u,u,u,u]
342 ; AVX2-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
343 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm1 = xmm5[4,5,12,13,u,u,u,u,u,u,u,u,6,7,14,15]
344 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0],xmm0[1,2],xmm1[3]
345 ; AVX2-FAST-NEXT: vmovdqa %xmm0, 32(%rax)
346 ; AVX2-FAST-NEXT: vmovdqa %ymm2, (%rax)
347 ; AVX2-FAST-NEXT: vzeroupper
348 ; AVX2-FAST-NEXT: retq
350 ; AVX2-FAST-PERLANE-LABEL: store_i16_stride6_vf4:
351 ; AVX2-FAST-PERLANE: # %bb.0:
352 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
353 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
354 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
355 ; AVX2-FAST-PERLANE-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
356 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
357 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
358 ; AVX2-FAST-PERLANE-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
359 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm2
360 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
361 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
362 ; AVX2-FAST-PERLANE-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm4[0],xmm3[0]
363 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm6 = ymm2[0,1,8,9,u,u,u,u,u,u,u,u,2,3,10,11,18,19,26,27,u,u,u,u,u,u,u,u,20,21,28,29]
364 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,0,1]
365 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,0,1,8,9,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20,21,28,29,u,u,u,u]
366 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm6[0],ymm2[1],ymm6[2,3,4,5],ymm2[6],ymm6[7]
367 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
368 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq %xmm3, %ymm3
369 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1],ymm3[2],ymm2[3,4],ymm3[5],ymm2[6,7]
370 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[6,7,14,15,4,5,6,7,u,u,u,u,u,u,u,u]
371 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[12,13,14,15,6,7,14,15,u,u,u,u,u,u,u,u]
372 ; AVX2-FAST-PERLANE-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
373 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm1 = xmm5[4,5,12,13,u,u,u,u,u,u,u,u,6,7,14,15]
374 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0],xmm0[1,2],xmm1[3]
375 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, 32(%rax)
376 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm2, (%rax)
377 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
378 ; AVX2-FAST-PERLANE-NEXT: retq
380 ; AVX512F-SLOW-LABEL: store_i16_stride6_vf4:
381 ; AVX512F-SLOW: # %bb.0:
382 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
383 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
384 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
385 ; AVX512F-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
386 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
387 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
388 ; AVX512F-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
389 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
390 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
391 ; AVX512F-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
392 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm3
393 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = ymm3[0,1,8,9,u,u,u,u,u,u,u,u,2,3,10,11,18,19,26,27,u,u,u,u,u,u,u,u,20,21,28,29]
394 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,3,0,1]
395 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm3 = ymm3[u,u,u,u,0,1,8,9,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20,21,28,29,u,u,u,u]
396 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0],ymm3[1],ymm4[2,3,4,5],ymm3[6],ymm4[7]
397 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm2[0,2,2,3]
398 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[0,2,1,3,4,5,6,7]
399 ; AVX512F-SLOW-NEXT: vpbroadcastq %xmm4, %ymm4
400 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1],ymm4[2],ymm3[3,4],ymm4[5],ymm3[6,7]
401 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[3,1,2,3]
402 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[3,1,2,3,4,5,6,7]
403 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[3,1,2,3]
404 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,1,3,1,4,5,6,7]
405 ; AVX512F-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
406 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} xmm1 = xmm2[4,5,12,13,u,u,u,u,u,u,u,u,6,7,14,15]
407 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0],xmm0[1,2],xmm1[3]
408 ; AVX512F-SLOW-NEXT: vinserti32x4 $2, %xmm0, %zmm3, %zmm1
409 ; AVX512F-SLOW-NEXT: vmovdqa %xmm0, 32(%rax)
410 ; AVX512F-SLOW-NEXT: vmovdqa %ymm1, (%rax)
411 ; AVX512F-SLOW-NEXT: vzeroupper
412 ; AVX512F-SLOW-NEXT: retq
414 ; AVX512F-FAST-LABEL: store_i16_stride6_vf4:
415 ; AVX512F-FAST: # %bb.0:
416 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
417 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
418 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
419 ; AVX512F-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
420 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
421 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
422 ; AVX512F-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
423 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
424 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
425 ; AVX512F-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm3[0],xmm2[0]
426 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm5
427 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm6 = ymm5[0,1,8,9,u,u,u,u,u,u,u,u,2,3,10,11,18,19,26,27,u,u,u,u,u,u,u,u,20,21,28,29]
428 ; AVX512F-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [4,6,1,3,4,6,1,3]
429 ; AVX512F-FAST-NEXT: # ymm7 = mem[0,1,0,1]
430 ; AVX512F-FAST-NEXT: vpermd %ymm5, %ymm7, %ymm5
431 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[u,u,u,u,0,1,4,5,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,28,29,u,u,u,u]
432 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0],ymm5[1],ymm6[2,3,4,5],ymm5[6],ymm6[7]
433 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
434 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,8,3,4,9,6,7]
435 ; AVX512F-FAST-NEXT: vpermi2d %ymm2, %ymm5, %ymm3
436 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[6,7,14,15,4,5,6,7,u,u,u,u,u,u,u,u]
437 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[12,13,14,15,6,7,14,15,u,u,u,u,u,u,u,u]
438 ; AVX512F-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
439 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} xmm1 = xmm4[4,5,12,13,u,u,u,u,u,u,u,u,6,7,14,15]
440 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0],xmm0[1,2],xmm1[3]
441 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm0, %zmm3, %zmm1
442 ; AVX512F-FAST-NEXT: vmovdqa %xmm0, 32(%rax)
443 ; AVX512F-FAST-NEXT: vmovdqa %ymm1, (%rax)
444 ; AVX512F-FAST-NEXT: vzeroupper
445 ; AVX512F-FAST-NEXT: retq
447 ; AVX512BW-LABEL: store_i16_stride6_vf4:
449 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
450 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
451 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
452 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
453 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
454 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
455 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
456 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
457 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
458 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
459 ; AVX512BW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
460 ; AVX512BW-NEXT: vinserti32x4 $2, %xmm2, %zmm0, %zmm0
461 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,4,8,12,16,20,1,5,9,13,17,21,2,6,10,14,18,22,3,7,11,15,19,23,u,u,u,u,u,u,u,u>
462 ; AVX512BW-NEXT: vpermw %zmm0, %zmm1, %zmm0
463 ; AVX512BW-NEXT: vextracti32x4 $2, %zmm0, 32(%rax)
464 ; AVX512BW-NEXT: vmovdqa %ymm0, (%rax)
465 ; AVX512BW-NEXT: vzeroupper
466 ; AVX512BW-NEXT: retq
467 %in.vec0 = load <4 x i16>, ptr %in.vecptr0, align 64
468 %in.vec1 = load <4 x i16>, ptr %in.vecptr1, align 64
469 %in.vec2 = load <4 x i16>, ptr %in.vecptr2, align 64
470 %in.vec3 = load <4 x i16>, ptr %in.vecptr3, align 64
471 %in.vec4 = load <4 x i16>, ptr %in.vecptr4, align 64
472 %in.vec5 = load <4 x i16>, ptr %in.vecptr5, align 64
473 %1 = shufflevector <4 x i16> %in.vec0, <4 x i16> %in.vec1, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
474 %2 = shufflevector <4 x i16> %in.vec2, <4 x i16> %in.vec3, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
475 %3 = shufflevector <4 x i16> %in.vec4, <4 x i16> %in.vec5, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
476 %4 = shufflevector <8 x i16> %1, <8 x i16> %2, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
477 %5 = shufflevector <8 x i16> %3, <8 x i16> poison, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
478 %6 = shufflevector <16 x i16> %4, <16 x i16> %5, <24 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23>
479 %interleaved.vec = shufflevector <24 x i16> %6, <24 x i16> poison, <24 x i32> <i32 0, i32 4, i32 8, i32 12, i32 16, i32 20, i32 1, i32 5, i32 9, i32 13, i32 17, i32 21, i32 2, i32 6, i32 10, i32 14, i32 18, i32 22, i32 3, i32 7, i32 11, i32 15, i32 19, i32 23>
480 store <24 x i16> %interleaved.vec, ptr %out.vec, align 64
484 define void @store_i16_stride6_vf8(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
485 ; SSE-LABEL: store_i16_stride6_vf8:
487 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
488 ; SSE-NEXT: movdqa (%rdi), %xmm0
489 ; SSE-NEXT: movdqa (%rsi), %xmm8
490 ; SSE-NEXT: movdqa (%rdx), %xmm1
491 ; SSE-NEXT: movdqa (%rcx), %xmm9
492 ; SSE-NEXT: movdqa (%r8), %xmm6
493 ; SSE-NEXT: movdqa (%r9), %xmm5
494 ; SSE-NEXT: movdqa %xmm1, %xmm4
495 ; SSE-NEXT: punpckhwd {{.*#+}} xmm4 = xmm4[4],xmm9[4],xmm4[5],xmm9[5],xmm4[6],xmm9[6],xmm4[7],xmm9[7]
496 ; SSE-NEXT: movdqa %xmm0, %xmm7
497 ; SSE-NEXT: punpckhwd {{.*#+}} xmm7 = xmm7[4],xmm8[4],xmm7[5],xmm8[5],xmm7[6],xmm8[6],xmm7[7],xmm8[7]
498 ; SSE-NEXT: movdqa %xmm7, %xmm10
499 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[2,3],xmm4[3,3]
500 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm6[0,1,2,3,6,5,7,7]
501 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[1,2],xmm2[2,3]
502 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[2,0,1,3]
503 ; SSE-NEXT: movaps {{.*#+}} xmm2 = [65535,0,65535,65535,65535,65535,65535,0]
504 ; SSE-NEXT: andps %xmm2, %xmm10
505 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm5[0,1,2,3,4,6,6,7]
506 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm3[2,1,2,3]
507 ; SSE-NEXT: movaps %xmm2, %xmm3
508 ; SSE-NEXT: andnps %xmm11, %xmm3
509 ; SSE-NEXT: orps %xmm10, %xmm3
510 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm9[0],xmm1[1],xmm9[1],xmm1[2],xmm9[2],xmm1[3],xmm9[3]
511 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm8[0],xmm0[1],xmm8[1],xmm0[2],xmm8[2],xmm0[3],xmm8[3]
512 ; SSE-NEXT: movdqa %xmm0, %xmm8
513 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[2,3],xmm1[3,3]
514 ; SSE-NEXT: pshuflw {{.*#+}} xmm9 = xmm6[2,1,3,3,4,5,6,7]
515 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[1,2],xmm9[0,1]
516 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[2,0,1,3]
517 ; SSE-NEXT: andps %xmm2, %xmm8
518 ; SSE-NEXT: pshuflw {{.*#+}} xmm9 = xmm5[0,2,2,3,4,5,6,7]
519 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm9[0,1,2,1]
520 ; SSE-NEXT: andnps %xmm9, %xmm2
521 ; SSE-NEXT: orps %xmm8, %xmm2
522 ; SSE-NEXT: movdqa %xmm1, %xmm10
523 ; SSE-NEXT: punpcklqdq {{.*#+}} xmm10 = xmm10[0],xmm0[0]
524 ; SSE-NEXT: movdqa %xmm6, %xmm8
525 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[0,1],xmm0[1,3]
526 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[2,0],xmm8[0,2]
527 ; SSE-NEXT: movaps {{.*#+}} xmm8 = [65535,65535,65535,65535,65535,0,65535,65535]
528 ; SSE-NEXT: andps %xmm8, %xmm10
529 ; SSE-NEXT: movdqa %xmm5, %xmm11
530 ; SSE-NEXT: pslldq {{.*#+}} xmm11 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm11[0,1,2,3,4,5]
531 ; SSE-NEXT: movaps %xmm8, %xmm9
532 ; SSE-NEXT: andnps %xmm11, %xmm9
533 ; SSE-NEXT: orps %xmm10, %xmm9
534 ; SSE-NEXT: movdqa %xmm7, %xmm10
535 ; SSE-NEXT: punpckhqdq {{.*#+}} xmm10 = xmm10[1],xmm4[1]
536 ; SSE-NEXT: movdqa %xmm6, %xmm12
537 ; SSE-NEXT: psrldq {{.*#+}} xmm12 = xmm12[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
538 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[1,1],xmm4[1,1]
539 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[2,0],xmm10[0,2]
540 ; SSE-NEXT: movaps {{.*#+}} xmm10 = [65535,65535,65535,0,65535,65535,65535,65535]
541 ; SSE-NEXT: andps %xmm10, %xmm12
542 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm5[2,2,3,3]
543 ; SSE-NEXT: movaps %xmm10, %xmm11
544 ; SSE-NEXT: andnps %xmm13, %xmm11
545 ; SSE-NEXT: orps %xmm12, %xmm11
546 ; SSE-NEXT: punpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm7[0]
547 ; SSE-NEXT: pshuflw {{.*#+}} xmm12 = xmm6[1,1,1,1,4,5,6,7]
548 ; SSE-NEXT: shufps {{.*#+}} xmm6 = xmm6[2,1],xmm7[1,3]
549 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[2,0],xmm6[0,2]
550 ; SSE-NEXT: andps %xmm8, %xmm4
551 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm5[0,0,1,1]
552 ; SSE-NEXT: pslld $16, %xmm5
553 ; SSE-NEXT: andnps %xmm5, %xmm8
554 ; SSE-NEXT: orps %xmm4, %xmm8
555 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
556 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[1,1],xmm1[1,1]
557 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[2,0],xmm0[0,2]
558 ; SSE-NEXT: andps %xmm10, %xmm12
559 ; SSE-NEXT: andnps %xmm6, %xmm10
560 ; SSE-NEXT: orps %xmm12, %xmm10
561 ; SSE-NEXT: movaps %xmm10, 16(%rax)
562 ; SSE-NEXT: movaps %xmm8, 48(%rax)
563 ; SSE-NEXT: movaps %xmm11, 64(%rax)
564 ; SSE-NEXT: movaps %xmm9, (%rax)
565 ; SSE-NEXT: movaps %xmm2, 32(%rax)
566 ; SSE-NEXT: movaps %xmm3, 80(%rax)
569 ; AVX1-ONLY-LABEL: store_i16_stride6_vf8:
570 ; AVX1-ONLY: # %bb.0:
571 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
572 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm0
573 ; AVX1-ONLY-NEXT: vmovdqa (%rsi), %xmm1
574 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm2
575 ; AVX1-ONLY-NEXT: vmovdqa (%rcx), %xmm3
576 ; AVX1-ONLY-NEXT: vmovdqa (%r8), %xmm4
577 ; AVX1-ONLY-NEXT: vmovdqa (%r9), %xmm5
578 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
579 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
580 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm7[1,1,2,2]
581 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0,1,2,3],xmm6[4,5],xmm8[6,7]
582 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
583 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0,1],xmm9[2,3],xmm8[4,5,6,7]
584 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm7[0,0,1,1]
585 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm6[0,1,0,1]
586 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm10 = xmm11[0,1],xmm10[2,3],xmm11[4,5,6,7]
587 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm9[0,1,0,1]
588 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm10 = xmm10[0,1,2,3],xmm11[4,5],xmm10[6,7]
589 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm10, %ymm8
590 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm2[4],xmm3[4],xmm2[5],xmm3[5],xmm2[6],xmm3[6],xmm2[7],xmm3[7]
591 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm2[0,0,1,1]
592 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
593 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[0,1,0,1]
594 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1],xmm3[2,3],xmm1[4,5,6,7]
595 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm4[4],xmm5[4],xmm4[5],xmm5[5],xmm4[6],xmm5[6],xmm4[7],xmm5[7]
596 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm3[0,1,0,1]
597 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3],xmm4[4,5],xmm1[6,7]
598 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm7[2,2,3,3]
599 ; AVX1-ONLY-NEXT: vpunpckhqdq {{.*#+}} xmm4 = xmm6[1],xmm4[1]
600 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm9[2,3,2,3]
601 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3,4,5],xmm5[6,7]
602 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm4, %ymm1
603 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm2[2,2,3,3]
604 ; AVX1-ONLY-NEXT: vpunpckhqdq {{.*#+}} xmm4 = xmm0[1],xmm4[1]
605 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm3[2,3,2,3]
606 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3,4,5],xmm5[6,7]
607 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[1,1,2,2]
608 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm2[0,1,2,3],xmm0[4,5],xmm2[6,7]
609 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm3[2,3],xmm0[4,5,6,7]
610 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm0
611 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 64(%rax)
612 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 32(%rax)
613 ; AVX1-ONLY-NEXT: vmovaps %ymm8, (%rax)
614 ; AVX1-ONLY-NEXT: vzeroupper
615 ; AVX1-ONLY-NEXT: retq
617 ; AVX2-SLOW-LABEL: store_i16_stride6_vf8:
618 ; AVX2-SLOW: # %bb.0:
619 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
620 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %xmm0
621 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %xmm1
622 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %xmm2
623 ; AVX2-SLOW-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
624 ; AVX2-SLOW-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
625 ; AVX2-SLOW-NEXT: vinserti128 $1, (%r9), %ymm2, %ymm2
626 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm1[0,2,0,2]
627 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm3 = ymm3[u,u,u,u,0,1,8,9,u,u,u,u,u,u,u,u,18,19,26,27,u,u,u,u,u,u,u,u,20,21,28,29]
628 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,0,2]
629 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[0,1,8,9,u,u,u,u,u,u,u,u,2,3,10,11,u,u,u,u,u,u,u,u,20,21,28,29,u,u,u,u]
630 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0],ymm3[1],ymm4[2,3],ymm3[4],ymm4[5,6],ymm3[7]
631 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm2[0,2,0,2]
632 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[u,u,u,u,u,u,u,u,0,1,8,9,u,u,u,u,u,u,u,u,18,19,26,27,u,u,u,u,u,u,u,u]
633 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1],ymm4[2],ymm3[3,4],ymm4[5],ymm3[6,7]
634 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm1[0,2,1,3]
635 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[u,u,u,u,u,u,u,u,6,7,14,15,u,u,u,u,u,u,u,u,16,17,24,25,u,u,u,u,u,u,u,u]
636 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm0[0,2,1,3]
637 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[u,u,u,u,6,7,14,15,u,u,u,u,u,u,u,u,16,17,24,25,u,u,u,u,u,u,u,u,18,19,26,27]
638 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0,1],ymm4[2],ymm5[3,4],ymm4[5],ymm5[6,7]
639 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm2[0,2,1,3]
640 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[4,5,12,13,u,u,u,u,u,u,u,u,6,7,14,15,u,u,u,u,u,u,u,u,16,17,24,25,u,u,u,u]
641 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0],ymm4[1,2],ymm5[3],ymm4[4,5],ymm5[6],ymm4[7]
642 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,1,3]
643 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,4,5,12,13,u,u,u,u,u,u,u,u,22,23,30,31,u,u,u,u,u,u,u,u]
644 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,3,1,3]
645 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[2,3,10,11,u,u,u,u,u,u,u,u,4,5,12,13,u,u,u,u,u,u,u,u,22,23,30,31,u,u,u,u]
646 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7]
647 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm2[1,3,1,3]
648 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,2,3,10,11,u,u,u,u,u,u,u,u,20,21,28,29,u,u,u,u,u,u,u,u,22,23,30,31]
649 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6],ymm1[7]
650 ; AVX2-SLOW-NEXT: vmovdqa %ymm0, 64(%rax)
651 ; AVX2-SLOW-NEXT: vmovdqa %ymm4, 32(%rax)
652 ; AVX2-SLOW-NEXT: vmovdqa %ymm3, (%rax)
653 ; AVX2-SLOW-NEXT: vzeroupper
654 ; AVX2-SLOW-NEXT: retq
656 ; AVX2-FAST-LABEL: store_i16_stride6_vf8:
657 ; AVX2-FAST: # %bb.0:
658 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
659 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %xmm0
660 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %xmm1
661 ; AVX2-FAST-NEXT: vmovdqa (%r8), %xmm2
662 ; AVX2-FAST-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
663 ; AVX2-FAST-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
664 ; AVX2-FAST-NEXT: vinserti128 $1, (%r9), %ymm2, %ymm2
665 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [0,4,1,5,0,4,1,5]
666 ; AVX2-FAST-NEXT: # ymm3 = mem[0,1,0,1]
667 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm3, %ymm3
668 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm3[u,u,u,u,0,1,4,5,u,u,u,u,u,u,u,u,18,19,22,23,u,u,u,u,u,u,u,u,24,25,28,29]
669 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,0,2]
670 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[0,1,8,9,u,u,u,u,u,u,u,u,2,3,10,11,u,u,u,u,u,u,u,u,20,21,28,29,u,u,u,u]
671 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0],ymm3[1],ymm4[2,3],ymm3[4],ymm4[5,6],ymm3[7]
672 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [0,4,4,0,0,4,4,0]
673 ; AVX2-FAST-NEXT: # ymm4 = mem[0,1,0,1]
674 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm4, %ymm4
675 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[u,u,u,u,u,u,u,u,12,13,8,9,u,u,u,u,u,u,u,u,18,19,22,23,u,u,u,u,u,u,u,u]
676 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1],ymm4[2],ymm3[3,4],ymm4[5],ymm3[6,7]
677 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [2,6,1,5,2,6,1,5]
678 ; AVX2-FAST-NEXT: # ymm4 = mem[0,1,0,1]
679 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm4, %ymm4
680 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[u,u,u,u,u,u,u,u,10,11,14,15,u,u,u,u,u,u,u,u,16,17,20,21,u,u,u,u,u,u,u,u]
681 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm0[0,2,1,3]
682 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[u,u,u,u,6,7,14,15,u,u,u,u,u,u,u,u,16,17,24,25,u,u,u,u,u,u,u,u,18,19,26,27]
683 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0,1],ymm4[2],ymm5[3,4],ymm4[5],ymm5[6,7]
684 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm2[0,2,1,3]
685 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[4,5,12,13,u,u,u,u,u,u,u,u,6,7,14,15,u,u,u,u,u,u,u,u,16,17,24,25,u,u,u,u]
686 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0],ymm4[1,2],ymm5[3],ymm4[4,5],ymm5[6],ymm4[7]
687 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm5 = [7,3,3,7,7,3,3,7]
688 ; AVX2-FAST-NEXT: # ymm5 = mem[0,1,0,1]
689 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm5, %ymm0
690 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,8,9,12,13,u,u,u,u,u,u,u,u,22,23,18,19,u,u,u,u,u,u,u,u]
691 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,3,1,3]
692 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[2,3,10,11,u,u,u,u,u,u,u,u,4,5,12,13,u,u,u,u,u,u,u,u,22,23,30,31,u,u,u,u]
693 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7]
694 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm2[1,3,1,3]
695 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,2,3,10,11,u,u,u,u,u,u,u,u,20,21,28,29,u,u,u,u,u,u,u,u,22,23,30,31]
696 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6],ymm1[7]
697 ; AVX2-FAST-NEXT: vmovdqa %ymm0, 64(%rax)
698 ; AVX2-FAST-NEXT: vmovdqa %ymm4, 32(%rax)
699 ; AVX2-FAST-NEXT: vmovdqa %ymm3, (%rax)
700 ; AVX2-FAST-NEXT: vzeroupper
701 ; AVX2-FAST-NEXT: retq
703 ; AVX2-FAST-PERLANE-LABEL: store_i16_stride6_vf8:
704 ; AVX2-FAST-PERLANE: # %bb.0:
705 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
706 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %xmm0
707 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %xmm1
708 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %xmm2
709 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
710 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
711 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, (%r9), %ymm2, %ymm2
712 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm1[0,2,0,2]
713 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm3 = ymm3[u,u,u,u,0,1,8,9,u,u,u,u,u,u,u,u,18,19,26,27,u,u,u,u,u,u,u,u,20,21,28,29]
714 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,0,2]
715 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[0,1,8,9,u,u,u,u,u,u,u,u,2,3,10,11,u,u,u,u,u,u,u,u,20,21,28,29,u,u,u,u]
716 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0],ymm3[1],ymm4[2,3],ymm3[4],ymm4[5,6],ymm3[7]
717 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm2[0,2,0,2]
718 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[u,u,u,u,u,u,u,u,0,1,8,9,u,u,u,u,u,u,u,u,18,19,26,27,u,u,u,u,u,u,u,u]
719 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1],ymm4[2],ymm3[3,4],ymm4[5],ymm3[6,7]
720 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm1[0,2,1,3]
721 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[u,u,u,u,u,u,u,u,6,7,14,15,u,u,u,u,u,u,u,u,16,17,24,25,u,u,u,u,u,u,u,u]
722 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm0[0,2,1,3]
723 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[u,u,u,u,6,7,14,15,u,u,u,u,u,u,u,u,16,17,24,25,u,u,u,u,u,u,u,u,18,19,26,27]
724 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0,1],ymm4[2],ymm5[3,4],ymm4[5],ymm5[6,7]
725 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm2[0,2,1,3]
726 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[4,5,12,13,u,u,u,u,u,u,u,u,6,7,14,15,u,u,u,u,u,u,u,u,16,17,24,25,u,u,u,u]
727 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0],ymm4[1,2],ymm5[3],ymm4[4,5],ymm5[6],ymm4[7]
728 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,1,3]
729 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,4,5,12,13,u,u,u,u,u,u,u,u,22,23,30,31,u,u,u,u,u,u,u,u]
730 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,3,1,3]
731 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[2,3,10,11,u,u,u,u,u,u,u,u,4,5,12,13,u,u,u,u,u,u,u,u,22,23,30,31,u,u,u,u]
732 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7]
733 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm2[1,3,1,3]
734 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,2,3,10,11,u,u,u,u,u,u,u,u,20,21,28,29,u,u,u,u,u,u,u,u,22,23,30,31]
735 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6],ymm1[7]
736 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, 64(%rax)
737 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm4, 32(%rax)
738 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm3, (%rax)
739 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
740 ; AVX2-FAST-PERLANE-NEXT: retq
742 ; AVX512F-SLOW-LABEL: store_i16_stride6_vf8:
743 ; AVX512F-SLOW: # %bb.0:
744 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
745 ; AVX512F-SLOW-NEXT: vmovdqa (%rdi), %xmm0
746 ; AVX512F-SLOW-NEXT: vmovdqa (%rdx), %xmm1
747 ; AVX512F-SLOW-NEXT: vmovdqa (%r8), %xmm2
748 ; AVX512F-SLOW-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
749 ; AVX512F-SLOW-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
750 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, (%r9), %zmm2, %zmm2
751 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm1[0,2,1,3]
752 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm3 = ymm3[u,u,u,u,u,u,u,u,6,7,14,15,u,u,u,u,u,u,u,u,16,17,24,25,u,u,u,u,u,u,u,u]
753 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,1,3]
754 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[u,u,u,u,6,7,14,15,u,u,u,u,u,u,u,u,16,17,24,25,u,u,u,u,u,u,u,u,18,19,26,27]
755 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0,1],ymm3[2],ymm4[3,4],ymm3[5],ymm4[6,7]
756 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm2[0,2,1,3]
757 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[4,5,12,13,u,u,u,u,u,u,u,u,6,7,14,15,u,u,u,u,u,u,u,u,16,17,24,25,u,u,u,u]
758 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0],ymm3[1,2],ymm4[3],ymm3[4,5],ymm4[6],ymm3[7]
759 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm1[0,2,0,2]
760 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[u,u,u,u,0,1,8,9,u,u,u,u,u,u,u,u,18,19,26,27,u,u,u,u,u,u,u,u,20,21,28,29]
761 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm0[0,2,0,2]
762 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[0,1,8,9,u,u,u,u,u,u,u,u,2,3,10,11,u,u,u,u,u,u,u,u,20,21,28,29,u,u,u,u]
763 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0],ymm4[1],ymm5[2,3],ymm4[4],ymm5[5,6],ymm4[7]
764 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm2[0,2,0,2]
765 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[u,u,u,u,u,u,u,u,0,1,8,9,u,u,u,u,u,u,u,u,18,19,26,27,u,u,u,u,u,u,u,u]
766 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1],ymm5[2],ymm4[3,4],ymm5[5],ymm4[6,7]
767 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm4, %zmm3
768 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,1,3]
769 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,4,5,12,13,u,u,u,u,u,u,u,u,22,23,30,31,u,u,u,u,u,u,u,u]
770 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,3,1,3]
771 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[2,3,10,11,u,u,u,u,u,u,u,u,4,5,12,13,u,u,u,u,u,u,u,u,22,23,30,31,u,u,u,u]
772 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7]
773 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm2[1,3,1,3]
774 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,2,3,10,11,u,u,u,u,u,u,u,u,20,21,28,29,u,u,u,u,u,u,u,u,22,23,30,31]
775 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6],ymm1[7]
776 ; AVX512F-SLOW-NEXT: vmovdqa %ymm0, 64(%rax)
777 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm3, (%rax)
778 ; AVX512F-SLOW-NEXT: vzeroupper
779 ; AVX512F-SLOW-NEXT: retq
781 ; AVX512F-FAST-LABEL: store_i16_stride6_vf8:
782 ; AVX512F-FAST: # %bb.0:
783 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
784 ; AVX512F-FAST-NEXT: vmovdqa (%rdi), %xmm0
785 ; AVX512F-FAST-NEXT: vmovdqa (%rdx), %xmm1
786 ; AVX512F-FAST-NEXT: vmovdqa (%r8), %xmm2
787 ; AVX512F-FAST-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
788 ; AVX512F-FAST-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
789 ; AVX512F-FAST-NEXT: vinserti32x4 $1, (%r9), %zmm2, %zmm2
790 ; AVX512F-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [2,6,1,5,2,6,1,5]
791 ; AVX512F-FAST-NEXT: # ymm3 = mem[0,1,0,1]
792 ; AVX512F-FAST-NEXT: vpermd %ymm1, %ymm3, %ymm3
793 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm3[u,u,u,u,u,u,u,u,10,11,14,15,u,u,u,u,u,u,u,u,16,17,20,21,u,u,u,u,u,u,u,u]
794 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,1,3]
795 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[u,u,u,u,6,7,14,15,u,u,u,u,u,u,u,u,16,17,24,25,u,u,u,u,u,u,u,u,18,19,26,27]
796 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0,1],ymm3[2],ymm4[3,4],ymm3[5],ymm4[6,7]
797 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm2[0,2,1,3]
798 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[4,5,12,13,u,u,u,u,u,u,u,u,6,7,14,15,u,u,u,u,u,u,u,u,16,17,24,25,u,u,u,u]
799 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0],ymm3[1,2],ymm4[3],ymm3[4,5],ymm4[6],ymm3[7]
800 ; AVX512F-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [0,4,1,5,0,4,1,5]
801 ; AVX512F-FAST-NEXT: # ymm4 = mem[0,1,0,1]
802 ; AVX512F-FAST-NEXT: vpermd %ymm1, %ymm4, %ymm4
803 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[u,u,u,u,0,1,4,5,u,u,u,u,u,u,u,u,18,19,22,23,u,u,u,u,u,u,u,u,24,25,28,29]
804 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm0[0,2,0,2]
805 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[0,1,8,9,u,u,u,u,u,u,u,u,2,3,10,11,u,u,u,u,u,u,u,u,20,21,28,29,u,u,u,u]
806 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0],ymm4[1],ymm5[2,3],ymm4[4],ymm5[5,6],ymm4[7]
807 ; AVX512F-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm5 = [0,4,4,0,0,4,4,0]
808 ; AVX512F-FAST-NEXT: # ymm5 = mem[0,1,0,1]
809 ; AVX512F-FAST-NEXT: vpermd %ymm2, %ymm5, %ymm5
810 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[u,u,u,u,u,u,u,u,12,13,8,9,u,u,u,u,u,u,u,u,18,19,22,23,u,u,u,u,u,u,u,u]
811 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1],ymm5[2],ymm4[3,4],ymm5[5],ymm4[6,7]
812 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm4, %zmm3
813 ; AVX512F-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [7,3,3,7,7,3,3,7]
814 ; AVX512F-FAST-NEXT: # ymm4 = mem[0,1,0,1]
815 ; AVX512F-FAST-NEXT: vpermd %ymm0, %ymm4, %ymm0
816 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,8,9,12,13,u,u,u,u,u,u,u,u,22,23,18,19,u,u,u,u,u,u,u,u]
817 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,3,1,3]
818 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[2,3,10,11,u,u,u,u,u,u,u,u,4,5,12,13,u,u,u,u,u,u,u,u,22,23,30,31,u,u,u,u]
819 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7]
820 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm2[1,3,1,3]
821 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,2,3,10,11,u,u,u,u,u,u,u,u,20,21,28,29,u,u,u,u,u,u,u,u,22,23,30,31]
822 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6],ymm1[7]
823 ; AVX512F-FAST-NEXT: vmovdqa %ymm0, 64(%rax)
824 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm3, (%rax)
825 ; AVX512F-FAST-NEXT: vzeroupper
826 ; AVX512F-FAST-NEXT: retq
828 ; AVX512BW-LABEL: store_i16_stride6_vf8:
830 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
831 ; AVX512BW-NEXT: vmovdqa (%rdi), %xmm0
832 ; AVX512BW-NEXT: vmovdqa (%rdx), %xmm1
833 ; AVX512BW-NEXT: vmovdqa (%r8), %xmm2
834 ; AVX512BW-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
835 ; AVX512BW-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
836 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
837 ; AVX512BW-NEXT: vinserti32x4 $1, (%r9), %zmm2, %zmm1
838 ; AVX512BW-NEXT: vmovdqa {{.*#+}} ymm2 = [21,29,37,45,6,14,22,30,38,46,7,15,23,31,39,47]
839 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm2
840 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,8,16,24,32,40,1,9,17,25,33,41,2,10,18,26,34,42,3,11,19,27,35,43,4,12,20,28,36,44,5,13]
841 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm3
842 ; AVX512BW-NEXT: vmovdqa64 %zmm3, (%rax)
843 ; AVX512BW-NEXT: vmovdqa %ymm2, 64(%rax)
844 ; AVX512BW-NEXT: vzeroupper
845 ; AVX512BW-NEXT: retq
846 %in.vec0 = load <8 x i16>, ptr %in.vecptr0, align 64
847 %in.vec1 = load <8 x i16>, ptr %in.vecptr1, align 64
848 %in.vec2 = load <8 x i16>, ptr %in.vecptr2, align 64
849 %in.vec3 = load <8 x i16>, ptr %in.vecptr3, align 64
850 %in.vec4 = load <8 x i16>, ptr %in.vecptr4, align 64
851 %in.vec5 = load <8 x i16>, ptr %in.vecptr5, align 64
852 %1 = shufflevector <8 x i16> %in.vec0, <8 x i16> %in.vec1, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
853 %2 = shufflevector <8 x i16> %in.vec2, <8 x i16> %in.vec3, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
854 %3 = shufflevector <8 x i16> %in.vec4, <8 x i16> %in.vec5, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
855 %4 = shufflevector <16 x i16> %1, <16 x i16> %2, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
856 %5 = shufflevector <16 x i16> %3, <16 x i16> poison, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
857 %6 = shufflevector <32 x i16> %4, <32 x i16> %5, <48 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47>
858 %interleaved.vec = shufflevector <48 x i16> %6, <48 x i16> poison, <48 x i32> <i32 0, i32 8, i32 16, i32 24, i32 32, i32 40, i32 1, i32 9, i32 17, i32 25, i32 33, i32 41, i32 2, i32 10, i32 18, i32 26, i32 34, i32 42, i32 3, i32 11, i32 19, i32 27, i32 35, i32 43, i32 4, i32 12, i32 20, i32 28, i32 36, i32 44, i32 5, i32 13, i32 21, i32 29, i32 37, i32 45, i32 6, i32 14, i32 22, i32 30, i32 38, i32 46, i32 7, i32 15, i32 23, i32 31, i32 39, i32 47>
859 store <48 x i16> %interleaved.vec, ptr %out.vec, align 64
863 define void @store_i16_stride6_vf16(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
864 ; SSE-LABEL: store_i16_stride6_vf16:
866 ; SSE-NEXT: subq $24, %rsp
867 ; SSE-NEXT: movdqa (%rdi), %xmm15
868 ; SSE-NEXT: movdqa 16(%rdi), %xmm11
869 ; SSE-NEXT: movdqa (%rsi), %xmm10
870 ; SSE-NEXT: movdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
871 ; SSE-NEXT: movdqa 16(%rsi), %xmm4
872 ; SSE-NEXT: movdqa (%rdx), %xmm14
873 ; SSE-NEXT: movdqa 16(%rdx), %xmm12
874 ; SSE-NEXT: movdqa (%rcx), %xmm3
875 ; SSE-NEXT: movdqa 16(%rcx), %xmm2
876 ; SSE-NEXT: movdqa 16(%r8), %xmm0
877 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
878 ; SSE-NEXT: movdqa 16(%r9), %xmm8
879 ; SSE-NEXT: movdqa %xmm12, %xmm6
880 ; SSE-NEXT: punpcklwd {{.*#+}} xmm6 = xmm6[0],xmm2[0],xmm6[1],xmm2[1],xmm6[2],xmm2[2],xmm6[3],xmm2[3]
881 ; SSE-NEXT: movdqa %xmm11, %xmm5
882 ; SSE-NEXT: punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
883 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
884 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[2,3],xmm6[3,3]
885 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[2,1,3,3,4,5,6,7]
886 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,2],xmm0[0,1]
887 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[2,0,1,3]
888 ; SSE-NEXT: movaps {{.*#+}} xmm7 = [65535,0,65535,65535,65535,65535,65535,0]
889 ; SSE-NEXT: andps %xmm7, %xmm5
890 ; SSE-NEXT: pshuflw {{.*#+}} xmm9 = xmm8[0,2,2,3,4,5,6,7]
891 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm9[0,1,2,1]
892 ; SSE-NEXT: movaps %xmm7, %xmm0
893 ; SSE-NEXT: andnps %xmm9, %xmm0
894 ; SSE-NEXT: orps %xmm5, %xmm0
895 ; SSE-NEXT: movaps %xmm0, (%rsp) # 16-byte Spill
896 ; SSE-NEXT: movdqa %xmm14, %xmm5
897 ; SSE-NEXT: punpckhwd {{.*#+}} xmm5 = xmm5[4],xmm3[4],xmm5[5],xmm3[5],xmm5[6],xmm3[6],xmm5[7],xmm3[7]
898 ; SSE-NEXT: movdqa %xmm15, %xmm13
899 ; SSE-NEXT: punpckhwd {{.*#+}} xmm13 = xmm13[4],xmm10[4],xmm13[5],xmm10[5],xmm13[6],xmm10[6],xmm13[7],xmm10[7]
900 ; SSE-NEXT: movdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
901 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[2,3],xmm5[3,3]
902 ; SSE-NEXT: movdqa (%r8), %xmm10
903 ; SSE-NEXT: pshufhw {{.*#+}} xmm9 = xmm10[0,1,2,3,6,5,7,7]
904 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[1,2],xmm9[2,3]
905 ; SSE-NEXT: movdqa (%r9), %xmm0
906 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm0[0,1,2,3,4,6,6,7]
907 ; SSE-NEXT: movdqa %xmm0, %xmm9
908 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,1,2,3]
909 ; SSE-NEXT: movaps %xmm7, %xmm0
910 ; SSE-NEXT: andnps %xmm1, %xmm0
911 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[2,0,1,3]
912 ; SSE-NEXT: andps %xmm7, %xmm13
913 ; SSE-NEXT: orps %xmm13, %xmm0
914 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
915 ; SSE-NEXT: punpckhwd {{.*#+}} xmm12 = xmm12[4],xmm2[4],xmm12[5],xmm2[5],xmm12[6],xmm2[6],xmm12[7],xmm2[7]
916 ; SSE-NEXT: punpckhwd {{.*#+}} xmm11 = xmm11[4],xmm4[4],xmm11[5],xmm4[5],xmm11[6],xmm4[6],xmm11[7],xmm4[7]
917 ; SSE-NEXT: movdqa %xmm11, %xmm1
918 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,3],xmm12[3,3]
919 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
920 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm13[0,1,2,3,6,5,7,7]
921 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[1,2],xmm4[2,3]
922 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
923 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm8[0,1,2,3,4,6,6,7]
924 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[2,1,2,3]
925 ; SSE-NEXT: movaps %xmm7, %xmm0
926 ; SSE-NEXT: andnps %xmm4, %xmm0
927 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0,1,3]
928 ; SSE-NEXT: andps %xmm7, %xmm1
929 ; SSE-NEXT: orps %xmm1, %xmm0
930 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
931 ; SSE-NEXT: punpcklwd {{.*#+}} xmm14 = xmm14[0],xmm3[0],xmm14[1],xmm3[1],xmm14[2],xmm3[2],xmm14[3],xmm3[3]
932 ; SSE-NEXT: punpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Folded Reload
933 ; SSE-NEXT: # xmm15 = xmm15[0],mem[0],xmm15[1],mem[1],xmm15[2],mem[2],xmm15[3],mem[3]
934 ; SSE-NEXT: movdqa %xmm15, %xmm1
935 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,3],xmm14[3,3]
936 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm10[2,1,3,3,4,5,6,7]
937 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[1,2],xmm3[0,1]
938 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0,1,3]
939 ; SSE-NEXT: andps %xmm7, %xmm1
940 ; SSE-NEXT: movdqa %xmm9, %xmm2
941 ; SSE-NEXT: movdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
942 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm9[0,2,2,3,4,5,6,7]
943 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,1,2,1]
944 ; SSE-NEXT: andnps %xmm3, %xmm7
945 ; SSE-NEXT: orps %xmm1, %xmm7
946 ; SSE-NEXT: movdqa %xmm12, %xmm3
947 ; SSE-NEXT: punpcklqdq {{.*#+}} xmm3 = xmm3[0],xmm11[0]
948 ; SSE-NEXT: movdqa %xmm13, %xmm1
949 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,1],xmm11[1,3]
950 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[2,0],xmm1[0,2]
951 ; SSE-NEXT: pslld $16, %xmm8
952 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [65535,65535,65535,65535,65535,0,65535,65535]
953 ; SSE-NEXT: movdqa %xmm1, %xmm13
954 ; SSE-NEXT: pandn %xmm8, %xmm13
955 ; SSE-NEXT: andps %xmm1, %xmm3
956 ; SSE-NEXT: por %xmm3, %xmm13
957 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
958 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm5[1]
959 ; SSE-NEXT: movdqa %xmm10, %xmm4
960 ; SSE-NEXT: psrldq {{.*#+}} xmm4 = xmm4[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
961 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[1,1],xmm5[1,1]
962 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[2,0],xmm3[0,2]
963 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [65535,65535,65535,0,65535,65535,65535,65535]
964 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm9[2,2,3,3]
965 ; SSE-NEXT: movdqa %xmm3, %xmm9
966 ; SSE-NEXT: pandn %xmm0, %xmm9
967 ; SSE-NEXT: andps %xmm3, %xmm4
968 ; SSE-NEXT: por %xmm4, %xmm9
969 ; SSE-NEXT: movdqa %xmm15, %xmm0
970 ; SSE-NEXT: punpckhqdq {{.*#+}} xmm0 = xmm0[1],xmm14[1]
971 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm10[1,1,1,1,4,5,6,7]
972 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[1,1],xmm14[1,1]
973 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[2,0],xmm0[0,2]
974 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[0,0,1,1]
975 ; SSE-NEXT: movdqa %xmm3, %xmm8
976 ; SSE-NEXT: pandn %xmm0, %xmm8
977 ; SSE-NEXT: andps %xmm3, %xmm4
978 ; SSE-NEXT: por %xmm4, %xmm8
979 ; SSE-NEXT: punpcklqdq {{.*#+}} xmm14 = xmm14[0],xmm15[0]
980 ; SSE-NEXT: movdqa %xmm10, %xmm0
981 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,1],xmm15[1,3]
982 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[2,0],xmm0[0,2]
983 ; SSE-NEXT: movdqa %xmm2, %xmm0
984 ; SSE-NEXT: pslldq {{.*#+}} xmm0 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm0[0,1,2,3,4,5]
985 ; SSE-NEXT: movdqa %xmm1, %xmm15
986 ; SSE-NEXT: pandn %xmm0, %xmm15
987 ; SSE-NEXT: andps %xmm1, %xmm14
988 ; SSE-NEXT: por %xmm14, %xmm15
989 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1],xmm12[1]
990 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
991 ; SSE-NEXT: movdqa %xmm14, %xmm0
992 ; SSE-NEXT: psrldq {{.*#+}} xmm0 = xmm0[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
993 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,1],xmm12[1,1]
994 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm11[0,2]
995 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
996 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm12[2,2,3,3]
997 ; SSE-NEXT: movdqa %xmm3, %xmm11
998 ; SSE-NEXT: pandn %xmm4, %xmm11
999 ; SSE-NEXT: andps %xmm3, %xmm0
1000 ; SSE-NEXT: por %xmm0, %xmm11
1001 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
1002 ; SSE-NEXT: movaps %xmm2, %xmm0
1003 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm6[1]
1004 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm14[1,1,1,1,4,5,6,7]
1005 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[1,1],xmm6[1,1]
1006 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[2,0],xmm0[0,2]
1007 ; SSE-NEXT: andps %xmm3, %xmm4
1008 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm12[0,0,1,1]
1009 ; SSE-NEXT: pandn %xmm0, %xmm3
1010 ; SSE-NEXT: por %xmm4, %xmm3
1011 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm2[0]
1012 ; SSE-NEXT: movdqa %xmm14, %xmm0
1013 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,1],xmm2[1,3]
1014 ; SSE-NEXT: shufps {{.*#+}} xmm6 = xmm6[2,0],xmm0[0,2]
1015 ; SSE-NEXT: movdqa %xmm12, %xmm4
1016 ; SSE-NEXT: pslldq {{.*#+}} xmm4 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm4[0,1,2,3,4,5]
1017 ; SSE-NEXT: movdqa %xmm1, %xmm0
1018 ; SSE-NEXT: pandn %xmm4, %xmm0
1019 ; SSE-NEXT: andps %xmm1, %xmm6
1020 ; SSE-NEXT: por %xmm6, %xmm0
1021 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
1022 ; SSE-NEXT: movlhps {{.*#+}} xmm5 = xmm5[0],xmm2[0]
1023 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[2,1],xmm2[1,3]
1024 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[2,0],xmm10[0,2]
1025 ; SSE-NEXT: andps %xmm1, %xmm5
1026 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
1027 ; SSE-NEXT: pslld $16, %xmm2
1028 ; SSE-NEXT: pandn %xmm2, %xmm1
1029 ; SSE-NEXT: por %xmm5, %xmm1
1030 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
1031 ; SSE-NEXT: movdqa %xmm1, 48(%rax)
1032 ; SSE-NEXT: movdqa %xmm0, 96(%rax)
1033 ; SSE-NEXT: movdqa %xmm3, 112(%rax)
1034 ; SSE-NEXT: movdqa %xmm11, 160(%rax)
1035 ; SSE-NEXT: movdqa %xmm15, (%rax)
1036 ; SSE-NEXT: movdqa %xmm8, 16(%rax)
1037 ; SSE-NEXT: movdqa %xmm9, 64(%rax)
1038 ; SSE-NEXT: movdqa %xmm13, 144(%rax)
1039 ; SSE-NEXT: movaps %xmm7, 32(%rax)
1040 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1041 ; SSE-NEXT: movaps %xmm0, 176(%rax)
1042 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1043 ; SSE-NEXT: movaps %xmm0, 80(%rax)
1044 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
1045 ; SSE-NEXT: movaps %xmm0, 128(%rax)
1046 ; SSE-NEXT: addq $24, %rsp
1049 ; AVX1-ONLY-LABEL: store_i16_stride6_vf16:
1050 ; AVX1-ONLY: # %bb.0:
1051 ; AVX1-ONLY-NEXT: vmovdqa (%rcx), %xmm3
1052 ; AVX1-ONLY-NEXT: vmovdqa 16(%rcx), %xmm0
1053 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm4
1054 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdx), %xmm1
1055 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
1056 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm8[2,2,3,3]
1057 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
1058 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[0,0,1,1]
1059 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
1060 ; AVX1-ONLY-NEXT: vmovdqa (%rsi), %xmm5
1061 ; AVX1-ONLY-NEXT: vmovdqa 16(%rsi), %xmm2
1062 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm6
1063 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdi), %xmm7
1064 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm7[0],xmm2[0],xmm7[1],xmm2[1],xmm7[2],xmm2[2],xmm7[3],xmm2[3]
1065 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm9[2,3,2,3]
1066 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm7[4],xmm2[4],xmm7[5],xmm2[5],xmm7[6],xmm2[6],xmm7[7],xmm2[7]
1067 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm2[0,1,0,1]
1068 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm10, %ymm7
1069 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm7[0,1],ymm1[2],ymm7[3,4],ymm1[5],ymm7[6,7]
1070 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm7
1071 ; AVX1-ONLY-NEXT: vmovdqa 16(%r8), %xmm10
1072 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1,2,3],xmm10[4,5],xmm7[6,7]
1073 ; AVX1-ONLY-NEXT: vmovdqa 16(%r9), %xmm11
1074 ; AVX1-ONLY-NEXT: vpslld $16, %xmm11, %xmm12
1075 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1,2,3,4],xmm12[5],xmm7[6,7]
1076 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1077 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm7 = xmm10[2,1,3,3,4,5,6,7]
1078 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[0,1,2,1]
1079 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm1 = xmm7[0],xmm1[1,2],xmm7[3]
1080 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm7 = xmm11[0,2,2,3,4,5,6,7]
1081 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[0,1,2,1]
1082 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0],xmm7[1],xmm1[2,3,4,5,6],xmm7[7]
1083 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1084 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm2[2,3,2,3]
1085 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
1086 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[1,1,2,2]
1087 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
1088 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm2, %ymm0
1089 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7]
1090 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm1 = xmm10[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
1091 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm1 = xmm0[0],xmm1[1],xmm0[2,3]
1092 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm11[2,2,3,3]
1093 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2],xmm2[3],xmm1[4,5,6,7]
1094 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1095 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm0
1096 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm1 = xmm10[0,1,2,3,6,5,7,7]
1097 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,1,2,3]
1098 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0,1],xmm0[2,3,4,5],xmm1[6,7]
1099 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm1 = xmm11[0,1,2,3,4,6,6,7]
1100 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,1,2,3]
1101 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm0[0],xmm1[1],xmm0[2,3,4,5,6],xmm1[7]
1102 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm13 = xmm6[4],xmm5[4],xmm6[5],xmm5[5],xmm6[6],xmm5[6],xmm6[7],xmm5[7]
1103 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
1104 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm1[1,1,2,2]
1105 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm1[2,2,3,3]
1106 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm0, %ymm0
1107 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm13[2,3,2,3]
1108 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm13, %ymm12
1109 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm15 = ymm0[0,1],ymm12[2],ymm0[3,4],ymm12[5],ymm0[6,7]
1110 ; AVX1-ONLY-NEXT: vmovdqa (%r8), %xmm14
1111 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm0 = xmm14[0,1,2,3,6,5,7,7]
1112 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,1,2,3]
1113 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm15, %xmm12
1114 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm12 = xmm0[0,1],xmm12[2,3,4,5],xmm0[6,7]
1115 ; AVX1-ONLY-NEXT: vmovdqa (%r9), %xmm0
1116 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm2 = xmm0[0,1,2,3,4,6,6,7]
1117 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,1,2,3]
1118 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0],xmm2[1],xmm12[2,3,4,5,6],xmm2[7]
1119 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm2 = xmm14[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
1120 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm2 = xmm15[0],xmm2[1],xmm15[2,3]
1121 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm15 = xmm0[2,2,3,3]
1122 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm15 = xmm2[0,1,2],xmm15[3],xmm2[4,5,6,7]
1123 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm8[0,0,1,1]
1124 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[1,1,2,2]
1125 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm2, %ymm2
1126 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm9[0,1,0,1]
1127 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm8, %ymm8
1128 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm8[0],ymm2[1],ymm8[2,3],ymm2[4],ymm8[5,6],ymm2[7]
1129 ; AVX1-ONLY-NEXT: vinsertps {{.*#+}} xmm8 = xmm2[0,1],xmm10[0],xmm2[3]
1130 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm9 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm11[0,1,2,3,4,5]
1131 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0,1,2,3,4],xmm9[5],xmm8[6,7]
1132 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm2
1133 ; AVX1-ONLY-NEXT: vpmovzxwd {{.*#+}} xmm9 = xmm10[0],zero,xmm10[1],zero,xmm10[2],zero,xmm10[3],zero
1134 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1],xmm9[2,3],xmm2[4,5,6,7]
1135 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm11[0,0,1,1]
1136 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm9 = xmm2[0,1,2],xmm9[3],xmm2[4,5,6,7]
1137 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
1138 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm2[0,0,1,1]
1139 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm2[1,1,2,2]
1140 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm3, %ymm3
1141 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
1142 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm4[0,1,0,1]
1143 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm5, %ymm5
1144 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm5[0],ymm3[1],ymm5[2,3],ymm3[4],ymm5[5,6],ymm3[7]
1145 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm3, %xmm5
1146 ; AVX1-ONLY-NEXT: vpmovzxwd {{.*#+}} xmm6 = xmm14[0],zero,xmm14[1],zero,xmm14[2],zero,xmm14[3],zero
1147 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1],xmm6[2,3],xmm5[4,5,6,7]
1148 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm0[0,0,1,1]
1149 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1,2],xmm6[3],xmm5[4,5,6,7]
1150 ; AVX1-ONLY-NEXT: vinsertps {{.*#+}} xmm3 = xmm3[0,1],xmm14[0],xmm3[3]
1151 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm6 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm0[0,1,2,3,4,5]
1152 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2,3,4],xmm6[5],xmm3[6,7]
1153 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
1154 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[0,0,1,1]
1155 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
1156 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm4[2,3,2,3]
1157 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm13[0,1,0,1]
1158 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm2, %ymm2
1159 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7]
1160 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm2 = xmm14[2,1,3,3,4,5,6,7]
1161 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,1,2,1]
1162 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm2 = xmm2[0],xmm1[1,2],xmm2[3]
1163 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm4 = xmm0[0,2,2,3,4,5,6,7]
1164 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[0,1,2,1]
1165 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm4[1],xmm2[2,3,4,5,6],xmm4[7]
1166 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm1
1167 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3],xmm14[4,5],xmm1[6,7]
1168 ; AVX1-ONLY-NEXT: vpslld $16, %xmm0, %xmm0
1169 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0,1,2,3,4],xmm0[5],xmm1[6,7]
1170 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
1171 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, 48(%rax)
1172 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, 32(%rax)
1173 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, (%rax)
1174 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, 16(%rax)
1175 ; AVX1-ONLY-NEXT: vmovdqa %xmm9, 112(%rax)
1176 ; AVX1-ONLY-NEXT: vmovdqa %xmm8, 96(%rax)
1177 ; AVX1-ONLY-NEXT: vmovdqa %xmm15, 64(%rax)
1178 ; AVX1-ONLY-NEXT: vmovdqa %xmm12, 80(%rax)
1179 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, 176(%rax)
1180 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1181 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 160(%rax)
1182 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1183 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 128(%rax)
1184 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1185 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 144(%rax)
1186 ; AVX1-ONLY-NEXT: vzeroupper
1187 ; AVX1-ONLY-NEXT: retq
1189 ; AVX2-SLOW-LABEL: store_i16_stride6_vf16:
1190 ; AVX2-SLOW: # %bb.0:
1191 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %ymm1
1192 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %ymm3
1193 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %ymm2
1194 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %ymm4
1195 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %ymm13
1196 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %xmm6
1197 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} xmm5 = xmm6[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
1198 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %xmm7
1199 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} xmm8 = xmm7[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
1200 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm8[0],xmm5[0],xmm8[1],xmm5[1],xmm8[2],xmm5[2],xmm8[3],xmm5[3]
1201 ; AVX2-SLOW-NEXT: vpbroadcastq %xmm5, %ymm5
1202 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %xmm8
1203 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm8[0,1,2,1]
1204 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm10 = xmm9[0,1,2,3,4,7,6,5]
1205 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %xmm9
1206 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm11 = xmm9[0,1,2,1]
1207 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm11 = xmm11[0,1,2,3,4,7,6,5]
1208 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm10 = xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
1209 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,1,0,1]
1210 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm10[0,1],ymm5[2],ymm10[3,4],ymm5[5],ymm10[6,7]
1211 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %xmm10
1212 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm11 = xmm10[2,1,3,3,4,5,6,7]
1213 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,2,1]
1214 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm11[0],ymm5[1,2],ymm11[3],ymm5[4,5],ymm11[6],ymm5[7]
1215 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %xmm11
1216 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm12 = xmm11[0,2,2,3,4,5,6,7]
1217 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm12 = xmm12[0,1,2,3,4,4,4,4]
1218 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,0,2,1]
1219 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm0 = [255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255]
1220 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm5, %ymm12, %ymm5
1221 ; AVX2-SLOW-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1222 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} ymm12 = ymm4[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm4[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
1223 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} ymm14 = ymm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm2[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
1224 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm14 = ymm14[0],ymm12[0],ymm14[1],ymm12[1],ymm14[2],ymm12[2],ymm14[3],ymm12[3],ymm14[8],ymm12[8],ymm14[9],ymm12[9],ymm14[10],ymm12[10],ymm14[11],ymm12[11]
1225 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm3[2,1,2,3,6,5,6,7]
1226 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm12 = ymm12[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
1227 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm15 = ymm1[2,1,2,3,6,5,6,7]
1228 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm15 = ymm15[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
1229 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm15 = ymm15[0],ymm12[0],ymm15[1],ymm12[1],ymm15[2],ymm12[2],ymm15[3],ymm12[3],ymm15[8],ymm12[8],ymm15[9],ymm12[9],ymm15[10],ymm12[10],ymm15[11],ymm12[11]
1230 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %ymm12
1231 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,2,2,2]
1232 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,1,2,3]
1233 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm14 = ymm15[0,1],ymm14[2],ymm15[3,4],ymm14[5],ymm15[6,7]
1234 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm15 = ymm13[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
1235 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
1236 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm14 = ymm15[0],ymm14[1,2],ymm15[3],ymm14[4,5],ymm15[6],ymm14[7]
1237 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm15 = ymm12[0,2,2,3,4,5,6,7,8,10,10,11,12,13,14,15]
1238 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm15 = ymm15[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
1239 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
1240 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm14, %ymm15, %ymm0
1241 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1242 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm14 = xmm9[4],xmm8[4],xmm9[5],xmm8[5],xmm9[6],xmm8[6],xmm9[7],xmm8[7]
1243 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[1,1,1,1]
1244 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm15 = xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
1245 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[1,2,3,3]
1246 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[0,0,2,1]
1247 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm14 = ymm15[0,1],ymm14[2],ymm15[3,4],ymm14[5],ymm15[6,7]
1248 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm15 = xmm10[12,13,10,11,10,11,14,15,14,15,14,15,14,15,14,15]
1249 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[0,1,0,1]
1250 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm14 = ymm14[0],ymm15[1],ymm14[2,3],ymm15[4],ymm14[5,6],ymm15[7]
1251 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm11[2,3,2,3]
1252 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm15 = xmm15[0,2,2,1,4,5,6,7]
1253 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[0,1,0,1]
1254 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm0 = [255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0]
1255 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm14, %ymm15, %ymm14
1256 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm15 = ymm1[4],ymm3[4],ymm1[5],ymm3[5],ymm1[6],ymm3[6],ymm1[7],ymm3[7],ymm1[12],ymm3[12],ymm1[13],ymm3[13],ymm1[14],ymm3[14],ymm1[15],ymm3[15]
1257 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[3,3,3,3]
1258 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm5 = ymm2[4],ymm4[4],ymm2[5],ymm4[5],ymm2[6],ymm4[6],ymm2[7],ymm4[7],ymm2[12],ymm4[12],ymm2[13],ymm4[13],ymm2[14],ymm4[14],ymm2[15],ymm4[15]
1259 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[1,2,3,3,5,6,7,7]
1260 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
1261 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1],ymm15[2],ymm5[3,4],ymm15[5],ymm5[6,7]
1262 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm15 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,26,27,30,31,30,31,30,31,30,31,30,31]
1263 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,1,2,3]
1264 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0],ymm15[1],ymm5[2,3],ymm15[4],ymm5[5,6],ymm15[7]
1265 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm15 = ymm12[2,3,2,3,6,7,6,7]
1266 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm15 = ymm15[0,2,2,1,4,5,6,7,8,10,10,9,12,13,14,15]
1267 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,1,2,3]
1268 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm5, %ymm15, %ymm0
1269 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
1270 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
1271 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,2,1]
1272 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[1,0,2,2]
1273 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,1,0,1]
1274 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0],ymm6[1],ymm5[2,3],ymm6[4],ymm5[5,6],ymm6[7]
1275 ; AVX2-SLOW-NEXT: vpmovzxwd {{.*#+}} xmm6 = xmm10[0],zero,xmm10[1],zero,xmm10[2],zero,xmm10[3],zero
1276 ; AVX2-SLOW-NEXT: vpbroadcastq %xmm6, %ymm6
1277 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1],ymm6[2],ymm5[3,4],ymm6[5],ymm5[6,7]
1278 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm6 = xmm11[0,0,2,1,4,5,6,7]
1279 ; AVX2-SLOW-NEXT: vpbroadcastq %xmm6, %ymm6
1280 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm7 = [255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255]
1281 ; AVX2-SLOW-NEXT: vpblendvb %ymm7, %ymm5, %ymm6, %ymm5
1282 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm1[0],ymm3[0],ymm1[1],ymm3[1],ymm1[2],ymm3[2],ymm1[3],ymm3[3],ymm1[8],ymm3[8],ymm1[9],ymm3[9],ymm1[10],ymm3[10],ymm1[11],ymm3[11]
1283 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm2[0],ymm4[0],ymm2[1],ymm4[1],ymm2[2],ymm4[2],ymm2[3],ymm4[3],ymm2[8],ymm4[8],ymm2[9],ymm4[9],ymm2[10],ymm4[10],ymm2[11],ymm4[11]
1284 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
1285 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[1,0,2,2,5,4,6,6]
1286 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,2,3]
1287 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3],ymm2[4],ymm1[5,6],ymm2[7]
1288 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm13[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
1289 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
1290 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7]
1291 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm12[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
1292 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
1293 ; AVX2-SLOW-NEXT: vpblendvb %ymm7, %ymm1, %ymm2, %ymm1
1294 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1295 ; AVX2-SLOW-NEXT: vmovdqa %ymm1, 96(%rax)
1296 ; AVX2-SLOW-NEXT: vmovdqa %ymm0, 160(%rax)
1297 ; AVX2-SLOW-NEXT: vmovdqa %ymm14, 64(%rax)
1298 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1299 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 128(%rax)
1300 ; AVX2-SLOW-NEXT: vmovdqa %ymm5, (%rax)
1301 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1302 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 32(%rax)
1303 ; AVX2-SLOW-NEXT: vzeroupper
1304 ; AVX2-SLOW-NEXT: retq
1306 ; AVX2-FAST-LABEL: store_i16_stride6_vf16:
1307 ; AVX2-FAST: # %bb.0:
1308 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %ymm13
1309 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %ymm1
1310 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %ymm3
1311 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %ymm4
1312 ; AVX2-FAST-NEXT: vmovdqa (%r8), %ymm2
1313 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %xmm6
1314 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm5 = [0,1,2,3,4,5,6,7,8,9,6,7,4,5,10,11]
1315 ; AVX2-FAST-NEXT: vpshufb %xmm5, %xmm6, %xmm8
1316 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %xmm7
1317 ; AVX2-FAST-NEXT: vpshufb %xmm5, %xmm7, %xmm5
1318 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm5[4],xmm8[4],xmm5[5],xmm8[5],xmm5[6],xmm8[6],xmm5[7],xmm8[7]
1319 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,1,0,1]
1320 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %xmm8
1321 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} xmm9 = xmm8[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
1322 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %xmm10
1323 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} xmm11 = xmm10[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
1324 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3]
1325 ; AVX2-FAST-NEXT: vpbroadcastq %xmm9, %ymm9
1326 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1],ymm9[2],ymm5[3,4],ymm9[5],ymm5[6,7]
1327 ; AVX2-FAST-NEXT: vmovdqa (%r8), %xmm9
1328 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} xmm11 = xmm9[2,1,3,3,4,5,6,7]
1329 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,2,1]
1330 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm11[0],ymm5[1,2],ymm11[3],ymm5[4,5],ymm11[6],ymm5[7]
1331 ; AVX2-FAST-NEXT: vmovdqa (%r9), %xmm11
1332 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm12 = xmm11[0,1,4,5,4,5,6,7,8,9,8,9,8,9,8,9]
1333 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,0,2,1]
1334 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255]
1335 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm5, %ymm12, %ymm5
1336 ; AVX2-FAST-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1337 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = <u,u,u,u,4,5,10,11,u,u,u,u,u,u,u,u,24,25,22,23,20,21,26,27,u,u,u,u,u,u,u,u>
1338 ; AVX2-FAST-NEXT: vpshufb %ymm12, %ymm1, %ymm14
1339 ; AVX2-FAST-NEXT: vpshufb %ymm12, %ymm13, %ymm12
1340 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm14 = ymm12[0],ymm14[0],ymm12[1],ymm14[1],ymm12[2],ymm14[2],ymm12[3],ymm14[3],ymm12[8],ymm14[8],ymm12[9],ymm14[9],ymm12[10],ymm14[10],ymm12[11],ymm14[11]
1341 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} ymm12 = ymm4[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm4[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
1342 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} ymm15 = ymm3[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm3[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
1343 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm15 = ymm15[0],ymm12[0],ymm15[1],ymm12[1],ymm15[2],ymm12[2],ymm15[3],ymm12[3],ymm15[8],ymm12[8],ymm15[9],ymm12[9],ymm15[10],ymm12[10],ymm15[11],ymm12[11]
1344 ; AVX2-FAST-NEXT: vmovdqa (%r9), %ymm12
1345 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,1,2,3]
1346 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,2]
1347 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm14 = ymm14[0,1],ymm15[2],ymm14[3,4],ymm15[5],ymm14[6,7]
1348 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} ymm15 = ymm2[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
1349 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
1350 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm14 = ymm15[0],ymm14[1,2],ymm15[3],ymm14[4,5],ymm15[6],ymm14[7]
1351 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm15 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,17,20,21,20,21,22,23,24,25,24,25,24,25,24,25]
1352 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
1353 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm14, %ymm15, %ymm0
1354 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1355 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm14 = xmm10[4],xmm8[4],xmm10[5],xmm8[5],xmm10[6],xmm8[6],xmm10[7],xmm8[7]
1356 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm15 = [1,0,3,2,1,0,3,2]
1357 ; AVX2-FAST-NEXT: # ymm15 = mem[0,1,0,1]
1358 ; AVX2-FAST-NEXT: vpermd %ymm14, %ymm15, %ymm14
1359 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm15 = xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
1360 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm15 = ymm15[1,1,1,1]
1361 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm14 = ymm14[0,1],ymm15[2],ymm14[3,4],ymm15[5],ymm14[6,7]
1362 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm15 = xmm9[12,13,10,11,10,11,14,15,14,15,14,15,14,15,14,15]
1363 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm15 = ymm15[0,1,0,1]
1364 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm14 = ymm14[0],ymm15[1],ymm14[2,3],ymm15[4],ymm14[5,6],ymm15[7]
1365 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm15 = xmm11[8,9,12,13,12,13,10,11,8,9,10,11,12,13,14,15]
1366 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm15 = ymm15[0,1,0,1]
1367 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0]
1368 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm14, %ymm15, %ymm14
1369 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm15 = ymm3[4],ymm4[4],ymm3[5],ymm4[5],ymm3[6],ymm4[6],ymm3[7],ymm4[7],ymm3[12],ymm4[12],ymm3[13],ymm4[13],ymm3[14],ymm4[14],ymm3[15],ymm4[15]
1370 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm5 = [5,0,7,6,5,0,7,6]
1371 ; AVX2-FAST-NEXT: # ymm5 = mem[0,1,0,1]
1372 ; AVX2-FAST-NEXT: vpermd %ymm15, %ymm5, %ymm5
1373 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm15 = ymm13[4],ymm1[4],ymm13[5],ymm1[5],ymm13[6],ymm1[6],ymm13[7],ymm1[7],ymm13[12],ymm1[12],ymm13[13],ymm1[13],ymm13[14],ymm1[14],ymm13[15],ymm1[15]
1374 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm15 = ymm15[3,3,3,3]
1375 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1],ymm15[2],ymm5[3,4],ymm15[5],ymm5[6,7]
1376 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm15 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,26,27,30,31,30,31,30,31,30,31,30,31]
1377 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,1,2,3]
1378 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0],ymm15[1],ymm5[2,3],ymm15[4],ymm5[5,6],ymm15[7]
1379 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm15 = ymm12[u,u,u,u,u,u,u,u,8,9,10,11,12,13,14,15,24,25,28,29,28,29,26,27,24,25,26,27,28,29,30,31]
1380 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,1,2,3]
1381 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm5, %ymm15, %ymm0
1382 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm10[0],xmm8[0],xmm10[1],xmm8[1],xmm10[2],xmm8[2],xmm10[3],xmm8[3]
1383 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm8 = [1,0,2,2,1,0,2,2]
1384 ; AVX2-FAST-NEXT: # ymm8 = mem[0,1,0,1]
1385 ; AVX2-FAST-NEXT: vpermd %ymm5, %ymm8, %ymm5
1386 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
1387 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,2,1]
1388 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0],ymm5[1],ymm6[2,3],ymm5[4],ymm6[5,6],ymm5[7]
1389 ; AVX2-FAST-NEXT: vpmovzxwd {{.*#+}} xmm6 = xmm9[0],zero,xmm9[1],zero,xmm9[2],zero,xmm9[3],zero
1390 ; AVX2-FAST-NEXT: vpbroadcastq %xmm6, %ymm6
1391 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1],ymm6[2],ymm5[3,4],ymm6[5],ymm5[6,7]
1392 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} xmm6 = xmm11[0,0,2,1,4,5,6,7]
1393 ; AVX2-FAST-NEXT: vpbroadcastq %xmm6, %ymm6
1394 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = [255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255]
1395 ; AVX2-FAST-NEXT: vpblendvb %ymm7, %ymm5, %ymm6, %ymm5
1396 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[8],ymm4[8],ymm3[9],ymm4[9],ymm3[10],ymm4[10],ymm3[11],ymm4[11]
1397 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [5,4,0,6,5,4,0,6]
1398 ; AVX2-FAST-NEXT: # ymm4 = mem[0,1,0,1]
1399 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm4, %ymm3
1400 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm13[0],ymm1[0],ymm13[1],ymm1[1],ymm13[2],ymm1[2],ymm13[3],ymm1[3],ymm13[8],ymm1[8],ymm13[9],ymm1[9],ymm13[10],ymm1[10],ymm13[11],ymm1[11]
1401 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
1402 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm3[1],ymm1[2,3],ymm3[4],ymm1[5,6],ymm3[7]
1403 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
1404 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
1405 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7]
1406 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} ymm2 = ymm12[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
1407 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
1408 ; AVX2-FAST-NEXT: vpblendvb %ymm7, %ymm1, %ymm2, %ymm1
1409 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
1410 ; AVX2-FAST-NEXT: vmovdqa %ymm1, 96(%rax)
1411 ; AVX2-FAST-NEXT: vmovdqa %ymm0, 160(%rax)
1412 ; AVX2-FAST-NEXT: vmovdqa %ymm14, 64(%rax)
1413 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1414 ; AVX2-FAST-NEXT: vmovaps %ymm0, 128(%rax)
1415 ; AVX2-FAST-NEXT: vmovdqa %ymm5, (%rax)
1416 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1417 ; AVX2-FAST-NEXT: vmovaps %ymm0, 32(%rax)
1418 ; AVX2-FAST-NEXT: vzeroupper
1419 ; AVX2-FAST-NEXT: retq
1421 ; AVX2-FAST-PERLANE-LABEL: store_i16_stride6_vf16:
1422 ; AVX2-FAST-PERLANE: # %bb.0:
1423 ; AVX2-FAST-PERLANE-NEXT: subq $24, %rsp
1424 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %ymm0
1425 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %ymm10
1426 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %ymm1
1427 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %ymm2
1428 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %ymm8
1429 ; AVX2-FAST-PERLANE-NEXT: vmovaps (%r9), %ymm3
1430 ; AVX2-FAST-PERLANE-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1431 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %xmm7
1432 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %xmm6
1433 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %xmm9
1434 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1435 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %xmm11
1436 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %xmm5
1437 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %xmm3
1438 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm12 = xmm6[4],xmm7[4],xmm6[5],xmm7[5],xmm6[6],xmm7[6],xmm6[7],xmm7[7]
1439 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm12[1,1,1,1]
1440 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm13 = xmm11[4],xmm9[4],xmm11[5],xmm9[5],xmm11[6],xmm9[6],xmm11[7],xmm9[7]
1441 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm11, %xmm4
1442 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm13 = xmm13[1,2,3,3]
1443 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm13 = ymm13[0,0,2,1]
1444 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm12 = ymm13[0,1],ymm12[2],ymm13[3,4],ymm12[5],ymm13[6,7]
1445 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm13 = xmm5[12,13,10,11,10,11,14,15,14,15,14,15,14,15,14,15]
1446 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm13 = ymm13[0,1,0,1]
1447 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0],ymm13[1],ymm12[2,3],ymm13[4],ymm12[5,6],ymm13[7]
1448 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm13 = xmm3[8,9,12,13,12,13,10,11,8,9,10,11,12,13,14,15]
1449 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm13 = ymm13[0,1,0,1]
1450 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm14 = [255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0]
1451 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm14, %ymm12, %ymm13, %ymm9
1452 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1453 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, %ymm9
1454 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm10, %ymm11
1455 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm13 = ymm0[4],ymm10[4],ymm0[5],ymm10[5],ymm0[6],ymm10[6],ymm0[7],ymm10[7],ymm0[12],ymm10[12],ymm0[13],ymm10[13],ymm0[14],ymm10[14],ymm0[15],ymm10[15]
1456 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm13 = ymm13[3,3,3,3]
1457 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm1, %ymm0
1458 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm2, %ymm10
1459 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm15 = ymm1[4],ymm2[4],ymm1[5],ymm2[5],ymm1[6],ymm2[6],ymm1[7],ymm2[7],ymm1[12],ymm2[12],ymm1[13],ymm2[13],ymm1[14],ymm2[14],ymm1[15],ymm2[15]
1460 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm15 = ymm15[1,2,3,3,5,6,7,7]
1461 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
1462 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm13 = ymm15[0,1],ymm13[2],ymm15[3,4],ymm13[5],ymm15[6,7]
1463 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm8, %ymm1
1464 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm15 = ymm8[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,26,27,30,31,30,31,30,31,30,31,30,31]
1465 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,1,2,3]
1466 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm13 = ymm13[0],ymm15[1],ymm13[2,3],ymm15[4],ymm13[5,6],ymm15[7]
1467 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
1468 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm15 = ymm8[u,u,u,u,u,u,u,u,8,9,10,11,12,13,14,15,24,25,28,29,28,29,26,27,24,25,26,27,28,29,30,31]
1469 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,1,2,3]
1470 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm14, %ymm13, %ymm15, %ymm2
1471 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1472 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
1473 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm6, %xmm13
1474 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,2,1]
1475 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
1476 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm15 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
1477 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[1,0,2,2]
1478 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm15 = ymm15[0,1,0,1]
1479 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm14 = ymm14[0],ymm15[1],ymm14[2,3],ymm15[4],ymm14[5,6],ymm15[7]
1480 ; AVX2-FAST-PERLANE-NEXT: vpmovzxwd {{.*#+}} xmm15 = xmm5[0],zero,xmm5[1],zero,xmm5[2],zero,xmm5[3],zero
1481 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq %xmm15, %ymm15
1482 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm14 = ymm14[0,1],ymm15[2],ymm14[3,4],ymm15[5],ymm14[6,7]
1483 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} xmm15 = xmm3[0,0,2,1,4,5,6,7]
1484 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq %xmm15, %ymm15
1485 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm6 = [255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255]
1486 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm14, %ymm15, %ymm12
1487 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1488 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm15 = ymm9[0],ymm11[0],ymm9[1],ymm11[1],ymm9[2],ymm11[2],ymm9[3],ymm11[3],ymm9[8],ymm11[8],ymm9[9],ymm11[9],ymm9[10],ymm11[10],ymm9[11],ymm11[11]
1489 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
1490 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm12 = ymm0[0],ymm10[0],ymm0[1],ymm10[1],ymm0[2],ymm10[2],ymm0[3],ymm10[3],ymm0[8],ymm10[8],ymm0[9],ymm10[9],ymm0[10],ymm10[10],ymm0[11],ymm10[11]
1491 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, %ymm14
1492 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm12 = ymm12[1,0,2,2,5,4,6,6]
1493 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,1,2,3]
1494 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm12 = ymm15[0],ymm12[1],ymm15[2,3],ymm12[4],ymm15[5,6],ymm12[7]
1495 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} ymm15 = ymm1[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
1496 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,2]
1497 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0,1],ymm15[2],ymm12[3,4],ymm15[5],ymm12[6,7]
1498 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} ymm15 = ymm8[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
1499 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,2]
1500 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm12, %ymm15, %ymm15
1501 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm0 = [0,1,2,3,4,5,6,7,8,9,6,7,4,5,10,11]
1502 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm7, %xmm6
1503 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm13, %xmm0
1504 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm6[4],xmm0[5],xmm6[5],xmm0[6],xmm6[6],xmm0[7],xmm6[7]
1505 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} xmm6 = xmm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
1506 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} xmm7 = xmm4[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
1507 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
1508 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,0,1]
1509 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq %xmm6, %ymm6
1510 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm6[2],ymm0[3,4],ymm6[5],ymm0[6,7]
1511 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} xmm6 = xmm5[2,1,3,3,4,5,6,7]
1512 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,2,1]
1513 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm6[0],ymm0[1,2],ymm6[3],ymm0[4,5],ymm6[6],ymm0[7]
1514 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm6 = xmm3[0,1,4,5,4,5,6,7,8,9,8,9,8,9,8,9]
1515 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,2,1]
1516 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm7 = [255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255]
1517 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm7, %ymm0, %ymm6, %ymm0
1518 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm6 = <u,u,u,u,4,5,10,11,u,u,u,u,u,u,u,u,24,25,22,23,20,21,26,27,u,u,u,u,u,u,u,u>
1519 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm6, %ymm11, %ymm5
1520 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm6, %ymm9, %ymm3
1521 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm3[0],ymm5[0],ymm3[1],ymm5[1],ymm3[2],ymm5[2],ymm3[3],ymm5[3],ymm3[8],ymm5[8],ymm3[9],ymm5[9],ymm3[10],ymm5[10],ymm3[11],ymm5[11]
1522 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} ymm4 = ymm10[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm10[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
1523 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} ymm2 = ymm14[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm14[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
1524 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm2[0],ymm4[0],ymm2[1],ymm4[1],ymm2[2],ymm4[2],ymm2[3],ymm4[3],ymm2[8],ymm4[8],ymm2[9],ymm4[9],ymm2[10],ymm4[10],ymm2[11],ymm4[11]
1525 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,2,3]
1526 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
1527 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1],ymm2[2],ymm3[3,4],ymm2[5],ymm3[6,7]
1528 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
1529 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
1530 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm2[1,2],ymm1[3],ymm2[4,5],ymm1[6],ymm2[7]
1531 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm2 = ymm8[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,17,20,21,20,21,22,23,24,25,24,25,24,25,24,25]
1532 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
1533 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm7, %ymm1, %ymm2, %ymm1
1534 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
1535 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm1, 128(%rax)
1536 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm15, 96(%rax)
1537 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
1538 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, 160(%rax)
1539 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
1540 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, 64(%rax)
1541 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, 32(%rax)
1542 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1543 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, (%rax)
1544 ; AVX2-FAST-PERLANE-NEXT: addq $24, %rsp
1545 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
1546 ; AVX2-FAST-PERLANE-NEXT: retq
1548 ; AVX512F-SLOW-LABEL: store_i16_stride6_vf16:
1549 ; AVX512F-SLOW: # %bb.0:
1550 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1551 ; AVX512F-SLOW-NEXT: vmovdqa (%rdi), %ymm0
1552 ; AVX512F-SLOW-NEXT: vmovdqa (%rsi), %ymm2
1553 ; AVX512F-SLOW-NEXT: vmovdqa (%rdx), %ymm4
1554 ; AVX512F-SLOW-NEXT: vmovdqa (%rcx), %ymm5
1555 ; AVX512F-SLOW-NEXT: vmovdqa (%r8), %ymm1
1556 ; AVX512F-SLOW-NEXT: vmovdqa (%r9), %ymm3
1557 ; AVX512F-SLOW-NEXT: vpsrldq {{.*#+}} ymm6 = ymm5[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm5[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
1558 ; AVX512F-SLOW-NEXT: vpsrldq {{.*#+}} ymm7 = ymm4[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm4[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
1559 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm6 = ymm7[0],ymm6[0],ymm7[1],ymm6[1],ymm7[2],ymm6[2],ymm7[3],ymm6[3],ymm7[8],ymm6[8],ymm7[9],ymm6[9],ymm7[10],ymm6[10],ymm7[11],ymm6[11]
1560 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm2[2,1,2,3,6,5,6,7]
1561 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} ymm7 = ymm7[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
1562 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm8 = ymm0[2,1,2,3,6,5,6,7]
1563 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} ymm8 = ymm8[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
1564 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm7 = ymm8[0],ymm7[0],ymm8[1],ymm7[1],ymm8[2],ymm7[2],ymm8[3],ymm7[3],ymm8[8],ymm7[8],ymm8[9],ymm7[9],ymm8[10],ymm7[10],ymm8[11],ymm7[11]
1565 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} ymm8 = <u,5,12,u,4,13,u,7>
1566 ; AVX512F-SLOW-NEXT: vpermi2d %ymm6, %ymm7, %ymm8
1567 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm3[1,2,2,3,5,6,6,7]
1568 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm1[1,2,2,3,5,6,6,7]
1569 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm6 = ymm7[0],ymm6[0],ymm7[1],ymm6[1],ymm7[2],ymm6[2],ymm7[3],ymm6[3],ymm7[8],ymm6[8],ymm7[9],ymm6[9],ymm7[10],ymm6[10],ymm7[11],ymm6[11]
1570 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
1571 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0],ymm8[1,2],ymm6[3],ymm8[4,5],ymm6[6],ymm8[7]
1572 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm7 = ymm0[4],ymm2[4],ymm0[5],ymm2[5],ymm0[6],ymm2[6],ymm0[7],ymm2[7],ymm0[12],ymm2[12],ymm0[13],ymm2[13],ymm0[14],ymm2[14],ymm0[15],ymm2[15]
1573 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm8 = ymm4[4],ymm5[4],ymm4[5],ymm5[5],ymm4[6],ymm5[6],ymm4[7],ymm5[7],ymm4[12],ymm5[12],ymm4[13],ymm5[13],ymm4[14],ymm5[14],ymm4[15],ymm5[15]
1574 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = <5,u,14,6,u,15,7,u>
1575 ; AVX512F-SLOW-NEXT: vpermi2d %ymm7, %ymm8, %ymm9
1576 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm9, %zmm0, %zmm7
1577 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} ymm8 = [8,21,10,11,22,13,14,23]
1578 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm9 = ymm1[4],ymm3[4],ymm1[5],ymm3[5],ymm1[6],ymm3[6],ymm1[7],ymm3[7],ymm1[12],ymm3[12],ymm1[13],ymm3[13],ymm1[14],ymm3[14],ymm1[15],ymm3[15]
1579 ; AVX512F-SLOW-NEXT: vpermi2d %zmm9, %zmm7, %zmm8
1580 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm8, %zmm6, %zmm16
1581 ; AVX512F-SLOW-NEXT: vmovdqa (%rcx), %xmm6
1582 ; AVX512F-SLOW-NEXT: vmovdqa (%rdx), %xmm7
1583 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
1584 ; AVX512F-SLOW-NEXT: vmovdqa (%rsi), %xmm8
1585 ; AVX512F-SLOW-NEXT: vmovdqa (%rdi), %xmm10
1586 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm10[0],xmm8[0],xmm10[1],xmm8[1],xmm10[2],xmm8[2],xmm10[3],xmm8[3]
1587 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} ymm13 = <0,8,u,1,9,u,2,10>
1588 ; AVX512F-SLOW-NEXT: vpermi2d %ymm9, %ymm11, %ymm13
1589 ; AVX512F-SLOW-NEXT: vmovdqa (%r9), %xmm9
1590 ; AVX512F-SLOW-NEXT: vmovdqa (%r8), %xmm11
1591 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3]
1592 ; AVX512F-SLOW-NEXT: vpbroadcastq %xmm14, %ymm14
1593 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm13 = ymm13[0,1],ymm14[2],ymm13[3,4],ymm14[5],ymm13[6,7]
1594 ; AVX512F-SLOW-NEXT: vpsrldq {{.*#+}} xmm14 = xmm6[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
1595 ; AVX512F-SLOW-NEXT: vpsrldq {{.*#+}} xmm15 = xmm7[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
1596 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
1597 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm8[0,1,2,1]
1598 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,7,6,5]
1599 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm12 = xmm10[0,1,2,1]
1600 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm12 = xmm12[0,1,2,3,4,7,6,5]
1601 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm12 = xmm12[4],xmm15[4],xmm12[5],xmm15[5],xmm12[6],xmm15[6],xmm12[7],xmm15[7]
1602 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} ymm15 = <u,1,8,u,0,9,u,3>
1603 ; AVX512F-SLOW-NEXT: vpermi2d %ymm14, %ymm12, %ymm15
1604 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm12 = xmm9[1,2,2,3]
1605 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm14 = xmm11[1,2,2,3]
1606 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm14[0],xmm12[0],xmm14[1],xmm12[1],xmm14[2],xmm12[2],xmm14[3],xmm12[3]
1607 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,0,2,1]
1608 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0],ymm15[1,2],ymm12[3],ymm15[4,5],ymm12[6],ymm15[7]
1609 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm12, %zmm13, %zmm12
1610 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm4 = ymm4[0],ymm5[0],ymm4[1],ymm5[1],ymm4[2],ymm5[2],ymm4[3],ymm5[3],ymm4[8],ymm5[8],ymm4[9],ymm5[9],ymm4[10],ymm5[10],ymm4[11],ymm5[11]
1611 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm0[0],ymm2[0],ymm0[1],ymm2[1],ymm0[2],ymm2[2],ymm0[3],ymm2[3],ymm0[8],ymm2[8],ymm0[9],ymm2[9],ymm0[10],ymm2[10],ymm0[11],ymm2[11]
1612 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = <4,12,u,5,13,u,6,14>
1613 ; AVX512F-SLOW-NEXT: vpermi2d %ymm4, %ymm0, %ymm2
1614 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm1[0],ymm3[0],ymm1[1],ymm3[1],ymm1[2],ymm3[2],ymm1[3],ymm3[3],ymm1[8],ymm3[8],ymm1[9],ymm3[9],ymm1[10],ymm3[10],ymm1[11],ymm3[11]
1615 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,2]
1616 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1],ymm0[2],ymm2[3,4],ymm0[5],ymm2[6,7]
1617 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm10[4],xmm8[4],xmm10[5],xmm8[5],xmm10[6],xmm8[6],xmm10[7],xmm8[7]
1618 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
1619 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} ymm3 = <1,u,10,2,u,11,3,u>
1620 ; AVX512F-SLOW-NEXT: vpermi2d %ymm1, %ymm2, %ymm3
1621 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm11[4],xmm9[4],xmm11[5],xmm9[5],xmm11[6],xmm9[6],xmm11[7],xmm9[7]
1622 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = [0,9,2,3,10,5,6,11]
1623 ; AVX512F-SLOW-NEXT: vpermi2d %ymm1, %ymm3, %ymm2
1624 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm2, %zmm0
1625 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm0, 64(%rax)
1626 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm12, (%rax)
1627 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm16, 128(%rax)
1628 ; AVX512F-SLOW-NEXT: vzeroupper
1629 ; AVX512F-SLOW-NEXT: retq
1631 ; AVX512F-FAST-LABEL: store_i16_stride6_vf16:
1632 ; AVX512F-FAST: # %bb.0:
1633 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
1634 ; AVX512F-FAST-NEXT: vmovdqa (%rdi), %ymm2
1635 ; AVX512F-FAST-NEXT: vmovdqa (%rsi), %ymm3
1636 ; AVX512F-FAST-NEXT: vmovdqa (%rdx), %ymm4
1637 ; AVX512F-FAST-NEXT: vmovdqa (%rcx), %ymm5
1638 ; AVX512F-FAST-NEXT: vmovdqa64 (%r8), %ymm16
1639 ; AVX512F-FAST-NEXT: vmovdqa (%r9), %ymm1
1640 ; AVX512F-FAST-NEXT: vmovdqa (%rcx), %xmm6
1641 ; AVX512F-FAST-NEXT: vmovdqa (%rdx), %xmm7
1642 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
1643 ; AVX512F-FAST-NEXT: vmovdqa (%rsi), %xmm8
1644 ; AVX512F-FAST-NEXT: vmovdqa (%rdi), %xmm10
1645 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm10[0],xmm8[0],xmm10[1],xmm8[1],xmm10[2],xmm8[2],xmm10[3],xmm8[3]
1646 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = <0,8,u,1,9,u,2,10>
1647 ; AVX512F-FAST-NEXT: vpermi2d %ymm9, %ymm11, %ymm12
1648 ; AVX512F-FAST-NEXT: vmovdqa (%r9), %xmm9
1649 ; AVX512F-FAST-NEXT: vmovdqa (%r8), %xmm11
1650 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3]
1651 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm14 = [0,1,8,3,4,9,6,7]
1652 ; AVX512F-FAST-NEXT: vpermi2d %ymm13, %ymm12, %ymm14
1653 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} xmm12 = [0,1,2,3,4,5,6,7,8,9,6,7,4,5,10,11]
1654 ; AVX512F-FAST-NEXT: vpshufb %xmm12, %xmm8, %xmm13
1655 ; AVX512F-FAST-NEXT: vpshufb %xmm12, %xmm10, %xmm12
1656 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm12 = xmm12[4],xmm13[4],xmm12[5],xmm13[5],xmm12[6],xmm13[6],xmm12[7],xmm13[7]
1657 ; AVX512F-FAST-NEXT: vpsrldq {{.*#+}} xmm13 = xmm6[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
1658 ; AVX512F-FAST-NEXT: vpsrldq {{.*#+}} xmm15 = xmm7[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
1659 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm15[0],xmm13[0],xmm15[1],xmm13[1],xmm15[2],xmm13[2],xmm15[3],xmm13[3]
1660 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm15 = <u,1,8,u,0,9,u,3>
1661 ; AVX512F-FAST-NEXT: vpermi2d %ymm13, %ymm12, %ymm15
1662 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm15, %zmm0, %zmm12
1663 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = [16,9,10,17,12,13,18,15]
1664 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} xmm15 = xmm9[1,2,2,3]
1665 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} xmm0 = xmm11[1,2,2,3]
1666 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm15[0],xmm0[1],xmm15[1],xmm0[2],xmm15[2],xmm0[3],xmm15[3]
1667 ; AVX512F-FAST-NEXT: vpermi2d %zmm0, %zmm12, %zmm13
1668 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm13, %zmm14, %zmm0
1669 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm10[4],xmm8[4],xmm10[5],xmm8[5],xmm10[6],xmm8[6],xmm10[7],xmm8[7]
1670 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
1671 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = <1,u,10,2,u,11,3,u>
1672 ; AVX512F-FAST-NEXT: vpermi2d %ymm8, %ymm6, %ymm7
1673 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm11[4],xmm9[4],xmm11[5],xmm9[5],xmm11[6],xmm9[6],xmm11[7],xmm9[7]
1674 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = [0,9,2,3,10,5,6,11]
1675 ; AVX512F-FAST-NEXT: vpermi2d %ymm6, %ymm7, %ymm8
1676 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm6 = ymm4[0],ymm5[0],ymm4[1],ymm5[1],ymm4[2],ymm5[2],ymm4[3],ymm5[3],ymm4[8],ymm5[8],ymm4[9],ymm5[9],ymm4[10],ymm5[10],ymm4[11],ymm5[11]
1677 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm7 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[8],ymm3[8],ymm2[9],ymm3[9],ymm2[10],ymm3[10],ymm2[11],ymm3[11]
1678 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = <4,12,u,5,13,u,6,14>
1679 ; AVX512F-FAST-NEXT: vpermi2d %ymm6, %ymm7, %ymm9
1680 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm9, %zmm0, %zmm6
1681 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = [8,9,20,11,12,21,14,15]
1682 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm16, %ymm10
1683 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm9 = ymm10[0],ymm1[0],ymm10[1],ymm1[1],ymm10[2],ymm1[2],ymm10[3],ymm1[3],ymm10[8],ymm1[8],ymm10[9],ymm1[9],ymm10[10],ymm1[10],ymm10[11],ymm1[11]
1684 ; AVX512F-FAST-NEXT: vpermi2d %zmm9, %zmm6, %zmm7
1685 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm7, %zmm8, %zmm6
1686 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = <u,u,u,u,4,5,10,11,u,u,u,u,u,u,u,u,24,25,22,23,20,21,26,27,u,u,u,u,u,u,u,u>
1687 ; AVX512F-FAST-NEXT: vpshufb %ymm7, %ymm3, %ymm8
1688 ; AVX512F-FAST-NEXT: vpshufb %ymm7, %ymm2, %ymm7
1689 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm7 = ymm7[0],ymm8[0],ymm7[1],ymm8[1],ymm7[2],ymm8[2],ymm7[3],ymm8[3],ymm7[8],ymm8[8],ymm7[9],ymm8[9],ymm7[10],ymm8[10],ymm7[11],ymm8[11]
1690 ; AVX512F-FAST-NEXT: vpsrldq {{.*#+}} ymm8 = ymm5[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm5[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
1691 ; AVX512F-FAST-NEXT: vpsrldq {{.*#+}} ymm9 = ymm4[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm4[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
1692 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm8 = ymm9[0],ymm8[0],ymm9[1],ymm8[1],ymm9[2],ymm8[2],ymm9[3],ymm8[3],ymm9[8],ymm8[8],ymm9[9],ymm8[9],ymm9[10],ymm8[10],ymm9[11],ymm8[11]
1693 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = <u,5,12,u,4,13,u,7>
1694 ; AVX512F-FAST-NEXT: vpermi2d %ymm8, %ymm7, %ymm9
1695 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} ymm7 = ymm1[1,2,2,3,5,6,6,7]
1696 ; AVX512F-FAST-NEXT: vpshufd {{.*#+}} ymm8 = ymm16[1,2,2,3,5,6,6,7]
1697 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} ymm7 = ymm8[0],ymm7[0],ymm8[1],ymm7[1],ymm8[2],ymm7[2],ymm8[3],ymm7[3],ymm8[8],ymm7[8],ymm8[9],ymm7[9],ymm8[10],ymm7[10],ymm8[11],ymm7[11]
1698 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = [12,1,2,13,4,5,14,7]
1699 ; AVX512F-FAST-NEXT: vpermi2d %ymm7, %ymm9, %ymm8
1700 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm2[4],ymm3[4],ymm2[5],ymm3[5],ymm2[6],ymm3[6],ymm2[7],ymm3[7],ymm2[12],ymm3[12],ymm2[13],ymm3[13],ymm2[14],ymm3[14],ymm2[15],ymm3[15]
1701 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm4[4],ymm5[4],ymm4[5],ymm5[5],ymm4[6],ymm5[6],ymm4[7],ymm5[7],ymm4[12],ymm5[12],ymm4[13],ymm5[13],ymm4[14],ymm5[14],ymm4[15],ymm5[15]
1702 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = <5,u,14,6,u,15,7,u>
1703 ; AVX512F-FAST-NEXT: vpermi2d %ymm2, %ymm3, %ymm4
1704 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm2
1705 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [8,21,10,11,22,13,14,23]
1706 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm10[4],ymm1[4],ymm10[5],ymm1[5],ymm10[6],ymm1[6],ymm10[7],ymm1[7],ymm10[12],ymm1[12],ymm10[13],ymm1[13],ymm10[14],ymm1[14],ymm10[15],ymm1[15]
1707 ; AVX512F-FAST-NEXT: vpermi2d %zmm1, %zmm2, %zmm3
1708 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm8, %zmm1
1709 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm1, 128(%rax)
1710 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm6, 64(%rax)
1711 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm0, (%rax)
1712 ; AVX512F-FAST-NEXT: vzeroupper
1713 ; AVX512F-FAST-NEXT: retq
1715 ; AVX512BW-LABEL: store_i16_stride6_vf16:
1716 ; AVX512BW: # %bb.0:
1717 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1718 ; AVX512BW-NEXT: vmovdqa (%rdi), %ymm0
1719 ; AVX512BW-NEXT: vmovdqa (%rdx), %ymm1
1720 ; AVX512BW-NEXT: vmovdqa (%r8), %ymm2
1721 ; AVX512BW-NEXT: vinserti64x4 $1, (%rsi), %zmm0, %zmm0
1722 ; AVX512BW-NEXT: vinserti64x4 $1, (%rcx), %zmm1, %zmm1
1723 ; AVX512BW-NEXT: vinserti64x4 $1, (%r9), %zmm2, %zmm2
1724 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = <0,16,32,48,u,u,1,17,33,49,u,u,2,18,34,50,u,u,3,19,35,51,u,u,4,20,36,52,u,u,5,21>
1725 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm3
1726 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = [0,1,2,3,32,48,6,7,8,9,33,49,12,13,14,15,34,50,18,19,20,21,35,51,24,25,26,27,36,52,30,31]
1727 ; AVX512BW-NEXT: vpermi2w %zmm2, %zmm3, %zmm4
1728 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = <5,21,u,u,38,54,6,22,u,u,39,55,7,23,u,u,40,56,8,24,u,u,41,57,9,25,u,u,42,58,10,26>
1729 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm1, %zmm3
1730 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm5 = [0,1,37,53,4,5,6,7,38,54,10,11,12,13,39,55,16,17,18,19,40,56,22,23,24,25,41,57,28,29,30,31]
1731 ; AVX512BW-NEXT: vpermi2w %zmm2, %zmm3, %zmm5
1732 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = <u,u,11,27,43,59,u,u,12,28,44,60,u,u,13,29,45,61,u,u,14,30,46,62,u,u,15,31,47,63,u,u>
1733 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm3
1734 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm0 = [42,58,2,3,4,5,43,59,8,9,10,11,44,60,14,15,16,17,45,61,20,21,22,23,46,62,26,27,28,29,47,63]
1735 ; AVX512BW-NEXT: vpermi2w %zmm2, %zmm3, %zmm0
1736 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 128(%rax)
1737 ; AVX512BW-NEXT: vmovdqa64 %zmm5, 64(%rax)
1738 ; AVX512BW-NEXT: vmovdqa64 %zmm4, (%rax)
1739 ; AVX512BW-NEXT: vzeroupper
1740 ; AVX512BW-NEXT: retq
1741 %in.vec0 = load <16 x i16>, ptr %in.vecptr0, align 64
1742 %in.vec1 = load <16 x i16>, ptr %in.vecptr1, align 64
1743 %in.vec2 = load <16 x i16>, ptr %in.vecptr2, align 64
1744 %in.vec3 = load <16 x i16>, ptr %in.vecptr3, align 64
1745 %in.vec4 = load <16 x i16>, ptr %in.vecptr4, align 64
1746 %in.vec5 = load <16 x i16>, ptr %in.vecptr5, align 64
1747 %1 = shufflevector <16 x i16> %in.vec0, <16 x i16> %in.vec1, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1748 %2 = shufflevector <16 x i16> %in.vec2, <16 x i16> %in.vec3, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1749 %3 = shufflevector <16 x i16> %in.vec4, <16 x i16> %in.vec5, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1750 %4 = shufflevector <32 x i16> %1, <32 x i16> %2, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
1751 %5 = shufflevector <32 x i16> %3, <32 x i16> poison, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
1752 %6 = shufflevector <64 x i16> %4, <64 x i16> %5, <96 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95>
1753 %interleaved.vec = shufflevector <96 x i16> %6, <96 x i16> poison, <96 x i32> <i32 0, i32 16, i32 32, i32 48, i32 64, i32 80, i32 1, i32 17, i32 33, i32 49, i32 65, i32 81, i32 2, i32 18, i32 34, i32 50, i32 66, i32 82, i32 3, i32 19, i32 35, i32 51, i32 67, i32 83, i32 4, i32 20, i32 36, i32 52, i32 68, i32 84, i32 5, i32 21, i32 37, i32 53, i32 69, i32 85, i32 6, i32 22, i32 38, i32 54, i32 70, i32 86, i32 7, i32 23, i32 39, i32 55, i32 71, i32 87, i32 8, i32 24, i32 40, i32 56, i32 72, i32 88, i32 9, i32 25, i32 41, i32 57, i32 73, i32 89, i32 10, i32 26, i32 42, i32 58, i32 74, i32 90, i32 11, i32 27, i32 43, i32 59, i32 75, i32 91, i32 12, i32 28, i32 44, i32 60, i32 76, i32 92, i32 13, i32 29, i32 45, i32 61, i32 77, i32 93, i32 14, i32 30, i32 46, i32 62, i32 78, i32 94, i32 15, i32 31, i32 47, i32 63, i32 79, i32 95>
1754 store <96 x i16> %interleaved.vec, ptr %out.vec, align 64
1758 define void @store_i16_stride6_vf32(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
1759 ; SSE-LABEL: store_i16_stride6_vf32:
1761 ; SSE-NEXT: subq $312, %rsp # imm = 0x138
1762 ; SSE-NEXT: movdqa (%rdi), %xmm2
1763 ; SSE-NEXT: movdqa 16(%rdi), %xmm13
1764 ; SSE-NEXT: movdqa (%rsi), %xmm3
1765 ; SSE-NEXT: movdqa 16(%rsi), %xmm1
1766 ; SSE-NEXT: movdqa (%rdx), %xmm5
1767 ; SSE-NEXT: movdqa 16(%rdx), %xmm14
1768 ; SSE-NEXT: movdqa (%rcx), %xmm4
1769 ; SSE-NEXT: movdqa 16(%rcx), %xmm10
1770 ; SSE-NEXT: movdqa (%r8), %xmm8
1771 ; SSE-NEXT: movdqa (%r9), %xmm11
1772 ; SSE-NEXT: movdqa %xmm5, %xmm0
1773 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3]
1774 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1775 ; SSE-NEXT: movdqa %xmm2, %xmm9
1776 ; SSE-NEXT: punpcklwd {{.*#+}} xmm9 = xmm9[0],xmm3[0],xmm9[1],xmm3[1],xmm9[2],xmm3[2],xmm9[3],xmm3[3]
1777 ; SSE-NEXT: movdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1778 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[2,3],xmm0[3,3]
1779 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm8[2,1,3,3,4,5,6,7]
1780 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[1,2],xmm7[0,1]
1781 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[2,0,1,3]
1782 ; SSE-NEXT: movaps {{.*#+}} xmm6 = [65535,0,65535,65535,65535,65535,65535,0]
1783 ; SSE-NEXT: andps %xmm6, %xmm9
1784 ; SSE-NEXT: movdqa %xmm11, %xmm7
1785 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1786 ; SSE-NEXT: pshuflw {{.*#+}} xmm11 = xmm11[0,2,2,3,4,5,6,7]
1787 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm11[0,1,2,1]
1788 ; SSE-NEXT: movaps %xmm6, %xmm0
1789 ; SSE-NEXT: andnps %xmm11, %xmm0
1790 ; SSE-NEXT: orps %xmm9, %xmm0
1791 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1792 ; SSE-NEXT: punpckhwd {{.*#+}} xmm5 = xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
1793 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1794 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm3[4],xmm2[5],xmm3[5],xmm2[6],xmm3[6],xmm2[7],xmm3[7]
1795 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1796 ; SSE-NEXT: movdqa %xmm2, %xmm3
1797 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[2,3],xmm5[3,3]
1798 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm8[0,1,2,3,6,5,7,7]
1799 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[1,2],xmm4[2,3]
1800 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[2,0,1,3]
1801 ; SSE-NEXT: andps %xmm6, %xmm3
1802 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm7[0,1,2,3,4,6,6,7]
1803 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[2,1,2,3]
1804 ; SSE-NEXT: movaps %xmm6, %xmm0
1805 ; SSE-NEXT: andnps %xmm4, %xmm0
1806 ; SSE-NEXT: orps %xmm3, %xmm0
1807 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1808 ; SSE-NEXT: movdqa %xmm14, %xmm0
1809 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm10[0],xmm0[1],xmm10[1],xmm0[2],xmm10[2],xmm0[3],xmm10[3]
1810 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1811 ; SSE-NEXT: movdqa %xmm13, %xmm11
1812 ; SSE-NEXT: punpcklwd {{.*#+}} xmm11 = xmm11[0],xmm1[0],xmm11[1],xmm1[1],xmm11[2],xmm1[2],xmm11[3],xmm1[3]
1813 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1814 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,3],xmm0[3,3]
1815 ; SSE-NEXT: movdqa 16(%r8), %xmm15
1816 ; SSE-NEXT: pshuflw {{.*#+}} xmm9 = xmm15[2,1,3,3,4,5,6,7]
1817 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[1,2],xmm9[0,1]
1818 ; SSE-NEXT: movdqa 16(%r9), %xmm9
1819 ; SSE-NEXT: pshuflw {{.*#+}} xmm12 = xmm9[0,2,2,3,4,5,6,7]
1820 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm12[0,1,2,1]
1821 ; SSE-NEXT: movaps %xmm6, %xmm0
1822 ; SSE-NEXT: andnps %xmm12, %xmm0
1823 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,0,1,3]
1824 ; SSE-NEXT: andps %xmm6, %xmm11
1825 ; SSE-NEXT: orps %xmm11, %xmm0
1826 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1827 ; SSE-NEXT: punpckhwd {{.*#+}} xmm14 = xmm14[4],xmm10[4],xmm14[5],xmm10[5],xmm14[6],xmm10[6],xmm14[7],xmm10[7]
1828 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1829 ; SSE-NEXT: punpckhwd {{.*#+}} xmm13 = xmm13[4],xmm1[4],xmm13[5],xmm1[5],xmm13[6],xmm1[6],xmm13[7],xmm1[7]
1830 ; SSE-NEXT: movdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1831 ; SSE-NEXT: movdqa %xmm13, %xmm1
1832 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,3],xmm14[3,3]
1833 ; SSE-NEXT: pshufhw {{.*#+}} xmm10 = xmm15[0,1,2,3,6,5,7,7]
1834 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[1,2],xmm10[2,3]
1835 ; SSE-NEXT: pshufhw {{.*#+}} xmm10 = xmm9[0,1,2,3,4,6,6,7]
1836 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[2,1,2,3]
1837 ; SSE-NEXT: movaps %xmm6, %xmm0
1838 ; SSE-NEXT: andnps %xmm10, %xmm0
1839 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0,1,3]
1840 ; SSE-NEXT: andps %xmm6, %xmm1
1841 ; SSE-NEXT: orps %xmm1, %xmm0
1842 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1843 ; SSE-NEXT: movdqa 32(%rdx), %xmm2
1844 ; SSE-NEXT: movdqa 32(%rcx), %xmm1
1845 ; SSE-NEXT: movdqa %xmm2, %xmm0
1846 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
1847 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1848 ; SSE-NEXT: movdqa 32(%rdi), %xmm3
1849 ; SSE-NEXT: movdqa 32(%rsi), %xmm11
1850 ; SSE-NEXT: movdqa %xmm3, %xmm10
1851 ; SSE-NEXT: punpcklwd {{.*#+}} xmm10 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3]
1852 ; SSE-NEXT: movdqa %xmm10, %xmm12
1853 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[2,3],xmm0[3,3]
1854 ; SSE-NEXT: movdqa 32(%r8), %xmm14
1855 ; SSE-NEXT: pshuflw {{.*#+}} xmm13 = xmm14[2,1,3,3,4,5,6,7]
1856 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[1,2],xmm13[0,1]
1857 ; SSE-NEXT: movdqa 32(%r9), %xmm4
1858 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm4[0,2,2,3,4,5,6,7]
1859 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1860 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,2,1]
1861 ; SSE-NEXT: movaps %xmm6, %xmm13
1862 ; SSE-NEXT: andnps %xmm0, %xmm13
1863 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[2,0,1,3]
1864 ; SSE-NEXT: andps %xmm6, %xmm12
1865 ; SSE-NEXT: orps %xmm12, %xmm13
1866 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1867 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
1868 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1869 ; SSE-NEXT: movdqa %xmm3, %xmm0
1870 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm11[4],xmm0[5],xmm11[5],xmm0[6],xmm11[6],xmm0[7],xmm11[7]
1871 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1872 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,3],xmm2[3,3]
1873 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm14[0,1,2,3,6,5,7,7]
1874 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,2],xmm1[2,3]
1875 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm4[0,1,2,3,4,6,6,7]
1876 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,1,2,3]
1877 ; SSE-NEXT: movaps %xmm6, %xmm11
1878 ; SSE-NEXT: andnps %xmm1, %xmm11
1879 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0,1,3]
1880 ; SSE-NEXT: andps %xmm6, %xmm0
1881 ; SSE-NEXT: orps %xmm0, %xmm11
1882 ; SSE-NEXT: movaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1883 ; SSE-NEXT: movdqa 48(%rdx), %xmm3
1884 ; SSE-NEXT: movdqa 48(%rcx), %xmm4
1885 ; SSE-NEXT: movdqa %xmm3, %xmm5
1886 ; SSE-NEXT: punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
1887 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1888 ; SSE-NEXT: movdqa 48(%rdi), %xmm0
1889 ; SSE-NEXT: movdqa 48(%rsi), %xmm1
1890 ; SSE-NEXT: movdqa %xmm0, %xmm11
1891 ; SSE-NEXT: punpcklwd {{.*#+}} xmm11 = xmm11[0],xmm1[0],xmm11[1],xmm1[1],xmm11[2],xmm1[2],xmm11[3],xmm1[3]
1892 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1893 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,3],xmm5[3,3]
1894 ; SSE-NEXT: movdqa 48(%r8), %xmm12
1895 ; SSE-NEXT: pshuflw {{.*#+}} xmm13 = xmm12[2,1,3,3,4,5,6,7]
1896 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[1,2],xmm13[0,1]
1897 ; SSE-NEXT: movdqa 48(%r9), %xmm2
1898 ; SSE-NEXT: pshuflw {{.*#+}} xmm13 = xmm2[0,2,2,3,4,5,6,7]
1899 ; SSE-NEXT: movdqa %xmm2, (%rsp) # 16-byte Spill
1900 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm13[0,1,2,1]
1901 ; SSE-NEXT: movaps %xmm6, %xmm7
1902 ; SSE-NEXT: andnps %xmm13, %xmm7
1903 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,0,1,3]
1904 ; SSE-NEXT: andps %xmm6, %xmm11
1905 ; SSE-NEXT: orps %xmm11, %xmm7
1906 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1907 ; SSE-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
1908 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1909 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
1910 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1911 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,3],xmm3[3,3]
1912 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm12[0,1,2,3,6,5,7,7]
1913 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,2],xmm1[2,3]
1914 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0,1,3]
1915 ; SSE-NEXT: andps %xmm6, %xmm0
1916 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm2[0,1,2,3,4,6,6,7]
1917 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,1,2,3]
1918 ; SSE-NEXT: andnps %xmm1, %xmm6
1919 ; SSE-NEXT: orps %xmm0, %xmm6
1920 ; SSE-NEXT: movaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1921 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
1922 ; SSE-NEXT: movaps %xmm5, %xmm0
1923 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
1924 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm2[0]
1925 ; SSE-NEXT: movdqa %xmm8, %xmm1
1926 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,1],xmm2[1,3]
1927 ; SSE-NEXT: movaps %xmm2, %xmm4
1928 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm1[0,2]
1929 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
1930 ; SSE-NEXT: movdqa %xmm3, %xmm11
1931 ; SSE-NEXT: pslldq {{.*#+}} xmm11 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm11[0,1,2,3,4,5]
1932 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [65535,65535,65535,65535,65535,0,65535,65535]
1933 ; SSE-NEXT: movdqa %xmm1, %xmm2
1934 ; SSE-NEXT: pandn %xmm11, %xmm2
1935 ; SSE-NEXT: andps %xmm1, %xmm0
1936 ; SSE-NEXT: por %xmm0, %xmm2
1937 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1938 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm5[1]
1939 ; SSE-NEXT: pshuflw {{.*#+}} xmm11 = xmm8[1,1,1,1,4,5,6,7]
1940 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[1,1],xmm5[1,1]
1941 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,0],xmm4[0,2]
1942 ; SSE-NEXT: movdqa {{.*#+}} xmm5 = [65535,65535,65535,0,65535,65535,65535,65535]
1943 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm3[0,0,1,1]
1944 ; SSE-NEXT: movdqa %xmm5, %xmm0
1945 ; SSE-NEXT: pandn %xmm6, %xmm0
1946 ; SSE-NEXT: andps %xmm5, %xmm11
1947 ; SSE-NEXT: por %xmm11, %xmm0
1948 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1949 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
1950 ; SSE-NEXT: movaps %xmm7, %xmm6
1951 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1952 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm0[0]
1953 ; SSE-NEXT: movdqa %xmm8, %xmm11
1954 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,1],xmm0[1,3]
1955 ; SSE-NEXT: movaps %xmm0, %xmm4
1956 ; SSE-NEXT: shufps {{.*#+}} xmm6 = xmm6[2,0],xmm11[0,2]
1957 ; SSE-NEXT: movdqa %xmm3, %xmm0
1958 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm3[2,2,3,3]
1959 ; SSE-NEXT: pslld $16, %xmm0
1960 ; SSE-NEXT: movdqa %xmm1, %xmm2
1961 ; SSE-NEXT: pandn %xmm0, %xmm2
1962 ; SSE-NEXT: andps %xmm1, %xmm6
1963 ; SSE-NEXT: por %xmm6, %xmm2
1964 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1965 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm7[1]
1966 ; SSE-NEXT: psrldq {{.*#+}} xmm8 = xmm8[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
1967 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[1,1],xmm7[1,1]
1968 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[2,0],xmm4[0,2]
1969 ; SSE-NEXT: movdqa %xmm5, %xmm0
1970 ; SSE-NEXT: pandn %xmm11, %xmm0
1971 ; SSE-NEXT: andps %xmm5, %xmm8
1972 ; SSE-NEXT: por %xmm8, %xmm0
1973 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1974 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
1975 ; SSE-NEXT: movaps %xmm3, %xmm6
1976 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1977 ; SSE-NEXT: movlhps {{.*#+}} xmm6 = xmm6[0],xmm0[0]
1978 ; SSE-NEXT: movdqa %xmm15, %xmm2
1979 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm0[1,3]
1980 ; SSE-NEXT: shufps {{.*#+}} xmm6 = xmm6[2,0],xmm2[0,2]
1981 ; SSE-NEXT: movdqa %xmm9, %xmm8
1982 ; SSE-NEXT: pslldq {{.*#+}} xmm8 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm8[0,1,2,3,4,5]
1983 ; SSE-NEXT: movdqa %xmm1, %xmm2
1984 ; SSE-NEXT: pandn %xmm8, %xmm2
1985 ; SSE-NEXT: andps %xmm1, %xmm6
1986 ; SSE-NEXT: por %xmm6, %xmm2
1987 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1988 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
1989 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm15[1,1,1,1,4,5,6,7]
1990 ; SSE-NEXT: shufps {{.*#+}} xmm6 = xmm6[1,1],xmm3[1,1]
1991 ; SSE-NEXT: shufps {{.*#+}} xmm6 = xmm6[2,0],xmm0[0,2]
1992 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm9[0,0,1,1]
1993 ; SSE-NEXT: movdqa %xmm5, %xmm0
1994 ; SSE-NEXT: pandn %xmm8, %xmm0
1995 ; SSE-NEXT: andps %xmm5, %xmm6
1996 ; SSE-NEXT: por %xmm6, %xmm0
1997 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1998 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
1999 ; SSE-NEXT: movaps %xmm2, %xmm8
2000 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2001 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm0[0]
2002 ; SSE-NEXT: movdqa %xmm15, %xmm6
2003 ; SSE-NEXT: shufps {{.*#+}} xmm6 = xmm6[2,1],xmm0[1,3]
2004 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[2,0],xmm6[0,2]
2005 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm9[2,2,3,3]
2006 ; SSE-NEXT: pslld $16, %xmm9
2007 ; SSE-NEXT: movdqa %xmm1, %xmm7
2008 ; SSE-NEXT: pandn %xmm9, %xmm7
2009 ; SSE-NEXT: andps %xmm1, %xmm8
2010 ; SSE-NEXT: por %xmm8, %xmm7
2011 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
2012 ; SSE-NEXT: psrldq {{.*#+}} xmm15 = xmm15[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
2013 ; SSE-NEXT: shufps {{.*#+}} xmm15 = xmm15[1,1],xmm2[1,1]
2014 ; SSE-NEXT: shufps {{.*#+}} xmm15 = xmm15[2,0],xmm0[0,2]
2015 ; SSE-NEXT: movdqa %xmm5, %xmm8
2016 ; SSE-NEXT: pandn %xmm11, %xmm8
2017 ; SSE-NEXT: andps %xmm5, %xmm15
2018 ; SSE-NEXT: por %xmm15, %xmm8
2019 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
2020 ; SSE-NEXT: movdqa %xmm2, %xmm9
2021 ; SSE-NEXT: punpcklqdq {{.*#+}} xmm9 = xmm9[0],xmm10[0]
2022 ; SSE-NEXT: movdqa %xmm14, %xmm3
2023 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm10[1,3]
2024 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[2,0],xmm3[0,2]
2025 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2026 ; SSE-NEXT: movdqa %xmm0, %xmm11
2027 ; SSE-NEXT: pslldq {{.*#+}} xmm11 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm11[0,1,2,3,4,5]
2028 ; SSE-NEXT: movdqa %xmm1, %xmm6
2029 ; SSE-NEXT: pandn %xmm11, %xmm6
2030 ; SSE-NEXT: andps %xmm1, %xmm9
2031 ; SSE-NEXT: por %xmm9, %xmm6
2032 ; SSE-NEXT: punpckhqdq {{.*#+}} xmm10 = xmm10[1],xmm2[1]
2033 ; SSE-NEXT: pshuflw {{.*#+}} xmm11 = xmm14[1,1,1,1,4,5,6,7]
2034 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[1,1],xmm2[1,1]
2035 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,0],xmm10[0,2]
2036 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm0[0,0,1,1]
2037 ; SSE-NEXT: movdqa %xmm5, %xmm9
2038 ; SSE-NEXT: pandn %xmm13, %xmm9
2039 ; SSE-NEXT: andps %xmm5, %xmm11
2040 ; SSE-NEXT: por %xmm11, %xmm9
2041 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
2042 ; SSE-NEXT: movaps %xmm3, %xmm11
2043 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
2044 ; SSE-NEXT: movlhps {{.*#+}} xmm11 = xmm11[0],xmm2[0]
2045 ; SSE-NEXT: movdqa %xmm14, %xmm13
2046 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[2,1],xmm2[1,3]
2047 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,0],xmm13[0,2]
2048 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm0[2,2,3,3]
2049 ; SSE-NEXT: pslld $16, %xmm0
2050 ; SSE-NEXT: movdqa %xmm1, %xmm15
2051 ; SSE-NEXT: pandn %xmm0, %xmm15
2052 ; SSE-NEXT: andps %xmm1, %xmm11
2053 ; SSE-NEXT: por %xmm11, %xmm15
2054 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
2055 ; SSE-NEXT: psrldq {{.*#+}} xmm14 = xmm14[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
2056 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[1,1],xmm3[1,1]
2057 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[2,0],xmm2[0,2]
2058 ; SSE-NEXT: movdqa %xmm5, %xmm10
2059 ; SSE-NEXT: pandn %xmm13, %xmm10
2060 ; SSE-NEXT: andps %xmm5, %xmm14
2061 ; SSE-NEXT: por %xmm14, %xmm10
2062 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
2063 ; SSE-NEXT: movaps %xmm2, %xmm11
2064 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2065 ; SSE-NEXT: movlhps {{.*#+}} xmm11 = xmm11[0],xmm0[0]
2066 ; SSE-NEXT: movdqa %xmm12, %xmm13
2067 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[0,1],xmm0[1,3]
2068 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,0],xmm13[0,2]
2069 ; SSE-NEXT: movdqa (%rsp), %xmm4 # 16-byte Reload
2070 ; SSE-NEXT: movdqa %xmm4, %xmm14
2071 ; SSE-NEXT: pslldq {{.*#+}} xmm14 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm14[0,1,2,3,4,5]
2072 ; SSE-NEXT: movdqa %xmm1, %xmm13
2073 ; SSE-NEXT: pandn %xmm14, %xmm13
2074 ; SSE-NEXT: andps %xmm1, %xmm11
2075 ; SSE-NEXT: por %xmm11, %xmm13
2076 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm2[1]
2077 ; SSE-NEXT: pshuflw {{.*#+}} xmm14 = xmm12[1,1,1,1,4,5,6,7]
2078 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[1,1],xmm2[1,1]
2079 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[2,0],xmm0[0,2]
2080 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm4[0,0,1,1]
2081 ; SSE-NEXT: movdqa %xmm5, %xmm11
2082 ; SSE-NEXT: pandn %xmm2, %xmm11
2083 ; SSE-NEXT: andps %xmm5, %xmm14
2084 ; SSE-NEXT: por %xmm14, %xmm11
2085 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
2086 ; SSE-NEXT: movaps %xmm3, %xmm2
2087 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2088 ; SSE-NEXT: movlhps {{.*#+}} xmm2 = xmm2[0],xmm0[0]
2089 ; SSE-NEXT: movdqa %xmm12, %xmm14
2090 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[2,1],xmm0[1,3]
2091 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[2,0],xmm14[0,2]
2092 ; SSE-NEXT: andps %xmm1, %xmm2
2093 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm4[2,2,3,3]
2094 ; SSE-NEXT: pslld $16, %xmm4
2095 ; SSE-NEXT: pandn %xmm4, %xmm1
2096 ; SSE-NEXT: por %xmm2, %xmm1
2097 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm3[1]
2098 ; SSE-NEXT: psrldq {{.*#+}} xmm12 = xmm12[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
2099 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[1,1],xmm3[1,1]
2100 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[2,0],xmm0[0,2]
2101 ; SSE-NEXT: andps %xmm5, %xmm12
2102 ; SSE-NEXT: pandn %xmm14, %xmm5
2103 ; SSE-NEXT: por %xmm12, %xmm5
2104 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
2105 ; SSE-NEXT: movdqa %xmm5, 352(%rax)
2106 ; SSE-NEXT: movdqa %xmm1, 336(%rax)
2107 ; SSE-NEXT: movdqa %xmm11, 304(%rax)
2108 ; SSE-NEXT: movdqa %xmm13, 288(%rax)
2109 ; SSE-NEXT: movdqa %xmm10, 256(%rax)
2110 ; SSE-NEXT: movdqa %xmm15, 240(%rax)
2111 ; SSE-NEXT: movdqa %xmm9, 208(%rax)
2112 ; SSE-NEXT: movdqa %xmm6, 192(%rax)
2113 ; SSE-NEXT: movdqa %xmm8, 160(%rax)
2114 ; SSE-NEXT: movdqa %xmm7, 144(%rax)
2115 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2116 ; SSE-NEXT: movaps %xmm0, 112(%rax)
2117 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2118 ; SSE-NEXT: movaps %xmm0, 96(%rax)
2119 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2120 ; SSE-NEXT: movaps %xmm0, 64(%rax)
2121 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2122 ; SSE-NEXT: movaps %xmm0, 48(%rax)
2123 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2124 ; SSE-NEXT: movaps %xmm0, 16(%rax)
2125 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2126 ; SSE-NEXT: movaps %xmm0, (%rax)
2127 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2128 ; SSE-NEXT: movaps %xmm0, 368(%rax)
2129 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2130 ; SSE-NEXT: movaps %xmm0, 320(%rax)
2131 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2132 ; SSE-NEXT: movaps %xmm0, 272(%rax)
2133 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2134 ; SSE-NEXT: movaps %xmm0, 224(%rax)
2135 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2136 ; SSE-NEXT: movaps %xmm0, 176(%rax)
2137 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2138 ; SSE-NEXT: movaps %xmm0, 128(%rax)
2139 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2140 ; SSE-NEXT: movaps %xmm0, 80(%rax)
2141 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2142 ; SSE-NEXT: movaps %xmm0, 32(%rax)
2143 ; SSE-NEXT: addq $312, %rsp # imm = 0x138
2146 ; AVX1-ONLY-LABEL: store_i16_stride6_vf32:
2147 ; AVX1-ONLY: # %bb.0:
2148 ; AVX1-ONLY-NEXT: subq $120, %rsp
2149 ; AVX1-ONLY-NEXT: vmovdqa 32(%rcx), %xmm8
2150 ; AVX1-ONLY-NEXT: vmovdqa 48(%rcx), %xmm0
2151 ; AVX1-ONLY-NEXT: vmovdqa 32(%rdx), %xmm9
2152 ; AVX1-ONLY-NEXT: vmovdqa 48(%rdx), %xmm1
2153 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
2154 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm6[2,2,3,3]
2155 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
2156 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm3[0,0,1,1]
2157 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm2, %ymm1
2158 ; AVX1-ONLY-NEXT: vmovdqa 32(%rsi), %xmm10
2159 ; AVX1-ONLY-NEXT: vmovdqa 48(%rsi), %xmm2
2160 ; AVX1-ONLY-NEXT: vmovdqa 32(%rdi), %xmm11
2161 ; AVX1-ONLY-NEXT: vmovdqa 48(%rdi), %xmm4
2162 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
2163 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm0[2,3,2,3]
2164 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm4[4],xmm2[4],xmm4[5],xmm2[5],xmm4[6],xmm2[6],xmm4[7],xmm2[7]
2165 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm4[0,1,0,1]
2166 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm5, %ymm2
2167 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7]
2168 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm5, %xmm2
2169 ; AVX1-ONLY-NEXT: vmovdqa 48(%r8), %xmm1
2170 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm2[0,1,2,3],xmm1[4,5],xmm2[6,7]
2171 ; AVX1-ONLY-NEXT: vmovdqa 48(%r9), %xmm2
2172 ; AVX1-ONLY-NEXT: vpslld $16, %xmm2, %xmm12
2173 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1,2,3,4],xmm12[5],xmm7[6,7]
2174 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2175 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm7 = xmm1[2,1,3,3,4,5,6,7]
2176 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[0,1,2,1]
2177 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm7[0],xmm5[1,2],xmm7[3]
2178 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm7 = xmm2[0,2,2,3,4,5,6,7]
2179 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[0,1,2,1]
2180 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0],xmm7[1],xmm5[2,3,4,5,6],xmm7[7]
2181 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2182 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm4[2,3,2,3]
2183 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm4, %ymm4
2184 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm3[1,1,2,2]
2185 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
2186 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm5, %ymm3
2187 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm3[0,1],ymm4[2],ymm3[3,4],ymm4[5],ymm3[6,7]
2188 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm3, %xmm4
2189 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm5 = xmm1[0,1,2,3,6,5,7,7]
2190 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,1,2,3]
2191 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3,4,5],xmm5[6,7]
2192 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm5 = xmm2[0,1,2,3,4,6,6,7]
2193 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,1,2,3]
2194 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0],xmm5[1],xmm4[2,3,4,5,6],xmm5[7]
2195 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2196 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm4 = xmm1[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
2197 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm3 = xmm3[0],xmm4[1],xmm3[2,3]
2198 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm2[2,2,3,3]
2199 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2],xmm4[3],xmm3[4,5,6,7]
2200 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2201 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
2202 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm9[4],xmm8[4],xmm9[5],xmm8[5],xmm9[6],xmm8[6],xmm9[7],xmm8[7]
2203 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm7[1,1,2,2]
2204 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm7[2,2,3,3]
2205 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm4, %ymm4
2206 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm3[2,3,2,3]
2207 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm3, %ymm5
2208 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm4[0,1],ymm5[2],ymm4[3,4],ymm5[5],ymm4[6,7]
2209 ; AVX1-ONLY-NEXT: vmovdqa 32(%r8), %xmm4
2210 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm5 = xmm4[0,1,2,3,6,5,7,7]
2211 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,1,2,3]
2212 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm12, %xmm13
2213 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm13 = xmm5[0,1],xmm13[2,3,4,5],xmm5[6,7]
2214 ; AVX1-ONLY-NEXT: vmovdqa 32(%r9), %xmm5
2215 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm14 = xmm5[0,1,2,3,4,6,6,7]
2216 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm14[2,1,2,3]
2217 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm13 = xmm13[0],xmm14[1],xmm13[2,3,4,5,6],xmm14[7]
2218 ; AVX1-ONLY-NEXT: vmovdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2219 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm13 = xmm4[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
2220 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm12 = xmm12[0],xmm13[1],xmm12[2,3]
2221 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm5[2,2,3,3]
2222 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0,1,2],xmm13[3],xmm12[4,5,6,7]
2223 ; AVX1-ONLY-NEXT: vmovdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2224 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm6[0,0,1,1]
2225 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[1,1,2,2]
2226 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm12, %ymm6
2227 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm0[0,1,0,1]
2228 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm12, %ymm0
2229 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0],ymm6[1],ymm0[2,3],ymm6[4],ymm0[5,6],ymm6[7]
2230 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm6
2231 ; AVX1-ONLY-NEXT: vpmovzxwd {{.*#+}} xmm12 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero
2232 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0,1],xmm12[2,3],xmm6[4,5,6,7]
2233 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm2[0,0,1,1]
2234 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0,1,2],xmm12[3],xmm6[4,5,6,7]
2235 ; AVX1-ONLY-NEXT: vmovdqa %xmm6, (%rsp) # 16-byte Spill
2236 ; AVX1-ONLY-NEXT: vinsertps {{.*#+}} xmm0 = xmm0[0,1],xmm1[0],xmm0[3]
2237 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm2[0,1,2,3,4,5]
2238 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3,4],xmm1[5],xmm0[6,7]
2239 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2240 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
2241 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[0,0,1,1]
2242 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[1,1,2,2]
2243 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
2244 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
2245 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm2[0,1,0,1]
2246 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm6, %ymm6
2247 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm6[0],ymm1[1],ymm6[2,3],ymm1[4],ymm6[5,6],ymm1[7]
2248 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm6
2249 ; AVX1-ONLY-NEXT: vpmovzxwd {{.*#+}} xmm8 = xmm4[0],zero,xmm4[1],zero,xmm4[2],zero,xmm4[3],zero
2250 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0,1],xmm8[2,3],xmm6[4,5,6,7]
2251 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm5[0,0,1,1]
2252 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0,1,2],xmm8[3],xmm6[4,5,6,7]
2253 ; AVX1-ONLY-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2254 ; AVX1-ONLY-NEXT: vinsertps {{.*#+}} xmm1 = xmm1[0,1],xmm4[0],xmm1[3]
2255 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm6 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm5[0,1,2,3,4,5]
2256 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4],xmm6[5],xmm1[6,7]
2257 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2258 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
2259 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm7[0,0,1,1]
2260 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
2261 ; AVX1-ONLY-NEXT: vmovdqa 16(%rcx), %xmm1
2262 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,3,2,3]
2263 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,1,0,1]
2264 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
2265 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1],ymm0[2],ymm2[3,4],ymm0[5],ymm2[6,7]
2266 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm2
2267 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2,3],xmm4[4,5],xmm2[6,7]
2268 ; AVX1-ONLY-NEXT: vpslld $16, %xmm5, %xmm3
2269 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2,3,4],xmm3[5],xmm2[6,7]
2270 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2271 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdx), %xmm2
2272 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm4[2,1,3,3,4,5,6,7]
2273 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,1,2,1]
2274 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm0 = xmm3[0],xmm0[1,2],xmm3[3]
2275 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm5[0,2,2,3,4,5,6,7]
2276 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,1,2,1]
2277 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm3[1],xmm0[2,3,4,5,6],xmm3[7]
2278 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2279 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
2280 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm11[2,2,3,3]
2281 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
2282 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm1[0,0,1,1]
2283 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm0
2284 ; AVX1-ONLY-NEXT: vmovdqa 16(%rsi), %xmm2
2285 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdi), %xmm3
2286 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
2287 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
2288 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm10[2,3,2,3]
2289 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm4[0,1,0,1]
2290 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
2291 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1],ymm0[2],ymm2[3,4],ymm0[5],ymm2[6,7]
2292 ; AVX1-ONLY-NEXT: vmovdqa 16(%r8), %xmm3
2293 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm2
2294 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm2[0,1,2,3],xmm3[4,5],xmm2[6,7]
2295 ; AVX1-ONLY-NEXT: vmovdqa 16(%r9), %xmm2
2296 ; AVX1-ONLY-NEXT: vpslld $16, %xmm2, %xmm6
2297 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1,2,3,4],xmm6[5],xmm5[6,7]
2298 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2299 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm5 = xmm3[2,1,3,3,4,5,6,7]
2300 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[0,1,2,1]
2301 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm0 = xmm5[0],xmm0[1,2],xmm5[3]
2302 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm5 = xmm2[0,2,2,3,4,5,6,7]
2303 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[0,1,2,1]
2304 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm5[1],xmm0[2,3,4,5,6],xmm5[7]
2305 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2306 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm4[2,3,2,3]
2307 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm4, %ymm0
2308 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm1[1,1,2,2]
2309 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
2310 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm4, %ymm1
2311 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7]
2312 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm1 = xmm3[0,1,2,3,6,5,7,7]
2313 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,1,2,3]
2314 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm4
2315 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1],xmm4[2,3,4,5],xmm1[6,7]
2316 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm4 = xmm2[0,1,2,3,4,6,6,7]
2317 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,1,2,3]
2318 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0],xmm4[1],xmm1[2,3,4,5,6],xmm4[7]
2319 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2320 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm1 = xmm3[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
2321 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3]
2322 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm2[2,2,3,3]
2323 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm15 = xmm0[0,1,2],xmm1[3],xmm0[4,5,6,7]
2324 ; AVX1-ONLY-NEXT: vmovdqa (%rcx), %xmm9
2325 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm8
2326 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm8[4],xmm9[4],xmm8[5],xmm9[5],xmm8[6],xmm9[6],xmm8[7],xmm9[7]
2327 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm5[1,1,2,2]
2328 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm5[2,2,3,3]
2329 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
2330 ; AVX1-ONLY-NEXT: vmovdqa (%rsi), %xmm7
2331 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm6
2332 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm6[4],xmm7[4],xmm6[5],xmm7[5],xmm6[6],xmm7[6],xmm6[7],xmm7[7]
2333 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm4[2,3,2,3]
2334 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm4, %ymm1
2335 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7]
2336 ; AVX1-ONLY-NEXT: vmovdqa (%r8), %xmm1
2337 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm0 = xmm1[0,1,2,3,6,5,7,7]
2338 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,1,2,3]
2339 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm12, %xmm13
2340 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm13 = xmm0[0,1],xmm13[2,3,4,5],xmm0[6,7]
2341 ; AVX1-ONLY-NEXT: vmovdqa (%r9), %xmm0
2342 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm14 = xmm0[0,1,2,3,4,6,6,7]
2343 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm14[2,1,2,3]
2344 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm13 = xmm13[0],xmm14[1],xmm13[2,3,4,5,6],xmm14[7]
2345 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm14 = xmm1[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
2346 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm12 = xmm12[0],xmm14[1],xmm12[2,3]
2347 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm0[2,2,3,3]
2348 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0,1,2],xmm14[3],xmm12[4,5,6,7]
2349 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm11[0,0,1,1]
2350 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[1,1,2,2]
2351 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm14, %ymm11
2352 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm10[0,1,0,1]
2353 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm14, %ymm10
2354 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm10[0],ymm11[1],ymm10[2,3],ymm11[4],ymm10[5,6],ymm11[7]
2355 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm11, %xmm10
2356 ; AVX1-ONLY-NEXT: vpmovzxwd {{.*#+}} xmm14 = xmm3[0],zero,xmm3[1],zero,xmm3[2],zero,xmm3[3],zero
2357 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm10 = xmm10[0,1],xmm14[2,3],xmm10[4,5,6,7]
2358 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm2[0,0,1,1]
2359 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm10 = xmm10[0,1,2],xmm14[3],xmm10[4,5,6,7]
2360 ; AVX1-ONLY-NEXT: vinsertps {{.*#+}} xmm3 = xmm11[0,1],xmm3[0],xmm11[3]
2361 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm2 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm2[0,1,2,3,4,5]
2362 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1,2,3,4],xmm2[5],xmm3[6,7]
2363 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
2364 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm3[0,0,1,1]
2365 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm3[1,1,2,2]
2366 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm8, %ymm8
2367 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
2368 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm6[0,1,0,1]
2369 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm7, %ymm7
2370 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0],ymm8[1],ymm7[2,3],ymm8[4],ymm7[5,6],ymm8[7]
2371 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm7, %xmm8
2372 ; AVX1-ONLY-NEXT: vpmovzxwd {{.*#+}} xmm9 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero
2373 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0,1],xmm9[2,3],xmm8[4,5,6,7]
2374 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm0[0,0,1,1]
2375 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0,1,2],xmm9[3],xmm8[4,5,6,7]
2376 ; AVX1-ONLY-NEXT: vinsertps {{.*#+}} xmm7 = xmm7[0,1],xmm1[0],xmm7[3]
2377 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm9 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm0[0,1,2,3,4,5]
2378 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1,2,3,4],xmm9[5],xmm7[6,7]
2379 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
2380 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[0,0,1,1]
2381 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm3, %ymm3
2382 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm6[2,3,2,3]
2383 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[0,1,0,1]
2384 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm5, %ymm4
2385 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm4[0,1],ymm3[2],ymm4[3,4],ymm3[5],ymm4[6,7]
2386 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm3, %xmm4
2387 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0,1,2,3],xmm1[4,5],xmm4[6,7]
2388 ; AVX1-ONLY-NEXT: vpslld $16, %xmm0, %xmm5
2389 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0,1,2,3,4],xmm5[5],xmm4[6,7]
2390 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[2,1,3,3,4,5,6,7]
2391 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[0,1,2,1]
2392 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm1 = xmm1[0],xmm3[1,2],xmm1[3]
2393 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,2,2,3,4,5,6,7]
2394 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,1,2,1]
2395 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0],xmm0[1],xmm1[2,3,4,5,6],xmm0[7]
2396 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
2397 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, 32(%rax)
2398 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, 48(%rax)
2399 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, (%rax)
2400 ; AVX1-ONLY-NEXT: vmovdqa %xmm8, 16(%rax)
2401 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, 96(%rax)
2402 ; AVX1-ONLY-NEXT: vmovdqa %xmm10, 112(%rax)
2403 ; AVX1-ONLY-NEXT: vmovdqa %xmm12, 64(%rax)
2404 ; AVX1-ONLY-NEXT: vmovdqa %xmm13, 80(%rax)
2405 ; AVX1-ONLY-NEXT: vmovdqa %xmm15, 160(%rax)
2406 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2407 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 176(%rax)
2408 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2409 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 128(%rax)
2410 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2411 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 144(%rax)
2412 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2413 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 224(%rax)
2414 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2415 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 240(%rax)
2416 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2417 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 192(%rax)
2418 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2419 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 208(%rax)
2420 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2421 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 288(%rax)
2422 ; AVX1-ONLY-NEXT: vmovaps (%rsp), %xmm0 # 16-byte Reload
2423 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 304(%rax)
2424 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2425 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 256(%rax)
2426 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2427 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 272(%rax)
2428 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2429 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 352(%rax)
2430 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2431 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 368(%rax)
2432 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2433 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 320(%rax)
2434 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2435 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 336(%rax)
2436 ; AVX1-ONLY-NEXT: addq $120, %rsp
2437 ; AVX1-ONLY-NEXT: vzeroupper
2438 ; AVX1-ONLY-NEXT: retq
2440 ; AVX2-SLOW-LABEL: store_i16_stride6_vf32:
2441 ; AVX2-SLOW: # %bb.0:
2442 ; AVX2-SLOW-NEXT: subq $616, %rsp # imm = 0x268
2443 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %xmm13
2444 ; AVX2-SLOW-NEXT: vmovdqa 32(%rcx), %xmm9
2445 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} xmm0 = xmm13[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
2446 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %xmm1
2447 ; AVX2-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2448 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdx), %xmm11
2449 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} xmm1 = xmm1[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
2450 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
2451 ; AVX2-SLOW-NEXT: vpbroadcastq %xmm0, %ymm0
2452 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %xmm15
2453 ; AVX2-SLOW-NEXT: vmovdqa 32(%rsi), %xmm5
2454 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm15[0,1,2,1]
2455 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,7,6,5]
2456 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %xmm2
2457 ; AVX2-SLOW-NEXT: vmovdqa %xmm2, (%rsp) # 16-byte Spill
2458 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdi), %xmm6
2459 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,1,2,1]
2460 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,7,6,5]
2461 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
2462 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
2463 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7]
2464 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %xmm1
2465 ; AVX2-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2466 ; AVX2-SLOW-NEXT: vmovdqa 32(%r8), %xmm7
2467 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[2,1,3,3,4,5,6,7]
2468 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
2469 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm0[1,2],ymm1[3],ymm0[4,5],ymm1[6],ymm0[7]
2470 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %xmm0
2471 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2472 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,2,2,3,4,5,6,7]
2473 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
2474 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm0[0,0,2,1]
2475 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm0 = [255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255]
2476 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm1
2477 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2478 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} xmm1 = xmm9[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
2479 ; AVX2-SLOW-NEXT: vmovdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2480 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} xmm2 = xmm11[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
2481 ; AVX2-SLOW-NEXT: vmovdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2482 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
2483 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm5[0,1,2,1]
2484 ; AVX2-SLOW-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2485 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,7,6,5]
2486 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm6[0,1,2,1]
2487 ; AVX2-SLOW-NEXT: vmovdqa %xmm6, %xmm14
2488 ; AVX2-SLOW-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2489 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,7,6,5]
2490 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
2491 ; AVX2-SLOW-NEXT: vmovdqa 32(%r9), %xmm4
2492 ; AVX2-SLOW-NEXT: vpbroadcastq %xmm1, %ymm1
2493 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
2494 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7]
2495 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm7[2,1,3,3,4,5,6,7]
2496 ; AVX2-SLOW-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2497 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
2498 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7]
2499 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm4[0,2,2,3,4,5,6,7]
2500 ; AVX2-SLOW-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2501 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,4,4,4]
2502 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
2503 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm1
2504 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2505 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdx), %ymm2
2506 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2507 ; AVX2-SLOW-NEXT: vmovdqa 32(%rcx), %ymm1
2508 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2509 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} ymm1 = ymm1[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm1[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
2510 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} ymm2 = ymm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm2[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
2511 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm2[0],ymm1[0],ymm2[1],ymm1[1],ymm2[2],ymm1[2],ymm2[3],ymm1[3],ymm2[8],ymm1[8],ymm2[9],ymm1[9],ymm2[10],ymm1[10],ymm2[11],ymm1[11]
2512 ; AVX2-SLOW-NEXT: vmovdqa 32(%rsi), %ymm10
2513 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm10[2,1,2,3,6,5,6,7]
2514 ; AVX2-SLOW-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2515 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
2516 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdi), %ymm8
2517 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm8[2,1,2,3,6,5,6,7]
2518 ; AVX2-SLOW-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2519 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm3 = ymm3[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
2520 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm3[0],ymm2[0],ymm3[1],ymm2[1],ymm3[2],ymm2[2],ymm3[3],ymm2[3],ymm3[8],ymm2[8],ymm3[9],ymm2[9],ymm3[10],ymm2[10],ymm3[11],ymm2[11]
2521 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
2522 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,2,3]
2523 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7]
2524 ; AVX2-SLOW-NEXT: vmovdqa 32(%r8), %ymm2
2525 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2526 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
2527 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
2528 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7]
2529 ; AVX2-SLOW-NEXT: vmovdqa 32(%r9), %ymm2
2530 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2531 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[0,2,2,3,4,5,6,7,8,10,10,11,12,13,14,15]
2532 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm2 = ymm2[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
2533 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
2534 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm1
2535 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2536 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %ymm2
2537 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2538 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %ymm1
2539 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2540 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} ymm1 = ymm1[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm1[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
2541 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} ymm2 = ymm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm2[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
2542 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm2[0],ymm1[0],ymm2[1],ymm1[1],ymm2[2],ymm1[2],ymm2[3],ymm1[3],ymm2[8],ymm1[8],ymm2[9],ymm1[9],ymm2[10],ymm1[10],ymm2[11],ymm1[11]
2543 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %ymm2
2544 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2545 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[2,1,2,3,6,5,6,7]
2546 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
2547 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %ymm12
2548 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm12[2,1,2,3,6,5,6,7]
2549 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm3 = ymm3[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
2550 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm3[0],ymm2[0],ymm3[1],ymm2[1],ymm3[2],ymm2[2],ymm3[3],ymm2[3],ymm3[8],ymm2[8],ymm3[9],ymm2[9],ymm3[10],ymm2[10],ymm3[11],ymm2[11]
2551 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
2552 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,2,3]
2553 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7]
2554 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %ymm2
2555 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2556 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
2557 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
2558 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7]
2559 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %ymm2
2560 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2561 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm6 = ymm2[0,2,2,3,4,5,6,7,8,10,10,11,12,13,14,15]
2562 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm6 = ymm6[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
2563 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
2564 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm6, %ymm0
2565 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2566 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm14[4],xmm5[4],xmm14[5],xmm5[5],xmm14[6],xmm5[6],xmm14[7],xmm5[7]
2567 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,1,1,1]
2568 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm11[4],xmm9[4],xmm11[5],xmm9[5],xmm11[6],xmm9[6],xmm11[7],xmm9[7]
2569 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[1,2,3,3]
2570 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
2571 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7]
2572 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm1 = [12,13,10,11,10,11,14,15,14,15,14,15,14,15,14,15]
2573 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm7, %xmm6
2574 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,1,0,1]
2575 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm0[0],ymm6[1],ymm0[2,3],ymm6[4],ymm0[5,6],ymm6[7]
2576 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm4[2,3,2,3]
2577 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,2,2,1,4,5,6,7]
2578 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm0[0,1,0,1]
2579 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = [255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0]
2580 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm6, %ymm14, %ymm0
2581 ; AVX2-SLOW-NEXT: vmovdqa %ymm2, %ymm3
2582 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2583 ; AVX2-SLOW-NEXT: vmovdqa (%rsp), %xmm0 # 16-byte Reload
2584 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm0[4],xmm15[4],xmm0[5],xmm15[5],xmm0[6],xmm15[6],xmm0[7],xmm15[7]
2585 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[1,1,1,1]
2586 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
2587 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm14 = xmm9[4],xmm13[4],xmm9[5],xmm13[5],xmm9[6],xmm13[6],xmm9[7],xmm13[7]
2588 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm14 = xmm14[1,2,3,3]
2589 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,2,1]
2590 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm14[0,1],ymm6[2],ymm14[3,4],ymm6[5],ymm14[6,7]
2591 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
2592 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm11, %xmm1
2593 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
2594 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm6[0],ymm1[1],ymm6[2,3],ymm1[4],ymm6[5,6],ymm1[7]
2595 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
2596 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm7[2,3,2,3]
2597 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm6 = xmm6[0,2,2,1,4,5,6,7]
2598 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,1,0,1]
2599 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm1, %ymm6, %ymm1
2600 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2601 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm8[4],ymm10[4],ymm8[5],ymm10[5],ymm8[6],ymm10[6],ymm8[7],ymm10[7],ymm8[12],ymm10[12],ymm8[13],ymm10[13],ymm8[14],ymm10[14],ymm8[15],ymm10[15]
2602 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[3,3,3,3]
2603 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2604 ; AVX2-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm2, %ymm6 # 32-byte Folded Reload
2605 ; AVX2-SLOW-NEXT: # ymm6 = ymm2[4],mem[4],ymm2[5],mem[5],ymm2[6],mem[6],ymm2[7],mem[7],ymm2[12],mem[12],ymm2[13],mem[13],ymm2[14],mem[14],ymm2[15],mem[15]
2606 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm6[1,2,3,3,5,6,7,7]
2607 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
2608 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm6[0,1],ymm1[2],ymm6[3,4],ymm1[5],ymm6[6,7]
2609 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm6 = <u,u,u,u,u,u,u,u,14,15,14,15,14,15,14,15,28,29,26,27,26,27,30,31,30,31,30,31,30,31,30,31>
2610 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2611 ; AVX2-SLOW-NEXT: vpshufb %ymm6, %ymm2, %ymm14
2612 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,1,2,3]
2613 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm14[1],ymm1[2,3],ymm14[4],ymm1[5,6],ymm14[7]
2614 ; AVX2-SLOW-NEXT: vpshufd $238, {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Folded Reload
2615 ; AVX2-SLOW-NEXT: # ymm14 = mem[2,3,2,3,6,7,6,7]
2616 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm14 = ymm14[0,2,2,1,4,5,6,7,8,10,10,9,12,13,14,15]
2617 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,1,2,3]
2618 ; AVX2-SLOW-NEXT: vpblendvb %ymm3, %ymm1, %ymm14, %ymm1
2619 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2620 ; AVX2-SLOW-NEXT: vmovdqa %ymm3, %ymm14
2621 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
2622 ; AVX2-SLOW-NEXT: vmovdqa %ymm12, %ymm3
2623 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm12[4],ymm4[4],ymm12[5],ymm4[5],ymm12[6],ymm4[6],ymm12[7],ymm4[7],ymm12[12],ymm4[12],ymm12[13],ymm4[13],ymm12[14],ymm4[14],ymm12[15],ymm4[15]
2624 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[3,3,3,3]
2625 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
2626 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2627 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm12 = ymm2[4],ymm5[4],ymm2[5],ymm5[5],ymm2[6],ymm5[6],ymm2[7],ymm5[7],ymm2[12],ymm5[12],ymm2[13],ymm5[13],ymm2[14],ymm5[14],ymm2[15],ymm5[15]
2628 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm12[1,2,3,3,5,6,7,7]
2629 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,2,2,3]
2630 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm12[0,1],ymm1[2],ymm12[3,4],ymm1[5],ymm12[6,7]
2631 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
2632 ; AVX2-SLOW-NEXT: vpshufb %ymm6, %ymm8, %ymm6
2633 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,1,2,3]
2634 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm6[1],ymm1[2,3],ymm6[4],ymm1[5,6],ymm6[7]
2635 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
2636 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm10[2,3,2,3,6,7,6,7]
2637 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm6 = ymm6[0,2,2,1,4,5,6,7,8,10,10,9,12,13,14,15]
2638 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,1,2,3]
2639 ; AVX2-SLOW-NEXT: vpblendvb %ymm14, %ymm1, %ymm6, %ymm6
2640 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm15[0],xmm0[1],xmm15[1],xmm0[2],xmm15[2],xmm0[3],xmm15[3]
2641 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm9[0],xmm13[0],xmm9[1],xmm13[1],xmm9[2],xmm13[2],xmm9[3],xmm13[3]
2642 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
2643 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[1,0,2,2]
2644 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
2645 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6],ymm1[7]
2646 ; AVX2-SLOW-NEXT: vpmovzxwd {{.*#+}} xmm1 = xmm11[0],zero,xmm11[1],zero,xmm11[2],zero,xmm11[3],zero
2647 ; AVX2-SLOW-NEXT: vpbroadcastq %xmm1, %ymm1
2648 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7]
2649 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm7[0,0,2,1,4,5,6,7]
2650 ; AVX2-SLOW-NEXT: vpbroadcastq %xmm0, %ymm12
2651 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm15 = [255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255]
2652 ; AVX2-SLOW-NEXT: vpblendvb %ymm15, %ymm1, %ymm12, %ymm12
2653 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2654 ; AVX2-SLOW-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm1 # 16-byte Folded Reload
2655 ; AVX2-SLOW-NEXT: # xmm1 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
2656 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2657 ; AVX2-SLOW-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
2658 ; AVX2-SLOW-NEXT: # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
2659 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
2660 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,0,2,2]
2661 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,0,1]
2662 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5,6],ymm0[7]
2663 ; AVX2-SLOW-NEXT: vpmovzxwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2664 ; AVX2-SLOW-NEXT: # xmm1 = mem[0],zero,mem[1],zero,mem[2],zero,mem[3],zero
2665 ; AVX2-SLOW-NEXT: vpbroadcastq %xmm1, %ymm1
2666 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7]
2667 ; AVX2-SLOW-NEXT: vpshuflw $96, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2668 ; AVX2-SLOW-NEXT: # xmm1 = mem[0,0,2,1,4,5,6,7]
2669 ; AVX2-SLOW-NEXT: vpbroadcastq %xmm1, %ymm1
2670 ; AVX2-SLOW-NEXT: vpblendvb %ymm15, %ymm0, %ymm1, %ymm1
2671 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2672 ; AVX2-SLOW-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
2673 ; AVX2-SLOW-NEXT: # ymm0 = ymm0[0],mem[0],ymm0[1],mem[1],ymm0[2],mem[2],ymm0[3],mem[3],ymm0[8],mem[8],ymm0[9],mem[9],ymm0[10],mem[10],ymm0[11],mem[11]
2674 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
2675 ; AVX2-SLOW-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm7, %ymm13 # 32-byte Folded Reload
2676 ; AVX2-SLOW-NEXT: # ymm13 = ymm7[0],mem[0],ymm7[1],mem[1],ymm7[2],mem[2],ymm7[3],mem[3],ymm7[8],mem[8],ymm7[9],mem[9],ymm7[10],mem[10],ymm7[11],mem[11]
2677 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
2678 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm13 = ymm13[1,0,2,2,5,4,6,6]
2679 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm13 = ymm13[2,1,2,3]
2680 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm13[1],ymm0[2,3],ymm13[4],ymm0[5,6],ymm13[7]
2681 ; AVX2-SLOW-NEXT: vpshuflw $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Folded Reload
2682 ; AVX2-SLOW-NEXT: # ymm11 = mem[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
2683 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,2,2,2]
2684 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm11[2],ymm0[3,4],ymm11[5],ymm0[6,7]
2685 ; AVX2-SLOW-NEXT: vpshuflw $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Folded Reload
2686 ; AVX2-SLOW-NEXT: # ymm9 = mem[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
2687 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,2,2,2]
2688 ; AVX2-SLOW-NEXT: vpblendvb %ymm15, %ymm0, %ymm9, %ymm0
2689 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm4 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[8],ymm4[8],ymm3[9],ymm4[9],ymm3[10],ymm4[10],ymm3[11],ymm4[11]
2690 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm7 = ymm2[0],ymm5[0],ymm2[1],ymm5[1],ymm2[2],ymm5[2],ymm2[3],ymm5[3],ymm2[8],ymm5[8],ymm2[9],ymm5[9],ymm2[10],ymm5[10],ymm2[11],ymm5[11]
2691 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
2692 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm7[1,0,2,2,5,4,6,6]
2693 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,1,2,3]
2694 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0],ymm7[1],ymm4[2,3],ymm7[4],ymm4[5,6],ymm7[7]
2695 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm5 = ymm8[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
2696 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,2]
2697 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1],ymm5[2],ymm4[3,4],ymm5[5],ymm4[6,7]
2698 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm3 = ymm10[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
2699 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,2]
2700 ; AVX2-SLOW-NEXT: vpblendvb %ymm15, %ymm4, %ymm3, %ymm2
2701 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
2702 ; AVX2-SLOW-NEXT: vmovdqa %ymm2, 96(%rax)
2703 ; AVX2-SLOW-NEXT: vmovdqa %ymm6, 160(%rax)
2704 ; AVX2-SLOW-NEXT: vmovdqa %ymm0, 288(%rax)
2705 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2706 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 352(%rax)
2707 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2708 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 64(%rax)
2709 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2710 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 128(%rax)
2711 ; AVX2-SLOW-NEXT: vmovdqa %ymm1, 192(%rax)
2712 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2713 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 256(%rax)
2714 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2715 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 320(%rax)
2716 ; AVX2-SLOW-NEXT: vmovdqa %ymm12, (%rax)
2717 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2718 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 224(%rax)
2719 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2720 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 32(%rax)
2721 ; AVX2-SLOW-NEXT: addq $616, %rsp # imm = 0x268
2722 ; AVX2-SLOW-NEXT: vzeroupper
2723 ; AVX2-SLOW-NEXT: retq
2725 ; AVX2-FAST-LABEL: store_i16_stride6_vf32:
2726 ; AVX2-FAST: # %bb.0:
2727 ; AVX2-FAST-NEXT: subq $648, %rsp # imm = 0x288
2728 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %xmm1
2729 ; AVX2-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2730 ; AVX2-FAST-NEXT: vmovdqa 32(%rsi), %xmm4
2731 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm0 = [0,1,2,3,4,5,6,7,8,9,6,7,4,5,10,11]
2732 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm1, %xmm1
2733 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %xmm2
2734 ; AVX2-FAST-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2735 ; AVX2-FAST-NEXT: vmovdqa 32(%rdi), %xmm5
2736 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm2, %xmm2
2737 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
2738 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
2739 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %xmm2
2740 ; AVX2-FAST-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2741 ; AVX2-FAST-NEXT: vmovdqa 32(%rcx), %xmm6
2742 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} xmm2 = xmm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
2743 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %xmm3
2744 ; AVX2-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2745 ; AVX2-FAST-NEXT: vmovdqa 32(%rdx), %xmm7
2746 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} xmm3 = xmm3[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
2747 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
2748 ; AVX2-FAST-NEXT: vpbroadcastq %xmm2, %ymm2
2749 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7]
2750 ; AVX2-FAST-NEXT: vmovdqa (%r8), %xmm2
2751 ; AVX2-FAST-NEXT: vmovdqa %xmm2, (%rsp) # 16-byte Spill
2752 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[2,1,3,3,4,5,6,7]
2753 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
2754 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7]
2755 ; AVX2-FAST-NEXT: vmovdqa (%r9), %xmm3
2756 ; AVX2-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2757 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm2 = [0,1,4,5,4,5,6,7,8,9,8,9,8,9,8,9]
2758 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm3, %xmm3
2759 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
2760 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = [255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255]
2761 ; AVX2-FAST-NEXT: vpblendvb %ymm13, %ymm1, %ymm3, %ymm1
2762 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2763 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm4, %xmm1
2764 ; AVX2-FAST-NEXT: vmovdqa %xmm4, %xmm14
2765 ; AVX2-FAST-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2766 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm5, %xmm0
2767 ; AVX2-FAST-NEXT: vmovdqa %xmm5, %xmm12
2768 ; AVX2-FAST-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2769 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
2770 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} xmm1 = xmm6[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
2771 ; AVX2-FAST-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2772 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} xmm3 = xmm7[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
2773 ; AVX2-FAST-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2774 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
2775 ; AVX2-FAST-NEXT: vmovdqa 32(%r8), %xmm3
2776 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,0,1]
2777 ; AVX2-FAST-NEXT: vpbroadcastq %xmm1, %ymm1
2778 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7]
2779 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} xmm1 = xmm3[2,1,3,3,4,5,6,7]
2780 ; AVX2-FAST-NEXT: vmovdqa %xmm3, %xmm8
2781 ; AVX2-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2782 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
2783 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0],ymm0[1,2],ymm1[3],ymm0[4,5],ymm1[6],ymm0[7]
2784 ; AVX2-FAST-NEXT: vmovdqa 32(%r9), %xmm3
2785 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm3, %xmm1
2786 ; AVX2-FAST-NEXT: vmovdqa %xmm3, %xmm9
2787 ; AVX2-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2788 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
2789 ; AVX2-FAST-NEXT: vpblendvb %ymm13, %ymm0, %ymm1, %ymm0
2790 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2791 ; AVX2-FAST-NEXT: vmovdqa 32(%rdi), %ymm2
2792 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2793 ; AVX2-FAST-NEXT: vmovdqa 32(%rsi), %ymm1
2794 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2795 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = <u,u,u,u,4,5,10,11,u,u,u,u,u,u,u,u,24,25,22,23,20,21,26,27,u,u,u,u,u,u,u,u>
2796 ; AVX2-FAST-NEXT: vpshufb %ymm0, %ymm1, %ymm1
2797 ; AVX2-FAST-NEXT: vpshufb %ymm0, %ymm2, %ymm2
2798 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm2[0],ymm1[0],ymm2[1],ymm1[1],ymm2[2],ymm1[2],ymm2[3],ymm1[3],ymm2[8],ymm1[8],ymm2[9],ymm1[9],ymm2[10],ymm1[10],ymm2[11],ymm1[11]
2799 ; AVX2-FAST-NEXT: vmovdqa 32(%rdx), %ymm10
2800 ; AVX2-FAST-NEXT: vmovdqa 32(%rcx), %ymm4
2801 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} ymm2 = ymm4[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm4[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
2802 ; AVX2-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2803 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} ymm3 = ymm10[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm10[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
2804 ; AVX2-FAST-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2805 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm3[0],ymm2[0],ymm3[1],ymm2[1],ymm3[2],ymm2[2],ymm3[3],ymm2[3],ymm3[8],ymm2[8],ymm3[9],ymm2[9],ymm3[10],ymm2[10],ymm3[11],ymm2[11]
2806 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,2,3]
2807 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
2808 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7]
2809 ; AVX2-FAST-NEXT: vmovdqa 32(%r8), %ymm2
2810 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2811 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
2812 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
2813 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7]
2814 ; AVX2-FAST-NEXT: vmovdqa 32(%r9), %ymm3
2815 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2816 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [16,17,20,21,20,21,22,23,24,25,24,25,24,25,24,25,16,17,20,21,20,21,22,23,24,25,24,25,24,25,24,25]
2817 ; AVX2-FAST-NEXT: # ymm2 = mem[0,1,0,1]
2818 ; AVX2-FAST-NEXT: vpshufb %ymm2, %ymm3, %ymm3
2819 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
2820 ; AVX2-FAST-NEXT: vpblendvb %ymm13, %ymm1, %ymm3, %ymm1
2821 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2822 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %ymm3
2823 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2824 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %ymm1
2825 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2826 ; AVX2-FAST-NEXT: vpshufb %ymm0, %ymm1, %ymm1
2827 ; AVX2-FAST-NEXT: vpshufb %ymm0, %ymm3, %ymm0
2828 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[8],ymm1[8],ymm0[9],ymm1[9],ymm0[10],ymm1[10],ymm0[11],ymm1[11]
2829 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %ymm5
2830 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %ymm1
2831 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2832 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} ymm1 = ymm1[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm1[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
2833 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} ymm3 = ymm5[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm5[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
2834 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm3[0],ymm1[0],ymm3[1],ymm1[1],ymm3[2],ymm1[2],ymm3[3],ymm1[3],ymm3[8],ymm1[8],ymm3[9],ymm1[9],ymm3[10],ymm1[10],ymm3[11],ymm1[11]
2835 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,2,3]
2836 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
2837 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7]
2838 ; AVX2-FAST-NEXT: vmovdqa (%r8), %ymm1
2839 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2840 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
2841 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
2842 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0],ymm0[1,2],ymm1[3],ymm0[4,5],ymm1[6],ymm0[7]
2843 ; AVX2-FAST-NEXT: vmovdqa (%r9), %ymm1
2844 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2845 ; AVX2-FAST-NEXT: vpshufb %ymm2, %ymm1, %ymm2
2846 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
2847 ; AVX2-FAST-NEXT: vpblendvb %ymm13, %ymm0, %ymm2, %ymm0
2848 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2849 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
2850 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <1,2,1,2,u,u,3,3>
2851 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm2, %ymm0
2852 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm12[4],xmm14[4],xmm12[5],xmm14[5],xmm12[6],xmm14[6],xmm12[7],xmm14[7]
2853 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm6 = ymm6[1,1,1,1]
2854 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm6[2],ymm0[3,4],ymm6[5],ymm0[6,7]
2855 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm6 = [12,13,10,11,10,11,14,15,14,15,14,15,14,15,14,15]
2856 ; AVX2-FAST-NEXT: vpshufb %xmm6, %xmm8, %xmm13
2857 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm13 = ymm13[0,1,0,1]
2858 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm13 = ymm0[0],ymm13[1],ymm0[2,3],ymm13[4],ymm0[5,6],ymm13[7]
2859 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm3 = [8,9,12,13,12,13,10,11,8,9,10,11,12,13,14,15]
2860 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm9, %xmm1
2861 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm1[0,1,0,1]
2862 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0]
2863 ; AVX2-FAST-NEXT: vpblendvb %ymm1, %ymm13, %ymm0, %ymm0
2864 ; AVX2-FAST-NEXT: vmovdqa %ymm1, %ymm8
2865 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2866 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
2867 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
2868 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm14[4],xmm15[4],xmm14[5],xmm15[5],xmm14[6],xmm15[6],xmm14[7],xmm15[7]
2869 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm2, %ymm0
2870 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
2871 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2872 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm1[4],xmm7[4],xmm1[5],xmm7[5],xmm1[6],xmm7[6],xmm1[7],xmm7[7]
2873 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[1,1,1,1]
2874 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm2[2],ymm0[3,4],ymm2[5],ymm0[6,7]
2875 ; AVX2-FAST-NEXT: vmovdqa (%rsp), %xmm12 # 16-byte Reload
2876 ; AVX2-FAST-NEXT: vpshufb %xmm6, %xmm12, %xmm2
2877 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
2878 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm2[1],ymm0[2,3],ymm2[4],ymm0[5,6],ymm2[7]
2879 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
2880 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm11, %xmm2
2881 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
2882 ; AVX2-FAST-NEXT: vpblendvb %ymm8, %ymm0, %ymm2, %ymm0
2883 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2884 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm10[4],ymm4[4],ymm10[5],ymm4[5],ymm10[6],ymm4[6],ymm10[7],ymm4[7],ymm10[12],ymm4[12],ymm10[13],ymm4[13],ymm10[14],ymm4[14],ymm10[15],ymm4[15]
2885 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [5,6,5,6,5,6,7,7]
2886 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm2, %ymm0
2887 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2888 ; AVX2-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm3 # 32-byte Folded Reload
2889 ; AVX2-FAST-NEXT: # ymm3 = ymm3[4],mem[4],ymm3[5],mem[5],ymm3[6],mem[6],ymm3[7],mem[7],ymm3[12],mem[12],ymm3[13],mem[13],ymm3[14],mem[14],ymm3[15],mem[15]
2890 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[3,3,3,3]
2891 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm3[2],ymm0[3,4],ymm3[5],ymm0[6,7]
2892 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = <u,u,u,u,u,u,u,u,14,15,14,15,14,15,14,15,28,29,26,27,26,27,30,31,30,31,30,31,30,31,30,31>
2893 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
2894 ; AVX2-FAST-NEXT: vpshufb %ymm10, %ymm4, %ymm6
2895 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,1,2,3]
2896 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm6[1],ymm0[2,3],ymm6[4],ymm0[5,6],ymm6[7]
2897 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = <u,u,u,u,u,u,u,u,8,9,10,11,12,13,14,15,24,25,28,29,28,29,26,27,24,25,26,27,28,29,30,31>
2898 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
2899 ; AVX2-FAST-NEXT: vpshufb %ymm3, %ymm4, %ymm13
2900 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm13 = ymm13[2,1,2,3]
2901 ; AVX2-FAST-NEXT: vpblendvb %ymm8, %ymm0, %ymm13, %ymm0
2902 ; AVX2-FAST-NEXT: vmovdqa %ymm8, %ymm13
2903 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2904 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
2905 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm5[4],ymm6[4],ymm5[5],ymm6[5],ymm5[6],ymm6[6],ymm5[7],ymm6[7],ymm5[12],ymm6[12],ymm5[13],ymm6[13],ymm5[14],ymm6[14],ymm5[15],ymm6[15]
2906 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm2, %ymm0
2907 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
2908 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
2909 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm4[4],ymm8[4],ymm4[5],ymm8[5],ymm4[6],ymm8[6],ymm4[7],ymm8[7],ymm4[12],ymm8[12],ymm4[13],ymm8[13],ymm4[14],ymm8[14],ymm4[15],ymm8[15]
2910 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[3,3,3,3]
2911 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm2[2],ymm0[3,4],ymm2[5],ymm0[6,7]
2912 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
2913 ; AVX2-FAST-NEXT: vpshufb %ymm10, %ymm9, %ymm2
2914 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,2,3]
2915 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm2[1],ymm0[2,3],ymm2[4],ymm0[5,6],ymm2[7]
2916 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
2917 ; AVX2-FAST-NEXT: vpshufb %ymm3, %ymm10, %ymm2
2918 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,2,3]
2919 ; AVX2-FAST-NEXT: vpblendvb %ymm13, %ymm0, %ymm2, %ymm13
2920 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm14[0],xmm15[0],xmm14[1],xmm15[1],xmm14[2],xmm15[2],xmm14[3],xmm15[3]
2921 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm7[0],xmm1[1],xmm7[1],xmm1[2],xmm7[2],xmm1[3],xmm7[3]
2922 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [1,0,2,2,1,0,2,2]
2923 ; AVX2-FAST-NEXT: # ymm3 = mem[0,1,0,1]
2924 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm3, %ymm0
2925 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
2926 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5,6],ymm0[7]
2927 ; AVX2-FAST-NEXT: vpmovzxwd {{.*#+}} xmm1 = xmm12[0],zero,xmm12[1],zero,xmm12[2],zero,xmm12[3],zero
2928 ; AVX2-FAST-NEXT: vpbroadcastq %xmm1, %ymm1
2929 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7]
2930 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} xmm0 = xmm11[0,0,2,1,4,5,6,7]
2931 ; AVX2-FAST-NEXT: vpbroadcastq %xmm0, %ymm2
2932 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255]
2933 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm2
2934 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2935 ; AVX2-FAST-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
2936 ; AVX2-FAST-NEXT: # xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
2937 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm3, %ymm1
2938 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
2939 ; AVX2-FAST-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm3, %xmm3 # 16-byte Folded Reload
2940 ; AVX2-FAST-NEXT: # xmm3 = xmm3[0],mem[0],xmm3[1],mem[1],xmm3[2],mem[2],xmm3[3],mem[3]
2941 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
2942 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0],ymm1[1],ymm3[2,3],ymm1[4],ymm3[5,6],ymm1[7]
2943 ; AVX2-FAST-NEXT: vpmovzxwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
2944 ; AVX2-FAST-NEXT: # xmm3 = mem[0],zero,mem[1],zero,mem[2],zero,mem[3],zero
2945 ; AVX2-FAST-NEXT: vpbroadcastq %xmm3, %ymm3
2946 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm3[2],ymm1[3,4],ymm3[5],ymm1[6,7]
2947 ; AVX2-FAST-NEXT: vpshuflw $96, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
2948 ; AVX2-FAST-NEXT: # xmm3 = mem[0,0,2,1,4,5,6,7]
2949 ; AVX2-FAST-NEXT: vpbroadcastq %xmm3, %ymm3
2950 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm1, %ymm3, %ymm1
2951 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2952 ; AVX2-FAST-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm3 # 32-byte Folded Reload
2953 ; AVX2-FAST-NEXT: # ymm3 = ymm3[0],mem[0],ymm3[1],mem[1],ymm3[2],mem[2],ymm3[3],mem[3],ymm3[8],mem[8],ymm3[9],mem[9],ymm3[10],mem[10],ymm3[11],mem[11]
2954 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
2955 ; AVX2-FAST-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm7, %ymm14 # 32-byte Folded Reload
2956 ; AVX2-FAST-NEXT: # ymm14 = ymm7[0],mem[0],ymm7[1],mem[1],ymm7[2],mem[2],ymm7[3],mem[3],ymm7[8],mem[8],ymm7[9],mem[9],ymm7[10],mem[10],ymm7[11],mem[11]
2957 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm15 = [5,4,2,2,5,4,6,6]
2958 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm15, %ymm3
2959 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,2,2,3]
2960 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm14[0],ymm3[1],ymm14[2,3],ymm3[4],ymm14[5,6],ymm3[7]
2961 ; AVX2-FAST-NEXT: vpshuflw $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Folded Reload
2962 ; AVX2-FAST-NEXT: # ymm12 = mem[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
2963 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,2,2,2]
2964 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1],ymm12[2],ymm3[3,4],ymm12[5],ymm3[6,7]
2965 ; AVX2-FAST-NEXT: vpshuflw $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Folded Reload
2966 ; AVX2-FAST-NEXT: # ymm11 = mem[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
2967 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,2,2,2]
2968 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm3, %ymm11, %ymm3
2969 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm7 = ymm5[0],ymm6[0],ymm5[1],ymm6[1],ymm5[2],ymm6[2],ymm5[3],ymm6[3],ymm5[8],ymm6[8],ymm5[9],ymm6[9],ymm5[10],ymm6[10],ymm5[11],ymm6[11]
2970 ; AVX2-FAST-NEXT: vpermd %ymm7, %ymm15, %ymm7
2971 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm8 = ymm4[0],ymm8[0],ymm4[1],ymm8[1],ymm4[2],ymm8[2],ymm4[3],ymm8[3],ymm4[8],ymm8[8],ymm4[9],ymm8[9],ymm4[10],ymm8[10],ymm4[11],ymm8[11]
2972 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,2,2,3]
2973 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm7 = ymm8[0],ymm7[1],ymm8[2,3],ymm7[4],ymm8[5,6],ymm7[7]
2974 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} ymm5 = ymm9[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
2975 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,2]
2976 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm7[0,1],ymm5[2],ymm7[3,4],ymm5[5],ymm7[6,7]
2977 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} ymm4 = ymm10[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
2978 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,2]
2979 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm5, %ymm4, %ymm0
2980 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
2981 ; AVX2-FAST-NEXT: vmovdqa %ymm0, 96(%rax)
2982 ; AVX2-FAST-NEXT: vmovdqa %ymm13, 160(%rax)
2983 ; AVX2-FAST-NEXT: vmovdqa %ymm3, 288(%rax)
2984 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2985 ; AVX2-FAST-NEXT: vmovaps %ymm0, 352(%rax)
2986 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2987 ; AVX2-FAST-NEXT: vmovaps %ymm0, 64(%rax)
2988 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2989 ; AVX2-FAST-NEXT: vmovaps %ymm0, 128(%rax)
2990 ; AVX2-FAST-NEXT: vmovdqa %ymm1, 192(%rax)
2991 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2992 ; AVX2-FAST-NEXT: vmovaps %ymm0, 256(%rax)
2993 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2994 ; AVX2-FAST-NEXT: vmovaps %ymm0, 320(%rax)
2995 ; AVX2-FAST-NEXT: vmovdqa %ymm2, (%rax)
2996 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2997 ; AVX2-FAST-NEXT: vmovaps %ymm0, 224(%rax)
2998 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2999 ; AVX2-FAST-NEXT: vmovaps %ymm0, 32(%rax)
3000 ; AVX2-FAST-NEXT: addq $648, %rsp # imm = 0x288
3001 ; AVX2-FAST-NEXT: vzeroupper
3002 ; AVX2-FAST-NEXT: retq
3004 ; AVX2-FAST-PERLANE-LABEL: store_i16_stride6_vf32:
3005 ; AVX2-FAST-PERLANE: # %bb.0:
3006 ; AVX2-FAST-PERLANE-NEXT: subq $648, %rsp # imm = 0x288
3007 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %xmm1
3008 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3009 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rsi), %xmm4
3010 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm0 = [0,1,2,3,4,5,6,7,8,9,6,7,4,5,10,11]
3011 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm1, %xmm1
3012 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %xmm2
3013 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3014 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdi), %xmm5
3015 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm2, %xmm2
3016 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
3017 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
3018 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %xmm2
3019 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3020 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rcx), %xmm6
3021 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} xmm2 = xmm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
3022 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %xmm3
3023 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm3, (%rsp) # 16-byte Spill
3024 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdx), %xmm7
3025 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} xmm3 = xmm3[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
3026 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
3027 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq %xmm2, %ymm2
3028 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7]
3029 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %xmm2
3030 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3031 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[2,1,3,3,4,5,6,7]
3032 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
3033 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0],ymm1[1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7]
3034 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %xmm3
3035 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3036 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm1 = [0,1,4,5,4,5,6,7,8,9,8,9,8,9,8,9]
3037 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm3, %xmm3
3038 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
3039 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm10 = [255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255]
3040 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm10, %ymm2, %ymm3, %ymm2
3041 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3042 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm4, %xmm2
3043 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm4, %xmm8
3044 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3045 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm5, %xmm0
3046 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3047 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm2[4],xmm0[5],xmm2[5],xmm0[6],xmm2[6],xmm0[7],xmm2[7]
3048 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} xmm2 = xmm6[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
3049 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3050 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} xmm3 = xmm7[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
3051 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3052 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
3053 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r8), %xmm3
3054 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,0,1]
3055 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq %xmm2, %ymm2
3056 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm2[2],ymm0[3,4],ymm2[5],ymm0[6,7]
3057 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} xmm2 = xmm3[2,1,3,3,4,5,6,7]
3058 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm3, %xmm9
3059 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3060 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
3061 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0],ymm0[1,2],ymm2[3],ymm0[4,5],ymm2[6],ymm0[7]
3062 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r9), %xmm2
3063 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm2, %xmm1
3064 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm2, %xmm14
3065 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3066 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
3067 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm10, %ymm0, %ymm1, %ymm0
3068 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3069 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdi), %ymm11
3070 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rsi), %ymm13
3071 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm0 = <u,u,u,u,4,5,10,11,u,u,u,u,u,u,u,u,24,25,22,23,20,21,26,27,u,u,u,u,u,u,u,u>
3072 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm0, %ymm13, %ymm1
3073 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm0, %ymm11, %ymm2
3074 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3075 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm2[0],ymm1[0],ymm2[1],ymm1[1],ymm2[2],ymm1[2],ymm2[3],ymm1[3],ymm2[8],ymm1[8],ymm2[9],ymm1[9],ymm2[10],ymm1[10],ymm2[11],ymm1[11]
3076 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdx), %ymm12
3077 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rcx), %ymm2
3078 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3079 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} ymm2 = ymm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm2[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
3080 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} ymm3 = ymm12[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm12[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
3081 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm3[0],ymm2[0],ymm3[1],ymm2[1],ymm3[2],ymm2[2],ymm3[3],ymm2[3],ymm3[8],ymm2[8],ymm3[9],ymm2[9],ymm3[10],ymm2[10],ymm3[11],ymm2[11]
3082 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,2,3]
3083 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
3084 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7]
3085 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r8), %ymm2
3086 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3087 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
3088 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
3089 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7]
3090 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r9), %ymm15
3091 ; AVX2-FAST-PERLANE-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [16,17,20,21,20,21,22,23,24,25,24,25,24,25,24,25,16,17,20,21,20,21,22,23,24,25,24,25,24,25,24,25]
3092 ; AVX2-FAST-PERLANE-NEXT: # ymm4 = mem[0,1,0,1]
3093 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm4, %ymm15, %ymm2
3094 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3095 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
3096 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm10, %ymm1, %ymm2, %ymm1
3097 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3098 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %ymm2
3099 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3100 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %ymm1
3101 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3102 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm0, %ymm1, %ymm1
3103 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm0, %ymm2, %ymm0
3104 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[8],ymm1[8],ymm0[9],ymm1[9],ymm0[10],ymm1[10],ymm0[11],ymm1[11]
3105 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %ymm3
3106 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3107 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %ymm1
3108 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3109 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} ymm1 = ymm1[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm1[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
3110 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} ymm2 = ymm3[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm3[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
3111 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm2[0],ymm1[0],ymm2[1],ymm1[1],ymm2[2],ymm1[2],ymm2[3],ymm1[3],ymm2[8],ymm1[8],ymm2[9],ymm1[9],ymm2[10],ymm1[10],ymm2[11],ymm1[11]
3112 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,2,3]
3113 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
3114 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7]
3115 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %ymm1
3116 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3117 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
3118 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
3119 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0],ymm0[1,2],ymm1[3],ymm0[4,5],ymm1[6],ymm0[7]
3120 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %ymm1
3121 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3122 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm4, %ymm1, %ymm1
3123 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
3124 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm10, %ymm0, %ymm1, %ymm0
3125 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3126 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm5[4],xmm8[4],xmm5[5],xmm8[5],xmm5[6],xmm8[6],xmm5[7],xmm8[7]
3127 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,1,1,1]
3128 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
3129 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[1,2,3,3]
3130 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
3131 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7]
3132 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm1 = [12,13,10,11,10,11,14,15,14,15,14,15,14,15,14,15]
3133 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm9, %xmm4
3134 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,0,1]
3135 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm4[1],ymm0[2,3],ymm4[4],ymm0[5,6],ymm4[7]
3136 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm7 = [8,9,12,13,12,13,10,11,8,9,10,11,12,13,14,15]
3137 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm7, %xmm14, %xmm4
3138 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm14 = ymm4[0,1,0,1]
3139 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm9 = [255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0]
3140 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm9, %ymm0, %ymm14, %ymm0
3141 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3142 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
3143 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
3144 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm4[4],xmm5[4],xmm4[5],xmm5[5],xmm4[6],xmm5[6],xmm4[7],xmm5[7]
3145 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,1,1,1]
3146 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
3147 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsp), %xmm2 # 16-byte Reload
3148 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm14 = xmm2[4],xmm3[4],xmm2[5],xmm3[5],xmm2[6],xmm3[6],xmm2[7],xmm3[7]
3149 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm14 = xmm14[1,2,3,3]
3150 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,2,1]
3151 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm14[0,1],ymm0[2],ymm14[3,4],ymm0[5],ymm14[6,7]
3152 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
3153 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm6, %xmm1
3154 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
3155 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6],ymm1[7]
3156 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
3157 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm7, %xmm8, %xmm1
3158 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
3159 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm9, %ymm0, %ymm1, %ymm0
3160 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3161 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm11[4],ymm13[4],ymm11[5],ymm13[5],ymm11[6],ymm13[6],ymm11[7],ymm13[7],ymm11[12],ymm13[12],ymm11[13],ymm13[13],ymm11[14],ymm13[14],ymm11[15],ymm13[15]
3162 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[3,3,3,3]
3163 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
3164 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm12[4],ymm10[4],ymm12[5],ymm10[5],ymm12[6],ymm10[6],ymm12[7],ymm10[7],ymm12[12],ymm10[12],ymm12[13],ymm10[13],ymm12[14],ymm10[14],ymm12[15],ymm10[15]
3165 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[1,2,3,3,5,6,7,7]
3166 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
3167 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7]
3168 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm1 = <u,u,u,u,u,u,u,u,14,15,14,15,14,15,14,15,28,29,26,27,26,27,30,31,30,31,30,31,30,31,30,31>
3169 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
3170 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm1, %ymm11, %ymm7
3171 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,1,2,3]
3172 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm7[1],ymm0[2,3],ymm7[4],ymm0[5,6],ymm7[7]
3173 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm14 = ymm15[u,u,u,u,u,u,u,u,8,9,10,11,12,13,14,15,24,25,28,29,28,29,26,27,24,25,26,27,28,29,30,31]
3174 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,1,2,3]
3175 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm9, %ymm0, %ymm14, %ymm0
3176 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3177 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
3178 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
3179 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm7[4],ymm9[4],ymm7[5],ymm9[5],ymm7[6],ymm9[6],ymm7[7],ymm9[7],ymm7[12],ymm9[12],ymm7[13],ymm9[13],ymm7[14],ymm9[14],ymm7[15],ymm9[15]
3180 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[3,3,3,3]
3181 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
3182 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
3183 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm15 = ymm15[4],ymm14[4],ymm15[5],ymm14[5],ymm15[6],ymm14[6],ymm15[7],ymm14[7],ymm15[12],ymm14[12],ymm15[13],ymm14[13],ymm15[14],ymm14[14],ymm15[15],ymm14[15]
3184 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm15 = ymm15[1,2,3,3,5,6,7,7]
3185 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
3186 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm15[0,1],ymm0[2],ymm15[3,4],ymm0[5],ymm15[6,7]
3187 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
3188 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm1, %ymm15, %ymm1
3189 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,2,3]
3190 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6],ymm1[7]
3191 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3192 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,8,9,10,11,12,13,14,15,24,25,28,29,28,29,26,27,24,25,26,27,28,29,30,31]
3193 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,2,3]
3194 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm15 = [255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0]
3195 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm15, %ymm0, %ymm1, %ymm0
3196 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3197 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
3198 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
3199 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
3200 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[1,0,2,2]
3201 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
3202 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6],ymm1[7]
3203 ; AVX2-FAST-PERLANE-NEXT: vpmovzxwd {{.*#+}} xmm1 = xmm6[0],zero,xmm6[1],zero,xmm6[2],zero,xmm6[3],zero
3204 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq %xmm1, %ymm1
3205 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7]
3206 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} xmm0 = xmm8[0,0,2,1,4,5,6,7]
3207 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq %xmm0, %ymm4
3208 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm0 = [255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255]
3209 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm0, %ymm1, %ymm4, %ymm4
3210 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
3211 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
3212 ; AVX2-FAST-PERLANE-NEXT: # xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
3213 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
3214 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm15 # 16-byte Folded Reload
3215 ; AVX2-FAST-PERLANE-NEXT: # xmm15 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3]
3216 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
3217 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[1,0,2,2]
3218 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm15 = ymm15[0,1,0,1]
3219 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm15[1],ymm1[2,3],ymm15[4],ymm1[5,6],ymm15[7]
3220 ; AVX2-FAST-PERLANE-NEXT: vpmovzxwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Folded Reload
3221 ; AVX2-FAST-PERLANE-NEXT: # xmm15 = mem[0],zero,mem[1],zero,mem[2],zero,mem[3],zero
3222 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq %xmm15, %ymm15
3223 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm15[2],ymm1[3,4],ymm15[5],ymm1[6,7]
3224 ; AVX2-FAST-PERLANE-NEXT: vpshuflw $96, {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Folded Reload
3225 ; AVX2-FAST-PERLANE-NEXT: # xmm15 = mem[0,0,2,1,4,5,6,7]
3226 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq %xmm15, %ymm15
3227 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm0, %ymm1, %ymm15, %ymm1
3228 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3229 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm15 = ymm2[0],ymm13[0],ymm2[1],ymm13[1],ymm2[2],ymm13[2],ymm2[3],ymm13[3],ymm2[8],ymm13[8],ymm2[9],ymm13[9],ymm2[10],ymm13[10],ymm2[11],ymm13[11]
3230 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm12 = ymm12[0],ymm10[0],ymm12[1],ymm10[1],ymm12[2],ymm10[2],ymm12[3],ymm10[3],ymm12[8],ymm10[8],ymm12[9],ymm10[9],ymm12[10],ymm10[10],ymm12[11],ymm10[11]
3231 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm13 = ymm15[2,2,2,3]
3232 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm12 = ymm12[1,0,2,2,5,4,6,6]
3233 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,1,2,3]
3234 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm12 = ymm13[0],ymm12[1],ymm13[2,3],ymm12[4],ymm13[5,6],ymm12[7]
3235 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} ymm11 = ymm11[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
3236 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,2,2,2]
3237 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm11 = ymm12[0,1],ymm11[2],ymm12[3,4],ymm11[5],ymm12[6,7]
3238 ; AVX2-FAST-PERLANE-NEXT: vpshuflw $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Folded Reload
3239 ; AVX2-FAST-PERLANE-NEXT: # ymm10 = mem[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
3240 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,2,2,2]
3241 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm0, %ymm11, %ymm10, %ymm10
3242 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm8 = ymm7[0],ymm9[0],ymm7[1],ymm9[1],ymm7[2],ymm9[2],ymm7[3],ymm9[3],ymm7[8],ymm9[8],ymm7[9],ymm9[9],ymm7[10],ymm9[10],ymm7[11],ymm9[11]
3243 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3244 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm5 = ymm2[0],ymm14[0],ymm2[1],ymm14[1],ymm2[2],ymm14[2],ymm2[3],ymm14[3],ymm2[8],ymm14[8],ymm2[9],ymm14[9],ymm2[10],ymm14[10],ymm2[11],ymm14[11]
3245 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm8[2,2,2,3]
3246 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[1,0,2,2,5,4,6,6]
3247 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,1,2,3]
3248 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0],ymm5[1],ymm6[2,3],ymm5[4],ymm6[5,6],ymm5[7]
3249 ; AVX2-FAST-PERLANE-NEXT: vpshuflw $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Folded Reload
3250 ; AVX2-FAST-PERLANE-NEXT: # ymm3 = mem[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
3251 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,2]
3252 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm5[0,1],ymm3[2],ymm5[3,4],ymm3[5],ymm5[6,7]
3253 ; AVX2-FAST-PERLANE-NEXT: vpshuflw $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Folded Reload
3254 ; AVX2-FAST-PERLANE-NEXT: # ymm2 = mem[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
3255 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
3256 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm0, %ymm3, %ymm2, %ymm0
3257 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
3258 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, 96(%rax)
3259 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3260 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 160(%rax)
3261 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3262 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 128(%rax)
3263 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm10, 288(%rax)
3264 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3265 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 352(%rax)
3266 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3267 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 320(%rax)
3268 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3269 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 64(%rax)
3270 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3271 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 224(%rax)
3272 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm1, 192(%rax)
3273 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3274 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 256(%rax)
3275 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm4, (%rax)
3276 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3277 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 32(%rax)
3278 ; AVX2-FAST-PERLANE-NEXT: addq $648, %rsp # imm = 0x288
3279 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
3280 ; AVX2-FAST-PERLANE-NEXT: retq
3282 ; AVX512F-ONLY-SLOW-LABEL: store_i16_stride6_vf32:
3283 ; AVX512F-ONLY-SLOW: # %bb.0:
3284 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rcx), %ymm9
3285 ; AVX512F-ONLY-SLOW-NEXT: vpsrldq {{.*#+}} ymm0 = ymm9[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm9[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
3286 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rdx), %ymm12
3287 ; AVX512F-ONLY-SLOW-NEXT: vpsrldq {{.*#+}} ymm1 = ymm12[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm12[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
3288 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[8],ymm0[8],ymm1[9],ymm0[9],ymm1[10],ymm0[10],ymm1[11],ymm0[11]
3289 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,2]
3290 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm12[4],ymm9[4],ymm12[5],ymm9[5],ymm12[6],ymm9[6],ymm12[7],ymm9[7],ymm12[12],ymm9[12],ymm12[13],ymm9[13],ymm12[14],ymm9[14],ymm12[15],ymm9[15]
3291 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[1,2,3,3,5,6,7,7]
3292 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
3293 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
3294 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rsi), %ymm8
3295 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm8[2,1,2,3,6,5,6,7]
3296 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
3297 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rdi), %ymm10
3298 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm10[2,1,2,3,6,5,6,7]
3299 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
3300 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm2[0],ymm1[0],ymm2[1],ymm1[1],ymm2[2],ymm1[2],ymm2[3],ymm1[3],ymm2[8],ymm1[8],ymm2[9],ymm1[9],ymm2[10],ymm1[10],ymm2[11],ymm1[11]
3301 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,2,3]
3302 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm10[4],ymm8[4],ymm10[5],ymm8[5],ymm10[6],ymm8[6],ymm10[7],ymm8[7],ymm10[12],ymm8[12],ymm10[13],ymm8[13],ymm10[14],ymm8[14],ymm10[15],ymm8[15]
3303 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[3,3,3,3]
3304 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm2
3305 ; AVX512F-ONLY-SLOW-NEXT: movw $18724, %ax # imm = 0x4924
3306 ; AVX512F-ONLY-SLOW-NEXT: kmovw %eax, %k1
3307 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm0, %zmm2 {%k1}
3308 ; AVX512F-ONLY-SLOW-NEXT: vextracti64x4 $1, %zmm2, %ymm0
3309 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%r8), %ymm1
3310 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} ymm4 = <u,u,u,u,u,u,u,u,14,15,14,15,14,15,14,15,28,29,26,27,26,27,30,31,30,31,30,31,30,31,30,31>
3311 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm4, %ymm1, %ymm3
3312 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,2,3]
3313 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm3[1],ymm0[2,3],ymm3[4],ymm0[5,6],ymm3[7]
3314 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm0, %zmm0
3315 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm3 = ymm1[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
3316 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
3317 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0],ymm2[1,2],ymm3[3],ymm2[4,5],ymm3[6],ymm2[7]
3318 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm17 = zmm2[0,1,2,3],zmm0[4,5,6,7]
3319 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%r9), %ymm2
3320 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm2[0,2,2,3,4,5,6,7,8,10,10,11,12,13,14,15]
3321 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm0 = ymm0[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
3322 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm18 = ymm0[2,2,2,3]
3323 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm2[2,3,2,3,6,7,6,7]
3324 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm2, %ymm25
3325 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm0[0,2,2,1,4,5,6,7,8,10,10,9,12,13,14,15]
3326 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm19 = ymm0[2,1,2,3]
3327 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rcx), %ymm2
3328 ; AVX512F-ONLY-SLOW-NEXT: vpsrldq {{.*#+}} ymm0 = ymm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm2[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
3329 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdx), %ymm3
3330 ; AVX512F-ONLY-SLOW-NEXT: vpsrldq {{.*#+}} ymm5 = ymm3[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm3[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
3331 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm5[0],ymm0[0],ymm5[1],ymm0[1],ymm5[2],ymm0[2],ymm5[3],ymm0[3],ymm5[8],ymm0[8],ymm5[9],ymm0[9],ymm5[10],ymm0[10],ymm5[11],ymm0[11]
3332 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,2]
3333 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm5 = ymm3[4],ymm2[4],ymm3[5],ymm2[5],ymm3[6],ymm2[6],ymm3[7],ymm2[7],ymm3[12],ymm2[12],ymm3[13],ymm2[13],ymm3[14],ymm2[14],ymm3[15],ymm2[15]
3334 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[1,2,3,3,5,6,7,7]
3335 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
3336 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm0, %zmm6
3337 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rsi), %ymm0
3338 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm0[2,1,2,3,6,5,6,7]
3339 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm7 = ymm5[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
3340 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdi), %ymm5
3341 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm11 = ymm5[2,1,2,3,6,5,6,7]
3342 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm11 = ymm11[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
3343 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm7 = ymm11[0],ymm7[0],ymm11[1],ymm7[1],ymm11[2],ymm7[2],ymm11[3],ymm7[3],ymm11[8],ymm7[8],ymm11[9],ymm7[9],ymm11[10],ymm7[10],ymm11[11],ymm7[11]
3344 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,1,2,3]
3345 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm11 = ymm5[4],ymm0[4],ymm5[5],ymm0[5],ymm5[6],ymm0[6],ymm5[7],ymm0[7],ymm5[12],ymm0[12],ymm5[13],ymm0[13],ymm5[14],ymm0[14],ymm5[15],ymm0[15]
3346 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[3,3,3,3]
3347 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm11, %zmm7, %zmm7
3348 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm6, %zmm7 {%k1}
3349 ; AVX512F-ONLY-SLOW-NEXT: vextracti64x4 $1, %zmm7, %ymm11
3350 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r8), %ymm6
3351 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm4, %ymm6, %ymm4
3352 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,1,2,3]
3353 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm11[0],ymm4[1],ymm11[2,3],ymm4[4],ymm11[5,6],ymm4[7]
3354 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm4
3355 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm11 = ymm6[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
3356 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,2,2,3]
3357 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm7 = ymm11[0],ymm7[1,2],ymm11[3],ymm7[4,5],ymm11[6],ymm7[7]
3358 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm20 = zmm7[0,1,2,3],zmm4[4,5,6,7]
3359 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r9), %ymm4
3360 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm7 = ymm4[0,2,2,3,4,5,6,7,8,10,10,11,12,13,14,15]
3361 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm7 = ymm7[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
3362 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm21 = ymm7[2,2,2,3]
3363 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm4[2,3,2,3,6,7,6,7]
3364 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm7 = ymm7[0,2,2,1,4,5,6,7,8,10,10,9,12,13,14,15]
3365 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm22 = ymm7[2,1,2,3]
3366 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rcx), %xmm13
3367 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdx), %xmm14
3368 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
3369 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm3[0],ymm2[0],ymm3[1],ymm2[1],ymm3[2],ymm2[2],ymm3[3],ymm2[3],ymm3[8],ymm2[8],ymm3[9],ymm2[9],ymm3[10],ymm2[10],ymm3[11],ymm2[11]
3370 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm24 = <17,18,17,18,u,u,19,19,5,4,2,2,5,4,6,6>
3371 ; AVX512F-ONLY-SLOW-NEXT: vpermt2d %zmm7, %zmm24, %zmm2
3372 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rsi), %xmm11
3373 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdi), %xmm15
3374 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm15[4],xmm11[4],xmm15[5],xmm11[5],xmm15[6],xmm11[6],xmm15[7],xmm11[7]
3375 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm3[1,1,1,1]
3376 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rcx), %xmm3
3377 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm5[0],ymm0[0],ymm5[1],ymm0[1],ymm5[2],ymm0[2],ymm5[3],ymm0[3],ymm5[8],ymm0[8],ymm5[9],ymm0[9],ymm5[10],ymm0[10],ymm5[11],ymm0[11]
3378 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
3379 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm7, %zmm0
3380 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm0, %zmm2 {%k1}
3381 ; AVX512F-ONLY-SLOW-NEXT: vextracti64x4 $1, %zmm2, %ymm0
3382 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm5 = ymm6[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
3383 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,2]
3384 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm5[2],ymm0[3,4],ymm5[5],ymm0[6,7]
3385 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm0, %zmm5
3386 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r8), %xmm0
3387 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} xmm6 = [12,13,10,11,10,11,14,15,14,15,14,15,14,15,14,15]
3388 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %xmm6, %xmm0, %xmm7
3389 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
3390 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0],ymm7[1],ymm2[2,3],ymm7[4],ymm2[5,6],ymm7[7]
3391 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm23 = zmm2[0,1,2,3],zmm5[4,5,6,7]
3392 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r9), %xmm2
3393 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm2[2,3,2,3]
3394 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[0,2,2,1,4,5,6,7]
3395 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm16 = ymm5[0,1,0,1]
3396 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rdx), %xmm5
3397 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm9 = ymm12[0],ymm9[0],ymm12[1],ymm9[1],ymm12[2],ymm9[2],ymm12[3],ymm9[3],ymm12[8],ymm9[8],ymm12[9],ymm9[9],ymm12[10],ymm9[10],ymm12[11],ymm9[11]
3398 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm5[4],xmm3[4],xmm5[5],xmm3[5],xmm5[6],xmm3[6],xmm5[7],xmm3[7]
3399 ; AVX512F-ONLY-SLOW-NEXT: vpermt2d %zmm7, %zmm24, %zmm9
3400 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rsi), %xmm12
3401 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm4 = ymm4[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
3402 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm24 = ymm4[2,2,2,2]
3403 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rdi), %xmm4
3404 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm7 = ymm10[0],ymm8[0],ymm10[1],ymm8[1],ymm10[2],ymm8[2],ymm10[3],ymm8[3],ymm10[8],ymm8[8],ymm10[9],ymm8[9],ymm10[10],ymm8[10],ymm10[11],ymm8[11]
3405 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm4[4],xmm12[4],xmm4[5],xmm12[5],xmm4[6],xmm12[6],xmm4[7],xmm12[7]
3406 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[1,1,1,1]
3407 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,2,2,3]
3408 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm7, %zmm8, %zmm7
3409 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm7, %zmm9 {%k1}
3410 ; AVX512F-ONLY-SLOW-NEXT: vextracti64x4 $1, %zmm9, %ymm7
3411 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
3412 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
3413 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm7[0,1],ymm1[2],ymm7[3,4],ymm1[5],ymm7[6,7]
3414 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm1
3415 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%r8), %xmm7
3416 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %xmm6, %xmm7, %xmm6
3417 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,1,0,1]
3418 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm9[0],ymm6[1],ymm9[2,3],ymm6[4],ymm9[5,6],ymm6[7]
3419 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%r9), %xmm10
3420 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm6[0,1,2,3],zmm1[4,5,6,7]
3421 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
3422 ; AVX512F-ONLY-SLOW-NEXT: vpsrldq {{.*#+}} xmm3 = xmm3[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
3423 ; AVX512F-ONLY-SLOW-NEXT: vpsrldq {{.*#+}} xmm5 = xmm5[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
3424 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
3425 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm5 = [1,0,2,2,1,0,2,2,16,17,16,17,16,17,16,17]
3426 ; AVX512F-ONLY-SLOW-NEXT: vpermt2d %zmm3, %zmm5, %zmm6
3427 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm12[0,1,2,1]
3428 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,7,6,5]
3429 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm8 = xmm4[0,1,2,1]
3430 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,4,7,6,5]
3431 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm9 = xmm8[4],xmm3[4],xmm8[5],xmm3[5],xmm8[6],xmm3[6],xmm8[7],xmm3[7]
3432 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm10[2,3,2,3]
3433 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,2,2,1,4,5,6,7]
3434 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm3[0,1,0,1]
3435 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm25, %ymm3
3436 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm3 = ymm3[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
3437 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,2]
3438 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm12[0],xmm4[1],xmm12[1],xmm4[2],xmm12[2],xmm4[3],xmm12[3]
3439 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,2,1]
3440 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm4 = zmm4[0,1,2,3],zmm9[0,1,0,1]
3441 ; AVX512F-ONLY-SLOW-NEXT: movw $9362, %ax # imm = 0x2492
3442 ; AVX512F-ONLY-SLOW-NEXT: kmovw %eax, %k1
3443 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm6, %zmm4 {%k1}
3444 ; AVX512F-ONLY-SLOW-NEXT: vextracti64x4 $1, %zmm4, %ymm6
3445 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm9 = xmm7[2,1,3,3,4,5,6,7]
3446 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,2,1]
3447 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm9[0],ymm6[1,2],ymm9[3],ymm6[4,5],ymm9[6],ymm6[7]
3448 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm6, %zmm0, %zmm6
3449 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxwd {{.*#+}} xmm7 = xmm7[0],zero,xmm7[1],zero,xmm7[2],zero,xmm7[3],zero
3450 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq %xmm7, %ymm7
3451 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1],ymm7[2],ymm4[3,4],ymm7[5],ymm4[6,7]
3452 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm4 = zmm4[0,1,2,3],zmm6[4,5,6,7]
3453 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm6 = xmm10[0,0,2,1,4,5,6,7]
3454 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq %xmm6, %ymm6
3455 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm7 = xmm10[0,2,2,3,4,5,6,7]
3456 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,4,4,4,4]
3457 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,0,2,1]
3458 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3]
3459 ; AVX512F-ONLY-SLOW-NEXT: vpsrldq {{.*#+}} xmm10 = xmm13[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
3460 ; AVX512F-ONLY-SLOW-NEXT: vpsrldq {{.*#+}} xmm12 = xmm14[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
3461 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm12[0],xmm10[0],xmm12[1],xmm10[1],xmm12[2],xmm10[2],xmm12[3],xmm10[3]
3462 ; AVX512F-ONLY-SLOW-NEXT: vpermt2d %zmm10, %zmm5, %zmm9
3463 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm11[0,1,2,1]
3464 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,7,6,5]
3465 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm10 = xmm15[0,1,2,1]
3466 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm10 = xmm10[0,1,2,3,4,7,6,5]
3467 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm10[4],xmm5[4],xmm10[5],xmm5[5],xmm10[6],xmm5[6],xmm10[7],xmm5[7]
3468 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm15[0],xmm11[0],xmm15[1],xmm11[1],xmm15[2],xmm11[2],xmm15[3],xmm11[3]
3469 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,0,2,1]
3470 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm5 = zmm10[0,1,2,3],zmm5[0,1,0,1]
3471 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm9, %zmm5 {%k1}
3472 ; AVX512F-ONLY-SLOW-NEXT: vextracti64x4 $1, %zmm5, %ymm9
3473 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm10 = xmm0[2,1,3,3,4,5,6,7]
3474 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,0,2,1]
3475 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm9 = ymm10[0],ymm9[1,2],ymm10[3],ymm9[4,5],ymm10[6],ymm9[7]
3476 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm9, %zmm0, %zmm9
3477 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxwd {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero
3478 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq %xmm0, %ymm0
3479 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm5[0,1],ymm0[2],ymm5[3,4],ymm0[5],ymm5[6,7]
3480 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,2,3],zmm9[4,5,6,7]
3481 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm2[0,0,2,1,4,5,6,7]
3482 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq %xmm5, %ymm5
3483 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,2,2,3,4,5,6,7]
3484 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,4,4,4]
3485 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
3486 ; AVX512F-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
3487 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm19, %zmm18, %zmm9
3488 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm10 = [65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0]
3489 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm17, %zmm10, %zmm9
3490 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm22, %zmm21, %zmm11
3491 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm20, %zmm10, %zmm11
3492 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm24, %zmm16, %zmm10
3493 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm12 = [65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
3494 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm23, %zmm12, %zmm10
3495 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm8, %zmm3
3496 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm1, %zmm12, %zmm3
3497 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm7, %zmm6, %zmm1
3498 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm6 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535]
3499 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm4, %zmm6, %zmm1
3500 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm5, %zmm2
3501 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm0, %zmm6, %zmm2
3502 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, (%rax)
3503 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, 192(%rax)
3504 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, 256(%rax)
3505 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, 64(%rax)
3506 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, 128(%rax)
3507 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, 320(%rax)
3508 ; AVX512F-ONLY-SLOW-NEXT: vzeroupper
3509 ; AVX512F-ONLY-SLOW-NEXT: retq
3511 ; AVX512F-ONLY-FAST-LABEL: store_i16_stride6_vf32:
3512 ; AVX512F-ONLY-FAST: # %bb.0:
3513 ; AVX512F-ONLY-FAST-NEXT: pushq %rax
3514 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rsi), %ymm2
3515 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = <u,u,u,u,4,5,10,11,u,u,u,u,u,u,u,u,24,25,22,23,20,21,26,27,u,u,u,u,u,u,u,u>
3516 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm0, %ymm2, %ymm1
3517 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rdi), %ymm4
3518 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm0, %ymm4, %ymm3
3519 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm3[0],ymm1[0],ymm3[1],ymm1[1],ymm3[2],ymm1[2],ymm3[3],ymm1[3],ymm3[8],ymm1[8],ymm3[9],ymm1[9],ymm3[10],ymm1[10],ymm3[11],ymm1[11]
3520 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm4[4],ymm2[4],ymm4[5],ymm2[5],ymm4[6],ymm2[6],ymm4[7],ymm2[7],ymm4[12],ymm2[12],ymm4[13],ymm2[13],ymm4[14],ymm2[14],ymm4[15],ymm2[15]
3521 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm4, %ymm24
3522 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm2, %ymm25
3523 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm6 = [2,1,2,3,11,11,11,11]
3524 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm6, %zmm1
3525 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rcx), %ymm12
3526 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rdx), %ymm13
3527 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm13[4],ymm12[4],ymm13[5],ymm12[5],ymm13[6],ymm12[6],ymm13[7],ymm12[7],ymm13[12],ymm12[12],ymm13[13],ymm12[13],ymm13[14],ymm12[14],ymm13[15],ymm12[15]
3528 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = [5,6,5,6,5,6,7,7]
3529 ; AVX512F-ONLY-FAST-NEXT: vpermd %ymm3, %ymm10, %ymm3
3530 ; AVX512F-ONLY-FAST-NEXT: vpsrldq {{.*#+}} ymm4 = ymm12[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm12[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
3531 ; AVX512F-ONLY-FAST-NEXT: vpsrldq {{.*#+}} ymm5 = ymm13[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm13[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
3532 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm4 = ymm5[0],ymm4[0],ymm5[1],ymm4[1],ymm5[2],ymm4[2],ymm5[3],ymm4[3],ymm5[8],ymm4[8],ymm5[9],ymm4[9],ymm5[10],ymm4[10],ymm5[11],ymm4[11]
3533 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,2]
3534 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm4, %zmm3
3535 ; AVX512F-ONLY-FAST-NEXT: movw $18724, %ax # imm = 0x4924
3536 ; AVX512F-ONLY-FAST-NEXT: kmovw %eax, %k1
3537 ; AVX512F-ONLY-FAST-NEXT: vmovdqa32 %zmm3, %zmm1 {%k1}
3538 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm11 = [8,21,10,11,20,13,14,23]
3539 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%r8), %ymm2
3540 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = <u,u,u,u,u,u,u,u,14,15,14,15,14,15,14,15,28,29,26,27,26,27,30,31,30,31,30,31,30,31,30,31>
3541 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm7, %ymm2, %ymm3
3542 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm4
3543 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %zmm3, %zmm11, %zmm4
3544 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} ymm3 = ymm2[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
3545 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm2, %ymm27
3546 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} ymm17 = [12,1,2,13,4,5,14,7]
3547 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %ymm3, %ymm17, %ymm1
3548 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm16 = zmm1[0,1,2,3],zmm4[0,1,2,3]
3549 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%r9), %ymm3
3550 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm14 = <u,u,u,u,u,u,u,u,8,9,10,11,12,13,14,15,24,25,28,29,28,29,26,27,24,25,26,27,28,29,30,31>
3551 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm14, %ymm3, %ymm1
3552 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm15 = [16,17,20,21,20,21,22,23,24,25,24,25,24,25,24,25,16,17,20,21,20,21,22,23,24,25,24,25,24,25,24,25]
3553 ; AVX512F-ONLY-FAST-NEXT: # ymm15 = mem[0,1,0,1]
3554 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm15, %ymm3, %ymm2
3555 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm3, %ymm26
3556 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm18 = <2,2,u,3,10,u,10,11>
3557 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm18, %zmm2
3558 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3559 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rsi), %ymm9
3560 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm0, %ymm9, %ymm1
3561 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdi), %ymm2
3562 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm0, %ymm2, %ymm0
3563 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[8],ymm1[8],ymm0[9],ymm1[9],ymm0[10],ymm1[10],ymm0[11],ymm1[11]
3564 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm2[4],ymm9[4],ymm2[5],ymm9[5],ymm2[6],ymm9[6],ymm2[7],ymm9[7],ymm2[12],ymm9[12],ymm2[13],ymm9[13],ymm2[14],ymm9[14],ymm2[15],ymm9[15]
3565 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm2, %ymm28
3566 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm6, %zmm0
3567 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rcx), %ymm1
3568 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdx), %ymm6
3569 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm6[4],ymm1[4],ymm6[5],ymm1[5],ymm6[6],ymm1[6],ymm6[7],ymm1[7],ymm6[12],ymm1[12],ymm6[13],ymm1[13],ymm6[14],ymm1[14],ymm6[15],ymm1[15]
3570 ; AVX512F-ONLY-FAST-NEXT: vpermd %ymm2, %ymm10, %ymm2
3571 ; AVX512F-ONLY-FAST-NEXT: vpsrldq {{.*#+}} ymm10 = ymm1[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm1[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
3572 ; AVX512F-ONLY-FAST-NEXT: vpsrldq {{.*#+}} ymm4 = ymm6[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm6[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
3573 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm4 = ymm4[0],ymm10[0],ymm4[1],ymm10[1],ymm4[2],ymm10[2],ymm4[3],ymm10[3],ymm4[8],ymm10[8],ymm4[9],ymm10[9],ymm4[10],ymm10[10],ymm4[11],ymm10[11]
3574 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,2]
3575 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm2, %zmm4, %zmm2
3576 ; AVX512F-ONLY-FAST-NEXT: vmovdqa32 %zmm2, %zmm0 {%k1}
3577 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r8), %ymm10
3578 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm7, %ymm10, %ymm2
3579 ; AVX512F-ONLY-FAST-NEXT: vpermi2d %zmm2, %zmm0, %zmm11
3580 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} ymm2 = ymm10[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
3581 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %ymm2, %ymm17, %ymm0
3582 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm17 = zmm0[0,1,2,3],zmm11[0,1,2,3]
3583 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r9), %ymm11
3584 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm14, %ymm11, %ymm0
3585 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm15, %ymm11, %ymm2
3586 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm18, %zmm2
3587 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3588 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rcx), %xmm14
3589 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdx), %xmm15
3590 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm6[0],ymm1[0],ymm6[1],ymm1[1],ymm6[2],ymm1[2],ymm6[3],ymm1[3],ymm6[8],ymm1[8],ymm6[9],ymm1[9],ymm6[10],ymm1[10],ymm6[11],ymm1[11]
3591 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm15[4],xmm14[4],xmm15[5],xmm14[5],xmm15[6],xmm14[6],xmm15[7],xmm14[7]
3592 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm18
3593 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rcx), %xmm1
3594 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rdx), %xmm6
3595 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm13[0],ymm12[0],ymm13[1],ymm12[1],ymm13[2],ymm12[2],ymm13[3],ymm12[3],ymm13[8],ymm12[8],ymm13[9],ymm12[9],ymm13[10],ymm12[10],ymm13[11],ymm12[11]
3596 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm6[4],xmm1[4],xmm6[5],xmm1[5],xmm6[6],xmm1[6],xmm6[7],xmm1[7]
3597 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm2, %zmm20
3598 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rsi), %xmm13
3599 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm2 = [0,1,2,3,4,5,6,7,8,9,6,7,4,5,10,11]
3600 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm2, %xmm13, %xmm4
3601 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rdi), %xmm3
3602 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm2, %xmm3, %xmm12
3603 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm12[4],xmm4[4],xmm12[5],xmm4[5],xmm12[6],xmm4[6],xmm12[7],xmm4[7]
3604 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm21 = [0,0,2,1,8,9,8,9]
3605 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm3[0],xmm13[0],xmm3[1],xmm13[1],xmm3[2],xmm13[2],xmm3[3],xmm13[3]
3606 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm21, %zmm12
3607 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} ymm23 = [1,0,2,2,1,0,2,2]
3608 ; AVX512F-ONLY-FAST-NEXT: # ymm23 = mem[0,1,2,3,0,1,2,3]
3609 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm1[0],xmm6[1],xmm1[1],xmm6[2],xmm1[2],xmm6[3],xmm1[3]
3610 ; AVX512F-ONLY-FAST-NEXT: vpermd %ymm5, %ymm23, %ymm5
3611 ; AVX512F-ONLY-FAST-NEXT: vpsrldq {{.*#+}} xmm1 = xmm1[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
3612 ; AVX512F-ONLY-FAST-NEXT: vpsrldq {{.*#+}} xmm6 = xmm6[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
3613 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm6[0],xmm1[0],xmm6[1],xmm1[1],xmm6[2],xmm1[2],xmm6[3],xmm1[3]
3614 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastq %xmm1, %ymm1
3615 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm5, %zmm1
3616 ; AVX512F-ONLY-FAST-NEXT: movw $9362, %ax # imm = 0x2492
3617 ; AVX512F-ONLY-FAST-NEXT: kmovw %eax, %k2
3618 ; AVX512F-ONLY-FAST-NEXT: vmovdqa32 %zmm1, %zmm12 {%k2}
3619 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = [16,9,10,17,12,13,18,15]
3620 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm12, %zmm1
3621 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%r8), %xmm6
3622 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} xmm8 = xmm6[2,1,3,3,4,5,6,7]
3623 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %zmm8, %zmm5, %zmm1
3624 ; AVX512F-ONLY-FAST-NEXT: vpmovzxwd {{.*#+}} xmm8 = xmm6[0],zero,xmm6[1],zero,xmm6[2],zero,xmm6[3],zero
3625 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} ymm22 = [0,1,8,3,4,9,6,7]
3626 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %ymm8, %ymm22, %ymm12
3627 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm19 = zmm12[0,1,2,3],zmm1[0,1,2,3]
3628 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rsi), %xmm8
3629 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdi), %xmm0
3630 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm2, %xmm8, %xmm1
3631 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm2, %xmm0, %xmm2
3632 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
3633 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm0[0],xmm8[0],xmm0[1],xmm8[1],xmm0[2],xmm8[2],xmm0[3],xmm8[3]
3634 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm21, %zmm2
3635 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm21 = <u,0,0,u,8,8,u,9>
3636 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%r9), %xmm12
3637 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm7 = [0,1,4,5,4,5,6,7,8,9,8,9,8,9,8,9]
3638 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm7, %xmm12, %xmm4
3639 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} xmm1 = xmm12[0,0,2,1,4,5,6,7]
3640 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm21, %zmm1
3641 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
3642 ; AVX512F-ONLY-FAST-NEXT: vpermd %ymm4, %ymm23, %ymm4
3643 ; AVX512F-ONLY-FAST-NEXT: vpsrldq {{.*#+}} xmm14 = xmm14[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
3644 ; AVX512F-ONLY-FAST-NEXT: vpsrldq {{.*#+}} xmm15 = xmm15[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
3645 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
3646 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastq %xmm14, %ymm14
3647 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm14, %zmm4, %zmm4
3648 ; AVX512F-ONLY-FAST-NEXT: vmovdqa32 %zmm4, %zmm2 {%k2}
3649 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r8), %xmm4
3650 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} xmm14 = xmm4[2,1,3,3,4,5,6,7]
3651 ; AVX512F-ONLY-FAST-NEXT: vpermi2d %zmm14, %zmm2, %zmm5
3652 ; AVX512F-ONLY-FAST-NEXT: vpmovzxwd {{.*#+}} xmm14 = xmm4[0],zero,xmm4[1],zero,xmm4[2],zero,xmm4[3],zero
3653 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %ymm14, %ymm22, %ymm2
3654 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm22 = zmm2[0,1,2,3],zmm5[0,1,2,3]
3655 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r9), %xmm5
3656 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm7, %xmm5, %xmm7
3657 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} xmm14 = xmm5[0,0,2,1,4,5,6,7]
3658 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm21, %zmm14
3659 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm28, %ymm2
3660 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm7 = ymm2[0],ymm9[0],ymm2[1],ymm9[1],ymm2[2],ymm9[2],ymm2[3],ymm9[3],ymm2[8],ymm9[8],ymm2[9],ymm9[9],ymm2[10],ymm9[10],ymm2[11],ymm9[11]
3661 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm8[4],xmm0[5],xmm8[5],xmm0[6],xmm8[6],xmm0[7],xmm8[7]
3662 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm15 = [1,1,1,1,10,10,10,11]
3663 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm15, %zmm0
3664 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm7 = <1,2,1,2,u,u,3,3,13,12,10,10,13,12,14,14>
3665 ; AVX512F-ONLY-FAST-NEXT: vpermd %zmm18, %zmm7, %zmm18
3666 ; AVX512F-ONLY-FAST-NEXT: vmovdqa32 %zmm0, %zmm18 {%k1}
3667 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [8,9,20,11,12,21,14,15]
3668 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} ymm8 = ymm10[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
3669 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm18, %zmm10
3670 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %zmm8, %zmm0, %zmm10
3671 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm2 = [12,13,10,11,10,11,14,15,14,15,14,15,14,15,14,15]
3672 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm2, %xmm4, %xmm4
3673 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} ymm21 = [0,9,2,3,8,5,6,11]
3674 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %ymm4, %ymm21, %ymm18
3675 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} ymm4 = ymm11[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
3676 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm11 = [8,9,12,13,12,13,10,11,8,9,10,11,12,13,14,15]
3677 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm11, %xmm5, %xmm8
3678 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = <0,u,0,1,u,10,10,u>
3679 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm5, %zmm8
3680 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm24, %ymm4
3681 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm25, %ymm9
3682 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm4 = ymm4[0],ymm9[0],ymm4[1],ymm9[1],ymm4[2],ymm9[2],ymm4[3],ymm9[3],ymm4[8],ymm9[8],ymm4[9],ymm9[9],ymm4[10],ymm9[10],ymm4[11],ymm9[11]
3683 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm3[4],xmm13[4],xmm3[5],xmm13[5],xmm3[6],xmm13[6],xmm3[7],xmm13[7]
3684 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm15, %zmm3
3685 ; AVX512F-ONLY-FAST-NEXT: vpermd %zmm20, %zmm7, %zmm4
3686 ; AVX512F-ONLY-FAST-NEXT: vmovdqa32 %zmm3, %zmm4 {%k1}
3687 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm27, %ymm3
3688 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} ymm3 = ymm3[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
3689 ; AVX512F-ONLY-FAST-NEXT: vpermi2d %zmm3, %zmm4, %zmm0
3690 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm2, %xmm6, %xmm2
3691 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %ymm2, %ymm21, %ymm4
3692 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm11, %xmm12, %xmm2
3693 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm26, %ymm3
3694 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} ymm3 = ymm3[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
3695 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm5, %zmm2
3696 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm4[0,1,2,3],zmm0[0,1,2,3]
3697 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm3 = [65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
3698 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm0, %zmm3, %zmm2
3699 ; AVX512F-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
3700 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, 256(%rax)
3701 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm18[0,1,2,3],zmm10[0,1,2,3]
3702 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm0, %zmm3, %zmm8
3703 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm8, 64(%rax)
3704 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535]
3705 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm22, %zmm0, %zmm14
3706 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm14, (%rax)
3707 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm19, %zmm0, %zmm1
3708 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, 192(%rax)
3709 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0]
3710 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
3711 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm17, %zmm0, %zmm1
3712 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, 128(%rax)
3713 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
3714 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm16, %zmm0, %zmm1
3715 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, 320(%rax)
3716 ; AVX512F-ONLY-FAST-NEXT: popq %rax
3717 ; AVX512F-ONLY-FAST-NEXT: vzeroupper
3718 ; AVX512F-ONLY-FAST-NEXT: retq
3720 ; AVX512DQ-SLOW-LABEL: store_i16_stride6_vf32:
3721 ; AVX512DQ-SLOW: # %bb.0:
3722 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rcx), %xmm6
3723 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rcx), %xmm1
3724 ; AVX512DQ-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3725 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdx), %xmm11
3726 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rdx), %xmm3
3727 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
3728 ; AVX512DQ-SLOW-NEXT: vpsrldq {{.*#+}} xmm1 = xmm1[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
3729 ; AVX512DQ-SLOW-NEXT: vpsrldq {{.*#+}} xmm2 = xmm3[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
3730 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm3, %xmm27
3731 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
3732 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm2 = [1,0,2,2,1,0,2,2,16,17,16,17,16,17,16,17]
3733 ; AVX512DQ-SLOW-NEXT: vpermt2d %zmm1, %zmm2, %zmm0
3734 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rsi), %xmm12
3735 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rsi), %xmm4
3736 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm4[0,1,2,1]
3737 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,7,6,5]
3738 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdi), %xmm15
3739 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rdi), %xmm5
3740 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm5[0,1,2,1]
3741 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,7,6,5]
3742 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
3743 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
3744 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm5, %xmm28
3745 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm4, %xmm29
3746 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
3747 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm3[0,1,2,3],zmm1[0,1,0,1]
3748 ; AVX512DQ-SLOW-NEXT: movw $9362, %ax # imm = 0x2492
3749 ; AVX512DQ-SLOW-NEXT: kmovw %eax, %k1
3750 ; AVX512DQ-SLOW-NEXT: vmovdqa32 %zmm0, %zmm1 {%k1}
3751 ; AVX512DQ-SLOW-NEXT: vextracti64x4 $1, %zmm1, %ymm0
3752 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r8), %xmm5
3753 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%r8), %xmm4
3754 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm4[2,1,3,3,4,5,6,7]
3755 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
3756 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm3[0],ymm0[1,2],ymm3[3],ymm0[4,5],ymm3[6],ymm0[7]
3757 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm0, %zmm0
3758 ; AVX512DQ-SLOW-NEXT: vpmovzxwd {{.*#+}} xmm3 = xmm4[0],zero,xmm4[1],zero,xmm4[2],zero,xmm4[3],zero
3759 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm4, %xmm31
3760 ; AVX512DQ-SLOW-NEXT: vpbroadcastq %xmm3, %ymm3
3761 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm3[2],ymm1[3,4],ymm3[5],ymm1[6,7]
3762 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm0[4,5,6,7]
3763 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
3764 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r9), %xmm4
3765 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%r9), %xmm1
3766 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm1[0,0,2,1,4,5,6,7]
3767 ; AVX512DQ-SLOW-NEXT: vpbroadcastq %xmm0, %ymm26
3768 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm1[0,2,2,3,4,5,6,7]
3769 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm1, %xmm30
3770 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
3771 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm19 = ymm0[0,0,2,1]
3772 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm11[0],xmm6[0],xmm11[1],xmm6[1],xmm11[2],xmm6[2],xmm11[3],xmm6[3]
3773 ; AVX512DQ-SLOW-NEXT: vpsrldq {{.*#+}} xmm1 = xmm6[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
3774 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm6, %xmm25
3775 ; AVX512DQ-SLOW-NEXT: vpsrldq {{.*#+}} xmm3 = xmm11[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
3776 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
3777 ; AVX512DQ-SLOW-NEXT: vpermt2d %zmm1, %zmm2, %zmm0
3778 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm12[0,1,2,1]
3779 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,7,6,5]
3780 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm15[0,1,2,1]
3781 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,7,6,5]
3782 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
3783 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm15[0],xmm12[0],xmm15[1],xmm12[1],xmm15[2],xmm12[2],xmm15[3],xmm12[3]
3784 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
3785 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm2[0,1,2,3],zmm1[0,1,0,1]
3786 ; AVX512DQ-SLOW-NEXT: vmovdqa32 %zmm0, %zmm1 {%k1}
3787 ; AVX512DQ-SLOW-NEXT: vextracti64x4 $1, %zmm1, %ymm0
3788 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm5[2,1,3,3,4,5,6,7]
3789 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
3790 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0],ymm0[1,2],ymm2[3],ymm0[4,5],ymm2[6],ymm0[7]
3791 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm0, %zmm0
3792 ; AVX512DQ-SLOW-NEXT: vpmovzxwd {{.*#+}} xmm2 = xmm5[0],zero,xmm5[1],zero,xmm5[2],zero,xmm5[3],zero
3793 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm5, %xmm17
3794 ; AVX512DQ-SLOW-NEXT: vpbroadcastq %xmm2, %ymm2
3795 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7]
3796 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm21 = zmm1[0,1,2,3],zmm0[4,5,6,7]
3797 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm4[0,0,2,1,4,5,6,7]
3798 ; AVX512DQ-SLOW-NEXT: vpbroadcastq %xmm0, %ymm22
3799 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm4[0,2,2,3,4,5,6,7]
3800 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm4, %xmm16
3801 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
3802 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm18 = ymm0[0,0,2,1]
3803 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rsi), %ymm8
3804 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm8[2,1,2,3,6,5,6,7]
3805 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm0[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
3806 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rdi), %ymm7
3807 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm7[2,1,2,3,6,5,6,7]
3808 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
3809 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[8],ymm0[8],ymm1[9],ymm0[9],ymm1[10],ymm0[10],ymm1[11],ymm0[11]
3810 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,2,3]
3811 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm7[4],ymm8[4],ymm7[5],ymm8[5],ymm7[6],ymm8[6],ymm7[7],ymm8[7],ymm7[12],ymm8[12],ymm7[13],ymm8[13],ymm7[14],ymm8[14],ymm7[15],ymm8[15]
3812 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[3,3,3,3]
3813 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rcx), %ymm13
3814 ; AVX512DQ-SLOW-NEXT: vpsrldq {{.*#+}} ymm2 = ymm13[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm13[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
3815 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rdx), %ymm14
3816 ; AVX512DQ-SLOW-NEXT: vpsrldq {{.*#+}} ymm3 = ymm14[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm14[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
3817 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm3[0],ymm2[0],ymm3[1],ymm2[1],ymm3[2],ymm2[2],ymm3[3],ymm2[3],ymm3[8],ymm2[8],ymm3[9],ymm2[9],ymm3[10],ymm2[10],ymm3[11],ymm2[11]
3818 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
3819 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm14[4],ymm13[4],ymm14[5],ymm13[5],ymm14[6],ymm13[6],ymm14[7],ymm13[7],ymm14[12],ymm13[12],ymm14[13],ymm13[13],ymm14[14],ymm13[14],ymm14[15],ymm13[15]
3820 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[1,2,3,3,5,6,7,7]
3821 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
3822 ; AVX512DQ-SLOW-NEXT: movw $18724, %ax # imm = 0x4924
3823 ; AVX512DQ-SLOW-NEXT: kmovw %eax, %k1
3824 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
3825 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm3, %zmm2, %zmm0 {%k1}
3826 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%r8), %ymm6
3827 ; AVX512DQ-SLOW-NEXT: vmovdqa {{.*#+}} ymm10 = <u,u,u,u,u,u,u,u,14,15,14,15,14,15,14,15,28,29,26,27,26,27,30,31,30,31,30,31,30,31,30,31>
3828 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm10, %ymm6, %ymm1
3829 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,2,3]
3830 ; AVX512DQ-SLOW-NEXT: vextracti64x4 $1, %zmm0, %ymm2
3831 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6],ymm1[7]
3832 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm1
3833 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm6[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
3834 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
3835 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0],ymm0[1,2],ymm2[3],ymm0[4,5],ymm2[6],ymm0[7]
3836 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm20 = zmm0[0,1,2,3],zmm1[4,5,6,7]
3837 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rsi), %ymm5
3838 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm5[2,1,2,3,6,5,6,7]
3839 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm0[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
3840 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdi), %ymm4
3841 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm4[2,1,2,3,6,5,6,7]
3842 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
3843 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[8],ymm0[8],ymm1[9],ymm0[9],ymm1[10],ymm0[10],ymm1[11],ymm0[11]
3844 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm24 = ymm0[2,1,2,3]
3845 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm4[4],ymm5[4],ymm4[5],ymm5[5],ymm4[6],ymm5[6],ymm4[7],ymm5[7],ymm4[12],ymm5[12],ymm4[13],ymm5[13],ymm4[14],ymm5[14],ymm4[15],ymm5[15]
3846 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm23 = ymm0[3,3,3,3]
3847 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rcx), %ymm3
3848 ; AVX512DQ-SLOW-NEXT: vpsrldq {{.*#+}} ymm2 = ymm3[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm3[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
3849 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdx), %ymm1
3850 ; AVX512DQ-SLOW-NEXT: vpsrldq {{.*#+}} ymm9 = ymm1[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm1[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
3851 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm9[0],ymm2[0],ymm9[1],ymm2[1],ymm9[2],ymm2[2],ymm9[3],ymm2[3],ymm9[8],ymm2[8],ymm9[9],ymm2[9],ymm9[10],ymm2[10],ymm9[11],ymm2[11]
3852 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
3853 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm9 = ymm1[4],ymm3[4],ymm1[5],ymm3[5],ymm1[6],ymm3[6],ymm1[7],ymm3[7],ymm1[12],ymm3[12],ymm1[13],ymm3[13],ymm1[14],ymm3[14],ymm1[15],ymm3[15]
3854 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm9[1,2,3,3,5,6,7,7]
3855 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,2,2,3]
3856 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm23, %zmm24, %zmm0
3857 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm9, %zmm2, %zmm0 {%k1}
3858 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r8), %ymm2
3859 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm10, %ymm2, %ymm9
3860 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,1,2,3]
3861 ; AVX512DQ-SLOW-NEXT: vextracti64x4 $1, %zmm0, %ymm10
3862 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm9 = ymm10[0],ymm9[1],ymm10[2,3],ymm9[4],ymm10[5,6],ymm9[7]
3863 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm9, %zmm0, %zmm9
3864 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm10 = ymm2[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
3865 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,2,2,3]
3866 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm10[0],ymm0[1,2],ymm10[3],ymm0[4,5],ymm10[6],ymm0[7]
3867 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm23 = zmm0[0,1,2,3],zmm9[4,5,6,7]
3868 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%r9), %ymm9
3869 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm9[0,2,2,3,4,5,6,7,8,10,10,11,12,13,14,15]
3870 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm0 = ymm0[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
3871 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm24 = ymm0[2,2,2,3]
3872 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm25, %xmm0
3873 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm11[4],xmm0[4],xmm11[5],xmm0[5],xmm11[6],xmm0[6],xmm11[7],xmm0[7]
3874 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm10 = ymm9[2,3,2,3,6,7,6,7]
3875 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm10 = ymm10[0,2,2,1,4,5,6,7,8,10,10,9,12,13,14,15]
3876 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm25 = ymm10[2,1,2,3]
3877 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm1[0],ymm3[0],ymm1[1],ymm3[1],ymm1[2],ymm3[2],ymm1[3],ymm3[3],ymm1[8],ymm3[8],ymm1[9],ymm3[9],ymm1[10],ymm3[10],ymm1[11],ymm3[11]
3878 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm10 = <17,18,17,18,u,u,19,19,5,4,2,2,5,4,6,6>
3879 ; AVX512DQ-SLOW-NEXT: vpermt2d %zmm0, %zmm10, %zmm3
3880 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm15[4],xmm12[4],xmm15[5],xmm12[5],xmm15[6],xmm12[6],xmm15[7],xmm12[7]
3881 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r9), %ymm1
3882 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm11 = ymm1[0,2,2,3,4,5,6,7,8,10,10,11,12,13,14,15]
3883 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm11 = ymm11[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
3884 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,2,2,3]
3885 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm4 = ymm4[0],ymm5[0],ymm4[1],ymm5[1],ymm4[2],ymm5[2],ymm4[3],ymm5[3],ymm4[8],ymm5[8],ymm4[9],ymm5[9],ymm4[10],ymm5[10],ymm4[11],ymm5[11]
3886 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm1[2,3,2,3,6,7,6,7]
3887 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm5 = ymm5[0,2,2,1,4,5,6,7,8,10,10,9,12,13,14,15]
3888 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm5[2,1,2,3]
3889 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,1,1,1]
3890 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
3891 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm4, %zmm0, %zmm3 {%k1}
3892 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm2[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
3893 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,2]
3894 ; AVX512DQ-SLOW-NEXT: vextracti64x4 $1, %zmm3, %ymm2
3895 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1],ymm0[2],ymm2[3,4],ymm0[5],ymm2[6,7]
3896 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm0, %zmm2
3897 ; AVX512DQ-SLOW-NEXT: vmovdqa {{.*#+}} xmm0 = [12,13,10,11,10,11,14,15,14,15,14,15,14,15,14,15]
3898 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm17, %xmm4
3899 ; AVX512DQ-SLOW-NEXT: vpshufb %xmm0, %xmm4, %xmm4
3900 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,0,1]
3901 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0],ymm4[1],ymm3[2,3],ymm4[4],ymm3[5,6],ymm4[7]
3902 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm2 = zmm3[0,1,2,3],zmm2[4,5,6,7]
3903 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm16[2,3,2,3]
3904 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,2,2,1,4,5,6,7]
3905 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,0,1]
3906 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
3907 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
3908 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm27, %xmm5
3909 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm5, %xmm4 # 16-byte Folded Reload
3910 ; AVX512DQ-SLOW-NEXT: # xmm4 = xmm5[4],mem[4],xmm5[5],mem[5],xmm5[6],mem[6],xmm5[7],mem[7]
3911 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm5 = ymm14[0],ymm13[0],ymm14[1],ymm13[1],ymm14[2],ymm13[2],ymm14[3],ymm13[3],ymm14[8],ymm13[8],ymm14[9],ymm13[9],ymm14[10],ymm13[10],ymm14[11],ymm13[11]
3912 ; AVX512DQ-SLOW-NEXT: vpermt2d %zmm4, %zmm10, %zmm5
3913 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm28, %xmm4
3914 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm29, %xmm10
3915 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm4[4],xmm10[4],xmm4[5],xmm10[5],xmm4[6],xmm10[6],xmm4[7],xmm10[7]
3916 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[1,1,1,1]
3917 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm7 = ymm7[0],ymm8[0],ymm7[1],ymm8[1],ymm7[2],ymm8[2],ymm7[3],ymm8[3],ymm7[8],ymm8[8],ymm7[9],ymm8[9],ymm7[10],ymm8[10],ymm7[11],ymm8[11]
3918 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,2,2,3]
3919 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm7, %zmm4, %zmm5 {%k1}
3920 ; AVX512DQ-SLOW-NEXT: vextracti64x4 $1, %zmm5, %ymm4
3921 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm6 = ymm6[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
3922 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,2]
3923 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1],ymm6[2],ymm4[3,4],ymm6[5],ymm4[6,7]
3924 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm4
3925 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm31, %xmm6
3926 ; AVX512DQ-SLOW-NEXT: vpshufb %xmm0, %xmm6, %xmm0
3927 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,0,1]
3928 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm5[0],ymm0[1],ymm5[2,3],ymm0[4],ymm5[5,6],ymm0[7]
3929 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,2,3],zmm4[4,5,6,7]
3930 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm30[2,3,2,3]
3931 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[0,2,2,1,4,5,6,7]
3932 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,0,1]
3933 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm5 = ymm9[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
3934 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,2]
3935 ; AVX512DQ-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
3936 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm19, %zmm26, %zmm6
3937 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm7 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535]
3938 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7, %zmm6 # 64-byte Folded Reload
3939 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm18, %zmm22, %zmm8
3940 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm21, %zmm7, %zmm8
3941 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm25, %zmm24, %zmm7
3942 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm9 = [65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0]
3943 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm20, %zmm9, %zmm7
3944 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm12, %zmm11, %zmm10
3945 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm23, %zmm9, %zmm10
3946 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm3, %zmm1
3947 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
3948 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm2, %zmm3, %zmm1
3949 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm4, %zmm2
3950 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm0, %zmm3, %zmm2
3951 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, 256(%rax)
3952 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, 64(%rax)
3953 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, (%rax)
3954 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, 192(%rax)
3955 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, 128(%rax)
3956 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, 320(%rax)
3957 ; AVX512DQ-SLOW-NEXT: vzeroupper
3958 ; AVX512DQ-SLOW-NEXT: retq
3960 ; AVX512DQ-FAST-LABEL: store_i16_stride6_vf32:
3961 ; AVX512DQ-FAST: # %bb.0:
3962 ; AVX512DQ-FAST-NEXT: vmovdqa (%rsi), %ymm14
3963 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdi), %ymm1
3964 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm1[0],ymm14[0],ymm1[1],ymm14[1],ymm1[2],ymm14[2],ymm1[3],ymm14[3],ymm1[8],ymm14[8],ymm1[9],ymm14[9],ymm1[10],ymm14[10],ymm1[11],ymm14[11]
3965 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm1, %ymm25
3966 ; AVX512DQ-FAST-NEXT: vmovdqa (%rsi), %xmm2
3967 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3968 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdi), %xmm1
3969 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3970 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
3971 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm20 = [1,1,1,1,10,10,10,11]
3972 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm20, %zmm1
3973 ; AVX512DQ-FAST-NEXT: vmovdqa (%rcx), %ymm3
3974 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdx), %ymm2
3975 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[8],ymm3[8],ymm2[9],ymm3[9],ymm2[10],ymm3[10],ymm2[11],ymm3[11]
3976 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm3, %ymm27
3977 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm2, %ymm26
3978 ; AVX512DQ-FAST-NEXT: vmovdqa (%rcx), %xmm3
3979 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3980 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rcx), %xmm5
3981 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdx), %xmm2
3982 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3983 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rdx), %xmm6
3984 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm2[4],xmm3[4],xmm2[5],xmm3[5],xmm2[6],xmm3[6],xmm2[7],xmm3[7]
3985 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm2, %zmm2
3986 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm21 = <1,2,1,2,u,u,3,3,13,12,10,10,13,12,14,14>
3987 ; AVX512DQ-FAST-NEXT: vpermd %zmm2, %zmm21, %zmm19
3988 ; AVX512DQ-FAST-NEXT: movw $18724, %ax # imm = 0x4924
3989 ; AVX512DQ-FAST-NEXT: kmovw %eax, %k1
3990 ; AVX512DQ-FAST-NEXT: vmovdqa32 %zmm1, %zmm19 {%k1}
3991 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} ymm17 = [8,9,20,11,12,21,14,15]
3992 ; AVX512DQ-FAST-NEXT: vmovdqa (%r8), %ymm0
3993 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} ymm1 = ymm0[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
3994 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm0, %ymm24
3995 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm19, %zmm23
3996 ; AVX512DQ-FAST-NEXT: vpermt2d %zmm1, %zmm17, %zmm23
3997 ; AVX512DQ-FAST-NEXT: vmovdqa (%r8), %xmm0
3998 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rcx), %ymm9
3999 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rdx), %ymm4
4000 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm4[0],ymm9[0],ymm4[1],ymm9[1],ymm4[2],ymm9[2],ymm4[3],ymm9[3],ymm4[8],ymm9[8],ymm4[9],ymm9[9],ymm4[10],ymm9[10],ymm4[11],ymm9[11]
4001 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm6[4],xmm5[4],xmm6[5],xmm5[5],xmm6[6],xmm5[6],xmm6[7],xmm5[7]
4002 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm6, %xmm30
4003 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm5, %xmm29
4004 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm3
4005 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm5 = [12,13,10,11,10,11,14,15,14,15,14,15,14,15,14,15]
4006 ; AVX512DQ-FAST-NEXT: vpshufb %xmm5, %xmm0, %xmm1
4007 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm0, %xmm28
4008 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = [0,9,2,3,8,5,6,11]
4009 ; AVX512DQ-FAST-NEXT: vpermt2d %ymm1, %ymm6, %ymm19
4010 ; AVX512DQ-FAST-NEXT: vmovdqa (%r9), %ymm7
4011 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm18 = <0,u,0,1,u,10,10,u>
4012 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} ymm1 = ymm7[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
4013 ; AVX512DQ-FAST-NEXT: vmovdqa (%r9), %xmm0
4014 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4015 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm13 = [8,9,12,13,12,13,10,11,8,9,10,11,12,13,14,15]
4016 ; AVX512DQ-FAST-NEXT: vpshufb %xmm13, %xmm0, %xmm12
4017 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm18, %zmm12
4018 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rsi), %xmm10
4019 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rdi), %xmm8
4020 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rsi), %ymm2
4021 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rdi), %ymm1
4022 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm11 = ymm1[0],ymm2[0],ymm1[1],ymm2[1],ymm1[2],ymm2[2],ymm1[3],ymm2[3],ymm1[8],ymm2[8],ymm1[9],ymm2[9],ymm1[10],ymm2[10],ymm1[11],ymm2[11]
4023 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm8[4],xmm10[4],xmm8[5],xmm10[5],xmm8[6],xmm10[6],xmm8[7],xmm10[7]
4024 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm11, %zmm20, %zmm0
4025 ; AVX512DQ-FAST-NEXT: vpermd %zmm3, %zmm21, %zmm20
4026 ; AVX512DQ-FAST-NEXT: vmovdqa32 %zmm0, %zmm20 {%k1}
4027 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%r8), %ymm3
4028 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} ymm0 = ymm3[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
4029 ; AVX512DQ-FAST-NEXT: vpermi2d %zmm0, %zmm20, %zmm17
4030 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%r8), %xmm11
4031 ; AVX512DQ-FAST-NEXT: vpshufb %xmm5, %xmm11, %xmm0
4032 ; AVX512DQ-FAST-NEXT: vpermt2d %ymm0, %ymm6, %ymm20
4033 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%r9), %xmm0
4034 ; AVX512DQ-FAST-NEXT: vpshufb %xmm13, %xmm0, %xmm13
4035 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm0, %xmm31
4036 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%r9), %ymm5
4037 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} ymm0 = ymm5[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
4038 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm18, %zmm13
4039 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = <u,u,u,u,4,5,10,11,u,u,u,u,u,u,u,u,24,25,22,23,20,21,26,27,u,u,u,u,u,u,u,u>
4040 ; AVX512DQ-FAST-NEXT: vpshufb %ymm6, %ymm2, %ymm0
4041 ; AVX512DQ-FAST-NEXT: vpshufb %ymm6, %ymm1, %ymm15
4042 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm15[0],ymm0[0],ymm15[1],ymm0[1],ymm15[2],ymm0[2],ymm15[3],ymm0[3],ymm15[8],ymm0[8],ymm15[9],ymm0[9],ymm15[10],ymm0[10],ymm15[11],ymm0[11]
4043 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm1[4],ymm2[4],ymm1[5],ymm2[5],ymm1[6],ymm2[6],ymm1[7],ymm2[7],ymm1[12],ymm2[12],ymm1[13],ymm2[13],ymm1[14],ymm2[14],ymm1[15],ymm2[15]
4044 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm2 = [2,1,2,3,11,11,11,11]
4045 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm2, %zmm0
4046 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} ymm21 = [5,6,5,6,5,6,7,7]
4047 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm4[4],ymm9[4],ymm4[5],ymm9[5],ymm4[6],ymm9[6],ymm4[7],ymm9[7],ymm4[12],ymm9[12],ymm4[13],ymm9[13],ymm4[14],ymm9[14],ymm4[15],ymm9[15]
4048 ; AVX512DQ-FAST-NEXT: vpermd %ymm1, %ymm21, %ymm1
4049 ; AVX512DQ-FAST-NEXT: vpsrldq {{.*#+}} ymm9 = ymm9[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm9[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
4050 ; AVX512DQ-FAST-NEXT: vpsrldq {{.*#+}} ymm4 = ymm4[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm4[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
4051 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm4 = ymm4[0],ymm9[0],ymm4[1],ymm9[1],ymm4[2],ymm9[2],ymm4[3],ymm9[3],ymm4[8],ymm9[8],ymm4[9],ymm9[9],ymm4[10],ymm9[10],ymm4[11],ymm9[11]
4052 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,2]
4053 ; AVX512DQ-FAST-NEXT: vinserti32x8 $1, %ymm1, %zmm4, %zmm0 {%k1}
4054 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = [8,21,10,11,20,13,14,23]
4055 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm9
4056 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = <u,u,u,u,u,u,u,u,14,15,14,15,14,15,14,15,28,29,26,27,26,27,30,31,30,31,30,31,30,31,30,31>
4057 ; AVX512DQ-FAST-NEXT: vpshufb %ymm1, %ymm3, %ymm15
4058 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm1, %ymm16
4059 ; AVX512DQ-FAST-NEXT: vpermt2d %zmm15, %zmm4, %zmm9
4060 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} ymm3 = ymm3[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
4061 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} ymm22 = [12,1,2,13,4,5,14,7]
4062 ; AVX512DQ-FAST-NEXT: vpermt2d %ymm3, %ymm22, %ymm0
4063 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm18 = zmm0[0,1,2,3],zmm9[0,1,2,3]
4064 ; AVX512DQ-FAST-NEXT: vpshufb %ymm6, %ymm14, %ymm0
4065 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm25, %ymm9
4066 ; AVX512DQ-FAST-NEXT: vpshufb %ymm6, %ymm9, %ymm3
4067 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm3[0],ymm0[0],ymm3[1],ymm0[1],ymm3[2],ymm0[2],ymm3[3],ymm0[3],ymm3[8],ymm0[8],ymm3[9],ymm0[9],ymm3[10],ymm0[10],ymm3[11],ymm0[11]
4068 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm9[4],ymm14[4],ymm9[5],ymm14[5],ymm9[6],ymm14[6],ymm9[7],ymm14[7],ymm9[12],ymm14[12],ymm9[13],ymm14[13],ymm9[14],ymm14[14],ymm9[15],ymm14[15]
4069 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm2, %zmm0
4070 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = <u,u,u,u,u,u,u,u,8,9,10,11,12,13,14,15,24,25,28,29,28,29,26,27,24,25,26,27,28,29,30,31>
4071 ; AVX512DQ-FAST-NEXT: vpshufb %ymm3, %ymm5, %ymm6
4072 ; AVX512DQ-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm9 = [16,17,20,21,20,21,22,23,24,25,24,25,24,25,24,25,16,17,20,21,20,21,22,23,24,25,24,25,24,25,24,25]
4073 ; AVX512DQ-FAST-NEXT: # ymm9 = mem[0,1,0,1]
4074 ; AVX512DQ-FAST-NEXT: vpshufb %ymm9, %ymm5, %ymm2
4075 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = <2,2,u,3,10,u,10,11>
4076 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm5, %zmm2
4077 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm27, %ymm14
4078 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm26, %ymm1
4079 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} ymm6 = ymm1[4],ymm14[4],ymm1[5],ymm14[5],ymm1[6],ymm14[6],ymm1[7],ymm14[7],ymm1[12],ymm14[12],ymm1[13],ymm14[13],ymm1[14],ymm14[14],ymm1[15],ymm14[15]
4080 ; AVX512DQ-FAST-NEXT: vpermd %ymm6, %ymm21, %ymm6
4081 ; AVX512DQ-FAST-NEXT: vpsrldq {{.*#+}} ymm15 = ymm14[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm14[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
4082 ; AVX512DQ-FAST-NEXT: vpsrldq {{.*#+}} ymm14 = ymm1[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm1[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
4083 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm14 = ymm14[0],ymm15[0],ymm14[1],ymm15[1],ymm14[2],ymm15[2],ymm14[3],ymm15[3],ymm14[8],ymm15[8],ymm14[9],ymm15[9],ymm14[10],ymm15[10],ymm14[11],ymm15[11]
4084 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,2,2,2]
4085 ; AVX512DQ-FAST-NEXT: vinserti32x8 $1, %ymm6, %zmm14, %zmm0 {%k1}
4086 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm24, %ymm6
4087 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm16, %ymm1
4088 ; AVX512DQ-FAST-NEXT: vpshufb %ymm1, %ymm6, %ymm1
4089 ; AVX512DQ-FAST-NEXT: vpermi2d %zmm1, %zmm0, %zmm4
4090 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} ymm1 = ymm6[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
4091 ; AVX512DQ-FAST-NEXT: vpermt2d %ymm1, %ymm22, %ymm0
4092 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm0[0,1,2,3],zmm4[0,1,2,3]
4093 ; AVX512DQ-FAST-NEXT: vpshufb %ymm3, %ymm7, %ymm3
4094 ; AVX512DQ-FAST-NEXT: vpshufb %ymm9, %ymm7, %ymm0
4095 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm5, %zmm0
4096 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm7 = [0,1,2,3,4,5,6,7,8,9,6,7,4,5,10,11]
4097 ; AVX512DQ-FAST-NEXT: vpshufb %xmm7, %xmm10, %xmm3
4098 ; AVX512DQ-FAST-NEXT: vpshufb %xmm7, %xmm8, %xmm4
4099 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
4100 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm8[0],xmm10[0],xmm8[1],xmm10[1],xmm8[2],xmm10[2],xmm8[3],xmm10[3]
4101 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = [0,0,2,1,8,9,8,9]
4102 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm5, %zmm3
4103 ; AVX512DQ-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [1,0,2,2,1,0,2,2]
4104 ; AVX512DQ-FAST-NEXT: # ymm4 = mem[0,1,0,1]
4105 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm29, %xmm8
4106 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm30, %xmm9
4107 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
4108 ; AVX512DQ-FAST-NEXT: vpermd %ymm6, %ymm4, %ymm6
4109 ; AVX512DQ-FAST-NEXT: vpsrldq {{.*#+}} xmm8 = xmm8[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
4110 ; AVX512DQ-FAST-NEXT: vpsrldq {{.*#+}} xmm9 = xmm9[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
4111 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
4112 ; AVX512DQ-FAST-NEXT: vpbroadcastq %xmm8, %ymm8
4113 ; AVX512DQ-FAST-NEXT: movw $9362, %ax # imm = 0x2492
4114 ; AVX512DQ-FAST-NEXT: kmovw %eax, %k1
4115 ; AVX512DQ-FAST-NEXT: vinserti32x8 $1, %ymm8, %zmm6, %zmm3 {%k1}
4116 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = [16,9,10,17,12,13,18,15]
4117 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} xmm8 = xmm11[2,1,3,3,4,5,6,7]
4118 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm9
4119 ; AVX512DQ-FAST-NEXT: vpermt2d %zmm8, %zmm6, %zmm9
4120 ; AVX512DQ-FAST-NEXT: vpmovzxwd {{.*#+}} xmm8 = xmm11[0],zero,xmm11[1],zero,xmm11[2],zero,xmm11[3],zero
4121 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
4122 ; AVX512DQ-FAST-NEXT: vpshufb %xmm7, %xmm11, %xmm10
4123 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
4124 ; AVX512DQ-FAST-NEXT: vpshufb %xmm7, %xmm14, %xmm7
4125 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm7[4],xmm10[4],xmm7[5],xmm10[5],xmm7[6],xmm10[6],xmm7[7],xmm10[7]
4126 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm14[0],xmm11[0],xmm14[1],xmm11[1],xmm14[2],xmm11[2],xmm14[3],xmm11[3]
4127 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm5, %zmm10
4128 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = [0,1,8,3,4,9,6,7]
4129 ; AVX512DQ-FAST-NEXT: vpermt2d %ymm8, %ymm5, %ymm3
4130 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm3 = zmm3[0,1,2,3],zmm9[0,1,2,3]
4131 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
4132 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
4133 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
4134 ; AVX512DQ-FAST-NEXT: vpermd %ymm7, %ymm4, %ymm4
4135 ; AVX512DQ-FAST-NEXT: vpsrldq {{.*#+}} xmm7 = xmm8[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
4136 ; AVX512DQ-FAST-NEXT: vpsrldq {{.*#+}} xmm8 = xmm9[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
4137 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3]
4138 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm8 = [0,1,4,5,4,5,6,7,8,9,8,9,8,9,8,9]
4139 ; AVX512DQ-FAST-NEXT: vpbroadcastq %xmm7, %ymm7
4140 ; AVX512DQ-FAST-NEXT: vinserti32x8 $1, %ymm7, %zmm4, %zmm10 {%k1}
4141 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm31, %xmm7
4142 ; AVX512DQ-FAST-NEXT: vpshufb %xmm8, %xmm7, %xmm4
4143 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} xmm7 = xmm7[0,0,2,1,4,5,6,7]
4144 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm9 = <u,0,0,u,8,8,u,9>
4145 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm9, %zmm7
4146 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm28, %xmm11
4147 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} xmm4 = xmm11[2,1,3,3,4,5,6,7]
4148 ; AVX512DQ-FAST-NEXT: vpermi2d %zmm4, %zmm10, %zmm6
4149 ; AVX512DQ-FAST-NEXT: vpmovzxwd {{.*#+}} xmm4 = xmm28[0],zero,xmm28[1],zero,xmm28[2],zero,xmm28[3],zero
4150 ; AVX512DQ-FAST-NEXT: vpermt2d %ymm4, %ymm5, %ymm10
4151 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm4 = zmm10[0,1,2,3],zmm6[0,1,2,3]
4152 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
4153 ; AVX512DQ-FAST-NEXT: vpshufb %xmm8, %xmm6, %xmm5
4154 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} xmm6 = xmm6[0,0,2,1,4,5,6,7]
4155 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm9, %zmm6
4156 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535]
4157 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm4, %zmm5, %zmm6
4158 ; AVX512DQ-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
4159 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, (%rax)
4160 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm3, %zmm5, %zmm7
4161 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, 192(%rax)
4162 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm3 = [65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0]
4163 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm1, %zmm3, %zmm0
4164 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, 128(%rax)
4165 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm18, %zmm3, %zmm2
4166 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, 320(%rax)
4167 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm20[0,1,2,3],zmm17[0,1,2,3]
4168 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
4169 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm0, %zmm1, %zmm13
4170 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm13, 256(%rax)
4171 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm19[0,1,2,3],zmm23[0,1,2,3]
4172 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm0, %zmm1, %zmm12
4173 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm12, 64(%rax)
4174 ; AVX512DQ-FAST-NEXT: vzeroupper
4175 ; AVX512DQ-FAST-NEXT: retq
4177 ; AVX512BW-LABEL: store_i16_stride6_vf32:
4178 ; AVX512BW: # %bb.0:
4179 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
4180 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm2
4181 ; AVX512BW-NEXT: vmovdqa64 (%rsi), %zmm3
4182 ; AVX512BW-NEXT: vmovdqa64 (%rdx), %zmm4
4183 ; AVX512BW-NEXT: vmovdqa64 (%rcx), %zmm5
4184 ; AVX512BW-NEXT: vmovdqa64 (%r8), %zmm1
4185 ; AVX512BW-NEXT: vmovdqa64 (%r9), %zmm0
4186 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,0,0,32,3,35,0,0,1,33,4,36,0,0,2,34,0,0,0,32,3,35,0,0,1,33,4,36,0,0,2,34]
4187 ; AVX512BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
4188 ; AVX512BW-NEXT: vpermi2w %zmm5, %zmm4, %zmm6
4189 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [0,32,3,35,0,0,1,33,4,36,0,0,2,34,5,37,0,32,3,35,0,0,1,33,4,36,0,0,2,34,5,37]
4190 ; AVX512BW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
4191 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm2, %zmm7
4192 ; AVX512BW-NEXT: movw $9362, %cx # imm = 0x2492
4193 ; AVX512BW-NEXT: kmovd %ecx, %k2
4194 ; AVX512BW-NEXT: vmovdqa32 %zmm6, %zmm7 {%k2}
4195 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm8 = <0,1,2,3,32,u,6,7,8,9,33,u,12,13,14,15,34,u,18,19,20,21,35,u,24,25,26,27,36,u,30,31>
4196 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm7, %zmm8
4197 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm6 = [0,1,2,3,4,32,6,7,8,9,10,33,12,13,14,15,16,34,18,19,20,21,22,35,24,25,26,27,28,36,30,31]
4198 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm8, %zmm6
4199 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [8,40,0,0,6,38,9,41,0,0,7,39,10,42,0,0,8,40,0,0,6,38,9,41,0,0,7,39,10,42,0,0]
4200 ; AVX512BW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
4201 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm2, %zmm7
4202 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [5,37,8,40,0,0,6,38,9,41,0,0,7,39,10,42,5,37,8,40,0,0,6,38,9,41,0,0,7,39,10,42]
4203 ; AVX512BW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
4204 ; AVX512BW-NEXT: vpermi2w %zmm5, %zmm4, %zmm8
4205 ; AVX512BW-NEXT: movw $18724, %cx # imm = 0x4924
4206 ; AVX512BW-NEXT: kmovd %ecx, %k1
4207 ; AVX512BW-NEXT: vmovdqa32 %zmm7, %zmm8 {%k1}
4208 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <0,1,37,u,4,5,6,7,38,u,10,11,12,13,39,u,16,17,18,19,40,u,22,23,24,25,41,u,28,29,30,31>
4209 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm8, %zmm7
4210 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm8 = [0,1,2,37,4,5,6,7,8,38,10,11,12,13,14,39,16,17,18,19,20,40,22,23,24,25,26,41,28,29,30,31]
4211 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm7, %zmm8
4212 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [13,45,0,0,11,43,14,46,0,0,12,44,15,47,0,0,13,45,0,0,11,43,14,46,0,0,12,44,15,47,0,0]
4213 ; AVX512BW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
4214 ; AVX512BW-NEXT: vpermi2w %zmm5, %zmm4, %zmm7
4215 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [0,0,11,43,14,46,0,0,12,44,15,47,0,0,13,45,0,0,11,43,14,46,0,0,12,44,15,47,0,0,13,45]
4216 ; AVX512BW-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
4217 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm2, %zmm9
4218 ; AVX512BW-NEXT: vmovdqa32 %zmm7, %zmm9 {%k1}
4219 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <42,u,2,3,4,5,43,u,8,9,10,11,44,u,14,15,16,17,45,u,20,21,22,23,46,u,26,27,28,29,47,u>
4220 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm9, %zmm7
4221 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm9 = [0,42,2,3,4,5,6,43,8,9,10,11,12,44,14,15,16,17,18,45,20,21,22,23,24,46,26,27,28,29,30,47]
4222 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm7, %zmm9
4223 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [0,0,16,48,19,51,0,0,17,49,20,52,0,0,18,50,0,0,16,48,19,51,0,0,17,49,20,52,0,0,18,50]
4224 ; AVX512BW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
4225 ; AVX512BW-NEXT: vpermi2w %zmm5, %zmm4, %zmm7
4226 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [16,48,19,51,0,0,17,49,20,52,0,0,18,50,21,53,16,48,19,51,0,0,17,49,20,52,0,0,18,50,21,53]
4227 ; AVX512BW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
4228 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm2, %zmm10
4229 ; AVX512BW-NEXT: vmovdqa32 %zmm7, %zmm10 {%k2}
4230 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <0,1,2,3,48,u,6,7,8,9,49,u,12,13,14,15,50,u,18,19,20,21,51,u,24,25,26,27,52,u,30,31>
4231 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm10, %zmm7
4232 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm10 = [0,1,2,3,4,48,6,7,8,9,10,49,12,13,14,15,16,50,18,19,20,21,22,51,24,25,26,27,28,52,30,31]
4233 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm7, %zmm10
4234 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [24,56,0,0,22,54,25,57,0,0,23,55,26,58,0,0,24,56,0,0,22,54,25,57,0,0,23,55,26,58,0,0]
4235 ; AVX512BW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
4236 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm2, %zmm7
4237 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [21,53,24,56,0,0,22,54,25,57,0,0,23,55,26,58,21,53,24,56,0,0,22,54,25,57,0,0,23,55,26,58]
4238 ; AVX512BW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
4239 ; AVX512BW-NEXT: vpermi2w %zmm5, %zmm4, %zmm11
4240 ; AVX512BW-NEXT: vmovdqa32 %zmm7, %zmm11 {%k1}
4241 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <0,1,53,u,4,5,6,7,54,u,10,11,12,13,55,u,16,17,18,19,56,u,22,23,24,25,57,u,28,29,30,31>
4242 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm11, %zmm7
4243 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm11 = [0,1,2,53,4,5,6,7,8,54,10,11,12,13,14,55,16,17,18,19,20,56,22,23,24,25,26,57,28,29,30,31]
4244 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm7, %zmm11
4245 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [29,61,0,0,27,59,30,62,0,0,28,60,31,63,0,0,29,61,0,0,27,59,30,62,0,0,28,60,31,63,0,0]
4246 ; AVX512BW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
4247 ; AVX512BW-NEXT: vpermi2w %zmm5, %zmm4, %zmm7
4248 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm4 = [0,0,27,59,30,62,0,0,28,60,31,63,0,0,29,61,0,0,27,59,30,62,0,0,28,60,31,63,0,0,29,61]
4249 ; AVX512BW-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3]
4250 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm2, %zmm4
4251 ; AVX512BW-NEXT: vmovdqa32 %zmm7, %zmm4 {%k1}
4252 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <58,u,2,3,4,5,59,u,8,9,10,11,60,u,14,15,16,17,61,u,20,21,22,23,62,u,26,27,28,29,63,u>
4253 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm4, %zmm2
4254 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,58,2,3,4,5,6,59,8,9,10,11,12,60,14,15,16,17,18,61,20,21,22,23,24,62,26,27,28,29,30,63]
4255 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm2, %zmm1
4256 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 320(%rax)
4257 ; AVX512BW-NEXT: vmovdqa64 %zmm11, 256(%rax)
4258 ; AVX512BW-NEXT: vmovdqa64 %zmm10, 192(%rax)
4259 ; AVX512BW-NEXT: vmovdqa64 %zmm9, 128(%rax)
4260 ; AVX512BW-NEXT: vmovdqa64 %zmm8, 64(%rax)
4261 ; AVX512BW-NEXT: vmovdqa64 %zmm6, (%rax)
4262 ; AVX512BW-NEXT: vzeroupper
4263 ; AVX512BW-NEXT: retq
4264 %in.vec0 = load <32 x i16>, ptr %in.vecptr0, align 64
4265 %in.vec1 = load <32 x i16>, ptr %in.vecptr1, align 64
4266 %in.vec2 = load <32 x i16>, ptr %in.vecptr2, align 64
4267 %in.vec3 = load <32 x i16>, ptr %in.vecptr3, align 64
4268 %in.vec4 = load <32 x i16>, ptr %in.vecptr4, align 64
4269 %in.vec5 = load <32 x i16>, ptr %in.vecptr5, align 64
4270 %1 = shufflevector <32 x i16> %in.vec0, <32 x i16> %in.vec1, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
4271 %2 = shufflevector <32 x i16> %in.vec2, <32 x i16> %in.vec3, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
4272 %3 = shufflevector <32 x i16> %in.vec4, <32 x i16> %in.vec5, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
4273 %4 = shufflevector <64 x i16> %1, <64 x i16> %2, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
4274 %5 = shufflevector <64 x i16> %3, <64 x i16> poison, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
4275 %6 = shufflevector <128 x i16> %4, <128 x i16> %5, <192 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191>
4276 %interleaved.vec = shufflevector <192 x i16> %6, <192 x i16> poison, <192 x i32> <i32 0, i32 32, i32 64, i32 96, i32 128, i32 160, i32 1, i32 33, i32 65, i32 97, i32 129, i32 161, i32 2, i32 34, i32 66, i32 98, i32 130, i32 162, i32 3, i32 35, i32 67, i32 99, i32 131, i32 163, i32 4, i32 36, i32 68, i32 100, i32 132, i32 164, i32 5, i32 37, i32 69, i32 101, i32 133, i32 165, i32 6, i32 38, i32 70, i32 102, i32 134, i32 166, i32 7, i32 39, i32 71, i32 103, i32 135, i32 167, i32 8, i32 40, i32 72, i32 104, i32 136, i32 168, i32 9, i32 41, i32 73, i32 105, i32 137, i32 169, i32 10, i32 42, i32 74, i32 106, i32 138, i32 170, i32 11, i32 43, i32 75, i32 107, i32 139, i32 171, i32 12, i32 44, i32 76, i32 108, i32 140, i32 172, i32 13, i32 45, i32 77, i32 109, i32 141, i32 173, i32 14, i32 46, i32 78, i32 110, i32 142, i32 174, i32 15, i32 47, i32 79, i32 111, i32 143, i32 175, i32 16, i32 48, i32 80, i32 112, i32 144, i32 176, i32 17, i32 49, i32 81, i32 113, i32 145, i32 177, i32 18, i32 50, i32 82, i32 114, i32 146, i32 178, i32 19, i32 51, i32 83, i32 115, i32 147, i32 179, i32 20, i32 52, i32 84, i32 116, i32 148, i32 180, i32 21, i32 53, i32 85, i32 117, i32 149, i32 181, i32 22, i32 54, i32 86, i32 118, i32 150, i32 182, i32 23, i32 55, i32 87, i32 119, i32 151, i32 183, i32 24, i32 56, i32 88, i32 120, i32 152, i32 184, i32 25, i32 57, i32 89, i32 121, i32 153, i32 185, i32 26, i32 58, i32 90, i32 122, i32 154, i32 186, i32 27, i32 59, i32 91, i32 123, i32 155, i32 187, i32 28, i32 60, i32 92, i32 124, i32 156, i32 188, i32 29, i32 61, i32 93, i32 125, i32 157, i32 189, i32 30, i32 62, i32 94, i32 126, i32 158, i32 190, i32 31, i32 63, i32 95, i32 127, i32 159, i32 191>
4277 store <192 x i16> %interleaved.vec, ptr %out.vec, align 64
4281 define void @store_i16_stride6_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
4282 ; SSE-LABEL: store_i16_stride6_vf64:
4284 ; SSE-NEXT: subq $808, %rsp # imm = 0x328
4285 ; SSE-NEXT: movdqa (%rdi), %xmm10
4286 ; SSE-NEXT: movdqa 16(%rdi), %xmm11
4287 ; SSE-NEXT: movdqa (%rsi), %xmm4
4288 ; SSE-NEXT: movdqa 16(%rsi), %xmm1
4289 ; SSE-NEXT: movdqa (%rdx), %xmm12
4290 ; SSE-NEXT: movdqa 16(%rdx), %xmm2
4291 ; SSE-NEXT: movdqa (%rcx), %xmm6
4292 ; SSE-NEXT: movdqa 16(%rcx), %xmm3
4293 ; SSE-NEXT: movdqa (%r8), %xmm9
4294 ; SSE-NEXT: movdqa (%r9), %xmm8
4295 ; SSE-NEXT: movdqa %xmm12, %xmm0
4296 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm6[0],xmm0[1],xmm6[1],xmm0[2],xmm6[2],xmm0[3],xmm6[3]
4297 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4298 ; SSE-NEXT: movdqa %xmm10, %xmm7
4299 ; SSE-NEXT: punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm4[0],xmm7[1],xmm4[1],xmm7[2],xmm4[2],xmm7[3],xmm4[3]
4300 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4301 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,3],xmm0[3,3]
4302 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm9[2,1,3,3,4,5,6,7]
4303 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[1,2],xmm5[0,1]
4304 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,0,1,3]
4305 ; SSE-NEXT: movaps {{.*#+}} xmm14 = [65535,0,65535,65535,65535,65535,65535,0]
4306 ; SSE-NEXT: andps %xmm14, %xmm7
4307 ; SSE-NEXT: movdqa %xmm8, %xmm5
4308 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4309 ; SSE-NEXT: pshuflw {{.*#+}} xmm8 = xmm8[0,2,2,3,4,5,6,7]
4310 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm8[0,1,2,1]
4311 ; SSE-NEXT: movaps %xmm14, %xmm0
4312 ; SSE-NEXT: andnps %xmm8, %xmm0
4313 ; SSE-NEXT: orps %xmm7, %xmm0
4314 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4315 ; SSE-NEXT: punpckhwd {{.*#+}} xmm12 = xmm12[4],xmm6[4],xmm12[5],xmm6[5],xmm12[6],xmm6[6],xmm12[7],xmm6[7]
4316 ; SSE-NEXT: movdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4317 ; SSE-NEXT: punpckhwd {{.*#+}} xmm10 = xmm10[4],xmm4[4],xmm10[5],xmm4[5],xmm10[6],xmm4[6],xmm10[7],xmm4[7]
4318 ; SSE-NEXT: movdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4319 ; SSE-NEXT: movdqa %xmm10, %xmm4
4320 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[2,3],xmm12[3,3]
4321 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm9[0,1,2,3,6,5,7,7]
4322 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[1,2],xmm6[2,3]
4323 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[2,0,1,3]
4324 ; SSE-NEXT: andps %xmm14, %xmm4
4325 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm5[0,1,2,3,4,6,6,7]
4326 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[2,1,2,3]
4327 ; SSE-NEXT: movaps %xmm14, %xmm0
4328 ; SSE-NEXT: andnps %xmm6, %xmm0
4329 ; SSE-NEXT: orps %xmm4, %xmm0
4330 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4331 ; SSE-NEXT: movdqa %xmm2, %xmm0
4332 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
4333 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4334 ; SSE-NEXT: movdqa %xmm11, %xmm4
4335 ; SSE-NEXT: punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
4336 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4337 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[2,3],xmm0[3,3]
4338 ; SSE-NEXT: movdqa 16(%r8), %xmm0
4339 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm0[2,1,3,3,4,5,6,7]
4340 ; SSE-NEXT: movdqa %xmm0, %xmm5
4341 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4342 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[1,2],xmm6[0,1]
4343 ; SSE-NEXT: movdqa 16(%r9), %xmm0
4344 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm0[0,2,2,3,4,5,6,7]
4345 ; SSE-NEXT: movdqa %xmm0, %xmm7
4346 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4347 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,1,2,1]
4348 ; SSE-NEXT: movaps %xmm14, %xmm0
4349 ; SSE-NEXT: andnps %xmm6, %xmm0
4350 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[2,0,1,3]
4351 ; SSE-NEXT: andps %xmm14, %xmm4
4352 ; SSE-NEXT: orps %xmm4, %xmm0
4353 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4354 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm3[4],xmm2[5],xmm3[5],xmm2[6],xmm3[6],xmm2[7],xmm3[7]
4355 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4356 ; SSE-NEXT: punpckhwd {{.*#+}} xmm11 = xmm11[4],xmm1[4],xmm11[5],xmm1[5],xmm11[6],xmm1[6],xmm11[7],xmm1[7]
4357 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4358 ; SSE-NEXT: movdqa %xmm11, %xmm1
4359 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,3],xmm2[3,3]
4360 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm5[0,1,2,3,6,5,7,7]
4361 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[1,2],xmm3[2,3]
4362 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm7[0,1,2,3,4,6,6,7]
4363 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[2,1,2,3]
4364 ; SSE-NEXT: movaps %xmm14, %xmm0
4365 ; SSE-NEXT: andnps %xmm3, %xmm0
4366 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0,1,3]
4367 ; SSE-NEXT: andps %xmm14, %xmm1
4368 ; SSE-NEXT: orps %xmm1, %xmm0
4369 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4370 ; SSE-NEXT: movdqa 32(%rdx), %xmm2
4371 ; SSE-NEXT: movdqa 32(%rcx), %xmm1
4372 ; SSE-NEXT: movdqa %xmm2, %xmm0
4373 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
4374 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4375 ; SSE-NEXT: movdqa 32(%rdi), %xmm3
4376 ; SSE-NEXT: movdqa 32(%rsi), %xmm6
4377 ; SSE-NEXT: movdqa %xmm3, %xmm7
4378 ; SSE-NEXT: punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
4379 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4380 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,3],xmm0[3,3]
4381 ; SSE-NEXT: movdqa 32(%r8), %xmm0
4382 ; SSE-NEXT: pshuflw {{.*#+}} xmm8 = xmm0[2,1,3,3,4,5,6,7]
4383 ; SSE-NEXT: movdqa %xmm0, %xmm4
4384 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4385 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[1,2],xmm8[0,1]
4386 ; SSE-NEXT: movdqa 32(%r9), %xmm0
4387 ; SSE-NEXT: pshuflw {{.*#+}} xmm8 = xmm0[0,2,2,3,4,5,6,7]
4388 ; SSE-NEXT: movdqa %xmm0, %xmm5
4389 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4390 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm8[0,1,2,1]
4391 ; SSE-NEXT: movaps %xmm14, %xmm0
4392 ; SSE-NEXT: andnps %xmm8, %xmm0
4393 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,0,1,3]
4394 ; SSE-NEXT: andps %xmm14, %xmm7
4395 ; SSE-NEXT: orps %xmm7, %xmm0
4396 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4397 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
4398 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4399 ; SSE-NEXT: movdqa %xmm3, %xmm1
4400 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm6[4],xmm1[5],xmm6[5],xmm1[6],xmm6[6],xmm1[7],xmm6[7]
4401 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4402 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,3],xmm2[3,3]
4403 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm4[0,1,2,3,6,5,7,7]
4404 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[1,2],xmm6[2,3]
4405 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm5[0,1,2,3,4,6,6,7]
4406 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[2,1,2,3]
4407 ; SSE-NEXT: movaps %xmm14, %xmm0
4408 ; SSE-NEXT: andnps %xmm6, %xmm0
4409 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0,1,3]
4410 ; SSE-NEXT: andps %xmm14, %xmm1
4411 ; SSE-NEXT: orps %xmm1, %xmm0
4412 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4413 ; SSE-NEXT: movdqa 48(%rdx), %xmm2
4414 ; SSE-NEXT: movdqa 48(%rcx), %xmm1
4415 ; SSE-NEXT: movdqa %xmm2, %xmm0
4416 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
4417 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4418 ; SSE-NEXT: movdqa 48(%rdi), %xmm3
4419 ; SSE-NEXT: movdqa 48(%rsi), %xmm7
4420 ; SSE-NEXT: movdqa %xmm3, %xmm8
4421 ; SSE-NEXT: punpcklwd {{.*#+}} xmm8 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3]
4422 ; SSE-NEXT: movdqa %xmm8, (%rsp) # 16-byte Spill
4423 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[2,3],xmm0[3,3]
4424 ; SSE-NEXT: movdqa 48(%r8), %xmm6
4425 ; SSE-NEXT: pshuflw {{.*#+}} xmm11 = xmm6[2,1,3,3,4,5,6,7]
4426 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[1,2],xmm11[0,1]
4427 ; SSE-NEXT: movdqa 48(%r9), %xmm0
4428 ; SSE-NEXT: pshuflw {{.*#+}} xmm11 = xmm0[0,2,2,3,4,5,6,7]
4429 ; SSE-NEXT: movdqa %xmm0, %xmm4
4430 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4431 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm11[0,1,2,1]
4432 ; SSE-NEXT: movaps %xmm14, %xmm0
4433 ; SSE-NEXT: andnps %xmm11, %xmm0
4434 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[2,0,1,3]
4435 ; SSE-NEXT: andps %xmm14, %xmm8
4436 ; SSE-NEXT: orps %xmm8, %xmm0
4437 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4438 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
4439 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4440 ; SSE-NEXT: movdqa %xmm3, %xmm1
4441 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm7[4],xmm1[5],xmm7[5],xmm1[6],xmm7[6],xmm1[7],xmm7[7]
4442 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4443 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,3],xmm2[3,3]
4444 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm6[0,1,2,3,6,5,7,7]
4445 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[1,2],xmm7[2,3]
4446 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm4[0,1,2,3,4,6,6,7]
4447 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[2,1,2,3]
4448 ; SSE-NEXT: movaps %xmm14, %xmm0
4449 ; SSE-NEXT: andnps %xmm7, %xmm0
4450 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0,1,3]
4451 ; SSE-NEXT: andps %xmm14, %xmm1
4452 ; SSE-NEXT: orps %xmm1, %xmm0
4453 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4454 ; SSE-NEXT: movdqa 64(%rdx), %xmm2
4455 ; SSE-NEXT: movdqa 64(%rcx), %xmm1
4456 ; SSE-NEXT: movdqa %xmm2, %xmm0
4457 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
4458 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4459 ; SSE-NEXT: movdqa 64(%rdi), %xmm3
4460 ; SSE-NEXT: movdqa 64(%rsi), %xmm8
4461 ; SSE-NEXT: movdqa %xmm3, %xmm11
4462 ; SSE-NEXT: punpcklwd {{.*#+}} xmm11 = xmm11[0],xmm8[0],xmm11[1],xmm8[1],xmm11[2],xmm8[2],xmm11[3],xmm8[3]
4463 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4464 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,3],xmm0[3,3]
4465 ; SSE-NEXT: movdqa 64(%r8), %xmm7
4466 ; SSE-NEXT: pshuflw {{.*#+}} xmm12 = xmm7[2,1,3,3,4,5,6,7]
4467 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[1,2],xmm12[0,1]
4468 ; SSE-NEXT: movdqa 64(%r9), %xmm0
4469 ; SSE-NEXT: pshuflw {{.*#+}} xmm12 = xmm0[0,2,2,3,4,5,6,7]
4470 ; SSE-NEXT: movdqa %xmm0, %xmm4
4471 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4472 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm12[0,1,2,1]
4473 ; SSE-NEXT: movaps %xmm14, %xmm0
4474 ; SSE-NEXT: andnps %xmm12, %xmm0
4475 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,0,1,3]
4476 ; SSE-NEXT: andps %xmm14, %xmm11
4477 ; SSE-NEXT: orps %xmm11, %xmm0
4478 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4479 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
4480 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4481 ; SSE-NEXT: movdqa %xmm3, %xmm1
4482 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm8[4],xmm1[5],xmm8[5],xmm1[6],xmm8[6],xmm1[7],xmm8[7]
4483 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4484 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,3],xmm2[3,3]
4485 ; SSE-NEXT: pshufhw {{.*#+}} xmm8 = xmm7[0,1,2,3,6,5,7,7]
4486 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[1,2],xmm8[2,3]
4487 ; SSE-NEXT: pshufhw {{.*#+}} xmm8 = xmm4[0,1,2,3,4,6,6,7]
4488 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm8[2,1,2,3]
4489 ; SSE-NEXT: movaps %xmm14, %xmm0
4490 ; SSE-NEXT: andnps %xmm8, %xmm0
4491 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0,1,3]
4492 ; SSE-NEXT: andps %xmm14, %xmm1
4493 ; SSE-NEXT: orps %xmm1, %xmm0
4494 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4495 ; SSE-NEXT: movdqa 80(%rdx), %xmm2
4496 ; SSE-NEXT: movdqa 80(%rcx), %xmm1
4497 ; SSE-NEXT: movdqa %xmm2, %xmm0
4498 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
4499 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4500 ; SSE-NEXT: movdqa 80(%rdi), %xmm3
4501 ; SSE-NEXT: movdqa 80(%rsi), %xmm11
4502 ; SSE-NEXT: movdqa %xmm3, %xmm12
4503 ; SSE-NEXT: punpcklwd {{.*#+}} xmm12 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3]
4504 ; SSE-NEXT: movdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4505 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[2,3],xmm0[3,3]
4506 ; SSE-NEXT: movdqa 80(%r8), %xmm8
4507 ; SSE-NEXT: pshuflw {{.*#+}} xmm15 = xmm8[2,1,3,3,4,5,6,7]
4508 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[1,2],xmm15[0,1]
4509 ; SSE-NEXT: movdqa 80(%r9), %xmm0
4510 ; SSE-NEXT: pshuflw {{.*#+}} xmm15 = xmm0[0,2,2,3,4,5,6,7]
4511 ; SSE-NEXT: movdqa %xmm0, %xmm4
4512 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4513 ; SSE-NEXT: pshufd {{.*#+}} xmm15 = xmm15[0,1,2,1]
4514 ; SSE-NEXT: movaps %xmm14, %xmm0
4515 ; SSE-NEXT: andnps %xmm15, %xmm0
4516 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[2,0,1,3]
4517 ; SSE-NEXT: andps %xmm14, %xmm12
4518 ; SSE-NEXT: orps %xmm12, %xmm0
4519 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4520 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
4521 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4522 ; SSE-NEXT: movdqa %xmm3, %xmm1
4523 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm11[4],xmm1[5],xmm11[5],xmm1[6],xmm11[6],xmm1[7],xmm11[7]
4524 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4525 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,3],xmm2[3,3]
4526 ; SSE-NEXT: pshufhw {{.*#+}} xmm11 = xmm8[0,1,2,3,6,5,7,7]
4527 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[1,2],xmm11[2,3]
4528 ; SSE-NEXT: pshufhw {{.*#+}} xmm11 = xmm4[0,1,2,3,4,6,6,7]
4529 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm11[2,1,2,3]
4530 ; SSE-NEXT: movaps %xmm14, %xmm0
4531 ; SSE-NEXT: andnps %xmm11, %xmm0
4532 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0,1,3]
4533 ; SSE-NEXT: andps %xmm14, %xmm1
4534 ; SSE-NEXT: orps %xmm1, %xmm0
4535 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4536 ; SSE-NEXT: movdqa 96(%rdx), %xmm2
4537 ; SSE-NEXT: movdqa 96(%rcx), %xmm1
4538 ; SSE-NEXT: movdqa %xmm2, %xmm0
4539 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
4540 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4541 ; SSE-NEXT: movdqa 96(%rdi), %xmm3
4542 ; SSE-NEXT: movdqa 96(%rsi), %xmm12
4543 ; SSE-NEXT: movdqa %xmm3, %xmm15
4544 ; SSE-NEXT: punpcklwd {{.*#+}} xmm15 = xmm15[0],xmm12[0],xmm15[1],xmm12[1],xmm15[2],xmm12[2],xmm15[3],xmm12[3]
4545 ; SSE-NEXT: movdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4546 ; SSE-NEXT: shufps {{.*#+}} xmm15 = xmm15[2,3],xmm0[3,3]
4547 ; SSE-NEXT: movdqa 96(%r8), %xmm11
4548 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm11[2,1,3,3,4,5,6,7]
4549 ; SSE-NEXT: shufps {{.*#+}} xmm15 = xmm15[1,2],xmm0[0,1]
4550 ; SSE-NEXT: movdqa 96(%r9), %xmm10
4551 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm10[0,2,2,3,4,5,6,7]
4552 ; SSE-NEXT: movdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4553 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,2,1]
4554 ; SSE-NEXT: movaps %xmm14, %xmm13
4555 ; SSE-NEXT: andnps %xmm0, %xmm13
4556 ; SSE-NEXT: shufps {{.*#+}} xmm15 = xmm15[2,0,1,3]
4557 ; SSE-NEXT: andps %xmm14, %xmm15
4558 ; SSE-NEXT: orps %xmm15, %xmm13
4559 ; SSE-NEXT: movaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4560 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
4561 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4562 ; SSE-NEXT: movdqa %xmm3, %xmm0
4563 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm12[4],xmm0[5],xmm12[5],xmm0[6],xmm12[6],xmm0[7],xmm12[7]
4564 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4565 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,3],xmm2[3,3]
4566 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm11[0,1,2,3,6,5,7,7]
4567 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,2],xmm1[2,3]
4568 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm10[0,1,2,3,4,6,6,7]
4569 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,1,2,3]
4570 ; SSE-NEXT: movaps %xmm14, %xmm12
4571 ; SSE-NEXT: andnps %xmm1, %xmm12
4572 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0,1,3]
4573 ; SSE-NEXT: andps %xmm14, %xmm0
4574 ; SSE-NEXT: orps %xmm0, %xmm12
4575 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4576 ; SSE-NEXT: movdqa 112(%rdx), %xmm4
4577 ; SSE-NEXT: movdqa 112(%rcx), %xmm5
4578 ; SSE-NEXT: movdqa %xmm4, %xmm2
4579 ; SSE-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3]
4580 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4581 ; SSE-NEXT: movdqa 112(%rdi), %xmm0
4582 ; SSE-NEXT: movdqa 112(%rsi), %xmm1
4583 ; SSE-NEXT: movdqa %xmm0, %xmm15
4584 ; SSE-NEXT: punpcklwd {{.*#+}} xmm15 = xmm15[0],xmm1[0],xmm15[1],xmm1[1],xmm15[2],xmm1[2],xmm15[3],xmm1[3]
4585 ; SSE-NEXT: movdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4586 ; SSE-NEXT: shufps {{.*#+}} xmm15 = xmm15[2,3],xmm2[3,3]
4587 ; SSE-NEXT: movdqa 112(%r8), %xmm3
4588 ; SSE-NEXT: pshuflw {{.*#+}} xmm13 = xmm3[2,1,3,3,4,5,6,7]
4589 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4590 ; SSE-NEXT: shufps {{.*#+}} xmm15 = xmm15[1,2],xmm13[0,1]
4591 ; SSE-NEXT: movdqa 112(%r9), %xmm2
4592 ; SSE-NEXT: pshuflw {{.*#+}} xmm13 = xmm2[0,2,2,3,4,5,6,7]
4593 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm13[0,1,2,1]
4594 ; SSE-NEXT: movaps %xmm14, %xmm12
4595 ; SSE-NEXT: andnps %xmm13, %xmm12
4596 ; SSE-NEXT: shufps {{.*#+}} xmm15 = xmm15[2,0,1,3]
4597 ; SSE-NEXT: andps %xmm14, %xmm15
4598 ; SSE-NEXT: orps %xmm15, %xmm12
4599 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4600 ; SSE-NEXT: punpckhwd {{.*#+}} xmm4 = xmm4[4],xmm5[4],xmm4[5],xmm5[5],xmm4[6],xmm5[6],xmm4[7],xmm5[7]
4601 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4602 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
4603 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4604 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,3],xmm4[3,3]
4605 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm3[0,1,2,3,6,5,7,7]
4606 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,2],xmm1[2,3]
4607 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0,1,3]
4608 ; SSE-NEXT: andps %xmm14, %xmm0
4609 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm2[0,1,2,3,4,6,6,7]
4610 ; SSE-NEXT: movdqa %xmm2, %xmm15
4611 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,1,2,3]
4612 ; SSE-NEXT: andnps %xmm1, %xmm14
4613 ; SSE-NEXT: orps %xmm0, %xmm14
4614 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
4615 ; SSE-NEXT: movaps %xmm4, %xmm0
4616 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
4617 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm2[0]
4618 ; SSE-NEXT: movdqa %xmm9, %xmm1
4619 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,1],xmm2[1,3]
4620 ; SSE-NEXT: movaps %xmm2, %xmm3
4621 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm1[0,2]
4622 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
4623 ; SSE-NEXT: movdqa %xmm2, %xmm13
4624 ; SSE-NEXT: pslldq {{.*#+}} xmm13 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm13[0,1,2,3,4,5]
4625 ; SSE-NEXT: movdqa {{.*#+}} xmm12 = [65535,65535,65535,65535,65535,0,65535,65535]
4626 ; SSE-NEXT: movdqa %xmm12, %xmm1
4627 ; SSE-NEXT: pandn %xmm13, %xmm1
4628 ; SSE-NEXT: andps %xmm12, %xmm0
4629 ; SSE-NEXT: por %xmm0, %xmm1
4630 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4631 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm4[1]
4632 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm9[1,1,1,1,4,5,6,7]
4633 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,1],xmm4[1,1]
4634 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm3[0,2]
4635 ; SSE-NEXT: movdqa {{.*#+}} xmm10 = [65535,65535,65535,0,65535,65535,65535,65535]
4636 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm2[0,0,1,1]
4637 ; SSE-NEXT: movdqa %xmm10, %xmm1
4638 ; SSE-NEXT: pandn %xmm13, %xmm1
4639 ; SSE-NEXT: andps %xmm10, %xmm0
4640 ; SSE-NEXT: por %xmm0, %xmm1
4641 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4642 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
4643 ; SSE-NEXT: movaps %xmm4, %xmm0
4644 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
4645 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
4646 ; SSE-NEXT: movdqa %xmm9, %xmm13
4647 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[2,1],xmm1[1,3]
4648 ; SSE-NEXT: movaps %xmm1, %xmm3
4649 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm13[0,2]
4650 ; SSE-NEXT: movdqa %xmm2, %xmm1
4651 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm2[2,2,3,3]
4652 ; SSE-NEXT: pslld $16, %xmm1
4653 ; SSE-NEXT: movdqa %xmm1, %xmm2
4654 ; SSE-NEXT: movdqa %xmm12, %xmm1
4655 ; SSE-NEXT: pandn %xmm2, %xmm1
4656 ; SSE-NEXT: andps %xmm12, %xmm0
4657 ; SSE-NEXT: por %xmm0, %xmm1
4658 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4659 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm4[1]
4660 ; SSE-NEXT: psrldq {{.*#+}} xmm9 = xmm9[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
4661 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[1,1],xmm4[1,1]
4662 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[2,0],xmm3[0,2]
4663 ; SSE-NEXT: movdqa %xmm10, %xmm0
4664 ; SSE-NEXT: pandn %xmm13, %xmm0
4665 ; SSE-NEXT: andps %xmm10, %xmm9
4666 ; SSE-NEXT: por %xmm9, %xmm0
4667 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4668 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
4669 ; SSE-NEXT: movaps %xmm9, %xmm0
4670 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
4671 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
4672 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
4673 ; SSE-NEXT: movaps %xmm3, %xmm2
4674 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm1[1,3]
4675 ; SSE-NEXT: movaps %xmm1, %xmm5
4676 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm2[0,2]
4677 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
4678 ; SSE-NEXT: movdqa %xmm4, %xmm2
4679 ; SSE-NEXT: pslldq {{.*#+}} xmm2 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm2[0,1,2,3,4,5]
4680 ; SSE-NEXT: movdqa %xmm12, %xmm1
4681 ; SSE-NEXT: pandn %xmm2, %xmm1
4682 ; SSE-NEXT: andps %xmm12, %xmm0
4683 ; SSE-NEXT: por %xmm0, %xmm1
4684 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4685 ; SSE-NEXT: unpckhpd {{.*#+}} xmm5 = xmm5[1],xmm9[1]
4686 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm3[1,1,1,1,4,5,6,7]
4687 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,1],xmm9[1,1]
4688 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm5[0,2]
4689 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm4[0,0,1,1]
4690 ; SSE-NEXT: movdqa %xmm4, %xmm5
4691 ; SSE-NEXT: movdqa %xmm10, %xmm1
4692 ; SSE-NEXT: pandn %xmm2, %xmm1
4693 ; SSE-NEXT: andps %xmm10, %xmm0
4694 ; SSE-NEXT: por %xmm0, %xmm1
4695 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4696 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
4697 ; SSE-NEXT: movaps %xmm9, %xmm0
4698 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
4699 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
4700 ; SSE-NEXT: movaps %xmm3, %xmm2
4701 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[2,1],xmm1[1,3]
4702 ; SSE-NEXT: movaps %xmm1, %xmm4
4703 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm2[0,2]
4704 ; SSE-NEXT: movdqa %xmm5, %xmm1
4705 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm5[2,2,3,3]
4706 ; SSE-NEXT: pslld $16, %xmm1
4707 ; SSE-NEXT: movdqa %xmm12, %xmm5
4708 ; SSE-NEXT: pandn %xmm1, %xmm5
4709 ; SSE-NEXT: andps %xmm12, %xmm0
4710 ; SSE-NEXT: por %xmm0, %xmm5
4711 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4712 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm9[1]
4713 ; SSE-NEXT: movdqa %xmm3, %xmm0
4714 ; SSE-NEXT: psrldq {{.*#+}} xmm0 = xmm0[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
4715 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,1],xmm9[1,1]
4716 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm4[0,2]
4717 ; SSE-NEXT: movdqa %xmm10, %xmm1
4718 ; SSE-NEXT: pandn %xmm2, %xmm1
4719 ; SSE-NEXT: andps %xmm10, %xmm0
4720 ; SSE-NEXT: por %xmm0, %xmm1
4721 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4722 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
4723 ; SSE-NEXT: movaps %xmm5, %xmm0
4724 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
4725 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm3[0]
4726 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
4727 ; SSE-NEXT: movaps %xmm1, %xmm2
4728 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm3[1,3]
4729 ; SSE-NEXT: movaps %xmm3, %xmm4
4730 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm2[0,2]
4731 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
4732 ; SSE-NEXT: movdqa %xmm3, %xmm2
4733 ; SSE-NEXT: pslldq {{.*#+}} xmm2 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm2[0,1,2,3,4,5]
4734 ; SSE-NEXT: movdqa %xmm12, %xmm9
4735 ; SSE-NEXT: pandn %xmm2, %xmm9
4736 ; SSE-NEXT: andps %xmm12, %xmm0
4737 ; SSE-NEXT: por %xmm0, %xmm9
4738 ; SSE-NEXT: movdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4739 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm5[1]
4740 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm1[1,1,1,1,4,5,6,7]
4741 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,1],xmm5[1,1]
4742 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm4[0,2]
4743 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm3[0,0,1,1]
4744 ; SSE-NEXT: movdqa %xmm10, %xmm4
4745 ; SSE-NEXT: pandn %xmm2, %xmm4
4746 ; SSE-NEXT: andps %xmm10, %xmm0
4747 ; SSE-NEXT: por %xmm0, %xmm4
4748 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4749 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
4750 ; SSE-NEXT: movaps %xmm5, %xmm0
4751 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
4752 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm4[0]
4753 ; SSE-NEXT: movaps %xmm1, %xmm2
4754 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[2,1],xmm4[1,3]
4755 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm2[0,2]
4756 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm3[2,2,3,3]
4757 ; SSE-NEXT: pslld $16, %xmm3
4758 ; SSE-NEXT: movdqa %xmm12, %xmm9
4759 ; SSE-NEXT: pandn %xmm3, %xmm9
4760 ; SSE-NEXT: andps %xmm12, %xmm0
4761 ; SSE-NEXT: por %xmm0, %xmm9
4762 ; SSE-NEXT: movdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4763 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm5[1]
4764 ; SSE-NEXT: movdqa %xmm1, %xmm0
4765 ; SSE-NEXT: psrldq {{.*#+}} xmm0 = xmm0[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
4766 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,1],xmm5[1,1]
4767 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm4[0,2]
4768 ; SSE-NEXT: movdqa %xmm10, %xmm1
4769 ; SSE-NEXT: pandn %xmm2, %xmm1
4770 ; SSE-NEXT: andps %xmm10, %xmm0
4771 ; SSE-NEXT: por %xmm0, %xmm1
4772 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4773 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
4774 ; SSE-NEXT: movaps %xmm4, %xmm0
4775 ; SSE-NEXT: movaps (%rsp), %xmm1 # 16-byte Reload
4776 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
4777 ; SSE-NEXT: movdqa %xmm6, %xmm2
4778 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm1[1,3]
4779 ; SSE-NEXT: movaps %xmm1, %xmm3
4780 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm2[0,2]
4781 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
4782 ; SSE-NEXT: movdqa %xmm1, %xmm2
4783 ; SSE-NEXT: pslldq {{.*#+}} xmm2 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm2[0,1,2,3,4,5]
4784 ; SSE-NEXT: movdqa %xmm12, %xmm5
4785 ; SSE-NEXT: pandn %xmm2, %xmm5
4786 ; SSE-NEXT: andps %xmm12, %xmm0
4787 ; SSE-NEXT: por %xmm0, %xmm5
4788 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4789 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm4[1]
4790 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm6[1,1,1,1,4,5,6,7]
4791 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,1],xmm4[1,1]
4792 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm3[0,2]
4793 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm1[0,0,1,1]
4794 ; SSE-NEXT: movdqa %xmm10, %xmm3
4795 ; SSE-NEXT: pandn %xmm2, %xmm3
4796 ; SSE-NEXT: andps %xmm10, %xmm0
4797 ; SSE-NEXT: por %xmm0, %xmm3
4798 ; SSE-NEXT: movdqa %xmm3, (%rsp) # 16-byte Spill
4799 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
4800 ; SSE-NEXT: movaps %xmm4, %xmm0
4801 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
4802 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm3[0]
4803 ; SSE-NEXT: movdqa %xmm6, %xmm2
4804 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[2,1],xmm3[1,3]
4805 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm2[0,2]
4806 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm1[2,2,3,3]
4807 ; SSE-NEXT: pslld $16, %xmm1
4808 ; SSE-NEXT: movdqa %xmm12, %xmm5
4809 ; SSE-NEXT: pandn %xmm1, %xmm5
4810 ; SSE-NEXT: andps %xmm12, %xmm0
4811 ; SSE-NEXT: por %xmm0, %xmm5
4812 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4813 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm4[1]
4814 ; SSE-NEXT: psrldq {{.*#+}} xmm6 = xmm6[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
4815 ; SSE-NEXT: shufps {{.*#+}} xmm6 = xmm6[1,1],xmm4[1,1]
4816 ; SSE-NEXT: shufps {{.*#+}} xmm6 = xmm6[2,0],xmm3[0,2]
4817 ; SSE-NEXT: movdqa %xmm10, %xmm0
4818 ; SSE-NEXT: pandn %xmm2, %xmm0
4819 ; SSE-NEXT: andps %xmm10, %xmm6
4820 ; SSE-NEXT: por %xmm6, %xmm0
4821 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4822 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
4823 ; SSE-NEXT: movaps %xmm4, %xmm0
4824 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
4825 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
4826 ; SSE-NEXT: movdqa %xmm7, %xmm2
4827 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm1[1,3]
4828 ; SSE-NEXT: movaps %xmm1, %xmm3
4829 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm2[0,2]
4830 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
4831 ; SSE-NEXT: movdqa %xmm1, %xmm2
4832 ; SSE-NEXT: pslldq {{.*#+}} xmm2 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm2[0,1,2,3,4,5]
4833 ; SSE-NEXT: movdqa %xmm12, %xmm5
4834 ; SSE-NEXT: pandn %xmm2, %xmm5
4835 ; SSE-NEXT: andps %xmm12, %xmm0
4836 ; SSE-NEXT: por %xmm0, %xmm5
4837 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4838 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm4[1]
4839 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm7[1,1,1,1,4,5,6,7]
4840 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,1],xmm4[1,1]
4841 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm3[0,2]
4842 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm1[0,0,1,1]
4843 ; SSE-NEXT: movdqa %xmm10, %xmm3
4844 ; SSE-NEXT: pandn %xmm2, %xmm3
4845 ; SSE-NEXT: andps %xmm10, %xmm0
4846 ; SSE-NEXT: por %xmm0, %xmm3
4847 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4848 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
4849 ; SSE-NEXT: movaps %xmm4, %xmm0
4850 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
4851 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm3[0]
4852 ; SSE-NEXT: movdqa %xmm7, %xmm2
4853 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[2,1],xmm3[1,3]
4854 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm2[0,2]
4855 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm1[2,2,3,3]
4856 ; SSE-NEXT: pslld $16, %xmm1
4857 ; SSE-NEXT: movdqa %xmm12, %xmm5
4858 ; SSE-NEXT: pandn %xmm1, %xmm5
4859 ; SSE-NEXT: andps %xmm12, %xmm0
4860 ; SSE-NEXT: por %xmm0, %xmm5
4861 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4862 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm4[1]
4863 ; SSE-NEXT: psrldq {{.*#+}} xmm7 = xmm7[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
4864 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[1,1],xmm4[1,1]
4865 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,0],xmm3[0,2]
4866 ; SSE-NEXT: movdqa %xmm10, %xmm0
4867 ; SSE-NEXT: pandn %xmm2, %xmm0
4868 ; SSE-NEXT: andps %xmm10, %xmm7
4869 ; SSE-NEXT: por %xmm7, %xmm0
4870 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4871 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
4872 ; SSE-NEXT: movaps %xmm4, %xmm0
4873 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
4874 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
4875 ; SSE-NEXT: movdqa %xmm8, %xmm2
4876 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm1[1,3]
4877 ; SSE-NEXT: movaps %xmm1, %xmm3
4878 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm2[0,2]
4879 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
4880 ; SSE-NEXT: movdqa %xmm1, %xmm2
4881 ; SSE-NEXT: pslldq {{.*#+}} xmm2 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm2[0,1,2,3,4,5]
4882 ; SSE-NEXT: movdqa %xmm12, %xmm5
4883 ; SSE-NEXT: pandn %xmm2, %xmm5
4884 ; SSE-NEXT: andps %xmm12, %xmm0
4885 ; SSE-NEXT: por %xmm0, %xmm5
4886 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4887 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm4[1]
4888 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm8[1,1,1,1,4,5,6,7]
4889 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,1],xmm4[1,1]
4890 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm3[0,2]
4891 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm1[0,0,1,1]
4892 ; SSE-NEXT: movdqa %xmm10, %xmm3
4893 ; SSE-NEXT: pandn %xmm2, %xmm3
4894 ; SSE-NEXT: andps %xmm10, %xmm0
4895 ; SSE-NEXT: por %xmm0, %xmm3
4896 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4897 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
4898 ; SSE-NEXT: movaps %xmm4, %xmm0
4899 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
4900 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm3[0]
4901 ; SSE-NEXT: movdqa %xmm8, %xmm2
4902 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[2,1],xmm3[1,3]
4903 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm2[0,2]
4904 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm1[2,2,3,3]
4905 ; SSE-NEXT: pslld $16, %xmm1
4906 ; SSE-NEXT: movdqa %xmm12, %xmm5
4907 ; SSE-NEXT: pandn %xmm1, %xmm5
4908 ; SSE-NEXT: andps %xmm12, %xmm0
4909 ; SSE-NEXT: por %xmm0, %xmm5
4910 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4911 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm4[1]
4912 ; SSE-NEXT: psrldq {{.*#+}} xmm8 = xmm8[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
4913 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[1,1],xmm4[1,1]
4914 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[2,0],xmm3[0,2]
4915 ; SSE-NEXT: movdqa %xmm10, %xmm0
4916 ; SSE-NEXT: pandn %xmm2, %xmm0
4917 ; SSE-NEXT: andps %xmm10, %xmm8
4918 ; SSE-NEXT: por %xmm8, %xmm0
4919 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4920 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
4921 ; SSE-NEXT: movaps %xmm4, %xmm0
4922 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
4923 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
4924 ; SSE-NEXT: movdqa %xmm11, %xmm2
4925 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm1[1,3]
4926 ; SSE-NEXT: movaps %xmm1, %xmm3
4927 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm2[0,2]
4928 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
4929 ; SSE-NEXT: movdqa %xmm1, %xmm2
4930 ; SSE-NEXT: pslldq {{.*#+}} xmm2 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm2[0,1,2,3,4,5]
4931 ; SSE-NEXT: movdqa %xmm12, %xmm13
4932 ; SSE-NEXT: pandn %xmm2, %xmm13
4933 ; SSE-NEXT: andps %xmm12, %xmm0
4934 ; SSE-NEXT: por %xmm0, %xmm13
4935 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm4[1]
4936 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm11[1,1,1,1,4,5,6,7]
4937 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,1],xmm4[1,1]
4938 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm3[0,2]
4939 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm1[0,0,1,1]
4940 ; SSE-NEXT: movdqa %xmm1, %xmm4
4941 ; SSE-NEXT: movdqa %xmm10, %xmm9
4942 ; SSE-NEXT: pandn %xmm2, %xmm9
4943 ; SSE-NEXT: andps %xmm10, %xmm0
4944 ; SSE-NEXT: por %xmm0, %xmm9
4945 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
4946 ; SSE-NEXT: movaps %xmm7, %xmm0
4947 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
4948 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
4949 ; SSE-NEXT: movdqa %xmm11, %xmm2
4950 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[2,1],xmm1[1,3]
4951 ; SSE-NEXT: movaps %xmm1, %xmm3
4952 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm2[0,2]
4953 ; SSE-NEXT: movdqa %xmm4, %xmm1
4954 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm4[2,2,3,3]
4955 ; SSE-NEXT: pslld $16, %xmm1
4956 ; SSE-NEXT: movdqa %xmm12, %xmm6
4957 ; SSE-NEXT: pandn %xmm1, %xmm6
4958 ; SSE-NEXT: andps %xmm12, %xmm0
4959 ; SSE-NEXT: por %xmm0, %xmm6
4960 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm7[1]
4961 ; SSE-NEXT: psrldq {{.*#+}} xmm11 = xmm11[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
4962 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[1,1],xmm7[1,1]
4963 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,0],xmm3[0,2]
4964 ; SSE-NEXT: movdqa %xmm10, %xmm4
4965 ; SSE-NEXT: pandn %xmm5, %xmm4
4966 ; SSE-NEXT: andps %xmm10, %xmm11
4967 ; SSE-NEXT: por %xmm11, %xmm4
4968 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
4969 ; SSE-NEXT: movaps %xmm7, %xmm0
4970 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
4971 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm2[0]
4972 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
4973 ; SSE-NEXT: movaps %xmm1, %xmm5
4974 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm2[1,3]
4975 ; SSE-NEXT: movaps %xmm2, %xmm3
4976 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm5[0,2]
4977 ; SSE-NEXT: movdqa %xmm15, %xmm5
4978 ; SSE-NEXT: pslldq {{.*#+}} xmm5 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm5[0,1,2,3,4,5]
4979 ; SSE-NEXT: movdqa %xmm12, %xmm11
4980 ; SSE-NEXT: pandn %xmm5, %xmm11
4981 ; SSE-NEXT: andps %xmm12, %xmm0
4982 ; SSE-NEXT: por %xmm0, %xmm11
4983 ; SSE-NEXT: movaps %xmm7, %xmm0
4984 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1],xmm7[1]
4985 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm1[1,1,1,1,4,5,6,7]
4986 ; SSE-NEXT: movaps %xmm1, %xmm7
4987 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[1,1],xmm0[1,1]
4988 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[2,0],xmm3[0,2]
4989 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm15[0,0,1,1]
4990 ; SSE-NEXT: movdqa %xmm15, %xmm8
4991 ; SSE-NEXT: movdqa %xmm10, %xmm15
4992 ; SSE-NEXT: pandn %xmm1, %xmm15
4993 ; SSE-NEXT: andps %xmm10, %xmm5
4994 ; SSE-NEXT: por %xmm5, %xmm15
4995 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
4996 ; SSE-NEXT: movaps %xmm3, %xmm1
4997 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
4998 ; SSE-NEXT: movlhps {{.*#+}} xmm1 = xmm1[0],xmm2[0]
4999 ; SSE-NEXT: movaps %xmm7, %xmm5
5000 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[2,1],xmm2[1,3]
5001 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0],xmm5[0,2]
5002 ; SSE-NEXT: andps %xmm12, %xmm1
5003 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm8[2,2,3,3]
5004 ; SSE-NEXT: pslld $16, %xmm8
5005 ; SSE-NEXT: pandn %xmm8, %xmm12
5006 ; SSE-NEXT: por %xmm1, %xmm12
5007 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1],xmm3[1]
5008 ; SSE-NEXT: psrldq {{.*#+}} xmm7 = xmm7[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
5009 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[1,1],xmm3[1,1]
5010 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,0],xmm2[0,2]
5011 ; SSE-NEXT: andps %xmm10, %xmm7
5012 ; SSE-NEXT: pandn %xmm5, %xmm10
5013 ; SSE-NEXT: por %xmm7, %xmm10
5014 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
5015 ; SSE-NEXT: movdqa %xmm10, 736(%rax)
5016 ; SSE-NEXT: movdqa %xmm12, 720(%rax)
5017 ; SSE-NEXT: movdqa %xmm15, 688(%rax)
5018 ; SSE-NEXT: movdqa %xmm11, 672(%rax)
5019 ; SSE-NEXT: movdqa %xmm4, 640(%rax)
5020 ; SSE-NEXT: movdqa %xmm6, 624(%rax)
5021 ; SSE-NEXT: movdqa %xmm9, 592(%rax)
5022 ; SSE-NEXT: movdqa %xmm13, 576(%rax)
5023 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5024 ; SSE-NEXT: movaps %xmm0, 544(%rax)
5025 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5026 ; SSE-NEXT: movaps %xmm0, 528(%rax)
5027 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5028 ; SSE-NEXT: movaps %xmm0, 496(%rax)
5029 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5030 ; SSE-NEXT: movaps %xmm0, 480(%rax)
5031 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5032 ; SSE-NEXT: movaps %xmm0, 448(%rax)
5033 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5034 ; SSE-NEXT: movaps %xmm0, 432(%rax)
5035 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5036 ; SSE-NEXT: movaps %xmm0, 400(%rax)
5037 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5038 ; SSE-NEXT: movaps %xmm0, 384(%rax)
5039 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5040 ; SSE-NEXT: movaps %xmm0, 352(%rax)
5041 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5042 ; SSE-NEXT: movaps %xmm0, 336(%rax)
5043 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
5044 ; SSE-NEXT: movaps %xmm0, 304(%rax)
5045 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5046 ; SSE-NEXT: movaps %xmm0, 288(%rax)
5047 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5048 ; SSE-NEXT: movaps %xmm0, 256(%rax)
5049 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5050 ; SSE-NEXT: movaps %xmm0, 240(%rax)
5051 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5052 ; SSE-NEXT: movaps %xmm0, 208(%rax)
5053 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5054 ; SSE-NEXT: movaps %xmm0, 192(%rax)
5055 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5056 ; SSE-NEXT: movaps %xmm0, 160(%rax)
5057 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5058 ; SSE-NEXT: movaps %xmm0, 144(%rax)
5059 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5060 ; SSE-NEXT: movaps %xmm0, 112(%rax)
5061 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5062 ; SSE-NEXT: movaps %xmm0, 96(%rax)
5063 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5064 ; SSE-NEXT: movaps %xmm0, 64(%rax)
5065 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5066 ; SSE-NEXT: movaps %xmm0, 48(%rax)
5067 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5068 ; SSE-NEXT: movaps %xmm0, 16(%rax)
5069 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5070 ; SSE-NEXT: movaps %xmm0, (%rax)
5071 ; SSE-NEXT: movaps %xmm14, 752(%rax)
5072 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5073 ; SSE-NEXT: movaps %xmm0, 704(%rax)
5074 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5075 ; SSE-NEXT: movaps %xmm0, 656(%rax)
5076 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5077 ; SSE-NEXT: movaps %xmm0, 608(%rax)
5078 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5079 ; SSE-NEXT: movaps %xmm0, 560(%rax)
5080 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5081 ; SSE-NEXT: movaps %xmm0, 512(%rax)
5082 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5083 ; SSE-NEXT: movaps %xmm0, 464(%rax)
5084 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5085 ; SSE-NEXT: movaps %xmm0, 416(%rax)
5086 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5087 ; SSE-NEXT: movaps %xmm0, 368(%rax)
5088 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5089 ; SSE-NEXT: movaps %xmm0, 320(%rax)
5090 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5091 ; SSE-NEXT: movaps %xmm0, 272(%rax)
5092 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5093 ; SSE-NEXT: movaps %xmm0, 224(%rax)
5094 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5095 ; SSE-NEXT: movaps %xmm0, 176(%rax)
5096 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5097 ; SSE-NEXT: movaps %xmm0, 128(%rax)
5098 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5099 ; SSE-NEXT: movaps %xmm0, 80(%rax)
5100 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5101 ; SSE-NEXT: movaps %xmm0, 32(%rax)
5102 ; SSE-NEXT: addq $808, %rsp # imm = 0x328
5105 ; AVX1-ONLY-LABEL: store_i16_stride6_vf64:
5106 ; AVX1-ONLY: # %bb.0:
5107 ; AVX1-ONLY-NEXT: subq $504, %rsp # imm = 0x1F8
5108 ; AVX1-ONLY-NEXT: vmovdqa 80(%rcx), %xmm1
5109 ; AVX1-ONLY-NEXT: vmovdqa 80(%rdx), %xmm2
5110 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
5111 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[2,2,3,3]
5112 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
5113 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm4[0,0,1,1]
5114 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm3, %ymm2
5115 ; AVX1-ONLY-NEXT: vmovdqa 80(%rsi), %xmm3
5116 ; AVX1-ONLY-NEXT: vmovdqa 80(%rdi), %xmm5
5117 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
5118 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm1[2,3,2,3]
5119 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm5[4],xmm3[4],xmm5[5],xmm3[5],xmm5[6],xmm3[6],xmm5[7],xmm3[7]
5120 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm5[0,1,0,1]
5121 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm6, %ymm3
5122 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm3[0,1],ymm2[2],ymm3[3,4],ymm2[5],ymm3[6,7]
5123 ; AVX1-ONLY-NEXT: vmovdqa 80(%r8), %xmm2
5124 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm2[2,1,3,3,4,5,6,7]
5125 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,1,2,1]
5126 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm3[0],xmm6[1,2],xmm3[3]
5127 ; AVX1-ONLY-NEXT: vmovdqa 80(%r9), %xmm3
5128 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm8 = xmm3[0,2,2,3,4,5,6,7]
5129 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[0,1,2,1]
5130 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0],xmm8[1],xmm7[2,3,4,5,6],xmm8[7]
5131 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5132 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm6, %xmm6
5133 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0,1,2,3],xmm2[4,5],xmm6[6,7]
5134 ; AVX1-ONLY-NEXT: vpslld $16, %xmm3, %xmm7
5135 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0,1,2,3,4],xmm7[5],xmm6[6,7]
5136 ; AVX1-ONLY-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5137 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm5[2,3,2,3]
5138 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm5
5139 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm4[1,1,2,2]
5140 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
5141 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm6, %ymm4
5142 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1],ymm5[2],ymm4[3,4],ymm5[5],ymm4[6,7]
5143 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm5 = xmm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
5144 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm4[0],xmm5[1],xmm4[2,3]
5145 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm3[2,2,3,3]
5146 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1,2],xmm6[3],xmm5[4,5,6,7]
5147 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5148 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm4, %xmm4
5149 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm5 = xmm2[0,1,2,3,6,5,7,7]
5150 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,1,2,3]
5151 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3,4,5],xmm5[6,7]
5152 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm5 = xmm3[0,1,2,3,4,6,6,7]
5153 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,1,2,3]
5154 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0],xmm5[1],xmm4[2,3,4,5,6],xmm5[7]
5155 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5156 ; AVX1-ONLY-NEXT: vmovdqa 64(%rsi), %xmm8
5157 ; AVX1-ONLY-NEXT: vmovdqa 64(%rdi), %xmm9
5158 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm9[4],xmm8[4],xmm9[5],xmm8[5],xmm9[6],xmm8[6],xmm9[7],xmm8[7]
5159 ; AVX1-ONLY-NEXT: vmovdqa 64(%rcx), %xmm10
5160 ; AVX1-ONLY-NEXT: vmovdqa 64(%rdx), %xmm11
5161 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
5162 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm7[1,1,2,2]
5163 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm7[2,2,3,3]
5164 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm5
5165 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm4[2,3,2,3]
5166 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm4, %ymm6
5167 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm5[0,1],ymm6[2],ymm5[3,4],ymm6[5],ymm5[6,7]
5168 ; AVX1-ONLY-NEXT: vmovdqa 64(%r8), %xmm5
5169 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm6 = xmm5[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
5170 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm13 = xmm12[0],xmm6[1],xmm12[2,3]
5171 ; AVX1-ONLY-NEXT: vmovdqa 64(%r9), %xmm6
5172 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm6[2,2,3,3]
5173 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm13 = xmm13[0,1,2],xmm14[3],xmm13[4,5,6,7]
5174 ; AVX1-ONLY-NEXT: vmovdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5175 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm12, %xmm12
5176 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm13 = xmm5[0,1,2,3,6,5,7,7]
5177 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm13[2,1,2,3]
5178 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm12 = xmm13[0,1],xmm12[2,3,4,5],xmm13[6,7]
5179 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm13 = xmm6[0,1,2,3,4,6,6,7]
5180 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm13[2,1,2,3]
5181 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0],xmm13[1],xmm12[2,3,4,5,6],xmm13[7]
5182 ; AVX1-ONLY-NEXT: vmovdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5183 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm0[0,0,1,1]
5184 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,2,2]
5185 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm12, %ymm0
5186 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm1[0,1,0,1]
5187 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm12, %ymm1
5188 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5,6],ymm0[7]
5189 ; AVX1-ONLY-NEXT: vinsertps {{.*#+}} xmm1 = xmm0[0,1],xmm2[0],xmm0[3]
5190 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm12 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm3[0,1,2,3,4,5]
5191 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4],xmm12[5],xmm1[6,7]
5192 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5193 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm0
5194 ; AVX1-ONLY-NEXT: vpmovzxwd {{.*#+}} xmm1 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero
5195 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3],xmm0[4,5,6,7]
5196 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm3[0,0,1,1]
5197 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2],xmm1[3],xmm0[4,5,6,7]
5198 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5199 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
5200 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[0,0,1,1]
5201 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[1,1,2,2]
5202 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
5203 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
5204 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm2[0,1,0,1]
5205 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm3
5206 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0],ymm1[1],ymm3[2,3],ymm1[4],ymm3[5,6],ymm1[7]
5207 ; AVX1-ONLY-NEXT: vinsertps {{.*#+}} xmm3 = xmm1[0,1],xmm5[0],xmm1[3]
5208 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm8 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm6[0,1,2,3,4,5]
5209 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2,3,4],xmm8[5],xmm3[6,7]
5210 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5211 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm1
5212 ; AVX1-ONLY-NEXT: vpmovzxwd {{.*#+}} xmm3 = xmm5[0],zero,xmm5[1],zero,xmm5[2],zero,xmm5[3],zero
5213 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1],xmm3[2,3],xmm1[4,5,6,7]
5214 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm6[0,0,1,1]
5215 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2],xmm3[3],xmm1[4,5,6,7]
5216 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5217 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
5218 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm7[0,0,1,1]
5219 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
5220 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm2[2,3,2,3]
5221 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm4[0,1,0,1]
5222 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
5223 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7]
5224 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm1 = xmm5[2,1,3,3,4,5,6,7]
5225 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[0,1,2,1]
5226 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm1 = xmm1[0],xmm0[1,2],xmm1[3]
5227 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm2 = xmm6[0,2,2,3,4,5,6,7]
5228 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,1,2,1]
5229 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0],xmm2[1],xmm1[2,3,4,5,6],xmm2[7]
5230 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5231 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm0
5232 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3],xmm5[4,5],xmm0[6,7]
5233 ; AVX1-ONLY-NEXT: vpslld $16, %xmm6, %xmm1
5234 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3,4],xmm1[5],xmm0[6,7]
5235 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5236 ; AVX1-ONLY-NEXT: vmovdqa 48(%rcx), %xmm1
5237 ; AVX1-ONLY-NEXT: vmovdqa 48(%rdx), %xmm2
5238 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
5239 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[2,2,3,3]
5240 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
5241 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm4[0,0,1,1]
5242 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm3, %ymm2
5243 ; AVX1-ONLY-NEXT: vmovdqa 48(%rsi), %xmm3
5244 ; AVX1-ONLY-NEXT: vmovdqa 48(%rdi), %xmm5
5245 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
5246 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm5[4],xmm3[4],xmm5[5],xmm3[5],xmm5[6],xmm3[6],xmm5[7],xmm3[7]
5247 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm1[2,3,2,3]
5248 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm5[0,1,0,1]
5249 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm3, %ymm3
5250 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm3[0,1],ymm2[2],ymm3[3,4],ymm2[5],ymm3[6,7]
5251 ; AVX1-ONLY-NEXT: vmovdqa 48(%r8), %xmm2
5252 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm2[2,1,3,3,4,5,6,7]
5253 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,1,2,1]
5254 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm3[0],xmm6[1,2],xmm3[3]
5255 ; AVX1-ONLY-NEXT: vmovdqa 48(%r9), %xmm3
5256 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm8 = xmm3[0,2,2,3,4,5,6,7]
5257 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[0,1,2,1]
5258 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0],xmm8[1],xmm7[2,3,4,5,6],xmm8[7]
5259 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5260 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm6, %xmm6
5261 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0,1,2,3],xmm2[4,5],xmm6[6,7]
5262 ; AVX1-ONLY-NEXT: vpslld $16, %xmm3, %xmm7
5263 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0,1,2,3,4],xmm7[5],xmm6[6,7]
5264 ; AVX1-ONLY-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5265 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm5[2,3,2,3]
5266 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm5
5267 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm4[1,1,2,2]
5268 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
5269 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm6, %ymm4
5270 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1],ymm5[2],ymm4[3,4],ymm5[5],ymm4[6,7]
5271 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm5 = xmm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
5272 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm4[0],xmm5[1],xmm4[2,3]
5273 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm3[2,2,3,3]
5274 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1,2],xmm6[3],xmm5[4,5,6,7]
5275 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5276 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm4, %xmm4
5277 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm5 = xmm2[0,1,2,3,6,5,7,7]
5278 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,1,2,3]
5279 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3,4,5],xmm5[6,7]
5280 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm5 = xmm3[0,1,2,3,4,6,6,7]
5281 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,1,2,3]
5282 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0],xmm5[1],xmm4[2,3,4,5,6],xmm5[7]
5283 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5284 ; AVX1-ONLY-NEXT: vmovdqa 32(%rcx), %xmm8
5285 ; AVX1-ONLY-NEXT: vmovdqa 32(%rdx), %xmm9
5286 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm9[4],xmm8[4],xmm9[5],xmm8[5],xmm9[6],xmm8[6],xmm9[7],xmm8[7]
5287 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm6[1,1,2,2]
5288 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm6[2,2,3,3]
5289 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm4, %ymm4
5290 ; AVX1-ONLY-NEXT: vmovdqa 32(%rsi), %xmm10
5291 ; AVX1-ONLY-NEXT: vmovdqa 32(%rdi), %xmm11
5292 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
5293 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm7[2,3,2,3]
5294 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm7, %ymm5
5295 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm4[0,1],ymm5[2],ymm4[3,4],ymm5[5],ymm4[6,7]
5296 ; AVX1-ONLY-NEXT: vmovdqa 32(%r8), %xmm4
5297 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm5 = xmm4[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
5298 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm13 = xmm12[0],xmm5[1],xmm12[2,3]
5299 ; AVX1-ONLY-NEXT: vmovdqa 32(%r9), %xmm5
5300 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm5[2,2,3,3]
5301 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm13 = xmm13[0,1,2],xmm14[3],xmm13[4,5,6,7]
5302 ; AVX1-ONLY-NEXT: vmovdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5303 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm12, %xmm12
5304 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm13 = xmm4[0,1,2,3,6,5,7,7]
5305 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm13[2,1,2,3]
5306 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm12 = xmm13[0,1],xmm12[2,3,4,5],xmm13[6,7]
5307 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm13 = xmm5[0,1,2,3,4,6,6,7]
5308 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm13[2,1,2,3]
5309 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0],xmm13[1],xmm12[2,3,4,5,6],xmm13[7]
5310 ; AVX1-ONLY-NEXT: vmovdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5311 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm0[0,0,1,1]
5312 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,2,2]
5313 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm12, %ymm0
5314 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm1[0,1,0,1]
5315 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm12, %ymm1
5316 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5,6],ymm0[7]
5317 ; AVX1-ONLY-NEXT: vinsertps {{.*#+}} xmm1 = xmm0[0,1],xmm2[0],xmm0[3]
5318 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm12 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm3[0,1,2,3,4,5]
5319 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4],xmm12[5],xmm1[6,7]
5320 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5321 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm0
5322 ; AVX1-ONLY-NEXT: vpmovzxwd {{.*#+}} xmm1 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero
5323 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3],xmm0[4,5,6,7]
5324 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm3[0,0,1,1]
5325 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2],xmm1[3],xmm0[4,5,6,7]
5326 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5327 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
5328 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[0,0,1,1]
5329 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[1,1,2,2]
5330 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
5331 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
5332 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm2[0,1,0,1]
5333 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm3
5334 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0],ymm1[1],ymm3[2,3],ymm1[4],ymm3[5,6],ymm1[7]
5335 ; AVX1-ONLY-NEXT: vinsertps {{.*#+}} xmm3 = xmm1[0,1],xmm4[0],xmm1[3]
5336 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm8 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm5[0,1,2,3,4,5]
5337 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2,3,4],xmm8[5],xmm3[6,7]
5338 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5339 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm1
5340 ; AVX1-ONLY-NEXT: vpmovzxwd {{.*#+}} xmm3 = xmm4[0],zero,xmm4[1],zero,xmm4[2],zero,xmm4[3],zero
5341 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1],xmm3[2,3],xmm1[4,5,6,7]
5342 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm5[0,0,1,1]
5343 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2],xmm3[3],xmm1[4,5,6,7]
5344 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5345 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
5346 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm6[0,0,1,1]
5347 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
5348 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm2[2,3,2,3]
5349 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm7[0,1,0,1]
5350 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
5351 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7]
5352 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm1 = xmm4[2,1,3,3,4,5,6,7]
5353 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[0,1,2,1]
5354 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm1 = xmm1[0],xmm0[1,2],xmm1[3]
5355 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm2 = xmm5[0,2,2,3,4,5,6,7]
5356 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,1,2,1]
5357 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0],xmm2[1],xmm1[2,3,4,5,6],xmm2[7]
5358 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5359 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm0
5360 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3],xmm4[4,5],xmm0[6,7]
5361 ; AVX1-ONLY-NEXT: vpslld $16, %xmm5, %xmm1
5362 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3,4],xmm1[5],xmm0[6,7]
5363 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5364 ; AVX1-ONLY-NEXT: vmovdqa 112(%rcx), %xmm1
5365 ; AVX1-ONLY-NEXT: vmovdqa 112(%rdx), %xmm2
5366 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
5367 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
5368 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,2,3,3]
5369 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm4[0,0,1,1]
5370 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm2
5371 ; AVX1-ONLY-NEXT: vmovdqa 112(%rsi), %xmm3
5372 ; AVX1-ONLY-NEXT: vmovdqa 112(%rdi), %xmm5
5373 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
5374 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm5[4],xmm3[4],xmm5[5],xmm3[5],xmm5[6],xmm3[6],xmm5[7],xmm3[7]
5375 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm1[2,3,2,3]
5376 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm5[0,1,0,1]
5377 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm3, %ymm3
5378 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm3[0,1],ymm2[2],ymm3[3,4],ymm2[5],ymm3[6,7]
5379 ; AVX1-ONLY-NEXT: vmovdqa 112(%r8), %xmm2
5380 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm2[2,1,3,3,4,5,6,7]
5381 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,1,2,1]
5382 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm3[0],xmm6[1,2],xmm3[3]
5383 ; AVX1-ONLY-NEXT: vmovdqa 112(%r9), %xmm3
5384 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm8 = xmm3[0,2,2,3,4,5,6,7]
5385 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[0,1,2,1]
5386 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0],xmm8[1],xmm7[2,3,4,5,6],xmm8[7]
5387 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5388 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm6, %xmm6
5389 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0,1,2,3],xmm2[4,5],xmm6[6,7]
5390 ; AVX1-ONLY-NEXT: vpslld $16, %xmm3, %xmm7
5391 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0,1,2,3,4],xmm7[5],xmm6[6,7]
5392 ; AVX1-ONLY-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5393 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm5[2,3,2,3]
5394 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm5
5395 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm4[1,1,2,2]
5396 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
5397 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm6, %ymm4
5398 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1],ymm5[2],ymm4[3,4],ymm5[5],ymm4[6,7]
5399 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm5 = xmm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
5400 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm4[0],xmm5[1],xmm4[2,3]
5401 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm3[2,2,3,3]
5402 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1,2],xmm6[3],xmm5[4,5,6,7]
5403 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5404 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm4, %xmm4
5405 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm5 = xmm2[0,1,2,3,6,5,7,7]
5406 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,1,2,3]
5407 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3,4,5],xmm5[6,7]
5408 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm5 = xmm3[0,1,2,3,4,6,6,7]
5409 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,1,2,3]
5410 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0],xmm5[1],xmm4[2,3,4,5,6],xmm5[7]
5411 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5412 ; AVX1-ONLY-NEXT: vmovdqa 96(%rcx), %xmm8
5413 ; AVX1-ONLY-NEXT: vmovdqa 96(%rdx), %xmm9
5414 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm9[4],xmm8[4],xmm9[5],xmm8[5],xmm9[6],xmm8[6],xmm9[7],xmm8[7]
5415 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm6[1,1,2,2]
5416 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm6[2,2,3,3]
5417 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm4, %ymm4
5418 ; AVX1-ONLY-NEXT: vmovdqa 96(%rsi), %xmm10
5419 ; AVX1-ONLY-NEXT: vmovdqa 96(%rdi), %xmm11
5420 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
5421 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm7[2,3,2,3]
5422 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm7, %ymm5
5423 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm4[0,1],ymm5[2],ymm4[3,4],ymm5[5],ymm4[6,7]
5424 ; AVX1-ONLY-NEXT: vmovdqa 96(%r8), %xmm4
5425 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm5 = xmm4[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
5426 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm13 = xmm12[0],xmm5[1],xmm12[2,3]
5427 ; AVX1-ONLY-NEXT: vmovdqa 96(%r9), %xmm5
5428 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm5[2,2,3,3]
5429 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm13 = xmm13[0,1,2],xmm14[3],xmm13[4,5,6,7]
5430 ; AVX1-ONLY-NEXT: vmovdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5431 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm12, %xmm12
5432 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm13 = xmm4[0,1,2,3,6,5,7,7]
5433 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm13[2,1,2,3]
5434 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm12 = xmm13[0,1],xmm12[2,3,4,5],xmm13[6,7]
5435 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm13 = xmm5[0,1,2,3,4,6,6,7]
5436 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm13[2,1,2,3]
5437 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0],xmm13[1],xmm12[2,3,4,5,6],xmm13[7]
5438 ; AVX1-ONLY-NEXT: vmovdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5439 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm0[0,0,1,1]
5440 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,2,2]
5441 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm12, %ymm0
5442 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm1[0,1,0,1]
5443 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm12, %ymm1
5444 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5,6],ymm0[7]
5445 ; AVX1-ONLY-NEXT: vinsertps {{.*#+}} xmm1 = xmm0[0,1],xmm2[0],xmm0[3]
5446 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm12 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm3[0,1,2,3,4,5]
5447 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4],xmm12[5],xmm1[6,7]
5448 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, (%rsp) # 16-byte Spill
5449 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm0
5450 ; AVX1-ONLY-NEXT: vpmovzxwd {{.*#+}} xmm1 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero
5451 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3],xmm0[4,5,6,7]
5452 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm3[0,0,1,1]
5453 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2],xmm1[3],xmm0[4,5,6,7]
5454 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5455 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
5456 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[0,0,1,1]
5457 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[1,1,2,2]
5458 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
5459 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
5460 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm2[0,1,0,1]
5461 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm3
5462 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0],ymm1[1],ymm3[2,3],ymm1[4],ymm3[5,6],ymm1[7]
5463 ; AVX1-ONLY-NEXT: vinsertps {{.*#+}} xmm3 = xmm1[0,1],xmm4[0],xmm1[3]
5464 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm8 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm5[0,1,2,3,4,5]
5465 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2,3,4],xmm8[5],xmm3[6,7]
5466 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5467 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm1
5468 ; AVX1-ONLY-NEXT: vpmovzxwd {{.*#+}} xmm3 = xmm4[0],zero,xmm4[1],zero,xmm4[2],zero,xmm4[3],zero
5469 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1],xmm3[2,3],xmm1[4,5,6,7]
5470 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm5[0,0,1,1]
5471 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2],xmm3[3],xmm1[4,5,6,7]
5472 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5473 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
5474 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm6[0,0,1,1]
5475 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
5476 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm2[2,3,2,3]
5477 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm7[0,1,0,1]
5478 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
5479 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7]
5480 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm1 = xmm4[2,1,3,3,4,5,6,7]
5481 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[0,1,2,1]
5482 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm1 = xmm1[0],xmm0[1,2],xmm1[3]
5483 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm2 = xmm5[0,2,2,3,4,5,6,7]
5484 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,1,2,1]
5485 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0],xmm2[1],xmm1[2,3,4,5,6],xmm2[7]
5486 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5487 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm0
5488 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3],xmm4[4,5],xmm0[6,7]
5489 ; AVX1-ONLY-NEXT: vpslld $16, %xmm5, %xmm1
5490 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3,4],xmm1[5],xmm0[6,7]
5491 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5492 ; AVX1-ONLY-NEXT: vmovdqa 16(%rcx), %xmm0
5493 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdx), %xmm1
5494 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
5495 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
5496 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm11[2,2,3,3]
5497 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[0,0,1,1]
5498 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
5499 ; AVX1-ONLY-NEXT: vmovdqa 16(%rsi), %xmm2
5500 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdi), %xmm3
5501 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
5502 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
5503 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm10[2,3,2,3]
5504 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm4[0,1,0,1]
5505 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
5506 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7]
5507 ; AVX1-ONLY-NEXT: vmovdqa 16(%r8), %xmm3
5508 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm2 = xmm3[2,1,3,3,4,5,6,7]
5509 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,1,2,1]
5510 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm5 = xmm2[0],xmm1[1,2],xmm2[3]
5511 ; AVX1-ONLY-NEXT: vmovdqa 16(%r9), %xmm2
5512 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm6 = xmm2[0,2,2,3,4,5,6,7]
5513 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[0,1,2,1]
5514 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0],xmm6[1],xmm5[2,3,4,5,6],xmm6[7]
5515 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5516 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm1
5517 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3],xmm3[4,5],xmm1[6,7]
5518 ; AVX1-ONLY-NEXT: vpslld $16, %xmm2, %xmm5
5519 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4],xmm5[5],xmm1[6,7]
5520 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5521 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm4[2,3,2,3]
5522 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm4, %ymm1
5523 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm0[1,1,2,2]
5524 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
5525 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm4, %ymm0
5526 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7]
5527 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm1 = xmm3[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
5528 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm1 = xmm0[0],xmm1[1],xmm0[2,3]
5529 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm2[2,2,3,3]
5530 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2],xmm4[3],xmm1[4,5,6,7]
5531 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5532 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm0
5533 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm1 = xmm3[0,1,2,3,6,5,7,7]
5534 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,1,2,3]
5535 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0,1],xmm0[2,3,4,5],xmm1[6,7]
5536 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm1 = xmm2[0,1,2,3,4,6,6,7]
5537 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,1,2,3]
5538 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm14 = xmm0[0],xmm1[1],xmm0[2,3,4,5,6],xmm1[7]
5539 ; AVX1-ONLY-NEXT: vmovdqa (%rcx), %xmm9
5540 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm8
5541 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm8[4],xmm9[4],xmm8[5],xmm9[5],xmm8[6],xmm9[6],xmm8[7],xmm9[7]
5542 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm5[1,1,2,2]
5543 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm5[2,2,3,3]
5544 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
5545 ; AVX1-ONLY-NEXT: vmovdqa (%rsi), %xmm7
5546 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm6
5547 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm6[4],xmm7[4],xmm6[5],xmm7[5],xmm6[6],xmm7[6],xmm6[7],xmm7[7]
5548 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm4[2,3,2,3]
5549 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm4, %ymm1
5550 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm12 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7]
5551 ; AVX1-ONLY-NEXT: vmovdqa (%r8), %xmm1
5552 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm0 = xmm1[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
5553 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm13 = xmm12[0],xmm0[1],xmm12[2,3]
5554 ; AVX1-ONLY-NEXT: vmovdqa (%r9), %xmm0
5555 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm15 = xmm0[2,2,3,3]
5556 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm13 = xmm13[0,1,2],xmm15[3],xmm13[4,5,6,7]
5557 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm12, %xmm12
5558 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm15 = xmm1[0,1,2,3,6,5,7,7]
5559 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[2,1,2,3]
5560 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm12 = xmm15[0,1],xmm12[2,3,4,5],xmm15[6,7]
5561 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm15 = xmm0[0,1,2,3,4,6,6,7]
5562 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[2,1,2,3]
5563 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0],xmm15[1],xmm12[2,3,4,5,6],xmm15[7]
5564 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm15 = xmm11[0,0,1,1]
5565 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[1,1,2,2]
5566 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm15, %ymm11
5567 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm15 = xmm10[0,1,0,1]
5568 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm15, %ymm10
5569 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm11 = ymm10[0],ymm11[1],ymm10[2,3],ymm11[4],ymm10[5,6],ymm11[7]
5570 ; AVX1-ONLY-NEXT: vinsertps {{.*#+}} xmm10 = xmm11[0,1],xmm3[0],xmm11[3]
5571 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm15 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm2[0,1,2,3,4,5]
5572 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm10 = xmm10[0,1,2,3,4],xmm15[5],xmm10[6,7]
5573 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm11, %xmm11
5574 ; AVX1-ONLY-NEXT: vpmovzxwd {{.*#+}} xmm3 = xmm3[0],zero,xmm3[1],zero,xmm3[2],zero,xmm3[3],zero
5575 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm3 = xmm11[0,1],xmm3[2,3],xmm11[4,5,6,7]
5576 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,0,1,1]
5577 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1,2],xmm2[3],xmm3[4,5,6,7]
5578 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
5579 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm3[0,0,1,1]
5580 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm3[1,1,2,2]
5581 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm8, %ymm8
5582 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
5583 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm6[0,1,0,1]
5584 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm7, %ymm7
5585 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0],ymm8[1],ymm7[2,3],ymm8[4],ymm7[5,6],ymm8[7]
5586 ; AVX1-ONLY-NEXT: vinsertps {{.*#+}} xmm8 = xmm7[0,1],xmm1[0],xmm7[3]
5587 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm9 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm0[0,1,2,3,4,5]
5588 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0,1,2,3,4],xmm9[5],xmm8[6,7]
5589 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm7, %xmm7
5590 ; AVX1-ONLY-NEXT: vpmovzxwd {{.*#+}} xmm9 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero
5591 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1],xmm9[2,3],xmm7[4,5,6,7]
5592 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm0[0,0,1,1]
5593 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1,2],xmm9[3],xmm7[4,5,6,7]
5594 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
5595 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[0,0,1,1]
5596 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm3, %ymm3
5597 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm6[2,3,2,3]
5598 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[0,1,0,1]
5599 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm5, %ymm4
5600 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm4[0,1],ymm3[2],ymm4[3,4],ymm3[5],ymm4[6,7]
5601 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm4 = xmm1[2,1,3,3,4,5,6,7]
5602 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[0,1,2,1]
5603 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm4 = xmm4[0],xmm3[1,2],xmm4[3]
5604 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm5 = xmm0[0,2,2,3,4,5,6,7]
5605 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[0,1,2,1]
5606 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0],xmm5[1],xmm4[2,3,4,5,6],xmm5[7]
5607 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm3, %xmm3
5608 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm3[0,1,2,3],xmm1[4,5],xmm3[6,7]
5609 ; AVX1-ONLY-NEXT: vpslld $16, %xmm0, %xmm0
5610 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0,1,2,3,4],xmm0[5],xmm1[6,7]
5611 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
5612 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, 48(%rax)
5613 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, 32(%rax)
5614 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, 16(%rax)
5615 ; AVX1-ONLY-NEXT: vmovdqa %xmm8, (%rax)
5616 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, 112(%rax)
5617 ; AVX1-ONLY-NEXT: vmovdqa %xmm10, 96(%rax)
5618 ; AVX1-ONLY-NEXT: vmovdqa %xmm12, 80(%rax)
5619 ; AVX1-ONLY-NEXT: vmovdqa %xmm13, 64(%rax)
5620 ; AVX1-ONLY-NEXT: vmovdqa %xmm14, 176(%rax)
5621 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5622 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 160(%rax)
5623 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5624 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 144(%rax)
5625 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5626 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 128(%rax)
5627 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5628 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 624(%rax)
5629 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5630 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 608(%rax)
5631 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5632 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 592(%rax)
5633 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5634 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 576(%rax)
5635 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5636 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 688(%rax)
5637 ; AVX1-ONLY-NEXT: vmovaps (%rsp), %xmm0 # 16-byte Reload
5638 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 672(%rax)
5639 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5640 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 656(%rax)
5641 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5642 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 640(%rax)
5643 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5644 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 752(%rax)
5645 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5646 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 736(%rax)
5647 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5648 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 720(%rax)
5649 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5650 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 704(%rax)
5651 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5652 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 240(%rax)
5653 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5654 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 224(%rax)
5655 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5656 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 208(%rax)
5657 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5658 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 192(%rax)
5659 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5660 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 304(%rax)
5661 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5662 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 288(%rax)
5663 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5664 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 272(%rax)
5665 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5666 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 256(%rax)
5667 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5668 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 368(%rax)
5669 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5670 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 352(%rax)
5671 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5672 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 336(%rax)
5673 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5674 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 320(%rax)
5675 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5676 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 432(%rax)
5677 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5678 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 416(%rax)
5679 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5680 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 400(%rax)
5681 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5682 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 384(%rax)
5683 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5684 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 496(%rax)
5685 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5686 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 480(%rax)
5687 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5688 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 464(%rax)
5689 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5690 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 448(%rax)
5691 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5692 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 560(%rax)
5693 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5694 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 544(%rax)
5695 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5696 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 528(%rax)
5697 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5698 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 512(%rax)
5699 ; AVX1-ONLY-NEXT: addq $504, %rsp # imm = 0x1F8
5700 ; AVX1-ONLY-NEXT: vzeroupper
5701 ; AVX1-ONLY-NEXT: retq
5703 ; AVX2-SLOW-LABEL: store_i16_stride6_vf64:
5704 ; AVX2-SLOW: # %bb.0:
5705 ; AVX2-SLOW-NEXT: subq $1544, %rsp # imm = 0x608
5706 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %xmm12
5707 ; AVX2-SLOW-NEXT: vmovdqa 32(%rcx), %xmm5
5708 ; AVX2-SLOW-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5709 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} xmm0 = xmm12[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
5710 ; AVX2-SLOW-NEXT: vmovdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5711 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %xmm2
5712 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdx), %xmm6
5713 ; AVX2-SLOW-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5714 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} xmm1 = xmm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
5715 ; AVX2-SLOW-NEXT: vmovdqa %xmm2, %xmm11
5716 ; AVX2-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5717 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
5718 ; AVX2-SLOW-NEXT: vpbroadcastq %xmm0, %ymm0
5719 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %xmm2
5720 ; AVX2-SLOW-NEXT: vmovdqa 32(%rsi), %xmm7
5721 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm2[0,1,2,1]
5722 ; AVX2-SLOW-NEXT: vmovdqa %xmm2, %xmm14
5723 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,7,6,5]
5724 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %xmm10
5725 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdi), %xmm8
5726 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm10[0,1,2,1]
5727 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,7,6,5]
5728 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
5729 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
5730 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7]
5731 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %xmm1
5732 ; AVX2-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5733 ; AVX2-SLOW-NEXT: vmovdqa 32(%r8), %xmm4
5734 ; AVX2-SLOW-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5735 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[2,1,3,3,4,5,6,7]
5736 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
5737 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm0[1,2],ymm1[3],ymm0[4,5],ymm1[6],ymm0[7]
5738 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %xmm0
5739 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5740 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,2,2,3,4,5,6,7]
5741 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
5742 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm0[0,0,2,1]
5743 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm0 = [255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255]
5744 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm1
5745 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5746 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} xmm1 = xmm5[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
5747 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} xmm2 = xmm6[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
5748 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
5749 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm7[0,1,2,1]
5750 ; AVX2-SLOW-NEXT: vmovdqa %xmm7, %xmm13
5751 ; AVX2-SLOW-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5752 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,7,6,5]
5753 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm8[0,1,2,1]
5754 ; AVX2-SLOW-NEXT: vmovdqa %xmm8, %xmm5
5755 ; AVX2-SLOW-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5756 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,7,6,5]
5757 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
5758 ; AVX2-SLOW-NEXT: vmovdqa 32(%r9), %xmm3
5759 ; AVX2-SLOW-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5760 ; AVX2-SLOW-NEXT: vpbroadcastq %xmm1, %ymm1
5761 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
5762 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7]
5763 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm4[2,1,3,3,4,5,6,7]
5764 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
5765 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7]
5766 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm3[0,2,2,3,4,5,6,7]
5767 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,4,4,4]
5768 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
5769 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm1
5770 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5771 ; AVX2-SLOW-NEXT: vmovdqa 64(%rcx), %xmm1
5772 ; AVX2-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5773 ; AVX2-SLOW-NEXT: vmovdqa 64(%rdx), %xmm2
5774 ; AVX2-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5775 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} xmm1 = xmm1[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
5776 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} xmm2 = xmm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
5777 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
5778 ; AVX2-SLOW-NEXT: vmovdqa 64(%rsi), %xmm9
5779 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm9[0,1,2,1]
5780 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,7,6,5]
5781 ; AVX2-SLOW-NEXT: vmovdqa 64(%rdi), %xmm3
5782 ; AVX2-SLOW-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5783 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,1,2,1]
5784 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,7,6,5]
5785 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
5786 ; AVX2-SLOW-NEXT: vpbroadcastq %xmm1, %ymm1
5787 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
5788 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7]
5789 ; AVX2-SLOW-NEXT: vmovdqa 64(%r8), %xmm2
5790 ; AVX2-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5791 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[2,1,3,3,4,5,6,7]
5792 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
5793 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7]
5794 ; AVX2-SLOW-NEXT: vmovdqa 64(%r9), %xmm2
5795 ; AVX2-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5796 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,2,2,3,4,5,6,7]
5797 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,4,4,4]
5798 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
5799 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm1
5800 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5801 ; AVX2-SLOW-NEXT: vmovdqa 96(%rcx), %xmm1
5802 ; AVX2-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5803 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} xmm1 = xmm1[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
5804 ; AVX2-SLOW-NEXT: vmovdqa 96(%rdx), %xmm2
5805 ; AVX2-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5806 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} xmm2 = xmm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
5807 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
5808 ; AVX2-SLOW-NEXT: vmovdqa 96(%rsi), %xmm2
5809 ; AVX2-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5810 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,1,2,1]
5811 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,7,6,5]
5812 ; AVX2-SLOW-NEXT: vmovdqa 96(%rdi), %xmm3
5813 ; AVX2-SLOW-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5814 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,1,2,1]
5815 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,7,6,5]
5816 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
5817 ; AVX2-SLOW-NEXT: vpbroadcastq %xmm1, %ymm1
5818 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
5819 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7]
5820 ; AVX2-SLOW-NEXT: vmovdqa 96(%r8), %xmm2
5821 ; AVX2-SLOW-NEXT: vmovdqa %xmm2, (%rsp) # 16-byte Spill
5822 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[2,1,3,3,4,5,6,7]
5823 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
5824 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7]
5825 ; AVX2-SLOW-NEXT: vmovdqa 96(%r9), %xmm2
5826 ; AVX2-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5827 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,2,2,3,4,5,6,7]
5828 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,4,4,4]
5829 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
5830 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm1
5831 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5832 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %ymm2
5833 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5834 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %ymm1
5835 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5836 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} ymm1 = ymm1[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm1[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
5837 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} ymm2 = ymm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm2[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
5838 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm2[0],ymm1[0],ymm2[1],ymm1[1],ymm2[2],ymm1[2],ymm2[3],ymm1[3],ymm2[8],ymm1[8],ymm2[9],ymm1[9],ymm2[10],ymm1[10],ymm2[11],ymm1[11]
5839 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %ymm2
5840 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5841 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[2,1,2,3,6,5,6,7]
5842 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
5843 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %ymm3
5844 ; AVX2-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5845 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[2,1,2,3,6,5,6,7]
5846 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm3 = ymm3[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
5847 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm3[0],ymm2[0],ymm3[1],ymm2[1],ymm3[2],ymm2[2],ymm3[3],ymm2[3],ymm3[8],ymm2[8],ymm3[9],ymm2[9],ymm3[10],ymm2[10],ymm3[11],ymm2[11]
5848 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
5849 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,2,3]
5850 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7]
5851 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %ymm2
5852 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5853 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
5854 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
5855 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7]
5856 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %ymm2
5857 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5858 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[0,2,2,3,4,5,6,7,8,10,10,11,12,13,14,15]
5859 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm2 = ymm2[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
5860 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
5861 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm1
5862 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5863 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdx), %ymm2
5864 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5865 ; AVX2-SLOW-NEXT: vmovdqa 32(%rcx), %ymm1
5866 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5867 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} ymm1 = ymm1[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm1[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
5868 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} ymm2 = ymm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm2[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
5869 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm2[0],ymm1[0],ymm2[1],ymm1[1],ymm2[2],ymm1[2],ymm2[3],ymm1[3],ymm2[8],ymm1[8],ymm2[9],ymm1[9],ymm2[10],ymm1[10],ymm2[11],ymm1[11]
5870 ; AVX2-SLOW-NEXT: vmovdqa 32(%rsi), %ymm3
5871 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm3[2,1,2,3,6,5,6,7]
5872 ; AVX2-SLOW-NEXT: vmovdqa %ymm3, %ymm7
5873 ; AVX2-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5874 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
5875 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdi), %ymm15
5876 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm15[2,1,2,3,6,5,6,7]
5877 ; AVX2-SLOW-NEXT: vmovdqu %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5878 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm3 = ymm3[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
5879 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm3[0],ymm2[0],ymm3[1],ymm2[1],ymm3[2],ymm2[2],ymm3[3],ymm2[3],ymm3[8],ymm2[8],ymm3[9],ymm2[9],ymm3[10],ymm2[10],ymm3[11],ymm2[11]
5880 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
5881 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,2,3]
5882 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7]
5883 ; AVX2-SLOW-NEXT: vmovdqa 32(%r8), %ymm2
5884 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5885 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
5886 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
5887 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7]
5888 ; AVX2-SLOW-NEXT: vmovdqa 32(%r9), %ymm2
5889 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5890 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[0,2,2,3,4,5,6,7,8,10,10,11,12,13,14,15]
5891 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm2 = ymm2[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
5892 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
5893 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm1
5894 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5895 ; AVX2-SLOW-NEXT: vmovdqa 64(%rdx), %ymm2
5896 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5897 ; AVX2-SLOW-NEXT: vmovdqa 64(%rcx), %ymm1
5898 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5899 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} ymm1 = ymm1[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm1[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
5900 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} ymm2 = ymm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm2[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
5901 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm2[0],ymm1[0],ymm2[1],ymm1[1],ymm2[2],ymm1[2],ymm2[3],ymm1[3],ymm2[8],ymm1[8],ymm2[9],ymm1[9],ymm2[10],ymm1[10],ymm2[11],ymm1[11]
5902 ; AVX2-SLOW-NEXT: vmovdqa 64(%rsi), %ymm2
5903 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5904 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[2,1,2,3,6,5,6,7]
5905 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
5906 ; AVX2-SLOW-NEXT: vmovdqa 64(%rdi), %ymm3
5907 ; AVX2-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5908 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[2,1,2,3,6,5,6,7]
5909 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm3 = ymm3[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
5910 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm3[0],ymm2[0],ymm3[1],ymm2[1],ymm3[2],ymm2[2],ymm3[3],ymm2[3],ymm3[8],ymm2[8],ymm3[9],ymm2[9],ymm3[10],ymm2[10],ymm3[11],ymm2[11]
5911 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
5912 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,2,3]
5913 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7]
5914 ; AVX2-SLOW-NEXT: vmovdqa 64(%r8), %ymm2
5915 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5916 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
5917 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
5918 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7]
5919 ; AVX2-SLOW-NEXT: vmovdqa 64(%r9), %ymm2
5920 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5921 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[0,2,2,3,4,5,6,7,8,10,10,11,12,13,14,15]
5922 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm2 = ymm2[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
5923 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
5924 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm1
5925 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5926 ; AVX2-SLOW-NEXT: vmovdqa 96(%rdx), %ymm3
5927 ; AVX2-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5928 ; AVX2-SLOW-NEXT: vmovdqa 96(%rcx), %ymm1
5929 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5930 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} ymm1 = ymm1[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm1[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
5931 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} ymm2 = ymm3[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm3[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
5932 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm2[0],ymm1[0],ymm2[1],ymm1[1],ymm2[2],ymm1[2],ymm2[3],ymm1[3],ymm2[8],ymm1[8],ymm2[9],ymm1[9],ymm2[10],ymm1[10],ymm2[11],ymm1[11]
5933 ; AVX2-SLOW-NEXT: vmovdqa 96(%rsi), %ymm6
5934 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm6[2,1,2,3,6,5,6,7]
5935 ; AVX2-SLOW-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5936 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
5937 ; AVX2-SLOW-NEXT: vmovdqa 96(%rdi), %ymm3
5938 ; AVX2-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5939 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[2,1,2,3,6,5,6,7]
5940 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm3 = ymm3[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
5941 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm3[0],ymm2[0],ymm3[1],ymm2[1],ymm3[2],ymm2[2],ymm3[3],ymm2[3],ymm3[8],ymm2[8],ymm3[9],ymm2[9],ymm3[10],ymm2[10],ymm3[11],ymm2[11]
5942 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
5943 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,2,3]
5944 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7]
5945 ; AVX2-SLOW-NEXT: vmovdqa 96(%r8), %ymm8
5946 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm8[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
5947 ; AVX2-SLOW-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5948 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
5949 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7]
5950 ; AVX2-SLOW-NEXT: vmovdqa 96(%r9), %ymm2
5951 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5952 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[0,2,2,3,4,5,6,7,8,10,10,11,12,13,14,15]
5953 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm2 = ymm2[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
5954 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
5955 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm0
5956 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5957 ; AVX2-SLOW-NEXT: vmovdqa %xmm14, %xmm4
5958 ; AVX2-SLOW-NEXT: vmovdqa %xmm10, %xmm3
5959 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm10[0],xmm14[0],xmm10[1],xmm14[1],xmm10[2],xmm14[2],xmm10[3],xmm14[3]
5960 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
5961 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
5962 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[1,0,2,2]
5963 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
5964 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6],ymm1[7]
5965 ; AVX2-SLOW-NEXT: vpmovzxwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
5966 ; AVX2-SLOW-NEXT: # xmm1 = mem[0],zero,mem[1],zero,mem[2],zero,mem[3],zero
5967 ; AVX2-SLOW-NEXT: vpbroadcastq %xmm1, %ymm1
5968 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7]
5969 ; AVX2-SLOW-NEXT: vpshuflw $96, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
5970 ; AVX2-SLOW-NEXT: # xmm0 = mem[0,0,2,1,4,5,6,7]
5971 ; AVX2-SLOW-NEXT: vpbroadcastq %xmm0, %ymm2
5972 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm0 = [255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255]
5973 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm1
5974 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5975 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm5[0],xmm13[0],xmm5[1],xmm13[1],xmm5[2],xmm13[2],xmm5[3],xmm13[3]
5976 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
5977 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
5978 ; AVX2-SLOW-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm5, %xmm2 # 16-byte Folded Reload
5979 ; AVX2-SLOW-NEXT: # xmm2 = xmm5[0],mem[0],xmm5[1],mem[1],xmm5[2],mem[2],xmm5[3],mem[3]
5980 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[1,0,2,2]
5981 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
5982 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3],ymm2[4],ymm1[5,6],ymm2[7]
5983 ; AVX2-SLOW-NEXT: vpmovzxwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
5984 ; AVX2-SLOW-NEXT: # xmm2 = mem[0],zero,mem[1],zero,mem[2],zero,mem[3],zero
5985 ; AVX2-SLOW-NEXT: vpbroadcastq %xmm2, %ymm2
5986 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7]
5987 ; AVX2-SLOW-NEXT: vpshuflw $96, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
5988 ; AVX2-SLOW-NEXT: # xmm2 = mem[0,0,2,1,4,5,6,7]
5989 ; AVX2-SLOW-NEXT: vpbroadcastq %xmm2, %ymm2
5990 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm1
5991 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5992 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5993 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm9[0],xmm1[1],xmm9[1],xmm1[2],xmm9[2],xmm1[3],xmm9[3]
5994 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
5995 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
5996 ; AVX2-SLOW-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
5997 ; AVX2-SLOW-NEXT: # xmm2 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3]
5998 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[1,0,2,2]
5999 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
6000 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3],ymm2[4],ymm1[5,6],ymm2[7]
6001 ; AVX2-SLOW-NEXT: vpmovzxwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
6002 ; AVX2-SLOW-NEXT: # xmm2 = mem[0],zero,mem[1],zero,mem[2],zero,mem[3],zero
6003 ; AVX2-SLOW-NEXT: vpbroadcastq %xmm2, %ymm2
6004 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7]
6005 ; AVX2-SLOW-NEXT: vpshuflw $96, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
6006 ; AVX2-SLOW-NEXT: # xmm2 = mem[0,0,2,1,4,5,6,7]
6007 ; AVX2-SLOW-NEXT: vpbroadcastq %xmm2, %ymm2
6008 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm1
6009 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6010 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6011 ; AVX2-SLOW-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
6012 ; AVX2-SLOW-NEXT: # xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
6013 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
6014 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
6015 ; AVX2-SLOW-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
6016 ; AVX2-SLOW-NEXT: # xmm2 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3]
6017 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[1,0,2,2]
6018 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
6019 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3],ymm2[4],ymm1[5,6],ymm2[7]
6020 ; AVX2-SLOW-NEXT: vpmovzxwd (%rsp), %xmm2 # 16-byte Folded Reload
6021 ; AVX2-SLOW-NEXT: # xmm2 = mem[0],zero,mem[1],zero,mem[2],zero,mem[3],zero
6022 ; AVX2-SLOW-NEXT: vpbroadcastq %xmm2, %ymm2
6023 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7]
6024 ; AVX2-SLOW-NEXT: vpshuflw $96, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
6025 ; AVX2-SLOW-NEXT: # xmm2 = mem[0,0,2,1,4,5,6,7]
6026 ; AVX2-SLOW-NEXT: vpbroadcastq %xmm2, %ymm2
6027 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm1
6028 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6029 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6030 ; AVX2-SLOW-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
6031 ; AVX2-SLOW-NEXT: # ymm1 = ymm1[0],mem[0],ymm1[1],mem[1],ymm1[2],mem[2],ymm1[3],mem[3],ymm1[8],mem[8],ymm1[9],mem[9],ymm1[10],mem[10],ymm1[11],mem[11]
6032 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
6033 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
6034 ; AVX2-SLOW-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm2, %ymm2 # 32-byte Folded Reload
6035 ; AVX2-SLOW-NEXT: # ymm2 = ymm2[0],mem[0],ymm2[1],mem[1],ymm2[2],mem[2],ymm2[3],mem[3],ymm2[8],mem[8],ymm2[9],mem[9],ymm2[10],mem[10],ymm2[11],mem[11]
6036 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[1,0,2,2,5,4,6,6]
6037 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,2,3]
6038 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3],ymm2[4],ymm1[5,6],ymm2[7]
6039 ; AVX2-SLOW-NEXT: vpshuflw $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Folded Reload
6040 ; AVX2-SLOW-NEXT: # ymm2 = mem[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
6041 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
6042 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7]
6043 ; AVX2-SLOW-NEXT: vpshuflw $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Folded Reload
6044 ; AVX2-SLOW-NEXT: # ymm2 = mem[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
6045 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
6046 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm1
6047 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6048 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm15[0],ymm7[0],ymm15[1],ymm7[1],ymm15[2],ymm7[2],ymm15[3],ymm7[3],ymm15[8],ymm7[8],ymm15[9],ymm7[9],ymm15[10],ymm7[10],ymm15[11],ymm7[11]
6049 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
6050 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
6051 ; AVX2-SLOW-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm2, %ymm2 # 32-byte Folded Reload
6052 ; AVX2-SLOW-NEXT: # ymm2 = ymm2[0],mem[0],ymm2[1],mem[1],ymm2[2],mem[2],ymm2[3],mem[3],ymm2[8],mem[8],ymm2[9],mem[9],ymm2[10],mem[10],ymm2[11],mem[11]
6053 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[1,0,2,2,5,4,6,6]
6054 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,2,3]
6055 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3],ymm2[4],ymm1[5,6],ymm2[7]
6056 ; AVX2-SLOW-NEXT: vpshuflw $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Folded Reload
6057 ; AVX2-SLOW-NEXT: # ymm2 = mem[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
6058 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
6059 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7]
6060 ; AVX2-SLOW-NEXT: vpshuflw $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Folded Reload
6061 ; AVX2-SLOW-NEXT: # ymm2 = mem[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
6062 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
6063 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm1
6064 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6065 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Reload
6066 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
6067 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm12[0],ymm13[0],ymm12[1],ymm13[1],ymm12[2],ymm13[2],ymm12[3],ymm13[3],ymm12[8],ymm13[8],ymm12[9],ymm13[9],ymm12[10],ymm13[10],ymm12[11],ymm13[11]
6068 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
6069 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
6070 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
6071 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm10[0],ymm11[0],ymm10[1],ymm11[1],ymm10[2],ymm11[2],ymm10[3],ymm11[3],ymm10[8],ymm11[8],ymm10[9],ymm11[9],ymm10[10],ymm11[10],ymm10[11],ymm11[11]
6072 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[1,0,2,2,5,4,6,6]
6073 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,2,3]
6074 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3],ymm2[4],ymm1[5,6],ymm2[7]
6075 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
6076 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm14[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
6077 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
6078 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7]
6079 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
6080 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm15[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
6081 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
6082 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm1
6083 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6084 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
6085 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm7[0],ymm6[0],ymm7[1],ymm6[1],ymm7[2],ymm6[2],ymm7[3],ymm6[3],ymm7[8],ymm6[8],ymm7[9],ymm6[9],ymm7[10],ymm6[10],ymm7[11],ymm6[11]
6086 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
6087 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
6088 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
6089 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm2[0],ymm6[0],ymm2[1],ymm6[1],ymm2[2],ymm6[2],ymm2[3],ymm6[3],ymm2[8],ymm6[8],ymm2[9],ymm6[9],ymm2[10],ymm6[10],ymm2[11],ymm6[11]
6090 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[1,0,2,2,5,4,6,6]
6091 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,2,3]
6092 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3],ymm2[4],ymm1[5,6],ymm2[7]
6093 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm8[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
6094 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
6095 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7]
6096 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
6097 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm8[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
6098 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
6099 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm0
6100 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6101 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
6102 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6103 ; AVX2-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
6104 ; AVX2-SLOW-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
6105 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,1,1,1]
6106 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[1,2,3,3]
6107 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
6108 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7]
6109 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm1 = [12,13,10,11,10,11,14,15,14,15,14,15,14,15,14,15]
6110 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
6111 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm2, %xmm2
6112 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
6113 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm2[1],ymm0[2,3],ymm2[4],ymm0[5,6],ymm2[7]
6114 ; AVX2-SLOW-NEXT: vpshufd $238, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
6115 ; AVX2-SLOW-NEXT: # xmm2 = mem[2,3,2,3]
6116 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,2,2,1,4,5,6,7]
6117 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm2[0,1,0,1]
6118 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm4 = [255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0]
6119 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm0, %ymm3, %ymm0
6120 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6121 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6122 ; AVX2-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
6123 ; AVX2-SLOW-NEXT: # xmm0 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
6124 ; AVX2-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm5, %xmm3 # 16-byte Folded Reload
6125 ; AVX2-SLOW-NEXT: # xmm3 = xmm5[4],mem[4],xmm5[5],mem[5],xmm5[6],mem[6],xmm5[7],mem[7]
6126 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,1,1,1]
6127 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[1,2,3,3]
6128 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
6129 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm3[0,1],ymm0[2],ymm3[3,4],ymm0[5],ymm3[6,7]
6130 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
6131 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm2, %xmm3
6132 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,0,1]
6133 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm3[1],ymm0[2,3],ymm3[4],ymm0[5,6],ymm3[7]
6134 ; AVX2-SLOW-NEXT: vpshufd $238, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
6135 ; AVX2-SLOW-NEXT: # xmm3 = mem[2,3,2,3]
6136 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,2,2,1,4,5,6,7]
6137 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,0,1]
6138 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm0, %ymm3, %ymm0
6139 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6140 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6141 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm9[4],xmm0[5],xmm9[5],xmm0[6],xmm9[6],xmm0[7],xmm9[7]
6142 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
6143 ; AVX2-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm3 # 16-byte Folded Reload
6144 ; AVX2-SLOW-NEXT: # xmm3 = xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
6145 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,1,1,1]
6146 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[1,2,3,3]
6147 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
6148 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm3[0,1],ymm0[2],ymm3[3,4],ymm0[5],ymm3[6,7]
6149 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
6150 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm2, %xmm3
6151 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,0,1]
6152 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm3[1],ymm0[2,3],ymm3[4],ymm0[5,6],ymm3[7]
6153 ; AVX2-SLOW-NEXT: vpshufd $238, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
6154 ; AVX2-SLOW-NEXT: # xmm3 = mem[2,3,2,3]
6155 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,2,2,1,4,5,6,7]
6156 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,0,1]
6157 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm0, %ymm3, %ymm0
6158 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6159 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6160 ; AVX2-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
6161 ; AVX2-SLOW-NEXT: # xmm0 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
6162 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
6163 ; AVX2-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm3 # 16-byte Folded Reload
6164 ; AVX2-SLOW-NEXT: # xmm3 = xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
6165 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,1,1,1]
6166 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[1,2,3,3]
6167 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
6168 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm3[0,1],ymm0[2],ymm3[3,4],ymm0[5],ymm3[6,7]
6169 ; AVX2-SLOW-NEXT: vmovdqa (%rsp), %xmm2 # 16-byte Reload
6170 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm2, %xmm1
6171 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
6172 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6],ymm1[7]
6173 ; AVX2-SLOW-NEXT: vpshufd $238, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6174 ; AVX2-SLOW-NEXT: # xmm1 = mem[2,3,2,3]
6175 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,2,2,1,4,5,6,7]
6176 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
6177 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm0, %ymm1, %ymm9
6178 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6179 ; AVX2-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6180 ; AVX2-SLOW-NEXT: # ymm0 = ymm0[4],mem[4],ymm0[5],mem[5],ymm0[6],mem[6],ymm0[7],mem[7],ymm0[12],mem[12],ymm0[13],mem[13],ymm0[14],mem[14],ymm0[15],mem[15]
6181 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6182 ; AVX2-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
6183 ; AVX2-SLOW-NEXT: # ymm1 = ymm1[4],mem[4],ymm1[5],mem[5],ymm1[6],mem[6],ymm1[7],mem[7],ymm1[12],mem[12],ymm1[13],mem[13],ymm1[14],mem[14],ymm1[15],mem[15]
6184 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[3,3,3,3]
6185 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[1,2,3,3,5,6,7,7]
6186 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
6187 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7]
6188 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,u,u,u,u,u,u,14,15,14,15,14,15,14,15,28,29,26,27,26,27,30,31,30,31,30,31,30,31,30,31>
6189 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6190 ; AVX2-SLOW-NEXT: vpshufb %ymm2, %ymm0, %ymm3
6191 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,2,3]
6192 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm3[1],ymm1[2,3],ymm3[4],ymm1[5,6],ymm3[7]
6193 ; AVX2-SLOW-NEXT: vpshufd $238, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Folded Reload
6194 ; AVX2-SLOW-NEXT: # ymm3 = mem[2,3,2,3,6,7,6,7]
6195 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm3 = ymm3[0,2,2,1,4,5,6,7,8,10,10,9,12,13,14,15]
6196 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,2,3]
6197 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm1, %ymm3, %ymm5
6198 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6199 ; AVX2-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm1 # 32-byte Folded Reload
6200 ; AVX2-SLOW-NEXT: # ymm1 = ymm0[4],mem[4],ymm0[5],mem[5],ymm0[6],mem[6],ymm0[7],mem[7],ymm0[12],mem[12],ymm0[13],mem[13],ymm0[14],mem[14],ymm0[15],mem[15]
6201 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6202 ; AVX2-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6203 ; AVX2-SLOW-NEXT: # ymm0 = ymm0[4],mem[4],ymm0[5],mem[5],ymm0[6],mem[6],ymm0[7],mem[7],ymm0[12],mem[12],ymm0[13],mem[13],ymm0[14],mem[14],ymm0[15],mem[15]
6204 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[3,3,3,3]
6205 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[1,2,3,3,5,6,7,7]
6206 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
6207 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7]
6208 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6209 ; AVX2-SLOW-NEXT: vpshufb %ymm2, %ymm1, %ymm1
6210 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,2,3]
6211 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6],ymm1[7]
6212 ; AVX2-SLOW-NEXT: vpshufd $238, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Folded Reload
6213 ; AVX2-SLOW-NEXT: # ymm1 = mem[2,3,2,3,6,7,6,7]
6214 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,2,2,1,4,5,6,7,8,10,10,9,12,13,14,15]
6215 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,2,3]
6216 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm0, %ymm1, %ymm3
6217 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm12[4],ymm13[4],ymm12[5],ymm13[5],ymm12[6],ymm13[6],ymm12[7],ymm13[7],ymm12[12],ymm13[12],ymm12[13],ymm13[13],ymm12[14],ymm13[14],ymm12[15],ymm13[15]
6218 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm10[4],ymm11[4],ymm10[5],ymm11[5],ymm10[6],ymm11[6],ymm10[7],ymm11[7],ymm10[12],ymm11[12],ymm10[13],ymm11[13],ymm10[14],ymm11[14],ymm10[15],ymm11[15]
6219 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[3,3,3,3]
6220 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[1,2,3,3,5,6,7,7]
6221 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
6222 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7]
6223 ; AVX2-SLOW-NEXT: vpshufb %ymm2, %ymm14, %ymm1
6224 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,2,3]
6225 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6],ymm1[7]
6226 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm15[2,3,2,3,6,7,6,7]
6227 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,2,2,1,4,5,6,7,8,10,10,9,12,13,14,15]
6228 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,2,3]
6229 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm0, %ymm1, %ymm0
6230 ; AVX2-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm7, %ymm1 # 32-byte Folded Reload
6231 ; AVX2-SLOW-NEXT: # ymm1 = ymm7[4],mem[4],ymm7[5],mem[5],ymm7[6],mem[6],ymm7[7],mem[7],ymm7[12],mem[12],ymm7[13],mem[13],ymm7[14],mem[14],ymm7[15],mem[15]
6232 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
6233 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm11 = ymm7[4],ymm6[4],ymm7[5],ymm6[5],ymm7[6],ymm6[6],ymm7[7],ymm6[7],ymm7[12],ymm6[12],ymm7[13],ymm6[13],ymm7[14],ymm6[14],ymm7[15],ymm6[15]
6234 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[3,3,3,3]
6235 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm11 = ymm11[1,2,3,3,5,6,7,7]
6236 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,2,2,3]
6237 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm11[0,1],ymm1[2],ymm11[3,4],ymm1[5],ymm11[6,7]
6238 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
6239 ; AVX2-SLOW-NEXT: vpshufb %ymm2, %ymm6, %ymm2
6240 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,2,3]
6241 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3],ymm2[4],ymm1[5,6],ymm2[7]
6242 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm8[2,3,2,3,6,7,6,7]
6243 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[0,2,2,1,4,5,6,7,8,10,10,9,12,13,14,15]
6244 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,2,3]
6245 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm1, %ymm2, %ymm1
6246 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
6247 ; AVX2-SLOW-NEXT: vmovdqa %ymm1, 736(%rax)
6248 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6249 ; AVX2-SLOW-NEXT: vmovaps %ymm1, 672(%rax)
6250 ; AVX2-SLOW-NEXT: vmovdqa %ymm0, 544(%rax)
6251 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6252 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 480(%rax)
6253 ; AVX2-SLOW-NEXT: vmovdqa %ymm3, 352(%rax)
6254 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6255 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 288(%rax)
6256 ; AVX2-SLOW-NEXT: vmovdqa %ymm5, 160(%rax)
6257 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6258 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 96(%rax)
6259 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6260 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 704(%rax)
6261 ; AVX2-SLOW-NEXT: vmovdqa %ymm9, 640(%rax)
6262 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6263 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 576(%rax)
6264 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6265 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 512(%rax)
6266 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6267 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 448(%rax)
6268 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6269 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 384(%rax)
6270 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6271 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 320(%rax)
6272 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6273 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 256(%rax)
6274 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6275 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 192(%rax)
6276 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6277 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 128(%rax)
6278 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6279 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 64(%rax)
6280 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6281 ; AVX2-SLOW-NEXT: vmovaps %ymm0, (%rax)
6282 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6283 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 608(%rax)
6284 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6285 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 416(%rax)
6286 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6287 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 224(%rax)
6288 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6289 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 32(%rax)
6290 ; AVX2-SLOW-NEXT: addq $1544, %rsp # imm = 0x608
6291 ; AVX2-SLOW-NEXT: vzeroupper
6292 ; AVX2-SLOW-NEXT: retq
6294 ; AVX2-FAST-LABEL: store_i16_stride6_vf64:
6295 ; AVX2-FAST: # %bb.0:
6296 ; AVX2-FAST-NEXT: subq $1560, %rsp # imm = 0x618
6297 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %xmm1
6298 ; AVX2-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6299 ; AVX2-FAST-NEXT: vmovdqa 32(%rsi), %xmm5
6300 ; AVX2-FAST-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6301 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm0 = [0,1,2,3,4,5,6,7,8,9,6,7,4,5,10,11]
6302 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm1, %xmm1
6303 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %xmm2
6304 ; AVX2-FAST-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6305 ; AVX2-FAST-NEXT: vmovdqa 32(%rdi), %xmm7
6306 ; AVX2-FAST-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6307 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm2, %xmm2
6308 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
6309 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
6310 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %xmm2
6311 ; AVX2-FAST-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6312 ; AVX2-FAST-NEXT: vmovdqa 32(%rcx), %xmm8
6313 ; AVX2-FAST-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6314 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} xmm2 = xmm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
6315 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %xmm3
6316 ; AVX2-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6317 ; AVX2-FAST-NEXT: vmovdqa 32(%rdx), %xmm14
6318 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} xmm3 = xmm3[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
6319 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
6320 ; AVX2-FAST-NEXT: vpbroadcastq %xmm2, %ymm2
6321 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7]
6322 ; AVX2-FAST-NEXT: vmovdqa (%r8), %xmm2
6323 ; AVX2-FAST-NEXT: vmovdqa %xmm2, (%rsp) # 16-byte Spill
6324 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[2,1,3,3,4,5,6,7]
6325 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
6326 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm2[0],ymm1[1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7]
6327 ; AVX2-FAST-NEXT: vmovdqa (%r9), %xmm1
6328 ; AVX2-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6329 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm2 = [0,1,4,5,4,5,6,7,8,9,8,9,8,9,8,9]
6330 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm1, %xmm1
6331 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm1[0,0,2,1]
6332 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255]
6333 ; AVX2-FAST-NEXT: vpblendvb %ymm1, %ymm3, %ymm4, %ymm3
6334 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6335 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm5, %xmm3
6336 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm7, %xmm4
6337 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
6338 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} xmm4 = xmm8[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
6339 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} xmm5 = xmm14[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
6340 ; AVX2-FAST-NEXT: vmovdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6341 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
6342 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,0,1]
6343 ; AVX2-FAST-NEXT: vpbroadcastq %xmm4, %ymm4
6344 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1],ymm4[2],ymm3[3,4],ymm4[5],ymm3[6,7]
6345 ; AVX2-FAST-NEXT: vmovdqa 32(%r8), %xmm4
6346 ; AVX2-FAST-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6347 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[2,1,3,3,4,5,6,7]
6348 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,2,1]
6349 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0],ymm3[1,2],ymm4[3],ymm3[4,5],ymm4[6],ymm3[7]
6350 ; AVX2-FAST-NEXT: vmovdqa 32(%r9), %xmm4
6351 ; AVX2-FAST-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6352 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm4, %xmm4
6353 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,2,1]
6354 ; AVX2-FAST-NEXT: vpblendvb %ymm1, %ymm3, %ymm4, %ymm3
6355 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6356 ; AVX2-FAST-NEXT: vmovdqa 64(%rsi), %xmm3
6357 ; AVX2-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6358 ; AVX2-FAST-NEXT: vmovdqa 64(%rdi), %xmm4
6359 ; AVX2-FAST-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6360 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm3, %xmm3
6361 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm4, %xmm4
6362 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
6363 ; AVX2-FAST-NEXT: vmovdqa 64(%rcx), %xmm4
6364 ; AVX2-FAST-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6365 ; AVX2-FAST-NEXT: vmovdqa 64(%rdx), %xmm5
6366 ; AVX2-FAST-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6367 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} xmm4 = xmm4[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
6368 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} xmm5 = xmm5[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
6369 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
6370 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,0,1]
6371 ; AVX2-FAST-NEXT: vpbroadcastq %xmm4, %ymm4
6372 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1],ymm4[2],ymm3[3,4],ymm4[5],ymm3[6,7]
6373 ; AVX2-FAST-NEXT: vmovdqa 64(%r8), %xmm4
6374 ; AVX2-FAST-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6375 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[2,1,3,3,4,5,6,7]
6376 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,2,1]
6377 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0],ymm3[1,2],ymm4[3],ymm3[4,5],ymm4[6],ymm3[7]
6378 ; AVX2-FAST-NEXT: vmovdqa 64(%r9), %xmm4
6379 ; AVX2-FAST-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6380 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm4, %xmm4
6381 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,2,1]
6382 ; AVX2-FAST-NEXT: vpblendvb %ymm1, %ymm3, %ymm4, %ymm3
6383 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6384 ; AVX2-FAST-NEXT: vmovdqa 96(%rsi), %xmm3
6385 ; AVX2-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6386 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm3, %xmm3
6387 ; AVX2-FAST-NEXT: vmovdqa 96(%rdi), %xmm4
6388 ; AVX2-FAST-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6389 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm4, %xmm0
6390 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm3[4],xmm0[5],xmm3[5],xmm0[6],xmm3[6],xmm0[7],xmm3[7]
6391 ; AVX2-FAST-NEXT: vmovdqa 96(%rcx), %xmm13
6392 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} xmm3 = xmm13[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
6393 ; AVX2-FAST-NEXT: vmovdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6394 ; AVX2-FAST-NEXT: vmovdqa 96(%rdx), %xmm12
6395 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} xmm4 = xmm12[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
6396 ; AVX2-FAST-NEXT: vmovdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6397 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
6398 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,0,1]
6399 ; AVX2-FAST-NEXT: vpbroadcastq %xmm3, %ymm3
6400 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm3[2],ymm0[3,4],ymm3[5],ymm0[6,7]
6401 ; AVX2-FAST-NEXT: vmovdqa 96(%r8), %xmm3
6402 ; AVX2-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6403 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[2,1,3,3,4,5,6,7]
6404 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
6405 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm3[0],ymm0[1,2],ymm3[3],ymm0[4,5],ymm3[6],ymm0[7]
6406 ; AVX2-FAST-NEXT: vmovdqa 96(%r9), %xmm3
6407 ; AVX2-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6408 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm3, %xmm2
6409 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
6410 ; AVX2-FAST-NEXT: vpblendvb %ymm1, %ymm0, %ymm2, %ymm0
6411 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6412 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %ymm3
6413 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6414 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %ymm2
6415 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6416 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = <u,u,u,u,4,5,10,11,u,u,u,u,u,u,u,u,24,25,22,23,20,21,26,27,u,u,u,u,u,u,u,u>
6417 ; AVX2-FAST-NEXT: vpshufb %ymm0, %ymm2, %ymm2
6418 ; AVX2-FAST-NEXT: vpshufb %ymm0, %ymm3, %ymm3
6419 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm3[0],ymm2[0],ymm3[1],ymm2[1],ymm3[2],ymm2[2],ymm3[3],ymm2[3],ymm3[8],ymm2[8],ymm3[9],ymm2[9],ymm3[10],ymm2[10],ymm3[11],ymm2[11]
6420 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %ymm4
6421 ; AVX2-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6422 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %ymm3
6423 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6424 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} ymm3 = ymm3[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm3[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
6425 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} ymm4 = ymm4[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm4[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
6426 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm4[0],ymm3[0],ymm4[1],ymm3[1],ymm4[2],ymm3[2],ymm4[3],ymm3[3],ymm4[8],ymm3[8],ymm4[9],ymm3[9],ymm4[10],ymm3[10],ymm4[11],ymm3[11]
6427 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,2,3]
6428 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,2]
6429 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1],ymm3[2],ymm2[3,4],ymm3[5],ymm2[6,7]
6430 ; AVX2-FAST-NEXT: vmovdqa (%r8), %ymm3
6431 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6432 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} ymm3 = ymm3[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
6433 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
6434 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0],ymm2[1,2],ymm3[3],ymm2[4,5],ymm3[6],ymm2[7]
6435 ; AVX2-FAST-NEXT: vmovdqa (%r9), %ymm4
6436 ; AVX2-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6437 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [16,17,20,21,20,21,22,23,24,25,24,25,24,25,24,25,16,17,20,21,20,21,22,23,24,25,24,25,24,25,24,25]
6438 ; AVX2-FAST-NEXT: # ymm2 = mem[0,1,0,1]
6439 ; AVX2-FAST-NEXT: vpshufb %ymm2, %ymm4, %ymm4
6440 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
6441 ; AVX2-FAST-NEXT: vpblendvb %ymm1, %ymm3, %ymm4, %ymm3
6442 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6443 ; AVX2-FAST-NEXT: vmovdqa 32(%rdi), %ymm4
6444 ; AVX2-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6445 ; AVX2-FAST-NEXT: vmovdqa 32(%rsi), %ymm3
6446 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6447 ; AVX2-FAST-NEXT: vpshufb %ymm0, %ymm3, %ymm3
6448 ; AVX2-FAST-NEXT: vpshufb %ymm0, %ymm4, %ymm4
6449 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm4[0],ymm3[0],ymm4[1],ymm3[1],ymm4[2],ymm3[2],ymm4[3],ymm3[3],ymm4[8],ymm3[8],ymm4[9],ymm3[9],ymm4[10],ymm3[10],ymm4[11],ymm3[11]
6450 ; AVX2-FAST-NEXT: vmovdqa 32(%rdx), %ymm10
6451 ; AVX2-FAST-NEXT: vmovdqa 32(%rcx), %ymm11
6452 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} ymm4 = ymm11[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm11[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
6453 ; AVX2-FAST-NEXT: vmovdqu %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6454 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} ymm5 = ymm10[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm10[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
6455 ; AVX2-FAST-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6456 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm4 = ymm5[0],ymm4[0],ymm5[1],ymm4[1],ymm5[2],ymm4[2],ymm5[3],ymm4[3],ymm5[8],ymm4[8],ymm5[9],ymm4[9],ymm5[10],ymm4[10],ymm5[11],ymm4[11]
6457 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,2,3]
6458 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,2]
6459 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1],ymm4[2],ymm3[3,4],ymm4[5],ymm3[6,7]
6460 ; AVX2-FAST-NEXT: vmovdqa 32(%r8), %ymm4
6461 ; AVX2-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6462 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} ymm4 = ymm4[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
6463 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
6464 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0],ymm3[1,2],ymm4[3],ymm3[4,5],ymm4[6],ymm3[7]
6465 ; AVX2-FAST-NEXT: vmovdqa 32(%r9), %ymm4
6466 ; AVX2-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6467 ; AVX2-FAST-NEXT: vpshufb %ymm2, %ymm4, %ymm4
6468 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
6469 ; AVX2-FAST-NEXT: vpblendvb %ymm1, %ymm3, %ymm4, %ymm3
6470 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6471 ; AVX2-FAST-NEXT: vmovdqa 64(%rdi), %ymm5
6472 ; AVX2-FAST-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6473 ; AVX2-FAST-NEXT: vmovdqa 64(%rsi), %ymm4
6474 ; AVX2-FAST-NEXT: vpshufb %ymm0, %ymm4, %ymm3
6475 ; AVX2-FAST-NEXT: vmovdqa %ymm4, %ymm9
6476 ; AVX2-FAST-NEXT: vpshufb %ymm0, %ymm5, %ymm4
6477 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm4[0],ymm3[0],ymm4[1],ymm3[1],ymm4[2],ymm3[2],ymm4[3],ymm3[3],ymm4[8],ymm3[8],ymm4[9],ymm3[9],ymm4[10],ymm3[10],ymm4[11],ymm3[11]
6478 ; AVX2-FAST-NEXT: vmovdqa 64(%rdx), %ymm15
6479 ; AVX2-FAST-NEXT: vmovdqa 64(%rcx), %ymm4
6480 ; AVX2-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6481 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} ymm4 = ymm4[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm4[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
6482 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} ymm5 = ymm15[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm15[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
6483 ; AVX2-FAST-NEXT: vmovdqu %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6484 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm4 = ymm5[0],ymm4[0],ymm5[1],ymm4[1],ymm5[2],ymm4[2],ymm5[3],ymm4[3],ymm5[8],ymm4[8],ymm5[9],ymm4[9],ymm5[10],ymm4[10],ymm5[11],ymm4[11]
6485 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,2,3]
6486 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,2]
6487 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1],ymm4[2],ymm3[3,4],ymm4[5],ymm3[6,7]
6488 ; AVX2-FAST-NEXT: vmovdqa 64(%r8), %ymm4
6489 ; AVX2-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6490 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} ymm4 = ymm4[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
6491 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
6492 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0],ymm3[1,2],ymm4[3],ymm3[4,5],ymm4[6],ymm3[7]
6493 ; AVX2-FAST-NEXT: vmovdqa 64(%r9), %ymm4
6494 ; AVX2-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6495 ; AVX2-FAST-NEXT: vpshufb %ymm2, %ymm4, %ymm4
6496 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
6497 ; AVX2-FAST-NEXT: vpblendvb %ymm1, %ymm3, %ymm4, %ymm3
6498 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6499 ; AVX2-FAST-NEXT: vmovdqa 96(%rdi), %ymm4
6500 ; AVX2-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6501 ; AVX2-FAST-NEXT: vmovdqa 96(%rsi), %ymm3
6502 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6503 ; AVX2-FAST-NEXT: vpshufb %ymm0, %ymm3, %ymm3
6504 ; AVX2-FAST-NEXT: vpshufb %ymm0, %ymm4, %ymm0
6505 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[1],ymm3[1],ymm0[2],ymm3[2],ymm0[3],ymm3[3],ymm0[8],ymm3[8],ymm0[9],ymm3[9],ymm0[10],ymm3[10],ymm0[11],ymm3[11]
6506 ; AVX2-FAST-NEXT: vmovdqa 96(%rdx), %ymm8
6507 ; AVX2-FAST-NEXT: vmovdqa 96(%rcx), %ymm7
6508 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} ymm3 = ymm7[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm7[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
6509 ; AVX2-FAST-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6510 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} ymm4 = ymm8[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm8[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
6511 ; AVX2-FAST-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6512 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm4[0],ymm3[0],ymm4[1],ymm3[1],ymm4[2],ymm3[2],ymm4[3],ymm3[3],ymm4[8],ymm3[8],ymm4[9],ymm3[9],ymm4[10],ymm3[10],ymm4[11],ymm3[11]
6513 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,2,3]
6514 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,2]
6515 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm3[2],ymm0[3,4],ymm3[5],ymm0[6,7]
6516 ; AVX2-FAST-NEXT: vmovdqa 96(%r8), %ymm6
6517 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} ymm3 = ymm6[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
6518 ; AVX2-FAST-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6519 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
6520 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm3[0],ymm0[1,2],ymm3[3],ymm0[4,5],ymm3[6],ymm0[7]
6521 ; AVX2-FAST-NEXT: vmovdqa 96(%r9), %ymm3
6522 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6523 ; AVX2-FAST-NEXT: vpshufb %ymm2, %ymm3, %ymm2
6524 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
6525 ; AVX2-FAST-NEXT: vpblendvb %ymm1, %ymm0, %ymm2, %ymm0
6526 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6527 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
6528 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
6529 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
6530 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [1,0,2,2,1,0,2,2]
6531 ; AVX2-FAST-NEXT: # ymm1 = mem[0,1,0,1]
6532 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm1, %ymm0
6533 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
6534 ; AVX2-FAST-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
6535 ; AVX2-FAST-NEXT: # xmm2 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3]
6536 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
6537 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0],ymm0[1],ymm2[2,3],ymm0[4],ymm2[5,6],ymm0[7]
6538 ; AVX2-FAST-NEXT: vpmovzxwd (%rsp), %xmm2 # 16-byte Folded Reload
6539 ; AVX2-FAST-NEXT: # xmm2 = mem[0],zero,mem[1],zero,mem[2],zero,mem[3],zero
6540 ; AVX2-FAST-NEXT: vpbroadcastq %xmm2, %ymm2
6541 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm0[0,1],ymm2[2],ymm0[3,4],ymm2[5],ymm0[6,7]
6542 ; AVX2-FAST-NEXT: vpshuflw $96, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
6543 ; AVX2-FAST-NEXT: # xmm0 = mem[0,0,2,1,4,5,6,7]
6544 ; AVX2-FAST-NEXT: vpbroadcastq %xmm0, %ymm3
6545 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255]
6546 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm2, %ymm3, %ymm2
6547 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6548 ; AVX2-FAST-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm2 # 16-byte Folded Reload
6549 ; AVX2-FAST-NEXT: # xmm2 = xmm14[0],mem[0],xmm14[1],mem[1],xmm14[2],mem[2],xmm14[3],mem[3]
6550 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm1, %ymm2
6551 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
6552 ; AVX2-FAST-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm3 # 16-byte Folded Reload
6553 ; AVX2-FAST-NEXT: # xmm3 = xmm14[0],mem[0],xmm14[1],mem[1],xmm14[2],mem[2],xmm14[3],mem[3]
6554 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
6555 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3],ymm2[4],ymm3[5,6],ymm2[7]
6556 ; AVX2-FAST-NEXT: vpmovzxwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
6557 ; AVX2-FAST-NEXT: # xmm3 = mem[0],zero,mem[1],zero,mem[2],zero,mem[3],zero
6558 ; AVX2-FAST-NEXT: vpbroadcastq %xmm3, %ymm3
6559 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1],ymm3[2],ymm2[3,4],ymm3[5],ymm2[6,7]
6560 ; AVX2-FAST-NEXT: vpshuflw $96, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
6561 ; AVX2-FAST-NEXT: # xmm3 = mem[0,0,2,1,4,5,6,7]
6562 ; AVX2-FAST-NEXT: vpbroadcastq %xmm3, %ymm3
6563 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm2, %ymm3, %ymm2
6564 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6565 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
6566 ; AVX2-FAST-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
6567 ; AVX2-FAST-NEXT: # xmm2 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3]
6568 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm1, %ymm2
6569 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
6570 ; AVX2-FAST-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm3, %xmm3 # 16-byte Folded Reload
6571 ; AVX2-FAST-NEXT: # xmm3 = xmm3[0],mem[0],xmm3[1],mem[1],xmm3[2],mem[2],xmm3[3],mem[3]
6572 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
6573 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3],ymm2[4],ymm3[5,6],ymm2[7]
6574 ; AVX2-FAST-NEXT: vpmovzxwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
6575 ; AVX2-FAST-NEXT: # xmm3 = mem[0],zero,mem[1],zero,mem[2],zero,mem[3],zero
6576 ; AVX2-FAST-NEXT: vpbroadcastq %xmm3, %ymm3
6577 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1],ymm3[2],ymm2[3,4],ymm3[5],ymm2[6,7]
6578 ; AVX2-FAST-NEXT: vpshuflw $96, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
6579 ; AVX2-FAST-NEXT: # xmm3 = mem[0,0,2,1,4,5,6,7]
6580 ; AVX2-FAST-NEXT: vpbroadcastq %xmm3, %ymm3
6581 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm2, %ymm3, %ymm2
6582 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6583 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm12[0],xmm13[0],xmm12[1],xmm13[1],xmm12[2],xmm13[2],xmm12[3],xmm13[3]
6584 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm1, %ymm1
6585 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
6586 ; AVX2-FAST-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
6587 ; AVX2-FAST-NEXT: # xmm2 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3]
6588 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
6589 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6],ymm1[7]
6590 ; AVX2-FAST-NEXT: vpmovzxwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
6591 ; AVX2-FAST-NEXT: # xmm2 = mem[0],zero,mem[1],zero,mem[2],zero,mem[3],zero
6592 ; AVX2-FAST-NEXT: vpbroadcastq %xmm2, %ymm2
6593 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7]
6594 ; AVX2-FAST-NEXT: vpshuflw $96, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
6595 ; AVX2-FAST-NEXT: # xmm2 = mem[0,0,2,1,4,5,6,7]
6596 ; AVX2-FAST-NEXT: vpbroadcastq %xmm2, %ymm2
6597 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm1
6598 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6599 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6600 ; AVX2-FAST-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm2 # 32-byte Folded Reload
6601 ; AVX2-FAST-NEXT: # ymm2 = ymm1[0],mem[0],ymm1[1],mem[1],ymm1[2],mem[2],ymm1[3],mem[3],ymm1[8],mem[8],ymm1[9],mem[9],ymm1[10],mem[10],ymm1[11],mem[11]
6602 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [5,4,2,2,5,4,6,6]
6603 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm1, %ymm2
6604 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6605 ; AVX2-FAST-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm3 # 32-byte Folded Reload
6606 ; AVX2-FAST-NEXT: # ymm3 = ymm3[0],mem[0],ymm3[1],mem[1],ymm3[2],mem[2],ymm3[3],mem[3],ymm3[8],mem[8],ymm3[9],mem[9],ymm3[10],mem[10],ymm3[11],mem[11]
6607 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
6608 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3],ymm2[4],ymm3[5,6],ymm2[7]
6609 ; AVX2-FAST-NEXT: vpshuflw $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Folded Reload
6610 ; AVX2-FAST-NEXT: # ymm3 = mem[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
6611 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,2]
6612 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1],ymm3[2],ymm2[3,4],ymm3[5],ymm2[6,7]
6613 ; AVX2-FAST-NEXT: vpshuflw $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Folded Reload
6614 ; AVX2-FAST-NEXT: # ymm3 = mem[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
6615 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,2]
6616 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm2, %ymm3, %ymm2
6617 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6618 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm10[0],ymm11[0],ymm10[1],ymm11[1],ymm10[2],ymm11[2],ymm10[3],ymm11[3],ymm10[8],ymm11[8],ymm10[9],ymm11[9],ymm10[10],ymm11[10],ymm10[11],ymm11[11]
6619 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm1, %ymm2
6620 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6621 ; AVX2-FAST-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm3 # 32-byte Folded Reload
6622 ; AVX2-FAST-NEXT: # ymm3 = ymm3[0],mem[0],ymm3[1],mem[1],ymm3[2],mem[2],ymm3[3],mem[3],ymm3[8],mem[8],ymm3[9],mem[9],ymm3[10],mem[10],ymm3[11],mem[11]
6623 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
6624 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3],ymm2[4],ymm3[5,6],ymm2[7]
6625 ; AVX2-FAST-NEXT: vpshuflw $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Folded Reload
6626 ; AVX2-FAST-NEXT: # ymm3 = mem[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
6627 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,2]
6628 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1],ymm3[2],ymm2[3,4],ymm3[5],ymm2[6,7]
6629 ; AVX2-FAST-NEXT: vpshuflw $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Folded Reload
6630 ; AVX2-FAST-NEXT: # ymm3 = mem[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
6631 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,2]
6632 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm2, %ymm3, %ymm2
6633 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6634 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Reload
6635 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm15[0],ymm13[0],ymm15[1],ymm13[1],ymm15[2],ymm13[2],ymm15[3],ymm13[3],ymm15[8],ymm13[8],ymm15[9],ymm13[9],ymm15[10],ymm13[10],ymm15[11],ymm13[11]
6636 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm1, %ymm2
6637 ; AVX2-FAST-NEXT: vmovdqa %ymm9, %ymm12
6638 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
6639 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm11[0],ymm9[0],ymm11[1],ymm9[1],ymm11[2],ymm9[2],ymm11[3],ymm9[3],ymm11[8],ymm9[8],ymm11[9],ymm9[9],ymm11[10],ymm9[10],ymm11[11],ymm9[11]
6640 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
6641 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3],ymm2[4],ymm3[5,6],ymm2[7]
6642 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
6643 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} ymm3 = ymm15[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
6644 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,2]
6645 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1],ymm3[2],ymm2[3,4],ymm3[5],ymm2[6,7]
6646 ; AVX2-FAST-NEXT: vpshuflw $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Folded Reload
6647 ; AVX2-FAST-NEXT: # ymm3 = mem[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
6648 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,2]
6649 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm2, %ymm3, %ymm2
6650 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6651 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm8[0],ymm7[0],ymm8[1],ymm7[1],ymm8[2],ymm7[2],ymm8[3],ymm7[3],ymm8[8],ymm7[8],ymm8[9],ymm7[9],ymm8[10],ymm7[10],ymm8[11],ymm7[11]
6652 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm1, %ymm1
6653 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
6654 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
6655 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm7[0],ymm8[0],ymm7[1],ymm8[1],ymm7[2],ymm8[2],ymm7[3],ymm8[3],ymm7[8],ymm8[8],ymm7[9],ymm8[9],ymm7[10],ymm8[10],ymm7[11],ymm8[11]
6656 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
6657 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6],ymm1[7]
6658 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} ymm2 = ymm6[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
6659 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
6660 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7]
6661 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
6662 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} ymm2 = ymm9[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
6663 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
6664 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm0
6665 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6666 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
6667 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6668 ; AVX2-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
6669 ; AVX2-FAST-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
6670 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = <1,2,1,2,u,u,3,3>
6671 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm3, %ymm0
6672 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,1,1,1]
6673 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7]
6674 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm2 = [12,13,10,11,10,11,14,15,14,15,14,15,14,15,14,15]
6675 ; AVX2-FAST-NEXT: vmovdqa (%rsp), %xmm1 # 16-byte Reload
6676 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm1, %xmm1
6677 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
6678 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6],ymm1[7]
6679 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm1 = [8,9,12,13,12,13,10,11,8,9,10,11,12,13,14,15]
6680 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6681 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm0, %xmm0
6682 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm0[0,1,0,1]
6683 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = [255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0]
6684 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm0
6685 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6686 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6687 ; AVX2-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm4 # 16-byte Folded Reload
6688 ; AVX2-FAST-NEXT: # xmm4 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
6689 ; AVX2-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm5 # 16-byte Folded Reload
6690 ; AVX2-FAST-NEXT: # xmm5 = xmm14[4],mem[4],xmm14[5],mem[5],xmm14[6],mem[6],xmm14[7],mem[7]
6691 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm3, %ymm4
6692 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[1,1,1,1]
6693 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1],ymm5[2],ymm4[3,4],ymm5[5],ymm4[6,7]
6694 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6695 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm0, %xmm5
6696 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,1,0,1]
6697 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0],ymm5[1],ymm4[2,3],ymm5[4],ymm4[5,6],ymm5[7]
6698 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6699 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm0, %xmm5
6700 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,1,0,1]
6701 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm0
6702 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6703 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6704 ; AVX2-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm4 # 16-byte Folded Reload
6705 ; AVX2-FAST-NEXT: # xmm4 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
6706 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6707 ; AVX2-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm5 # 16-byte Folded Reload
6708 ; AVX2-FAST-NEXT: # xmm5 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
6709 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm3, %ymm4
6710 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[1,1,1,1]
6711 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1],ymm5[2],ymm4[3,4],ymm5[5],ymm4[6,7]
6712 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6713 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm0, %xmm5
6714 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,1,0,1]
6715 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0],ymm5[1],ymm4[2,3],ymm5[4],ymm4[5,6],ymm5[7]
6716 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6717 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm0, %xmm5
6718 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,1,0,1]
6719 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm0
6720 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6721 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6722 ; AVX2-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm4 # 16-byte Folded Reload
6723 ; AVX2-FAST-NEXT: # xmm4 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
6724 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm3, %ymm3
6725 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6726 ; AVX2-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm4 # 16-byte Folded Reload
6727 ; AVX2-FAST-NEXT: # xmm4 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
6728 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[1,1,1,1]
6729 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1],ymm4[2],ymm3[3,4],ymm4[5],ymm3[6,7]
6730 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6731 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm0, %xmm2
6732 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
6733 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3],ymm2[4],ymm3[5,6],ymm2[7]
6734 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6735 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm0, %xmm1
6736 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
6737 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm2, %ymm1, %ymm10
6738 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6739 ; AVX2-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm1 # 32-byte Folded Reload
6740 ; AVX2-FAST-NEXT: # ymm1 = ymm0[4],mem[4],ymm0[5],mem[5],ymm0[6],mem[6],ymm0[7],mem[7],ymm0[12],mem[12],ymm0[13],mem[13],ymm0[14],mem[14],ymm0[15],mem[15]
6741 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6742 ; AVX2-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm2 # 32-byte Folded Reload
6743 ; AVX2-FAST-NEXT: # ymm2 = ymm0[4],mem[4],ymm0[5],mem[5],ymm0[6],mem[6],ymm0[7],mem[7],ymm0[12],mem[12],ymm0[13],mem[13],ymm0[14],mem[14],ymm0[15],mem[15]
6744 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [5,6,5,6,5,6,7,7]
6745 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm3, %ymm1
6746 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[3,3,3,3]
6747 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7]
6748 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,u,u,u,u,u,u,14,15,14,15,14,15,14,15,28,29,26,27,26,27,30,31,30,31,30,31,30,31,30,31>
6749 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6750 ; AVX2-FAST-NEXT: vpshufb %ymm2, %ymm0, %ymm4
6751 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,1,2,3]
6752 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm1[0],ymm4[1],ymm1[2,3],ymm4[4],ymm1[5,6],ymm4[7]
6753 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = <u,u,u,u,u,u,u,u,8,9,10,11,12,13,14,15,24,25,28,29,28,29,26,27,24,25,26,27,28,29,30,31>
6754 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6755 ; AVX2-FAST-NEXT: vpshufb %ymm1, %ymm0, %ymm14
6756 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,1,2,3]
6757 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm4, %ymm14, %ymm5
6758 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6759 ; AVX2-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm14 # 32-byte Folded Reload
6760 ; AVX2-FAST-NEXT: # ymm14 = ymm0[4],mem[4],ymm0[5],mem[5],ymm0[6],mem[6],ymm0[7],mem[7],ymm0[12],mem[12],ymm0[13],mem[13],ymm0[14],mem[14],ymm0[15],mem[15]
6761 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6762 ; AVX2-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6763 ; AVX2-FAST-NEXT: # ymm0 = ymm0[4],mem[4],ymm0[5],mem[5],ymm0[6],mem[6],ymm0[7],mem[7],ymm0[12],mem[12],ymm0[13],mem[13],ymm0[14],mem[14],ymm0[15],mem[15]
6764 ; AVX2-FAST-NEXT: vpermd %ymm14, %ymm3, %ymm14
6765 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[3,3,3,3]
6766 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm14[0,1],ymm0[2],ymm14[3,4],ymm0[5],ymm14[6,7]
6767 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
6768 ; AVX2-FAST-NEXT: vpshufb %ymm2, %ymm4, %ymm14
6769 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,1,2,3]
6770 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm14[1],ymm0[2,3],ymm14[4],ymm0[5,6],ymm14[7]
6771 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
6772 ; AVX2-FAST-NEXT: vpshufb %ymm1, %ymm4, %ymm14
6773 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,1,2,3]
6774 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm0, %ymm14, %ymm4
6775 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6776 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm14 = ymm0[4],ymm13[4],ymm0[5],ymm13[5],ymm0[6],ymm13[6],ymm0[7],ymm13[7],ymm0[12],ymm13[12],ymm0[13],ymm13[13],ymm0[14],ymm13[14],ymm0[15],ymm13[15]
6777 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm11[4],ymm12[4],ymm11[5],ymm12[5],ymm11[6],ymm12[6],ymm11[7],ymm12[7],ymm11[12],ymm12[12],ymm11[13],ymm12[13],ymm11[14],ymm12[14],ymm11[15],ymm12[15]
6778 ; AVX2-FAST-NEXT: vpermd %ymm14, %ymm3, %ymm14
6779 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[3,3,3,3]
6780 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm14[0,1],ymm0[2],ymm14[3,4],ymm0[5],ymm14[6,7]
6781 ; AVX2-FAST-NEXT: vpshufb %ymm2, %ymm15, %ymm14
6782 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,1,2,3]
6783 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm14[1],ymm0[2,3],ymm14[4],ymm0[5,6],ymm14[7]
6784 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
6785 ; AVX2-FAST-NEXT: vpshufb %ymm1, %ymm11, %ymm14
6786 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,1,2,3]
6787 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm0, %ymm14, %ymm0
6788 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
6789 ; AVX2-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm11, %ymm14 # 32-byte Folded Reload
6790 ; AVX2-FAST-NEXT: # ymm14 = ymm11[4],mem[4],ymm11[5],mem[5],ymm11[6],mem[6],ymm11[7],mem[7],ymm11[12],mem[12],ymm11[13],mem[13],ymm11[14],mem[14],ymm11[15],mem[15]
6791 ; AVX2-FAST-NEXT: vpermd %ymm14, %ymm3, %ymm3
6792 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} ymm14 = ymm7[4],ymm8[4],ymm7[5],ymm8[5],ymm7[6],ymm8[6],ymm7[7],ymm8[7],ymm7[12],ymm8[12],ymm7[13],ymm8[13],ymm7[14],ymm8[14],ymm7[15],ymm8[15]
6793 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm14 = ymm14[3,3,3,3]
6794 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1],ymm14[2],ymm3[3,4],ymm14[5],ymm3[6,7]
6795 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
6796 ; AVX2-FAST-NEXT: vpshufb %ymm2, %ymm7, %ymm2
6797 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,2,3]
6798 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3],ymm2[4],ymm3[5,6],ymm2[7]
6799 ; AVX2-FAST-NEXT: vpshufb %ymm1, %ymm9, %ymm1
6800 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,2,3]
6801 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm2, %ymm1, %ymm1
6802 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
6803 ; AVX2-FAST-NEXT: vmovdqa %ymm1, 736(%rax)
6804 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6805 ; AVX2-FAST-NEXT: vmovaps %ymm1, 672(%rax)
6806 ; AVX2-FAST-NEXT: vmovdqa %ymm0, 544(%rax)
6807 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6808 ; AVX2-FAST-NEXT: vmovaps %ymm0, 480(%rax)
6809 ; AVX2-FAST-NEXT: vmovdqa %ymm4, 352(%rax)
6810 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6811 ; AVX2-FAST-NEXT: vmovaps %ymm0, 288(%rax)
6812 ; AVX2-FAST-NEXT: vmovdqa %ymm5, 160(%rax)
6813 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6814 ; AVX2-FAST-NEXT: vmovaps %ymm0, 96(%rax)
6815 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6816 ; AVX2-FAST-NEXT: vmovaps %ymm0, 704(%rax)
6817 ; AVX2-FAST-NEXT: vmovdqa %ymm10, 640(%rax)
6818 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6819 ; AVX2-FAST-NEXT: vmovaps %ymm0, 576(%rax)
6820 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6821 ; AVX2-FAST-NEXT: vmovaps %ymm0, 512(%rax)
6822 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6823 ; AVX2-FAST-NEXT: vmovaps %ymm0, 448(%rax)
6824 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6825 ; AVX2-FAST-NEXT: vmovaps %ymm0, 384(%rax)
6826 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6827 ; AVX2-FAST-NEXT: vmovaps %ymm0, 320(%rax)
6828 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6829 ; AVX2-FAST-NEXT: vmovaps %ymm0, 256(%rax)
6830 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6831 ; AVX2-FAST-NEXT: vmovaps %ymm0, 192(%rax)
6832 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6833 ; AVX2-FAST-NEXT: vmovaps %ymm0, 128(%rax)
6834 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6835 ; AVX2-FAST-NEXT: vmovaps %ymm0, 64(%rax)
6836 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6837 ; AVX2-FAST-NEXT: vmovaps %ymm0, (%rax)
6838 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6839 ; AVX2-FAST-NEXT: vmovaps %ymm0, 608(%rax)
6840 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6841 ; AVX2-FAST-NEXT: vmovaps %ymm0, 416(%rax)
6842 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6843 ; AVX2-FAST-NEXT: vmovaps %ymm0, 224(%rax)
6844 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6845 ; AVX2-FAST-NEXT: vmovaps %ymm0, 32(%rax)
6846 ; AVX2-FAST-NEXT: addq $1560, %rsp # imm = 0x618
6847 ; AVX2-FAST-NEXT: vzeroupper
6848 ; AVX2-FAST-NEXT: retq
6850 ; AVX2-FAST-PERLANE-LABEL: store_i16_stride6_vf64:
6851 ; AVX2-FAST-PERLANE: # %bb.0:
6852 ; AVX2-FAST-PERLANE-NEXT: subq $1544, %rsp # imm = 0x608
6853 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %xmm0
6854 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6855 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rsi), %xmm5
6856 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %xmm1
6857 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6858 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdi), %xmm4
6859 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6860 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
6861 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
6862 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %xmm1
6863 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6864 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rcx), %xmm8
6865 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %xmm2
6866 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6867 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdx), %xmm6
6868 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6869 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
6870 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[1,0,2,2]
6871 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
6872 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6],ymm1[7]
6873 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %xmm1
6874 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6875 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r8), %xmm2
6876 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6877 ; AVX2-FAST-PERLANE-NEXT: vpmovzxwd {{.*#+}} xmm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero
6878 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq %xmm1, %ymm1
6879 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7]
6880 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %xmm1
6881 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6882 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r9), %xmm3
6883 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6884 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,0,2,1,4,5,6,7]
6885 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq %xmm1, %ymm1
6886 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm10 = [255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255]
6887 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm10, %ymm0, %ymm1, %ymm0
6888 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6889 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
6890 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6891 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
6892 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm6[0],xmm8[0],xmm6[1],xmm8[1],xmm6[2],xmm8[2],xmm6[3],xmm8[3]
6893 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm8, %xmm11
6894 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6895 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[1,0,2,2]
6896 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
6897 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6],ymm1[7]
6898 ; AVX2-FAST-PERLANE-NEXT: vpmovzxwd {{.*#+}} xmm1 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero
6899 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq %xmm1, %ymm1
6900 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7]
6901 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} xmm1 = xmm3[0,0,2,1,4,5,6,7]
6902 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq %xmm1, %ymm1
6903 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm10, %ymm0, %ymm1, %ymm0
6904 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6905 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rsi), %xmm12
6906 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rdi), %xmm0
6907 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6908 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm12[0],xmm0[1],xmm12[1],xmm0[2],xmm12[2],xmm0[3],xmm12[3]
6909 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6910 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
6911 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rcx), %xmm1
6912 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, (%rsp) # 16-byte Spill
6913 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rdx), %xmm2
6914 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6915 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
6916 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[1,0,2,2]
6917 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
6918 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6],ymm1[7]
6919 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%r8), %xmm1
6920 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6921 ; AVX2-FAST-PERLANE-NEXT: vpmovzxwd {{.*#+}} xmm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero
6922 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq %xmm1, %ymm1
6923 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7]
6924 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%r9), %xmm1
6925 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6926 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,0,2,1,4,5,6,7]
6927 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq %xmm1, %ymm1
6928 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm10, %ymm0, %ymm1, %ymm0
6929 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6930 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rsi), %xmm6
6931 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rdi), %xmm0
6932 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6933 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm6[0],xmm0[1],xmm6[1],xmm0[2],xmm6[2],xmm0[3],xmm6[3]
6934 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6935 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
6936 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rcx), %xmm1
6937 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6938 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rdx), %xmm2
6939 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6940 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
6941 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[1,0,2,2]
6942 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
6943 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6],ymm1[7]
6944 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%r8), %xmm1
6945 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6946 ; AVX2-FAST-PERLANE-NEXT: vpmovzxwd {{.*#+}} xmm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero
6947 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq %xmm1, %ymm1
6948 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7]
6949 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%r9), %xmm1
6950 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6951 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,0,2,1,4,5,6,7]
6952 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq %xmm1, %ymm1
6953 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm10, %ymm0, %ymm1, %ymm0
6954 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6955 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %ymm1
6956 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %ymm0
6957 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6958 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[8],ymm0[8],ymm1[9],ymm0[9],ymm1[10],ymm0[10],ymm1[11],ymm0[11]
6959 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm1, %ymm14
6960 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
6961 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %ymm13
6962 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %ymm9
6963 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm13[0],ymm9[0],ymm13[1],ymm9[1],ymm13[2],ymm9[2],ymm13[3],ymm9[3],ymm13[8],ymm9[8],ymm13[9],ymm9[9],ymm13[10],ymm9[10],ymm13[11],ymm9[11]
6964 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6965 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm13, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6966 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[1,0,2,2,5,4,6,6]
6967 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,2,3]
6968 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6],ymm1[7]
6969 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %ymm1
6970 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6971 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
6972 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
6973 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7]
6974 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %ymm1
6975 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6976 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
6977 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
6978 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm10, %ymm0, %ymm1, %ymm0
6979 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6980 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdi), %ymm1
6981 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6982 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rsi), %ymm0
6983 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6984 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[8],ymm0[8],ymm1[9],ymm0[9],ymm1[10],ymm0[10],ymm1[11],ymm0[11]
6985 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
6986 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdx), %ymm2
6987 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rcx), %ymm1
6988 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6989 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm2[0],ymm1[0],ymm2[1],ymm1[1],ymm2[2],ymm1[2],ymm2[3],ymm1[3],ymm2[8],ymm1[8],ymm2[9],ymm1[9],ymm2[10],ymm1[10],ymm2[11],ymm1[11]
6990 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm2, %ymm15
6991 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[1,0,2,2,5,4,6,6]
6992 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,2,3]
6993 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6],ymm1[7]
6994 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r8), %ymm1
6995 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6996 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
6997 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
6998 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7]
6999 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r9), %ymm1
7000 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7001 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
7002 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
7003 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm10, %ymm0, %ymm1, %ymm0
7004 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7005 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rdi), %ymm1
7006 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7007 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rsi), %ymm0
7008 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7009 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[8],ymm0[8],ymm1[9],ymm0[9],ymm1[10],ymm0[10],ymm1[11],ymm0[11]
7010 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
7011 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rdx), %ymm8
7012 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rcx), %ymm1
7013 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7014 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm8[0],ymm1[0],ymm8[1],ymm1[1],ymm8[2],ymm1[2],ymm8[3],ymm1[3],ymm8[8],ymm1[8],ymm8[9],ymm1[9],ymm8[10],ymm1[10],ymm8[11],ymm1[11]
7015 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7016 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[1,0,2,2,5,4,6,6]
7017 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,2,3]
7018 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6],ymm1[7]
7019 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%r8), %ymm1
7020 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7021 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
7022 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
7023 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7]
7024 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%r9), %ymm1
7025 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7026 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
7027 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
7028 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm10, %ymm0, %ymm1, %ymm0
7029 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7030 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rdi), %ymm0
7031 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7032 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rsi), %ymm7
7033 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm0[0],ymm7[0],ymm0[1],ymm7[1],ymm0[2],ymm7[2],ymm0[3],ymm7[3],ymm0[8],ymm7[8],ymm0[9],ymm7[9],ymm0[10],ymm7[10],ymm0[11],ymm7[11]
7034 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7035 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
7036 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rdx), %ymm2
7037 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7038 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rcx), %ymm1
7039 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7040 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm2[0],ymm1[0],ymm2[1],ymm1[1],ymm2[2],ymm1[2],ymm2[3],ymm1[3],ymm2[8],ymm1[8],ymm2[9],ymm1[9],ymm2[10],ymm1[10],ymm2[11],ymm1[11]
7041 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[1,0,2,2,5,4,6,6]
7042 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,2,3]
7043 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6],ymm1[7]
7044 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%r8), %ymm1
7045 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7046 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
7047 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
7048 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7]
7049 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%r9), %ymm1
7050 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7051 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
7052 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
7053 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm10, %ymm0, %ymm1, %ymm0
7054 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7055 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm0 = [0,1,2,3,4,5,6,7,8,9,6,7,4,5,10,11]
7056 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7057 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm1, %xmm1
7058 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7059 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm2, %xmm2
7060 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
7061 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7062 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} xmm2 = xmm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
7063 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
7064 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} xmm3 = xmm3[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
7065 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
7066 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
7067 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq %xmm2, %ymm2
7068 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7]
7069 ; AVX2-FAST-PERLANE-NEXT: vpshuflw $246, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
7070 ; AVX2-FAST-PERLANE-NEXT: # xmm2 = mem[2,1,3,3,4,5,6,7]
7071 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
7072 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0],ymm1[1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7]
7073 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm1 = [0,1,4,5,4,5,6,7,8,9,8,9,8,9,8,9]
7074 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
7075 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm3, %xmm3
7076 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
7077 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm4 = [255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255]
7078 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm2, %ymm3, %ymm2
7079 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7080 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm5, %xmm2
7081 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
7082 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm3, %xmm3
7083 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
7084 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} xmm3 = xmm11[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
7085 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7086 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} xmm5 = xmm5[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
7087 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
7088 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
7089 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq %xmm3, %ymm3
7090 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1],ymm3[2],ymm2[3,4],ymm3[5],ymm2[6,7]
7091 ; AVX2-FAST-PERLANE-NEXT: vpshuflw $246, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
7092 ; AVX2-FAST-PERLANE-NEXT: # xmm3 = mem[2,1,3,3,4,5,6,7]
7093 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
7094 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0],ymm2[1,2],ymm3[3],ymm2[4,5],ymm3[6],ymm2[7]
7095 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
7096 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm3, %xmm3
7097 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
7098 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm2, %ymm3, %ymm2
7099 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7100 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm12, %xmm2
7101 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
7102 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm3, %xmm3
7103 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
7104 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsp), %xmm3 # 16-byte Reload
7105 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} xmm3 = xmm3[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
7106 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7107 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} xmm5 = xmm5[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
7108 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
7109 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
7110 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq %xmm3, %ymm3
7111 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1],ymm3[2],ymm2[3,4],ymm3[5],ymm2[6,7]
7112 ; AVX2-FAST-PERLANE-NEXT: vpshuflw $246, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
7113 ; AVX2-FAST-PERLANE-NEXT: # xmm3 = mem[2,1,3,3,4,5,6,7]
7114 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
7115 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0],ymm2[1,2],ymm3[3],ymm2[4,5],ymm3[6],ymm2[7]
7116 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
7117 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm3, %xmm3
7118 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
7119 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm2, %ymm3, %ymm2
7120 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7121 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm6, %xmm2
7122 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
7123 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm3, %xmm0
7124 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm2[4],xmm0[5],xmm2[5],xmm0[6],xmm2[6],xmm0[7],xmm2[7]
7125 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7126 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} xmm2 = xmm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
7127 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
7128 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} xmm3 = xmm3[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
7129 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
7130 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,0,1]
7131 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq %xmm2, %ymm2
7132 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm2[2],ymm0[3,4],ymm2[5],ymm0[6,7]
7133 ; AVX2-FAST-PERLANE-NEXT: vpshuflw $246, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
7134 ; AVX2-FAST-PERLANE-NEXT: # xmm2 = mem[2,1,3,3,4,5,6,7]
7135 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
7136 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0],ymm0[1,2],ymm2[3],ymm0[4,5],ymm2[6],ymm0[7]
7137 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7138 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm2, %xmm1
7139 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
7140 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm0, %ymm1, %ymm0
7141 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7142 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm0 = <u,u,u,u,4,5,10,11,u,u,u,u,u,u,u,u,24,25,22,23,20,21,26,27,u,u,u,u,u,u,u,u>
7143 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7144 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm0, %ymm1, %ymm1
7145 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm14, %ymm6
7146 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm0, %ymm14, %ymm2
7147 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm2[0],ymm1[0],ymm2[1],ymm1[1],ymm2[2],ymm1[2],ymm2[3],ymm1[3],ymm2[8],ymm1[8],ymm2[9],ymm1[9],ymm2[10],ymm1[10],ymm2[11],ymm1[11]
7148 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} ymm2 = ymm9[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm9[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
7149 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} ymm3 = ymm13[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm13[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
7150 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm3[0],ymm2[0],ymm3[1],ymm2[1],ymm3[2],ymm2[2],ymm3[3],ymm2[3],ymm3[8],ymm2[8],ymm3[9],ymm2[9],ymm3[10],ymm2[10],ymm3[11],ymm2[11]
7151 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,2,3]
7152 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
7153 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7]
7154 ; AVX2-FAST-PERLANE-NEXT: vpshuflw $246, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Folded Reload
7155 ; AVX2-FAST-PERLANE-NEXT: # ymm2 = mem[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
7156 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
7157 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0],ymm1[1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7]
7158 ; AVX2-FAST-PERLANE-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [16,17,20,21,20,21,22,23,24,25,24,25,24,25,24,25,16,17,20,21,20,21,22,23,24,25,24,25,24,25,24,25]
7159 ; AVX2-FAST-PERLANE-NEXT: # ymm1 = mem[0,1,0,1]
7160 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
7161 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm1, %ymm3, %ymm3
7162 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
7163 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm2, %ymm3, %ymm2
7164 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7165 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
7166 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm0, %ymm2, %ymm2
7167 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
7168 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm0, %ymm3, %ymm3
7169 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm3[0],ymm2[0],ymm3[1],ymm2[1],ymm3[2],ymm2[2],ymm3[3],ymm2[3],ymm3[8],ymm2[8],ymm3[9],ymm2[9],ymm3[10],ymm2[10],ymm3[11],ymm2[11]
7170 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
7171 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} ymm3 = ymm3[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm3[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
7172 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm15, %ymm13
7173 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} ymm5 = ymm15[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm15[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
7174 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm5[0],ymm3[0],ymm5[1],ymm3[1],ymm5[2],ymm3[2],ymm5[3],ymm3[3],ymm5[8],ymm3[8],ymm5[9],ymm3[9],ymm5[10],ymm3[10],ymm5[11],ymm3[11]
7175 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,2,3]
7176 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,2]
7177 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1],ymm3[2],ymm2[3,4],ymm3[5],ymm2[6,7]
7178 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
7179 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} ymm3 = ymm14[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
7180 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
7181 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0],ymm2[1,2],ymm3[3],ymm2[4,5],ymm3[6],ymm2[7]
7182 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
7183 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm1, %ymm15, %ymm3
7184 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
7185 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm2, %ymm3, %ymm2
7186 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7187 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
7188 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm0, %ymm2, %ymm2
7189 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
7190 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm0, %ymm3, %ymm3
7191 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm3[0],ymm2[0],ymm3[1],ymm2[1],ymm3[2],ymm2[2],ymm3[3],ymm2[3],ymm3[8],ymm2[8],ymm3[9],ymm2[9],ymm3[10],ymm2[10],ymm3[11],ymm2[11]
7192 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
7193 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} ymm3 = ymm12[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm12[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
7194 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} ymm5 = ymm8[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm8[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
7195 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm5[0],ymm3[0],ymm5[1],ymm3[1],ymm5[2],ymm3[2],ymm5[3],ymm3[3],ymm5[8],ymm3[8],ymm5[9],ymm3[9],ymm5[10],ymm3[10],ymm5[11],ymm3[11]
7196 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,2,3]
7197 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,2]
7198 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1],ymm3[2],ymm2[3,4],ymm3[5],ymm2[6,7]
7199 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
7200 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} ymm3 = ymm11[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
7201 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
7202 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0],ymm2[1,2],ymm3[3],ymm2[4,5],ymm3[6],ymm2[7]
7203 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
7204 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm1, %ymm3, %ymm3
7205 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
7206 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm2, %ymm3, %ymm2
7207 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7208 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm0, %ymm7, %ymm2
7209 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
7210 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm0, %ymm10, %ymm0
7211 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm0[0],ymm2[0],ymm0[1],ymm2[1],ymm0[2],ymm2[2],ymm0[3],ymm2[3],ymm0[8],ymm2[8],ymm0[9],ymm2[9],ymm0[10],ymm2[10],ymm0[11],ymm2[11]
7212 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
7213 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} ymm2 = ymm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm2[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
7214 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
7215 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} ymm3 = ymm7[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm7[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
7216 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm3[0],ymm2[0],ymm3[1],ymm2[1],ymm3[2],ymm2[2],ymm3[3],ymm2[3],ymm3[8],ymm2[8],ymm3[9],ymm2[9],ymm3[10],ymm2[10],ymm3[11],ymm2[11]
7217 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,2,3]
7218 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
7219 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm2[2],ymm0[3,4],ymm2[5],ymm0[6,7]
7220 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
7221 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} ymm2 = ymm8[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
7222 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
7223 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0],ymm0[1,2],ymm2[3],ymm0[4,5],ymm2[6],ymm0[7]
7224 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
7225 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm1, %ymm9, %ymm1
7226 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
7227 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm0, %ymm1, %ymm0
7228 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7229 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7230 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
7231 ; AVX2-FAST-PERLANE-NEXT: # xmm0 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
7232 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7233 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
7234 ; AVX2-FAST-PERLANE-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
7235 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,1,1,1]
7236 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[1,2,3,3]
7237 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
7238 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7]
7239 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm1 = [12,13,10,11,10,11,14,15,14,15,14,15,14,15,14,15]
7240 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7241 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm2, %xmm2
7242 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
7243 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm0[0],ymm2[1],ymm0[2,3],ymm2[4],ymm0[5,6],ymm2[7]
7244 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm2 = [8,9,12,13,12,13,10,11,8,9,10,11,12,13,14,15]
7245 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7246 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm0, %xmm0
7247 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,1,0,1]
7248 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm5 = [255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,0,0]
7249 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm3, %ymm4, %ymm0
7250 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7251 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7252 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm3 # 16-byte Folded Reload
7253 ; AVX2-FAST-PERLANE-NEXT: # xmm3 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
7254 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7255 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm4 # 16-byte Folded Reload
7256 ; AVX2-FAST-PERLANE-NEXT: # xmm4 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
7257 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[1,1,1,1]
7258 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[1,2,3,3]
7259 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,2,1]
7260 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0,1],ymm3[2],ymm4[3,4],ymm3[5],ymm4[6,7]
7261 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7262 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm0, %xmm4
7263 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,0,1]
7264 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0],ymm4[1],ymm3[2,3],ymm4[4],ymm3[5,6],ymm4[7]
7265 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7266 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm0, %xmm4
7267 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,0,1]
7268 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm3, %ymm4, %ymm0
7269 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7270 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7271 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm3 # 16-byte Folded Reload
7272 ; AVX2-FAST-PERLANE-NEXT: # xmm3 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
7273 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7274 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd (%rsp), %xmm0, %xmm4 # 16-byte Folded Reload
7275 ; AVX2-FAST-PERLANE-NEXT: # xmm4 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
7276 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[1,1,1,1]
7277 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[1,2,3,3]
7278 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,2,1]
7279 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0,1],ymm3[2],ymm4[3,4],ymm3[5],ymm4[6,7]
7280 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7281 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm0, %xmm4
7282 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,0,1]
7283 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0],ymm4[1],ymm3[2,3],ymm4[4],ymm3[5,6],ymm4[7]
7284 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7285 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm0, %xmm4
7286 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,0,1]
7287 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm3, %ymm4, %ymm0
7288 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7289 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7290 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm3 # 16-byte Folded Reload
7291 ; AVX2-FAST-PERLANE-NEXT: # xmm3 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
7292 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7293 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm4 # 16-byte Folded Reload
7294 ; AVX2-FAST-PERLANE-NEXT: # xmm4 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
7295 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[1,1,1,1]
7296 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[1,2,3,3]
7297 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,2,1]
7298 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0,1],ymm3[2],ymm4[3,4],ymm3[5],ymm4[6,7]
7299 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7300 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm0, %xmm1
7301 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
7302 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0],ymm1[1],ymm3[2,3],ymm1[4],ymm3[5,6],ymm1[7]
7303 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7304 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm0, %xmm2
7305 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
7306 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm1, %ymm2, %ymm0
7307 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7308 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm1 # 32-byte Folded Reload
7309 ; AVX2-FAST-PERLANE-NEXT: # ymm1 = ymm6[4],mem[4],ymm6[5],mem[5],ymm6[6],mem[6],ymm6[7],mem[7],ymm6[12],mem[12],ymm6[13],mem[13],ymm6[14],mem[14],ymm6[15],mem[15]
7310 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7311 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm2 # 32-byte Folded Reload
7312 ; AVX2-FAST-PERLANE-NEXT: # ymm2 = ymm0[4],mem[4],ymm0[5],mem[5],ymm0[6],mem[6],ymm0[7],mem[7],ymm0[12],mem[12],ymm0[13],mem[13],ymm0[14],mem[14],ymm0[15],mem[15]
7313 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[3,3,3,3]
7314 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[1,2,3,3,5,6,7,7]
7315 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
7316 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7]
7317 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm6 = <u,u,u,u,u,u,u,u,14,15,14,15,14,15,14,15,28,29,26,27,26,27,30,31,30,31,30,31,30,31,30,31>
7318 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7319 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm6, %ymm0, %ymm3
7320 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,2,3]
7321 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm1[0],ymm3[1],ymm1[2,3],ymm3[4],ymm1[5,6],ymm3[7]
7322 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,u,u,u,u,u,u,8,9,10,11,12,13,14,15,24,25,28,29,28,29,26,27,24,25,26,27,28,29,30,31>
7323 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7324 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm2, %ymm0, %ymm4
7325 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,1,2,3]
7326 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm3, %ymm4, %ymm4
7327 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7328 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm3 # 32-byte Folded Reload
7329 ; AVX2-FAST-PERLANE-NEXT: # ymm3 = ymm0[4],mem[4],ymm0[5],mem[5],ymm0[6],mem[6],ymm0[7],mem[7],ymm0[12],mem[12],ymm0[13],mem[13],ymm0[14],mem[14],ymm0[15],mem[15]
7330 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm13, %ymm0 # 32-byte Folded Reload
7331 ; AVX2-FAST-PERLANE-NEXT: # ymm0 = ymm13[4],mem[4],ymm13[5],mem[5],ymm13[6],mem[6],ymm13[7],mem[7],ymm13[12],mem[12],ymm13[13],mem[13],ymm13[14],mem[14],ymm13[15],mem[15]
7332 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[3,3,3,3]
7333 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[1,2,3,3,5,6,7,7]
7334 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
7335 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm3[2],ymm0[3,4],ymm3[5],ymm0[6,7]
7336 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm6, %ymm14, %ymm3
7337 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,2,3]
7338 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm3[1],ymm0[2,3],ymm3[4],ymm0[5,6],ymm3[7]
7339 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm2, %ymm15, %ymm3
7340 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,2,3]
7341 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm0, %ymm3, %ymm3
7342 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7343 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
7344 ; AVX2-FAST-PERLANE-NEXT: # ymm0 = ymm0[4],mem[4],ymm0[5],mem[5],ymm0[6],mem[6],ymm0[7],mem[7],ymm0[12],mem[12],ymm0[13],mem[13],ymm0[14],mem[14],ymm0[15],mem[15]
7345 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7346 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm1[4],ymm12[4],ymm1[5],ymm12[5],ymm1[6],ymm12[6],ymm1[7],ymm12[7],ymm1[12],ymm12[12],ymm1[13],ymm12[13],ymm1[14],ymm12[14],ymm1[15],ymm12[15]
7347 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[3,3,3,3]
7348 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[1,2,3,3,5,6,7,7]
7349 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
7350 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7]
7351 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm6, %ymm11, %ymm1
7352 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,2,3]
7353 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6],ymm1[7]
7354 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7355 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm2, %ymm1, %ymm1
7356 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,2,3]
7357 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm0, %ymm1, %ymm0
7358 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm10, %ymm1 # 32-byte Folded Reload
7359 ; AVX2-FAST-PERLANE-NEXT: # ymm1 = ymm10[4],mem[4],ymm10[5],mem[5],ymm10[6],mem[6],ymm10[7],mem[7],ymm10[12],mem[12],ymm10[13],mem[13],ymm10[14],mem[14],ymm10[15],mem[15]
7360 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm7, %ymm11 # 32-byte Folded Reload
7361 ; AVX2-FAST-PERLANE-NEXT: # ymm11 = ymm7[4],mem[4],ymm7[5],mem[5],ymm7[6],mem[6],ymm7[7],mem[7],ymm7[12],mem[12],ymm7[13],mem[13],ymm7[14],mem[14],ymm7[15],mem[15]
7362 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[3,3,3,3]
7363 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm11 = ymm11[1,2,3,3,5,6,7,7]
7364 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,2,2,3]
7365 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm11[0,1],ymm1[2],ymm11[3,4],ymm1[5],ymm11[6,7]
7366 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm6, %ymm8, %ymm6
7367 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,1,2,3]
7368 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm6[1],ymm1[2,3],ymm6[4],ymm1[5,6],ymm6[7]
7369 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm2, %ymm9, %ymm2
7370 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,2,3]
7371 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm1, %ymm2, %ymm1
7372 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
7373 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm1, 736(%rax)
7374 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7375 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, 704(%rax)
7376 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7377 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, 672(%rax)
7378 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, 544(%rax)
7379 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7380 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 512(%rax)
7381 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7382 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 480(%rax)
7383 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm3, 352(%rax)
7384 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7385 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 320(%rax)
7386 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7387 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 288(%rax)
7388 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm4, 160(%rax)
7389 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7390 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 128(%rax)
7391 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7392 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 96(%rax)
7393 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7394 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 640(%rax)
7395 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7396 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 608(%rax)
7397 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7398 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 576(%rax)
7399 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7400 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 448(%rax)
7401 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7402 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 416(%rax)
7403 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7404 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 384(%rax)
7405 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7406 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 256(%rax)
7407 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7408 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 224(%rax)
7409 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7410 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 192(%rax)
7411 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7412 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 64(%rax)
7413 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7414 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 32(%rax)
7415 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7416 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, (%rax)
7417 ; AVX2-FAST-PERLANE-NEXT: addq $1544, %rsp # imm = 0x608
7418 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
7419 ; AVX2-FAST-PERLANE-NEXT: retq
7421 ; AVX512F-ONLY-SLOW-LABEL: store_i16_stride6_vf64:
7422 ; AVX512F-ONLY-SLOW: # %bb.0:
7423 ; AVX512F-ONLY-SLOW-NEXT: subq $600, %rsp # imm = 0x258
7424 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%rcx), %ymm9
7425 ; AVX512F-ONLY-SLOW-NEXT: vpsrldq {{.*#+}} ymm0 = ymm9[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm9[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
7426 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%rdx), %ymm6
7427 ; AVX512F-ONLY-SLOW-NEXT: vpsrldq {{.*#+}} ymm1 = ymm6[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm6[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
7428 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[8],ymm0[8],ymm1[9],ymm0[9],ymm1[10],ymm0[10],ymm1[11],ymm0[11]
7429 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,2]
7430 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm6[4],ymm9[4],ymm6[5],ymm9[5],ymm6[6],ymm9[6],ymm6[7],ymm9[7],ymm6[12],ymm9[12],ymm6[13],ymm9[13],ymm6[14],ymm9[14],ymm6[15],ymm9[15]
7431 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[1,2,3,3,5,6,7,7]
7432 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
7433 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
7434 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%rsi), %ymm10
7435 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm10[2,1,2,3,6,5,6,7]
7436 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
7437 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%rdi), %ymm8
7438 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm8[2,1,2,3,6,5,6,7]
7439 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
7440 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm2[0],ymm1[0],ymm2[1],ymm1[1],ymm2[2],ymm1[2],ymm2[3],ymm1[3],ymm2[8],ymm1[8],ymm2[9],ymm1[9],ymm2[10],ymm1[10],ymm2[11],ymm1[11]
7441 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,2,3]
7442 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm8[4],ymm10[4],ymm8[5],ymm10[5],ymm8[6],ymm10[6],ymm8[7],ymm10[7],ymm8[12],ymm10[12],ymm8[13],ymm10[13],ymm8[14],ymm10[14],ymm8[15],ymm10[15]
7443 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[3,3,3,3]
7444 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1
7445 ; AVX512F-ONLY-SLOW-NEXT: movw $18724, %ax # imm = 0x4924
7446 ; AVX512F-ONLY-SLOW-NEXT: kmovw %eax, %k1
7447 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm0, %zmm1 {%k1}
7448 ; AVX512F-ONLY-SLOW-NEXT: vextracti64x4 $1, %zmm1, %ymm0
7449 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%r8), %ymm5
7450 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} ymm7 = <u,u,u,u,u,u,u,u,14,15,14,15,14,15,14,15,28,29,26,27,26,27,30,31,30,31,30,31,30,31,30,31>
7451 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm7, %ymm5, %ymm2
7452 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,2,3]
7453 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm2[1],ymm0[2,3],ymm2[4],ymm0[5,6],ymm2[7]
7454 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm0, %zmm0
7455 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm5[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
7456 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
7457 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7]
7458 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm0[4,5,6,7]
7459 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7460 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%r9), %ymm1
7461 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7462 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm1[0,2,2,3,4,5,6,7,8,10,10,11,12,13,14,15]
7463 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm0 = ymm0[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
7464 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
7465 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7466 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm1[2,3,2,3,6,7,6,7]
7467 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm0[0,2,2,1,4,5,6,7,8,10,10,9,12,13,14,15]
7468 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,2,3]
7469 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7470 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rcx), %ymm2
7471 ; AVX512F-ONLY-SLOW-NEXT: vpsrldq {{.*#+}} ymm0 = ymm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm2[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
7472 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rdx), %ymm3
7473 ; AVX512F-ONLY-SLOW-NEXT: vpsrldq {{.*#+}} ymm1 = ymm3[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm3[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
7474 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[8],ymm0[8],ymm1[9],ymm0[9],ymm1[10],ymm0[10],ymm1[11],ymm0[11]
7475 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,2]
7476 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm3[4],ymm2[4],ymm3[5],ymm2[5],ymm3[6],ymm2[6],ymm3[7],ymm2[7],ymm3[12],ymm2[12],ymm3[13],ymm2[13],ymm3[14],ymm2[14],ymm3[15],ymm2[15]
7477 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm3, %ymm21
7478 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm2, %ymm19
7479 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[1,2,3,3,5,6,7,7]
7480 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
7481 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
7482 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rsi), %ymm3
7483 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm3[2,1,2,3,6,5,6,7]
7484 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
7485 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rdi), %ymm4
7486 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm4[2,1,2,3,6,5,6,7]
7487 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
7488 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm2[0],ymm1[0],ymm2[1],ymm1[1],ymm2[2],ymm1[2],ymm2[3],ymm1[3],ymm2[8],ymm1[8],ymm2[9],ymm1[9],ymm2[10],ymm1[10],ymm2[11],ymm1[11]
7489 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,2,3]
7490 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm4[4],ymm3[4],ymm4[5],ymm3[5],ymm4[6],ymm3[6],ymm4[7],ymm3[7],ymm4[12],ymm3[12],ymm4[13],ymm3[13],ymm4[14],ymm3[14],ymm4[15],ymm3[15]
7491 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm4, %ymm16
7492 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm3, %ymm18
7493 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[3,3,3,3]
7494 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1
7495 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm0, %zmm1 {%k1}
7496 ; AVX512F-ONLY-SLOW-NEXT: vextracti64x4 $1, %zmm1, %ymm0
7497 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%r8), %ymm11
7498 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm7, %ymm11, %ymm2
7499 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,2,3]
7500 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm2[1],ymm0[2,3],ymm2[4],ymm0[5,6],ymm2[7]
7501 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm0, %zmm0
7502 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm11[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
7503 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
7504 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7]
7505 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm0[4,5,6,7]
7506 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7507 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%r9), %ymm1
7508 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7509 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm1[0,2,2,3,4,5,6,7,8,10,10,11,12,13,14,15]
7510 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm0 = ymm0[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
7511 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
7512 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7513 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm1[2,3,2,3,6,7,6,7]
7514 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm0[0,2,2,1,4,5,6,7,8,10,10,9,12,13,14,15]
7515 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,2,3]
7516 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7517 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rcx), %ymm13
7518 ; AVX512F-ONLY-SLOW-NEXT: vpsrldq {{.*#+}} ymm0 = ymm13[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm13[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
7519 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rdx), %ymm12
7520 ; AVX512F-ONLY-SLOW-NEXT: vpsrldq {{.*#+}} ymm1 = ymm12[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm12[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
7521 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[8],ymm0[8],ymm1[9],ymm0[9],ymm1[10],ymm0[10],ymm1[11],ymm0[11]
7522 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,2]
7523 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm12[4],ymm13[4],ymm12[5],ymm13[5],ymm12[6],ymm13[6],ymm12[7],ymm13[7],ymm12[12],ymm13[12],ymm12[13],ymm13[13],ymm12[14],ymm13[14],ymm12[15],ymm13[15]
7524 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[1,2,3,3,5,6,7,7]
7525 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rsi), %ymm4
7526 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm4[2,1,2,3,6,5,6,7]
7527 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
7528 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rdi), %ymm14
7529 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm14[2,1,2,3,6,5,6,7]
7530 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm3 = ymm3[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
7531 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm3[0],ymm2[0],ymm3[1],ymm2[1],ymm3[2],ymm2[2],ymm3[3],ymm2[3],ymm3[8],ymm2[8],ymm3[9],ymm2[9],ymm3[10],ymm2[10],ymm3[11],ymm2[11]
7532 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,2,3]
7533 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm14[4],ymm4[4],ymm14[5],ymm4[5],ymm14[6],ymm4[6],ymm14[7],ymm4[7],ymm14[12],ymm4[12],ymm14[13],ymm4[13],ymm14[14],ymm4[14],ymm14[15],ymm4[15]
7534 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm14, %ymm23
7535 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm4, %ymm24
7536 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[3,3,3,3]
7537 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
7538 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
7539 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm1
7540 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm0, %zmm1 {%k1}
7541 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%r8), %ymm3
7542 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm7, %ymm3, %ymm0
7543 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,2,3]
7544 ; AVX512F-ONLY-SLOW-NEXT: vextracti64x4 $1, %zmm1, %ymm2
7545 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0],ymm0[1],ymm2[2,3],ymm0[4],ymm2[5,6],ymm0[7]
7546 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm0, %zmm0
7547 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm3[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
7548 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm3, %ymm17
7549 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
7550 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7]
7551 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm0[4,5,6,7]
7552 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7553 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%r9), %ymm1
7554 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7555 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm1[0,2,2,3,4,5,6,7,8,10,10,11,12,13,14,15]
7556 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm0 = ymm0[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
7557 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
7558 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7559 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm1[2,3,2,3,6,7,6,7]
7560 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm0[0,2,2,1,4,5,6,7,8,10,10,9,12,13,14,15]
7561 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm22 = ymm0[2,1,2,3]
7562 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rcx), %ymm2
7563 ; AVX512F-ONLY-SLOW-NEXT: vpsrldq {{.*#+}} ymm0 = ymm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm2[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
7564 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdx), %ymm1
7565 ; AVX512F-ONLY-SLOW-NEXT: vpsrldq {{.*#+}} ymm3 = ymm1[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm1[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
7566 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm3[0],ymm0[0],ymm3[1],ymm0[1],ymm3[2],ymm0[2],ymm3[3],ymm0[3],ymm3[8],ymm0[8],ymm3[9],ymm0[9],ymm3[10],ymm0[10],ymm3[11],ymm0[11]
7567 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm25 = ymm0[2,2,2,2]
7568 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm1[4],ymm2[4],ymm1[5],ymm2[5],ymm1[6],ymm2[6],ymm1[7],ymm2[7],ymm1[12],ymm2[12],ymm1[13],ymm2[13],ymm1[14],ymm2[14],ymm1[15],ymm2[15]
7569 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm20 = ymm3[1,2,3,3,5,6,7,7]
7570 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rsi), %ymm4
7571 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm4[2,1,2,3,6,5,6,7]
7572 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm3[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
7573 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdi), %ymm3
7574 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm15 = ymm3[2,1,2,3,6,5,6,7]
7575 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm15 = ymm15[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
7576 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm15[0],ymm0[0],ymm15[1],ymm0[1],ymm15[2],ymm0[2],ymm15[3],ymm0[3],ymm15[8],ymm0[8],ymm15[9],ymm0[9],ymm15[10],ymm0[10],ymm15[11],ymm0[11]
7577 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,2,3]
7578 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm15 = ymm3[4],ymm4[4],ymm3[5],ymm4[5],ymm3[6],ymm4[6],ymm3[7],ymm4[7],ymm3[12],ymm4[12],ymm3[13],ymm4[13],ymm3[14],ymm4[14],ymm3[15],ymm4[15]
7579 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[3,3,3,3]
7580 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm20 = ymm20[2,2,2,3]
7581 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm20, %zmm25, %zmm20
7582 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm15, %zmm0, %zmm15
7583 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm20, %zmm15 {%k1}
7584 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r8), %ymm0
7585 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm7, %ymm0, %ymm7
7586 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,1,2,3]
7587 ; AVX512F-ONLY-SLOW-NEXT: vextracti64x4 $1, %zmm15, %ymm14
7588 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm7 = ymm14[0],ymm7[1],ymm14[2,3],ymm7[4],ymm14[5,6],ymm7[7]
7589 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm7, %zmm0, %zmm7
7590 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm14 = ymm0[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
7591 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,2,2,3]
7592 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm14 = ymm14[0],ymm15[1,2],ymm14[3],ymm15[4,5],ymm14[6],ymm15[7]
7593 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm7 = zmm14[0,1,2,3],zmm7[4,5,6,7]
7594 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7595 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm1[0],ymm2[0],ymm1[1],ymm2[1],ymm1[2],ymm2[2],ymm1[3],ymm2[3],ymm1[8],ymm2[8],ymm1[9],ymm2[9],ymm1[10],ymm2[10],ymm1[11],ymm2[11]
7596 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm29 = <17,18,17,18,u,u,19,19,5,4,2,2,5,4,6,6>
7597 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rcx), %xmm7
7598 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdx), %xmm14
7599 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm14[4],xmm7[4],xmm14[5],xmm7[5],xmm14[6],xmm7[6],xmm14[7],xmm7[7]
7600 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm14, %xmm26
7601 ; AVX512F-ONLY-SLOW-NEXT: vpermt2d %zmm2, %zmm29, %zmm1
7602 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rsi), %xmm14
7603 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7604 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdi), %xmm2
7605 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7606 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm2[4],xmm14[4],xmm2[5],xmm14[5],xmm2[6],xmm14[6],xmm2[7],xmm14[7]
7607 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[1,1,1,1]
7608 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[8],ymm4[8],ymm3[9],ymm4[9],ymm3[10],ymm4[10],ymm3[11],ymm4[11]
7609 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
7610 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm2
7611 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm2, %zmm1 {%k1}
7612 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm0[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
7613 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,2]
7614 ; AVX512F-ONLY-SLOW-NEXT: vextracti64x4 $1, %zmm1, %ymm2
7615 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1],ymm0[2],ymm2[3,4],ymm0[5],ymm2[6,7]
7616 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm0, %zmm0
7617 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r8), %xmm2
7618 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7619 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} xmm3 = [12,13,10,11,10,11,14,15,14,15,14,15,14,15,14,15]
7620 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %xmm3, %xmm2, %xmm2
7621 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa %xmm3, %xmm14
7622 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
7623 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3],ymm2[4],ymm1[5,6],ymm2[7]
7624 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm0[4,5,6,7]
7625 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7626 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm6 = ymm6[0],ymm9[0],ymm6[1],ymm9[1],ymm6[2],ymm9[2],ymm6[3],ymm9[3],ymm6[8],ymm9[8],ymm6[9],ymm9[9],ymm6[10],ymm9[10],ymm6[11],ymm9[11]
7627 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%rcx), %xmm3
7628 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%rdx), %xmm4
7629 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
7630 ; AVX512F-ONLY-SLOW-NEXT: vpermt2d %zmm0, %zmm29, %zmm6
7631 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%rsi), %xmm1
7632 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%rdi), %xmm0
7633 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
7634 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[1,1,1,1]
7635 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm8 = ymm8[0],ymm10[0],ymm8[1],ymm10[1],ymm8[2],ymm10[2],ymm8[3],ymm10[3],ymm8[8],ymm10[8],ymm8[9],ymm10[9],ymm8[10],ymm10[10],ymm8[11],ymm10[11]
7636 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,2,2,3]
7637 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm8, %zmm2, %zmm2
7638 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm2, %zmm6 {%k1}
7639 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm5[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
7640 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
7641 ; AVX512F-ONLY-SLOW-NEXT: vextracti64x4 $1, %zmm6, %ymm5
7642 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm5[0,1],ymm2[2],ymm5[3,4],ymm2[5],ymm5[6,7]
7643 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm5
7644 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%r8), %xmm2
7645 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %xmm14, %xmm2, %xmm8
7646 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,1,0,1]
7647 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0],ymm8[1],ymm6[2,3],ymm8[4],ymm6[5,6],ymm8[7]
7648 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm27 = zmm6[0,1,2,3],zmm5[4,5,6,7]
7649 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
7650 ; AVX512F-ONLY-SLOW-NEXT: vpsrldq {{.*#+}} xmm3 = xmm3[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
7651 ; AVX512F-ONLY-SLOW-NEXT: vpsrldq {{.*#+}} xmm4 = xmm4[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
7652 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
7653 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm31 = [1,0,2,2,1,0,2,2,16,17,16,17,16,17,16,17]
7654 ; AVX512F-ONLY-SLOW-NEXT: vpermt2d %zmm3, %zmm31, %zmm5
7655 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm1[0,1,2,1]
7656 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,7,6,5]
7657 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm0[0,1,2,1]
7658 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,7,6,5]
7659 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
7660 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
7661 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
7662 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,2,3],zmm3[0,1,0,1]
7663 ; AVX512F-ONLY-SLOW-NEXT: movw $9362, %ax # imm = 0x2492
7664 ; AVX512F-ONLY-SLOW-NEXT: kmovw %eax, %k2
7665 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm5, %zmm0 {%k2}
7666 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm2[2,1,3,3,4,5,6,7]
7667 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
7668 ; AVX512F-ONLY-SLOW-NEXT: vextracti64x4 $1, %zmm0, %ymm3
7669 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm3[1,2],ymm1[3],ymm3[4,5],ymm1[6],ymm3[7]
7670 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm1
7671 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxwd {{.*#+}} xmm2 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero
7672 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq %xmm2, %ymm2
7673 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm2[2],ymm0[3,4],ymm2[5],ymm0[6,7]
7674 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm28 = zmm0[0,1,2,3],zmm1[4,5,6,7]
7675 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rcx), %xmm2
7676 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rdx), %xmm3
7677 ; AVX512F-ONLY-SLOW-NEXT: vpsrldq {{.*#+}} xmm0 = xmm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
7678 ; AVX512F-ONLY-SLOW-NEXT: vpsrldq {{.*#+}} xmm1 = xmm3[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
7679 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
7680 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
7681 ; AVX512F-ONLY-SLOW-NEXT: vpermt2d %zmm0, %zmm31, %zmm5
7682 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rsi), %xmm1
7683 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm1[0,1,2,1]
7684 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,7,6,5]
7685 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rdi), %xmm4
7686 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm4[0,1,2,1]
7687 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,4,7,6,5]
7688 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm6[4],xmm0[4],xmm6[5],xmm0[5],xmm6[6],xmm0[6],xmm6[7],xmm0[7]
7689 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
7690 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,2,1]
7691 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm6 = zmm6[0,1,2,3],zmm0[0,1,0,1]
7692 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm5, %zmm6 {%k2}
7693 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%r8), %xmm0
7694 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm0[2,1,3,3,4,5,6,7]
7695 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,2,1]
7696 ; AVX512F-ONLY-SLOW-NEXT: vextracti64x4 $1, %zmm6, %ymm8
7697 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0],ymm8[1,2],ymm5[3],ymm8[4,5],ymm5[6],ymm8[7]
7698 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm0, %zmm5
7699 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxwd {{.*#+}} xmm8 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero
7700 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq %xmm8, %ymm8
7701 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1],ymm8[2],ymm6[3,4],ymm8[5],ymm6[6,7]
7702 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm30 = zmm6[0,1,2,3],zmm5[4,5,6,7]
7703 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
7704 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm21, %ymm3
7705 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm19, %ymm5
7706 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm3[0],ymm5[0],ymm3[1],ymm5[1],ymm3[2],ymm5[2],ymm3[3],ymm5[3],ymm3[8],ymm5[8],ymm3[9],ymm5[9],ymm3[10],ymm5[10],ymm3[11],ymm5[11]
7707 ; AVX512F-ONLY-SLOW-NEXT: vpermt2d %zmm2, %zmm29, %zmm3
7708 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm4[4],xmm1[4],xmm4[5],xmm1[5],xmm4[6],xmm1[6],xmm4[7],xmm1[7]
7709 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,1,1,1]
7710 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm16, %ymm2
7711 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm18, %ymm4
7712 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm2[0],ymm4[0],ymm2[1],ymm4[1],ymm2[2],ymm4[2],ymm2[3],ymm4[3],ymm2[8],ymm4[8],ymm2[9],ymm4[9],ymm2[10],ymm4[10],ymm2[11],ymm4[11]
7713 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
7714 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1
7715 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm1, %zmm3 {%k1}
7716 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm11[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
7717 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
7718 ; AVX512F-ONLY-SLOW-NEXT: vextracti64x4 $1, %zmm3, %ymm2
7719 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7]
7720 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa %xmm14, %xmm4
7721 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %xmm14, %xmm0, %xmm0
7722 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,0,1]
7723 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm3[0],ymm0[1],ymm3[2,3],ymm0[4],ymm3[5,6],ymm0[7]
7724 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm1
7725 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm25 = zmm0[0,1,2,3],zmm1[4,5,6,7]
7726 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r9), %ymm0
7727 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm6 = ymm12[0],ymm13[0],ymm12[1],ymm13[1],ymm12[2],ymm13[2],ymm12[3],ymm13[3],ymm12[8],ymm13[8],ymm12[9],ymm13[9],ymm12[10],ymm13[10],ymm12[11],ymm13[11]
7728 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm0[0,2,2,3,4,5,6,7,8,10,10,11,12,13,14,15]
7729 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm1 = ymm1[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
7730 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm20 = ymm1[2,2,2,3]
7731 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm0[2,3,2,3,6,7,6,7]
7732 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,2,2,1,4,5,6,7,8,10,10,9,12,13,14,15]
7733 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm16 = ymm1[2,1,2,3]
7734 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r9), %xmm11
7735 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm11[2,3,2,3]
7736 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,2,2,1,4,5,6,7]
7737 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm1[0,1,0,1]
7738 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm0[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
7739 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm0[2,2,2,2]
7740 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rcx), %xmm0
7741 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rdx), %xmm13
7742 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm13[4],xmm0[4],xmm13[5],xmm0[5],xmm13[6],xmm0[6],xmm13[7],xmm0[7]
7743 ; AVX512F-ONLY-SLOW-NEXT: vpermt2d %zmm1, %zmm29, %zmm6
7744 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rsi), %xmm15
7745 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rdi), %xmm1
7746 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm1[4],xmm15[4],xmm1[5],xmm15[5],xmm1[6],xmm15[6],xmm1[7],xmm15[7]
7747 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[1,1,1,1]
7748 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm23, %ymm2
7749 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm24, %ymm3
7750 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm9 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[8],ymm3[8],ymm2[9],ymm3[9],ymm2[10],ymm3[10],ymm2[11],ymm3[11]
7751 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,2,2,3]
7752 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm9, %zmm8, %zmm8
7753 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm8, %zmm6 {%k1}
7754 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm17, %ymm2
7755 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm8 = ymm2[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
7756 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,2,2,2]
7757 ; AVX512F-ONLY-SLOW-NEXT: vextracti64x4 $1, %zmm6, %ymm9
7758 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm14 = ymm9[0,1],ymm8[2],ymm9[3,4],ymm8[5],ymm9[6,7]
7759 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%r8), %xmm8
7760 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %xmm4, %xmm8, %xmm9
7761 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,1,0,1]
7762 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm9 = ymm6[0],ymm9[1],ymm6[2,3],ymm9[4],ymm6[5,6],ymm9[7]
7763 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
7764 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm6 # 32-byte Folded Reload
7765 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm29 = [65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0]
7766 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm29, %zmm6 # 64-byte Folded Reload
7767 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
7768 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm23 # 32-byte Folded Reload
7769 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm29, %zmm23 # 64-byte Folded Reload
7770 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
7771 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm22, %zmm2, %zmm12
7772 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm14, %zmm0, %zmm14
7773 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm22 = zmm9[0,1,2,3],zmm14[4,5,6,7]
7774 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm13[0],xmm0[0],xmm13[1],xmm0[1],xmm13[2],xmm0[2],xmm13[3],xmm0[3]
7775 ; AVX512F-ONLY-SLOW-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
7776 ; AVX512F-ONLY-SLOW-NEXT: vpsrldq {{.*#+}} xmm13 = xmm13[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
7777 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm13[0],xmm0[0],xmm13[1],xmm0[1],xmm13[2],xmm0[2],xmm13[3],xmm0[3]
7778 ; AVX512F-ONLY-SLOW-NEXT: vpermt2d %zmm0, %zmm31, %zmm9
7779 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm15[0,1,2,1]
7780 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,7,6,5]
7781 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm13 = xmm1[0,1,2,1]
7782 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm13 = xmm13[0,1,2,3,4,7,6,5]
7783 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm13[4],xmm0[4],xmm13[5],xmm0[5],xmm13[6],xmm0[6],xmm13[7],xmm0[7]
7784 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%r9), %xmm13
7785 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm15[0],xmm1[1],xmm15[1],xmm1[2],xmm15[2],xmm1[3],xmm15[3]
7786 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm14 = xmm13[2,3,2,3]
7787 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm14 = xmm14[0,2,2,1,4,5,6,7]
7788 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm24 = ymm14[0,1,0,1]
7789 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Folded Reload
7790 ; AVX512F-ONLY-SLOW-NEXT: # ymm14 = mem[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
7791 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm17 = ymm14[2,2,2,2]
7792 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
7793 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm0[0,1,0,1]
7794 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm9, %zmm0 {%k2}
7795 ; AVX512F-ONLY-SLOW-NEXT: vextracti64x4 $1, %zmm0, %ymm1
7796 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm9 = xmm8[2,1,3,3,4,5,6,7]
7797 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,2,1]
7798 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm9[0],ymm1[1,2],ymm9[3],ymm1[4,5],ymm9[6],ymm1[7]
7799 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm9 = xmm13[0,0,2,1,4,5,6,7]
7800 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq %xmm9, %ymm18
7801 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm9 = xmm13[0,2,2,3,4,5,6,7]
7802 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,4,4,4,4]
7803 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm19 = ymm9[0,0,2,1]
7804 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm1
7805 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxwd {{.*#+}} xmm8 = xmm8[0],zero,xmm8[1],zero,xmm8[2],zero,xmm8[3],zero
7806 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq %xmm8, %ymm8
7807 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm8[2],ymm0[3,4],ymm8[5],ymm0[6,7]
7808 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%r9), %xmm8
7809 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm21 = zmm0[0,1,2,3],zmm1[4,5,6,7]
7810 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm26, %xmm2
7811 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm2[0],xmm7[0],xmm2[1],xmm7[1],xmm2[2],xmm7[2],xmm2[3],xmm7[3]
7812 ; AVX512F-ONLY-SLOW-NEXT: vpsrldq {{.*#+}} xmm1 = xmm7[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
7813 ; AVX512F-ONLY-SLOW-NEXT: vpsrldq {{.*#+}} xmm7 = xmm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
7814 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm7[0],xmm1[0],xmm7[1],xmm1[1],xmm7[2],xmm1[2],xmm7[3],xmm1[3]
7815 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm7 = xmm8[0,0,2,1,4,5,6,7]
7816 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq %xmm7, %ymm7
7817 ; AVX512F-ONLY-SLOW-NEXT: vpermt2d %zmm1, %zmm31, %zmm0
7818 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
7819 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm3[0,1,2,1]
7820 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,7,6,5]
7821 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7822 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm2[0,1,2,1]
7823 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,4,7,6,5]
7824 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm9[4],xmm1[4],xmm9[5],xmm1[5],xmm9[6],xmm1[6],xmm9[7],xmm1[7]
7825 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm9 = xmm8[0,2,2,3,4,5,6,7]
7826 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,4,4,4,4]
7827 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,2,1]
7828 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[2,3,2,3]
7829 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm8 = xmm8[0,2,2,1,4,5,6,7]
7830 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,1,0,1]
7831 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Folded Reload
7832 ; AVX512F-ONLY-SLOW-NEXT: # ymm13 = mem[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
7833 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm13 = ymm13[2,2,2,2]
7834 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
7835 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%r9), %xmm15
7836 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,2,1]
7837 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm14[0,1,2,3],zmm1[0,1,0,1]
7838 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm0, %zmm1 {%k2}
7839 ; AVX512F-ONLY-SLOW-NEXT: vextracti64x4 $1, %zmm1, %ymm0
7840 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
7841 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm14 = xmm3[2,1,3,3,4,5,6,7]
7842 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,2,1]
7843 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm14[0],ymm0[1,2],ymm14[3],ymm0[4,5],ymm14[6],ymm0[7]
7844 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm14 = xmm15[2,3,2,3]
7845 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm14 = xmm14[0,2,2,1,4,5,6,7]
7846 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,1,0,1]
7847 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Folded Reload
7848 ; AVX512F-ONLY-SLOW-NEXT: # ymm2 = mem[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
7849 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
7850 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxwd {{.*#+}} xmm26 = xmm3[0],zero,xmm3[1],zero,xmm3[2],zero,xmm3[3],zero
7851 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq %xmm26, %ymm3
7852 ; AVX512F-ONLY-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm3[2],ymm1[3,4],ymm3[5],ymm1[6,7]
7853 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm15[0,0,2,1,4,5,6,7]
7854 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq %xmm3, %ymm3
7855 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm15 = xmm15[0,2,2,3,4,5,6,7]
7856 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,4,4,4]
7857 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[0,0,2,1]
7858 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm0, %zmm0
7859 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm0[4,5,6,7]
7860 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm11[0,0,2,1,4,5,6,7]
7861 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq %xmm1, %ymm1
7862 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm11 = xmm11[0,2,2,3,4,5,6,7]
7863 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm11 = xmm11[0,1,2,3,4,4,4,4]
7864 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,2,1]
7865 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm29, %zmm12 # 64-byte Folded Reload
7866 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm16, %zmm20, %zmm4
7867 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm29, %zmm4 # 64-byte Folded Reload
7868 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm10, %zmm5, %zmm5
7869 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm10 = [65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
7870 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm5 # 64-byte Folded Reload
7871 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm17, %zmm24, %zmm16
7872 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm27, %zmm10, %zmm16
7873 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm13, %zmm8, %zmm8
7874 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm25, %zmm10, %zmm8
7875 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm14, %zmm2
7876 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm22, %zmm10, %zmm2
7877 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm19, %zmm18, %zmm10
7878 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm13 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535]
7879 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm28, %zmm13, %zmm10
7880 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm9, %zmm7, %zmm7
7881 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm30, %zmm13, %zmm7
7882 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm15, %zmm3, %zmm3
7883 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm21, %zmm13, %zmm3
7884 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm11, %zmm1, %zmm1
7885 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm0, %zmm13, %zmm1
7886 ; AVX512F-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
7887 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, (%rax)
7888 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, 192(%rax)
7889 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, 256(%rax)
7890 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, 448(%rax)
7891 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, 384(%rax)
7892 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, 576(%rax)
7893 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, 640(%rax)
7894 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, 64(%rax)
7895 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, 128(%rax)
7896 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, 320(%rax)
7897 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, 512(%rax)
7898 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm6, 704(%rax)
7899 ; AVX512F-ONLY-SLOW-NEXT: addq $600, %rsp # imm = 0x258
7900 ; AVX512F-ONLY-SLOW-NEXT: vzeroupper
7901 ; AVX512F-ONLY-SLOW-NEXT: retq
7903 ; AVX512F-ONLY-FAST-LABEL: store_i16_stride6_vf64:
7904 ; AVX512F-ONLY-FAST: # %bb.0:
7905 ; AVX512F-ONLY-FAST-NEXT: subq $1256, %rsp # imm = 0x4E8
7906 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 96(%rcx), %ymm3
7907 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 96(%rdx), %ymm4
7908 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rcx), %ymm2
7909 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rdx), %ymm5
7910 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 96(%rcx), %xmm9
7911 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 96(%rdx), %xmm11
7912 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rcx), %xmm8
7913 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7914 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rcx), %xmm12
7915 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7916 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rcx), %xmm6
7917 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdx), %xmm7
7918 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm7, (%rsp) # 16-byte Spill
7919 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rdx), %xmm13
7920 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7921 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rdx), %xmm10
7922 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rcx), %ymm1
7923 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7924 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdx), %ymm0
7925 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7926 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[8],ymm1[8],ymm0[9],ymm1[9],ymm0[10],ymm1[10],ymm0[11],ymm1[11]
7927 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm7[4],xmm8[4],xmm7[5],xmm8[5],xmm7[6],xmm8[6],xmm7[7],xmm8[7]
7928 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
7929 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7930 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm4[0],ymm3[0],ymm4[1],ymm3[1],ymm4[2],ymm3[2],ymm4[3],ymm3[3],ymm4[8],ymm3[8],ymm4[9],ymm3[9],ymm4[10],ymm3[10],ymm4[11],ymm3[11]
7931 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm11[4],xmm9[4],xmm11[5],xmm9[5],xmm11[6],xmm9[6],xmm11[7],xmm9[7]
7932 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
7933 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7934 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm5[0],ymm2[0],ymm5[1],ymm2[1],ymm5[2],ymm2[2],ymm5[3],ymm2[3],ymm5[8],ymm2[8],ymm5[9],ymm2[9],ymm5[10],ymm2[10],ymm5[11],ymm2[11]
7935 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm10[4],xmm6[4],xmm10[5],xmm6[5],xmm10[6],xmm6[6],xmm10[7],xmm6[7]
7936 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm10, %xmm19
7937 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm6, %xmm23
7938 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
7939 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7940 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rcx), %ymm7
7941 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rdx), %ymm10
7942 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm10[0],ymm7[0],ymm10[1],ymm7[1],ymm10[2],ymm7[2],ymm10[3],ymm7[3],ymm10[8],ymm7[8],ymm10[9],ymm7[9],ymm10[10],ymm7[10],ymm10[11],ymm7[11]
7943 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm13[4],xmm12[4],xmm13[5],xmm12[5],xmm13[6],xmm12[6],xmm13[7],xmm12[7]
7944 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
7945 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7946 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 96(%rsi), %ymm12
7947 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7948 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = <u,u,u,u,4,5,10,11,u,u,u,u,u,u,u,u,24,25,22,23,20,21,26,27,u,u,u,u,u,u,u,u>
7949 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm6, %ymm12, %ymm0
7950 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 96(%rdi), %ymm1
7951 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7952 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm6, %ymm1, %ymm8
7953 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm8[0],ymm0[0],ymm8[1],ymm0[1],ymm8[2],ymm0[2],ymm8[3],ymm0[3],ymm8[8],ymm0[8],ymm8[9],ymm0[9],ymm8[10],ymm0[10],ymm8[11],ymm0[11]
7954 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm27 = [2,1,2,3,11,11,11,11]
7955 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} ymm8 = ymm1[4],ymm12[4],ymm1[5],ymm12[5],ymm1[6],ymm12[6],ymm1[7],ymm12[7],ymm1[12],ymm12[12],ymm1[13],ymm12[13],ymm1[14],ymm12[14],ymm1[15],ymm12[15]
7956 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm8, %zmm27, %zmm0
7957 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} ymm26 = [5,6,5,6,5,6,7,7]
7958 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} ymm8 = ymm4[4],ymm3[4],ymm4[5],ymm3[5],ymm4[6],ymm3[6],ymm4[7],ymm3[7],ymm4[12],ymm3[12],ymm4[13],ymm3[13],ymm4[14],ymm3[14],ymm4[15],ymm3[15]
7959 ; AVX512F-ONLY-FAST-NEXT: vpermd %ymm8, %ymm26, %ymm8
7960 ; AVX512F-ONLY-FAST-NEXT: vpsrldq {{.*#+}} ymm3 = ymm3[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm3[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
7961 ; AVX512F-ONLY-FAST-NEXT: vpsrldq {{.*#+}} ymm4 = ymm4[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm4[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
7962 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm4[0],ymm3[0],ymm4[1],ymm3[1],ymm4[2],ymm3[2],ymm4[3],ymm3[3],ymm4[8],ymm3[8],ymm4[9],ymm3[9],ymm4[10],ymm3[10],ymm4[11],ymm3[11]
7963 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,2]
7964 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm8, %zmm3, %zmm3
7965 ; AVX512F-ONLY-FAST-NEXT: movw $18724, %ax # imm = 0x4924
7966 ; AVX512F-ONLY-FAST-NEXT: kmovw %eax, %k1
7967 ; AVX512F-ONLY-FAST-NEXT: vmovdqa32 %zmm3, %zmm0 {%k1}
7968 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} ymm25 = [8,21,10,11,20,13,14,23]
7969 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm3
7970 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 96(%r8), %ymm1
7971 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7972 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = <u,u,u,u,u,u,u,u,14,15,14,15,14,15,14,15,28,29,26,27,26,27,30,31,30,31,30,31,30,31,30,31>
7973 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm13, %ymm1, %ymm4
7974 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %ymm13, %ymm8
7975 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %zmm4, %zmm25, %zmm3
7976 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} ymm29 = [12,1,2,13,4,5,14,7]
7977 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} ymm4 = ymm1[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
7978 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %ymm4, %ymm29, %ymm0
7979 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,2,3],zmm3[0,1,2,3]
7980 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7981 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm28 = <2,2,u,3,10,u,10,11>
7982 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 96(%r9), %ymm1
7983 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7984 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm15 = <u,u,u,u,u,u,u,u,8,9,10,11,12,13,14,15,24,25,28,29,28,29,26,27,24,25,26,27,28,29,30,31>
7985 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm15, %ymm1, %ymm0
7986 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm14 = [16,17,20,21,20,21,22,23,24,25,24,25,24,25,24,25,16,17,20,21,20,21,22,23,24,25,24,25,24,25,24,25]
7987 ; AVX512F-ONLY-FAST-NEXT: # ymm14 = mem[0,1,0,1]
7988 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm14, %ymm1, %ymm3
7989 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm28, %zmm3
7990 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7991 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rsi), %ymm1
7992 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7993 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm6, %ymm1, %ymm0
7994 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rdi), %ymm4
7995 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7996 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm6, %ymm4, %ymm3
7997 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm3[0],ymm0[0],ymm3[1],ymm0[1],ymm3[2],ymm0[2],ymm3[3],ymm0[3],ymm3[8],ymm0[8],ymm3[9],ymm0[9],ymm3[10],ymm0[10],ymm3[11],ymm0[11]
7998 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm4[4],ymm1[4],ymm4[5],ymm1[5],ymm4[6],ymm1[6],ymm4[7],ymm1[7],ymm4[12],ymm1[12],ymm4[13],ymm1[13],ymm4[14],ymm1[14],ymm4[15],ymm1[15]
7999 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm27, %zmm0
8000 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm5[4],ymm2[4],ymm5[5],ymm2[5],ymm5[6],ymm2[6],ymm5[7],ymm2[7],ymm5[12],ymm2[12],ymm5[13],ymm2[13],ymm5[14],ymm2[14],ymm5[15],ymm2[15]
8001 ; AVX512F-ONLY-FAST-NEXT: vpermd %ymm3, %ymm26, %ymm3
8002 ; AVX512F-ONLY-FAST-NEXT: vpsrldq {{.*#+}} ymm4 = ymm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm2[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
8003 ; AVX512F-ONLY-FAST-NEXT: vpsrldq {{.*#+}} ymm5 = ymm5[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm5[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
8004 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm4 = ymm5[0],ymm4[0],ymm5[1],ymm4[1],ymm5[2],ymm4[2],ymm5[3],ymm4[3],ymm5[8],ymm4[8],ymm5[9],ymm4[9],ymm5[10],ymm4[10],ymm5[11],ymm4[11]
8005 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,2]
8006 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm4, %zmm3
8007 ; AVX512F-ONLY-FAST-NEXT: vmovdqa32 %zmm3, %zmm0 {%k1}
8008 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, %zmm3
8009 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%r8), %ymm1
8010 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8011 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm13, %ymm1, %ymm4
8012 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %zmm4, %zmm25, %zmm3
8013 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} ymm4 = ymm1[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
8014 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %ymm4, %ymm29, %ymm0
8015 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,2,3],zmm3[0,1,2,3]
8016 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8017 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%r9), %ymm1
8018 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8019 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm15, %ymm1, %ymm0
8020 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm14, %ymm1, %ymm3
8021 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm28, %zmm3
8022 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8023 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 96(%rsi), %xmm0
8024 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8025 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm12 = [0,1,2,3,4,5,6,7,8,9,6,7,4,5,10,11]
8026 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm12, %xmm0, %xmm3
8027 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 96(%rdi), %xmm1
8028 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8029 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm12, %xmm1, %xmm4
8030 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
8031 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm18 = [0,0,2,1,8,9,8,9]
8032 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
8033 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm18, %zmm4
8034 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} ymm31 = [1,0,2,2,1,0,2,2]
8035 ; AVX512F-ONLY-FAST-NEXT: # ymm31 = mem[0,1,2,3,0,1,2,3]
8036 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3]
8037 ; AVX512F-ONLY-FAST-NEXT: vpermd %ymm3, %ymm31, %ymm3
8038 ; AVX512F-ONLY-FAST-NEXT: vpsrldq {{.*#+}} xmm2 = xmm9[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
8039 ; AVX512F-ONLY-FAST-NEXT: vpsrldq {{.*#+}} xmm5 = xmm11[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
8040 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm5[0],xmm2[0],xmm5[1],xmm2[1],xmm5[2],xmm2[2],xmm5[3],xmm2[3]
8041 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastq %xmm2, %ymm2
8042 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm2, %zmm3, %zmm2
8043 ; AVX512F-ONLY-FAST-NEXT: movw $9362, %ax # imm = 0x2492
8044 ; AVX512F-ONLY-FAST-NEXT: kmovw %eax, %k2
8045 ; AVX512F-ONLY-FAST-NEXT: vmovdqa32 %zmm2, %zmm4 {%k2}
8046 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} ymm30 = [16,9,10,17,12,13,18,15]
8047 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, %zmm2
8048 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 96(%r8), %xmm0
8049 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8050 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} xmm3 = xmm0[2,1,3,3,4,5,6,7]
8051 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %zmm3, %zmm30, %zmm2
8052 ; AVX512F-ONLY-FAST-NEXT: vpmovzxwd {{.*#+}} xmm3 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero
8053 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} ymm16 = [0,1,8,3,4,9,6,7]
8054 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %ymm3, %ymm16, %ymm4
8055 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm4[0,1,2,3],zmm2[0,1,2,3]
8056 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8057 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm17 = <u,0,0,u,8,8,u,9>
8058 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 96(%r9), %xmm0
8059 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8060 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm13 = [0,1,4,5,4,5,6,7,8,9,8,9,8,9,8,9]
8061 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm13, %xmm0, %xmm2
8062 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,0,2,1,4,5,6,7]
8063 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm17, %zmm0
8064 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8065 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rsi), %xmm0
8066 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm12, %xmm0, %xmm2
8067 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rdi), %xmm1
8068 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8069 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm12, %xmm1, %xmm3
8070 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
8071 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
8072 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm0, %xmm20
8073 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm18, %zmm3
8074 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm23, %xmm0
8075 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm19, %xmm1
8076 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
8077 ; AVX512F-ONLY-FAST-NEXT: vpermd %ymm2, %ymm31, %ymm2
8078 ; AVX512F-ONLY-FAST-NEXT: vpsrldq {{.*#+}} xmm4 = xmm0[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
8079 ; AVX512F-ONLY-FAST-NEXT: vpsrldq {{.*#+}} xmm5 = xmm1[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
8080 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
8081 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastq %xmm4, %ymm4
8082 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm4, %zmm2, %zmm2
8083 ; AVX512F-ONLY-FAST-NEXT: vmovdqa32 %zmm2, %zmm3 {%k2}
8084 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm2
8085 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%r8), %xmm0
8086 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} xmm4 = xmm0[2,1,3,3,4,5,6,7]
8087 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %zmm4, %zmm30, %zmm2
8088 ; AVX512F-ONLY-FAST-NEXT: vpmovzxwd {{.*#+}} xmm4 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero
8089 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm0, %xmm24
8090 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %ymm4, %ymm16, %ymm3
8091 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm3[0,1,2,3],zmm2[0,1,2,3]
8092 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8093 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%r9), %xmm0
8094 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8095 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm13, %xmm0, %xmm2
8096 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,0,2,1,4,5,6,7]
8097 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm17, %zmm0
8098 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8099 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rsi), %ymm0
8100 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm6, %ymm0, %ymm2
8101 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rdi), %ymm1
8102 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm6, %ymm1, %ymm3
8103 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm3[0],ymm2[0],ymm3[1],ymm2[1],ymm3[2],ymm2[2],ymm3[3],ymm2[3],ymm3[8],ymm2[8],ymm3[9],ymm2[9],ymm3[10],ymm2[10],ymm3[11],ymm2[11]
8104 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm1[4],ymm0[4],ymm1[5],ymm0[5],ymm1[6],ymm0[6],ymm1[7],ymm0[7],ymm1[12],ymm0[12],ymm1[13],ymm0[13],ymm1[14],ymm0[14],ymm1[15],ymm0[15]
8105 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm1, %ymm22
8106 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm0, %ymm23
8107 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm27, %zmm2
8108 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm10[4],ymm7[4],ymm10[5],ymm7[5],ymm10[6],ymm7[6],ymm10[7],ymm7[7],ymm10[12],ymm7[12],ymm10[13],ymm7[13],ymm10[14],ymm7[14],ymm10[15],ymm7[15]
8109 ; AVX512F-ONLY-FAST-NEXT: vpermd %ymm3, %ymm26, %ymm3
8110 ; AVX512F-ONLY-FAST-NEXT: vpsrldq {{.*#+}} ymm4 = ymm7[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm7[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
8111 ; AVX512F-ONLY-FAST-NEXT: vpsrldq {{.*#+}} ymm5 = ymm10[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm10[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
8112 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm4 = ymm5[0],ymm4[0],ymm5[1],ymm4[1],ymm5[2],ymm4[2],ymm5[3],ymm4[3],ymm5[8],ymm4[8],ymm5[9],ymm4[9],ymm5[10],ymm4[10],ymm5[11],ymm4[11]
8113 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,2]
8114 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm4, %zmm3
8115 ; AVX512F-ONLY-FAST-NEXT: vmovdqa32 %zmm3, %zmm2 {%k1}
8116 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm3
8117 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%r8), %ymm0
8118 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %ymm8, %ymm5
8119 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm8, %ymm0, %ymm4
8120 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %zmm4, %zmm25, %zmm3
8121 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} ymm4 = ymm0[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
8122 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm0, %ymm19
8123 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %ymm4, %ymm29, %ymm2
8124 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm2[0,1,2,3],zmm3[0,1,2,3]
8125 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8126 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rsi), %ymm9
8127 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm6, %ymm9, %ymm3
8128 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdi), %ymm8
8129 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm6, %ymm8, %ymm1
8130 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm1[0],ymm3[0],ymm1[1],ymm3[1],ymm1[2],ymm3[2],ymm1[3],ymm3[3],ymm1[8],ymm3[8],ymm1[9],ymm3[9],ymm1[10],ymm3[10],ymm1[11],ymm3[11]
8131 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm8[4],ymm9[4],ymm8[5],ymm9[5],ymm8[6],ymm9[6],ymm8[7],ymm9[7],ymm8[12],ymm9[12],ymm8[13],ymm9[13],ymm8[14],ymm9[14],ymm8[15],ymm9[15]
8132 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm27, %zmm1
8133 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%r9), %ymm2
8134 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm15, %ymm2, %ymm3
8135 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm14, %ymm2, %ymm0
8136 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm2, %ymm21
8137 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm28, %zmm0
8138 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8139 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8140 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
8141 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm2[4],ymm0[4],ymm2[5],ymm0[5],ymm2[6],ymm0[6],ymm2[7],ymm0[7],ymm2[12],ymm0[12],ymm2[13],ymm0[13],ymm2[14],ymm0[14],ymm2[15],ymm0[15]
8142 ; AVX512F-ONLY-FAST-NEXT: vpermd %ymm3, %ymm26, %ymm3
8143 ; AVX512F-ONLY-FAST-NEXT: vpsrldq {{.*#+}} ymm4 = ymm0[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm0[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
8144 ; AVX512F-ONLY-FAST-NEXT: vpsrldq {{.*#+}} ymm6 = ymm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm2[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
8145 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm4 = ymm6[0],ymm4[0],ymm6[1],ymm4[1],ymm6[2],ymm4[2],ymm6[3],ymm4[3],ymm6[8],ymm4[8],ymm6[9],ymm4[9],ymm6[10],ymm4[10],ymm6[11],ymm4[11]
8146 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,2]
8147 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm4, %zmm3
8148 ; AVX512F-ONLY-FAST-NEXT: vmovdqa32 %zmm3, %zmm1 {%k1}
8149 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r8), %ymm3
8150 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm5, %ymm3, %ymm4
8151 ; AVX512F-ONLY-FAST-NEXT: vpermi2d %zmm4, %zmm1, %zmm25
8152 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} ymm4 = ymm3[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
8153 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %ymm4, %ymm29, %ymm1
8154 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm25 = zmm1[0,1,2,3],zmm25[0,1,2,3]
8155 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r9), %ymm5
8156 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm15, %ymm5, %ymm1
8157 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm14, %ymm5, %ymm10
8158 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm28, %zmm10
8159 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rsi), %xmm0
8160 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rdi), %xmm11
8161 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm12, %xmm0, %xmm1
8162 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm12, %xmm11, %xmm6
8163 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm6[4],xmm1[4],xmm6[5],xmm1[5],xmm6[6],xmm1[6],xmm6[7],xmm1[7]
8164 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm11[0],xmm0[0],xmm11[1],xmm0[1],xmm11[2],xmm0[2],xmm11[3],xmm0[3]
8165 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm0, %xmm29
8166 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm18, %zmm7
8167 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8168 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
8169 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],xmm2[3]
8170 ; AVX512F-ONLY-FAST-NEXT: vpermd %ymm1, %ymm31, %ymm1
8171 ; AVX512F-ONLY-FAST-NEXT: vpsrldq {{.*#+}} xmm6 = xmm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
8172 ; AVX512F-ONLY-FAST-NEXT: vpsrldq {{.*#+}} xmm15 = xmm0[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
8173 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm15[0],xmm6[0],xmm15[1],xmm6[1],xmm15[2],xmm6[2],xmm15[3],xmm6[3]
8174 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastq %xmm6, %ymm6
8175 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm6, %zmm1, %zmm1
8176 ; AVX512F-ONLY-FAST-NEXT: vmovdqa32 %zmm1, %zmm7 {%k2}
8177 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, %zmm1
8178 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%r8), %xmm6
8179 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} xmm15 = xmm6[2,1,3,3,4,5,6,7]
8180 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %zmm15, %zmm30, %zmm1
8181 ; AVX512F-ONLY-FAST-NEXT: vpmovzxwd {{.*#+}} xmm15 = xmm6[0],zero,xmm6[1],zero,xmm6[2],zero,xmm6[3],zero
8182 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %ymm15, %ymm16, %ymm7
8183 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm26 = zmm7[0,1,2,3],zmm1[0,1,2,3]
8184 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rsi), %xmm15
8185 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdi), %xmm0
8186 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm12, %xmm15, %xmm1
8187 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm12, %xmm0, %xmm7
8188 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm7[4],xmm1[4],xmm7[5],xmm1[5],xmm7[6],xmm1[6],xmm7[7],xmm1[7]
8189 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm0[0],xmm15[0],xmm0[1],xmm15[1],xmm0[2],xmm15[2],xmm0[3],xmm15[3]
8190 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm18, %zmm12
8191 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%r9), %xmm7
8192 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm13, %xmm7, %xmm1
8193 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} xmm4 = xmm7[0,0,2,1,4,5,6,7]
8194 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm17, %zmm4
8195 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rsp), %xmm2 # 16-byte Reload
8196 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8197 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm14[0],xmm2[1],xmm14[1],xmm2[2],xmm14[2],xmm2[3],xmm14[3]
8198 ; AVX512F-ONLY-FAST-NEXT: vpermd %ymm1, %ymm31, %ymm18
8199 ; AVX512F-ONLY-FAST-NEXT: vpsrldq {{.*#+}} xmm1 = xmm14[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
8200 ; AVX512F-ONLY-FAST-NEXT: vpsrldq {{.*#+}} xmm14 = xmm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
8201 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm14[0],xmm1[0],xmm14[1],xmm1[1],xmm14[2],xmm1[2],xmm14[3],xmm1[3]
8202 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastq %xmm1, %ymm1
8203 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm18, %zmm1
8204 ; AVX512F-ONLY-FAST-NEXT: vmovdqa32 %zmm1, %zmm12 {%k2}
8205 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r8), %xmm14
8206 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} xmm1 = xmm14[2,1,3,3,4,5,6,7]
8207 ; AVX512F-ONLY-FAST-NEXT: vpermi2d %zmm1, %zmm12, %zmm30
8208 ; AVX512F-ONLY-FAST-NEXT: vpmovzxwd {{.*#+}} xmm1 = xmm14[0],zero,xmm14[1],zero,xmm14[2],zero,xmm14[3],zero
8209 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %ymm1, %ymm16, %ymm12
8210 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm16 = zmm12[0,1,2,3],zmm30[0,1,2,3]
8211 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r9), %xmm1
8212 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm13, %xmm1, %xmm2
8213 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} xmm12 = xmm1[0,0,2,1,4,5,6,7]
8214 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm17, %zmm12
8215 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm8[0],ymm9[0],ymm8[1],ymm9[1],ymm8[2],ymm9[2],ymm8[3],ymm9[3],ymm8[8],ymm9[8],ymm8[9],ymm9[9],ymm8[10],ymm9[10],ymm8[11],ymm9[11]
8216 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm15[4],xmm0[5],xmm15[5],xmm0[6],xmm15[6],xmm0[7],xmm15[7]
8217 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm9 = [1,1,1,1,10,10,10,11]
8218 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm9, %zmm0
8219 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm15 = <1,2,1,2,u,u,3,3,13,12,10,10,13,12,14,14>
8220 ; AVX512F-ONLY-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm15, %zmm2 # 64-byte Folded Reload
8221 ; AVX512F-ONLY-FAST-NEXT: vmovdqa32 %zmm0, %zmm2 {%k1}
8222 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} ymm0 = ymm3[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
8223 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} ymm18 = [8,9,20,11,12,21,14,15]
8224 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, %zmm17
8225 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %zmm0, %zmm18, %zmm17
8226 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm8 = [12,13,10,11,10,11,14,15,14,15,14,15,14,15,14,15]
8227 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm8, %xmm14, %xmm0
8228 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm14 = [0,9,2,3,8,5,6,11]
8229 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %ymm0, %ymm14, %ymm2
8230 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} ymm5 = ymm5[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
8231 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm0 = [8,9,12,13,12,13,10,11,8,9,10,11,12,13,14,15]
8232 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm0, %xmm1, %xmm3
8233 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm0, %xmm30
8234 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm27 = <0,u,0,1,u,10,10,u>
8235 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm27, %zmm3
8236 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8237 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm5 # 32-byte Folded Reload
8238 ; AVX512F-ONLY-FAST-NEXT: # ymm5 = ymm0[0],mem[0],ymm0[1],mem[1],ymm0[2],mem[2],ymm0[3],mem[3],ymm0[8],mem[8],ymm0[9],mem[9],ymm0[10],mem[10],ymm0[11],mem[11]
8239 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8240 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm1 # 16-byte Folded Reload
8241 ; AVX512F-ONLY-FAST-NEXT: # xmm1 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
8242 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm9, %zmm1
8243 ; AVX512F-ONLY-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm15, %zmm5 # 64-byte Folded Reload
8244 ; AVX512F-ONLY-FAST-NEXT: vmovdqa32 %zmm1, %zmm5 {%k1}
8245 ; AVX512F-ONLY-FAST-NEXT: vpshuflw $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Folded Reload
8246 ; AVX512F-ONLY-FAST-NEXT: # ymm1 = mem[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
8247 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm28
8248 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %zmm1, %zmm18, %zmm28
8249 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8250 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm8, %xmm0, %xmm1
8251 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %ymm1, %ymm14, %ymm5
8252 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8253 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm1 # 32-byte Folded Reload
8254 ; AVX512F-ONLY-FAST-NEXT: # ymm1 = ymm0[0],mem[0],ymm0[1],mem[1],ymm0[2],mem[2],ymm0[3],mem[3],ymm0[8],mem[8],ymm0[9],mem[9],ymm0[10],mem[10],ymm0[11],mem[11]
8255 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm20, %xmm13
8256 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8257 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm13[4],xmm0[5],xmm13[5],xmm0[6],xmm13[6],xmm0[7],xmm13[7]
8258 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm9, %zmm0
8259 ; AVX512F-ONLY-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm15, %zmm1 # 64-byte Folded Reload
8260 ; AVX512F-ONLY-FAST-NEXT: vmovdqa32 %zmm0, %zmm1 {%k1}
8261 ; AVX512F-ONLY-FAST-NEXT: vpshuflw $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Folded Reload
8262 ; AVX512F-ONLY-FAST-NEXT: # ymm0 = mem[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
8263 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, %zmm20
8264 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %zmm0, %zmm18, %zmm20
8265 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm24, %xmm0
8266 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm8, %xmm0, %xmm0
8267 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %ymm0, %ymm14, %ymm1
8268 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm22, %ymm0
8269 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm23, %ymm13
8270 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm0[0],ymm13[0],ymm0[1],ymm13[1],ymm0[2],ymm13[2],ymm0[3],ymm13[3],ymm0[8],ymm13[8],ymm0[9],ymm13[9],ymm0[10],ymm13[10],ymm0[11],ymm13[11]
8271 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm29, %xmm13
8272 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm11 = xmm11[4],xmm13[4],xmm11[5],xmm13[5],xmm11[6],xmm13[6],xmm11[7],xmm13[7]
8273 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm9, %zmm11
8274 ; AVX512F-ONLY-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %zmm15, %zmm0 # 64-byte Folded Reload
8275 ; AVX512F-ONLY-FAST-NEXT: vmovdqa32 %zmm11, %zmm0 {%k1}
8276 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm19, %ymm9
8277 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} ymm9 = ymm9[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
8278 ; AVX512F-ONLY-FAST-NEXT: vpermi2d %zmm9, %zmm0, %zmm18
8279 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm8, %xmm6, %xmm6
8280 ; AVX512F-ONLY-FAST-NEXT: vpermt2d %ymm6, %ymm14, %ymm0
8281 ; AVX512F-ONLY-FAST-NEXT: vpshuflw $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Folded Reload
8282 ; AVX512F-ONLY-FAST-NEXT: # ymm6 = mem[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
8283 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
8284 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm30, %xmm11
8285 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm11, %xmm8, %xmm8
8286 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm27, %zmm8
8287 ; AVX512F-ONLY-FAST-NEXT: vpshuflw $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Folded Reload
8288 ; AVX512F-ONLY-FAST-NEXT: # ymm6 = mem[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
8289 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
8290 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm11, %xmm9, %xmm9
8291 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm27, %zmm9
8292 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm11, %xmm7, %xmm6
8293 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm21, %ymm7
8294 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} ymm7 = ymm7[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
8295 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm27, %zmm6
8296 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,2,3],zmm18[0,1,2,3]
8297 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm7 = [65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
8298 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm0, %zmm7, %zmm6
8299 ; AVX512F-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
8300 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm6, 256(%rax)
8301 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm20[0,1,2,3]
8302 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm0, %zmm7, %zmm9
8303 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm9, 448(%rax)
8304 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm5[0,1,2,3],zmm28[0,1,2,3]
8305 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm0, %zmm7, %zmm8
8306 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm8, 640(%rax)
8307 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm2[0,1,2,3],zmm17[0,1,2,3]
8308 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm0, %zmm7, %zmm3
8309 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, 64(%rax)
8310 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535]
8311 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm16, %zmm0, %zmm12
8312 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm12, (%rax)
8313 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm26, %zmm0, %zmm4
8314 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, 192(%rax)
8315 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0]
8316 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm25, %zmm1, %zmm10
8317 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, 128(%rax)
8318 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
8319 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm2 # 64-byte Folded Reload
8320 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, 320(%rax)
8321 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
8322 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm2 # 64-byte Folded Reload
8323 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, 384(%rax)
8324 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
8325 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm2 # 64-byte Folded Reload
8326 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, 576(%rax)
8327 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8328 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm0 # 64-byte Folded Reload
8329 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, 512(%rax)
8330 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8331 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm0 # 64-byte Folded Reload
8332 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, 704(%rax)
8333 ; AVX512F-ONLY-FAST-NEXT: addq $1256, %rsp # imm = 0x4E8
8334 ; AVX512F-ONLY-FAST-NEXT: vzeroupper
8335 ; AVX512F-ONLY-FAST-NEXT: retq
8337 ; AVX512DQ-SLOW-LABEL: store_i16_stride6_vf64:
8338 ; AVX512DQ-SLOW: # %bb.0:
8339 ; AVX512DQ-SLOW-NEXT: subq $936, %rsp # imm = 0x3A8
8340 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%rcx), %xmm0
8341 ; AVX512DQ-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8342 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%rdx), %xmm2
8343 ; AVX512DQ-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8344 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
8345 ; AVX512DQ-SLOW-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
8346 ; AVX512DQ-SLOW-NEXT: vpsrldq {{.*#+}} xmm2 = xmm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
8347 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
8348 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = [1,0,2,2,1,0,2,2,16,17,16,17,16,17,16,17]
8349 ; AVX512DQ-SLOW-NEXT: vpermt2d %zmm2, %zmm0, %zmm1
8350 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%rsi), %xmm4
8351 ; AVX512DQ-SLOW-NEXT: vmovdqa %xmm4, (%rsp) # 16-byte Spill
8352 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm4[0,1,2,1]
8353 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,7,6,5]
8354 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%rdi), %xmm5
8355 ; AVX512DQ-SLOW-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8356 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm5[0,1,2,1]
8357 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,7,6,5]
8358 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
8359 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
8360 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
8361 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm2 = zmm3[0,1,2,3],zmm2[0,1,0,1]
8362 ; AVX512DQ-SLOW-NEXT: movw $9362, %ax # imm = 0x2492
8363 ; AVX512DQ-SLOW-NEXT: kmovw %eax, %k1
8364 ; AVX512DQ-SLOW-NEXT: vmovdqa32 %zmm1, %zmm2 {%k1}
8365 ; AVX512DQ-SLOW-NEXT: vextracti64x4 $1, %zmm2, %ymm1
8366 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%r8), %xmm4
8367 ; AVX512DQ-SLOW-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8368 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm4[2,1,3,3,4,5,6,7]
8369 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
8370 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0],ymm1[1,2],ymm3[3],ymm1[4,5],ymm3[6],ymm1[7]
8371 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm1
8372 ; AVX512DQ-SLOW-NEXT: vpmovzxwd {{.*#+}} xmm3 = xmm4[0],zero,xmm4[1],zero,xmm4[2],zero,xmm4[3],zero
8373 ; AVX512DQ-SLOW-NEXT: vpbroadcastq %xmm3, %ymm3
8374 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1],ymm3[2],ymm2[3,4],ymm3[5],ymm2[6,7]
8375 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm2[0,1,2,3],zmm1[4,5,6,7]
8376 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8377 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%r9), %xmm2
8378 ; AVX512DQ-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8379 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm2[0,0,2,1,4,5,6,7]
8380 ; AVX512DQ-SLOW-NEXT: vpbroadcastq %xmm1, %ymm1
8381 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8382 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm2[0,2,2,3,4,5,6,7]
8383 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,4,4,4]
8384 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
8385 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8386 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rcx), %xmm4
8387 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rcx), %xmm2
8388 ; AVX512DQ-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8389 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rdx), %xmm6
8390 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rdx), %xmm3
8391 ; AVX512DQ-SLOW-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8392 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
8393 ; AVX512DQ-SLOW-NEXT: vpsrldq {{.*#+}} xmm2 = xmm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
8394 ; AVX512DQ-SLOW-NEXT: vpsrldq {{.*#+}} xmm3 = xmm3[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
8395 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
8396 ; AVX512DQ-SLOW-NEXT: vpermt2d %zmm2, %zmm0, %zmm1
8397 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rsi), %xmm5
8398 ; AVX512DQ-SLOW-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8399 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm5[0,1,2,1]
8400 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,7,6,5]
8401 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rdi), %xmm7
8402 ; AVX512DQ-SLOW-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8403 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm7[0,1,2,1]
8404 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,7,6,5]
8405 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
8406 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm7[0],xmm5[0],xmm7[1],xmm5[1],xmm7[2],xmm5[2],xmm7[3],xmm5[3]
8407 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
8408 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm2 = zmm3[0,1,2,3],zmm2[0,1,0,1]
8409 ; AVX512DQ-SLOW-NEXT: vmovdqa32 %zmm1, %zmm2 {%k1}
8410 ; AVX512DQ-SLOW-NEXT: vextracti64x4 $1, %zmm2, %ymm1
8411 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%r8), %xmm5
8412 ; AVX512DQ-SLOW-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8413 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm5[2,1,3,3,4,5,6,7]
8414 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
8415 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0],ymm1[1,2],ymm3[3],ymm1[4,5],ymm3[6],ymm1[7]
8416 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm1
8417 ; AVX512DQ-SLOW-NEXT: vpmovzxwd {{.*#+}} xmm3 = xmm5[0],zero,xmm5[1],zero,xmm5[2],zero,xmm5[3],zero
8418 ; AVX512DQ-SLOW-NEXT: vpbroadcastq %xmm3, %ymm3
8419 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1],ymm3[2],ymm2[3,4],ymm3[5],ymm2[6,7]
8420 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm2[0,1,2,3],zmm1[4,5,6,7]
8421 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8422 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%r9), %xmm2
8423 ; AVX512DQ-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8424 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm2[0,0,2,1,4,5,6,7]
8425 ; AVX512DQ-SLOW-NEXT: vpbroadcastq %xmm1, %ymm1
8426 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8427 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm2[0,2,2,3,4,5,6,7]
8428 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,4,4,4]
8429 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
8430 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8431 ; AVX512DQ-SLOW-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8432 ; AVX512DQ-SLOW-NEXT: vpsrldq {{.*#+}} xmm1 = xmm4[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
8433 ; AVX512DQ-SLOW-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8434 ; AVX512DQ-SLOW-NEXT: vpsrldq {{.*#+}} xmm2 = xmm6[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
8435 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
8436 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm6[0],xmm4[0],xmm6[1],xmm4[1],xmm6[2],xmm4[2],xmm6[3],xmm4[3]
8437 ; AVX512DQ-SLOW-NEXT: vpermt2d %zmm1, %zmm0, %zmm2
8438 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rsi), %xmm4
8439 ; AVX512DQ-SLOW-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8440 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm4[0,1,2,1]
8441 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,7,6,5]
8442 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rdi), %xmm5
8443 ; AVX512DQ-SLOW-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8444 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm5[0,1,2,1]
8445 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,7,6,5]
8446 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
8447 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
8448 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
8449 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm3[0,1,2,3],zmm1[0,1,0,1]
8450 ; AVX512DQ-SLOW-NEXT: vmovdqa32 %zmm2, %zmm1 {%k1}
8451 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%r8), %xmm4
8452 ; AVX512DQ-SLOW-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8453 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm4[2,1,3,3,4,5,6,7]
8454 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
8455 ; AVX512DQ-SLOW-NEXT: vextracti64x4 $1, %zmm1, %ymm3
8456 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0],ymm3[1,2],ymm2[3],ymm3[4,5],ymm2[6],ymm3[7]
8457 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm2
8458 ; AVX512DQ-SLOW-NEXT: vpmovzxwd {{.*#+}} xmm3 = xmm4[0],zero,xmm4[1],zero,xmm4[2],zero,xmm4[3],zero
8459 ; AVX512DQ-SLOW-NEXT: vpbroadcastq %xmm3, %ymm3
8460 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1],ymm3[2],ymm1[3,4],ymm3[5],ymm1[6,7]
8461 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm1[0,1,2,3],zmm2[4,5,6,7]
8462 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8463 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%r9), %xmm2
8464 ; AVX512DQ-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8465 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm2[0,0,2,1,4,5,6,7]
8466 ; AVX512DQ-SLOW-NEXT: vpbroadcastq %xmm1, %ymm1
8467 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8468 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm2[0,2,2,3,4,5,6,7]
8469 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,4,4,4]
8470 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
8471 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8472 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rcx), %xmm3
8473 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdx), %xmm4
8474 ; AVX512DQ-SLOW-NEXT: vpsrldq {{.*#+}} xmm1 = xmm3[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
8475 ; AVX512DQ-SLOW-NEXT: vpsrldq {{.*#+}} xmm2 = xmm4[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
8476 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
8477 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
8478 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm4, %xmm31
8479 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm3, %xmm19
8480 ; AVX512DQ-SLOW-NEXT: vpermt2d %zmm1, %zmm0, %zmm2
8481 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rsi), %xmm3
8482 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm3[0,1,2,1]
8483 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,7,6,5]
8484 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdi), %xmm15
8485 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm15[0,1,2,1]
8486 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,7,6,5]
8487 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
8488 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm15[0],xmm3[0],xmm15[1],xmm3[1],xmm15[2],xmm3[2],xmm15[3],xmm3[3]
8489 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm3, %xmm18
8490 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
8491 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm0[0,1,0,1]
8492 ; AVX512DQ-SLOW-NEXT: vmovdqa32 %zmm2, %zmm0 {%k1}
8493 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r8), %xmm3
8494 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm3[2,1,3,3,4,5,6,7]
8495 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
8496 ; AVX512DQ-SLOW-NEXT: vextracti64x4 $1, %zmm0, %ymm2
8497 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm2[1,2],ymm1[3],ymm2[4,5],ymm1[6],ymm2[7]
8498 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm1
8499 ; AVX512DQ-SLOW-NEXT: vpmovzxwd {{.*#+}} xmm2 = xmm3[0],zero,xmm3[1],zero,xmm3[2],zero,xmm3[3],zero
8500 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm3, %xmm26
8501 ; AVX512DQ-SLOW-NEXT: vpbroadcastq %xmm2, %ymm2
8502 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm2[2],ymm0[3,4],ymm2[5],ymm0[6,7]
8503 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,2,3],zmm1[4,5,6,7]
8504 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8505 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%rsi), %ymm2
8506 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm2[2,1,2,3,6,5,6,7]
8507 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm0[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
8508 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%rdi), %ymm3
8509 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm3[2,1,2,3,6,5,6,7]
8510 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
8511 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[8],ymm0[8],ymm1[9],ymm0[9],ymm1[10],ymm0[10],ymm1[11],ymm0[11]
8512 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,2,3]
8513 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm3[4],ymm2[4],ymm3[5],ymm2[5],ymm3[6],ymm2[6],ymm3[7],ymm2[7],ymm3[12],ymm2[12],ymm3[13],ymm2[13],ymm3[14],ymm2[14],ymm3[15],ymm2[15]
8514 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm3, %ymm28
8515 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm2, %ymm27
8516 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[3,3,3,3]
8517 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%rcx), %ymm4
8518 ; AVX512DQ-SLOW-NEXT: vpsrldq {{.*#+}} ymm2 = ymm4[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm4[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
8519 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%rdx), %ymm11
8520 ; AVX512DQ-SLOW-NEXT: vpsrldq {{.*#+}} ymm3 = ymm11[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm11[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
8521 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm3[0],ymm2[0],ymm3[1],ymm2[1],ymm3[2],ymm2[2],ymm3[3],ymm2[3],ymm3[8],ymm2[8],ymm3[9],ymm2[9],ymm3[10],ymm2[10],ymm3[11],ymm2[11]
8522 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
8523 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm11[4],ymm4[4],ymm11[5],ymm4[5],ymm11[6],ymm4[6],ymm11[7],ymm4[7],ymm11[12],ymm4[12],ymm11[13],ymm4[13],ymm11[14],ymm4[14],ymm11[15],ymm4[15]
8524 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm4, %ymm21
8525 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[1,2,3,3,5,6,7,7]
8526 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
8527 ; AVX512DQ-SLOW-NEXT: movw $18724, %ax # imm = 0x4924
8528 ; AVX512DQ-SLOW-NEXT: kmovw %eax, %k1
8529 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
8530 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm3, %zmm2, %zmm0 {%k1}
8531 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%r8), %ymm9
8532 ; AVX512DQ-SLOW-NEXT: vmovdqa {{.*#+}} ymm13 = <u,u,u,u,u,u,u,u,14,15,14,15,14,15,14,15,28,29,26,27,26,27,30,31,30,31,30,31,30,31,30,31>
8533 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm13, %ymm9, %ymm1
8534 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,2,3]
8535 ; AVX512DQ-SLOW-NEXT: vextracti64x4 $1, %zmm0, %ymm2
8536 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6],ymm1[7]
8537 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm1
8538 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm9[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
8539 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
8540 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0],ymm0[1,2],ymm2[3],ymm0[4,5],ymm2[6],ymm0[7]
8541 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,2,3],zmm1[4,5,6,7]
8542 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8543 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rsi), %ymm2
8544 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm2[2,1,2,3,6,5,6,7]
8545 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm0[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
8546 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rdi), %ymm3
8547 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm3[2,1,2,3,6,5,6,7]
8548 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
8549 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[8],ymm0[8],ymm1[9],ymm0[9],ymm1[10],ymm0[10],ymm1[11],ymm0[11]
8550 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,2,3]
8551 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm3[4],ymm2[4],ymm3[5],ymm2[5],ymm3[6],ymm2[6],ymm3[7],ymm2[7],ymm3[12],ymm2[12],ymm3[13],ymm2[13],ymm3[14],ymm2[14],ymm3[15],ymm2[15]
8552 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm3, %ymm22
8553 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm2, %ymm24
8554 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[3,3,3,3]
8555 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rcx), %ymm12
8556 ; AVX512DQ-SLOW-NEXT: vpsrldq {{.*#+}} ymm2 = ymm12[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm12[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
8557 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rdx), %ymm10
8558 ; AVX512DQ-SLOW-NEXT: vpsrldq {{.*#+}} ymm3 = ymm10[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm10[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
8559 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm3[0],ymm2[0],ymm3[1],ymm2[1],ymm3[2],ymm2[2],ymm3[3],ymm2[3],ymm3[8],ymm2[8],ymm3[9],ymm2[9],ymm3[10],ymm2[10],ymm3[11],ymm2[11]
8560 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
8561 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm10[4],ymm12[4],ymm10[5],ymm12[5],ymm10[6],ymm12[6],ymm10[7],ymm12[7],ymm10[12],ymm12[12],ymm10[13],ymm12[13],ymm10[14],ymm12[14],ymm10[15],ymm12[15]
8562 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[1,2,3,3,5,6,7,7]
8563 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
8564 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
8565 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm3, %zmm2, %zmm0 {%k1}
8566 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%r8), %ymm3
8567 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8568 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm13, %ymm3, %ymm1
8569 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,2,3]
8570 ; AVX512DQ-SLOW-NEXT: vextracti64x4 $1, %zmm0, %ymm2
8571 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6],ymm1[7]
8572 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm1
8573 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm3[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
8574 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
8575 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0],ymm0[1,2],ymm2[3],ymm0[4,5],ymm2[6],ymm0[7]
8576 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,2,3],zmm1[4,5,6,7]
8577 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8578 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rsi), %ymm2
8579 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm2[2,1,2,3,6,5,6,7]
8580 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm0[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
8581 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rdi), %ymm14
8582 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm14[2,1,2,3,6,5,6,7]
8583 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
8584 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[8],ymm0[8],ymm1[9],ymm0[9],ymm1[10],ymm0[10],ymm1[11],ymm0[11]
8585 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,2,3]
8586 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm1 = ymm14[4],ymm2[4],ymm14[5],ymm2[5],ymm14[6],ymm2[6],ymm14[7],ymm2[7],ymm14[12],ymm2[12],ymm14[13],ymm2[13],ymm14[14],ymm2[14],ymm14[15],ymm2[15]
8587 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm2, %ymm20
8588 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[3,3,3,3]
8589 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rcx), %ymm7
8590 ; AVX512DQ-SLOW-NEXT: vpsrldq {{.*#+}} ymm2 = ymm7[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm7[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
8591 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rdx), %ymm6
8592 ; AVX512DQ-SLOW-NEXT: vpsrldq {{.*#+}} ymm3 = ymm6[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm6[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
8593 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm3[0],ymm2[0],ymm3[1],ymm2[1],ymm3[2],ymm2[2],ymm3[3],ymm2[3],ymm3[8],ymm2[8],ymm3[9],ymm2[9],ymm3[10],ymm2[10],ymm3[11],ymm2[11]
8594 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
8595 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm3 = ymm6[4],ymm7[4],ymm6[5],ymm7[5],ymm6[6],ymm7[6],ymm6[7],ymm7[7],ymm6[12],ymm7[12],ymm6[13],ymm7[13],ymm6[14],ymm7[14],ymm6[15],ymm7[15]
8596 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[1,2,3,3,5,6,7,7]
8597 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
8598 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
8599 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm3, %zmm2, %zmm0 {%k1}
8600 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%r8), %ymm3
8601 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm13, %ymm3, %ymm1
8602 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,2,3]
8603 ; AVX512DQ-SLOW-NEXT: vextracti64x4 $1, %zmm0, %ymm2
8604 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6],ymm1[7]
8605 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm1
8606 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm3[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
8607 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm3, %ymm25
8608 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
8609 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0],ymm0[1,2],ymm2[3],ymm0[4,5],ymm2[6],ymm0[7]
8610 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,2,3],zmm1[4,5,6,7]
8611 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8612 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rsi), %ymm5
8613 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm5[2,1,2,3,6,5,6,7]
8614 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm0[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
8615 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdi), %ymm3
8616 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm3[2,1,2,3,6,5,6,7]
8617 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
8618 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[8],ymm0[8],ymm1[9],ymm0[9],ymm1[10],ymm0[10],ymm1[11],ymm0[11]
8619 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm17 = ymm0[2,1,2,3]
8620 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm3[4],ymm5[4],ymm3[5],ymm5[5],ymm3[6],ymm5[6],ymm3[7],ymm5[7],ymm3[12],ymm5[12],ymm3[13],ymm5[13],ymm3[14],ymm5[14],ymm3[15],ymm5[15]
8621 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm16 = ymm0[3,3,3,3]
8622 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rcx), %ymm4
8623 ; AVX512DQ-SLOW-NEXT: vpsrldq {{.*#+}} ymm1 = ymm4[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm4[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
8624 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdx), %ymm2
8625 ; AVX512DQ-SLOW-NEXT: vpsrldq {{.*#+}} ymm8 = ymm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm2[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
8626 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm8[0],ymm1[0],ymm8[1],ymm1[1],ymm8[2],ymm1[2],ymm8[3],ymm1[3],ymm8[8],ymm1[8],ymm8[9],ymm1[9],ymm8[10],ymm1[10],ymm8[11],ymm1[11]
8627 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
8628 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} ymm8 = ymm2[4],ymm4[4],ymm2[5],ymm4[5],ymm2[6],ymm4[6],ymm2[7],ymm4[7],ymm2[12],ymm4[12],ymm2[13],ymm4[13],ymm2[14],ymm4[14],ymm2[15],ymm4[15]
8629 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm8 = ymm8[1,2,3,3,5,6,7,7]
8630 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,2,2,3]
8631 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm16, %zmm17, %zmm0
8632 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm8, %zmm1, %zmm0 {%k1}
8633 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r8), %ymm1
8634 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm13, %ymm1, %ymm8
8635 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,1,2,3]
8636 ; AVX512DQ-SLOW-NEXT: vextracti64x4 $1, %zmm0, %ymm13
8637 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm13[0],ymm8[1],ymm13[2,3],ymm8[4],ymm13[5,6],ymm8[7]
8638 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm8, %zmm0, %zmm8
8639 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm13 = ymm1[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
8640 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm13 = ymm13[2,2,2,3]
8641 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm13[0],ymm0[1,2],ymm13[3],ymm0[4,5],ymm13[6],ymm0[7]
8642 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm29 = zmm0[0,1,2,3],zmm8[4,5,6,7]
8643 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r9), %xmm8
8644 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm8[0,0,2,1,4,5,6,7]
8645 ; AVX512DQ-SLOW-NEXT: vpbroadcastq %xmm0, %ymm30
8646 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm31, %xmm0
8647 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm19, %xmm13
8648 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm13[4],xmm0[5],xmm13[5],xmm0[6],xmm13[6],xmm0[7],xmm13[7]
8649 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm13 = xmm8[0,2,2,3,4,5,6,7]
8650 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm13 = xmm13[0,1,2,3,4,4,4,4]
8651 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm31 = ymm13[0,0,2,1]
8652 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm2[0],ymm4[0],ymm2[1],ymm4[1],ymm2[2],ymm4[2],ymm2[3],ymm4[3],ymm2[8],ymm4[8],ymm2[9],ymm4[9],ymm2[10],ymm4[10],ymm2[11],ymm4[11]
8653 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm4 = <17,18,17,18,u,u,19,19,5,4,2,2,5,4,6,6>
8654 ; AVX512DQ-SLOW-NEXT: vpermt2d %zmm0, %zmm4, %zmm2
8655 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm18, %xmm0
8656 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm15[4],xmm0[4],xmm15[5],xmm0[5],xmm15[6],xmm0[6],xmm15[7],xmm0[7]
8657 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%r9), %ymm15
8658 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm13 = ymm15[0,2,2,3,4,5,6,7,8,10,10,11,12,13,14,15]
8659 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm13 = ymm13[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
8660 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm23 = ymm13[2,2,2,3]
8661 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm3[0],ymm5[0],ymm3[1],ymm5[1],ymm3[2],ymm5[2],ymm3[3],ymm5[3],ymm3[8],ymm5[8],ymm3[9],ymm5[9],ymm3[10],ymm5[10],ymm3[11],ymm5[11]
8662 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm15[2,3,2,3,6,7,6,7]
8663 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm15, %ymm16
8664 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm5 = ymm5[0,2,2,1,4,5,6,7,8,10,10,9,12,13,14,15]
8665 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm18 = ymm5[2,1,2,3]
8666 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,1,1,1]
8667 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
8668 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm3, %zmm0, %zmm2 {%k1}
8669 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm1[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
8670 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,2]
8671 ; AVX512DQ-SLOW-NEXT: vextracti64x4 $1, %zmm2, %ymm1
8672 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7]
8673 ; AVX512DQ-SLOW-NEXT: vmovdqa {{.*#+}} xmm1 = [12,13,10,11,10,11,14,15,14,15,14,15,14,15,14,15]
8674 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm26, %xmm0
8675 ; AVX512DQ-SLOW-NEXT: vpshufb %xmm1, %xmm0, %xmm0
8676 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,0,1]
8677 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0],ymm0[1],ymm2[2,3],ymm0[4],ymm2[5,6],ymm0[7]
8678 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%r9), %ymm0
8679 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm5 = ymm0[0,2,2,3,4,5,6,7,8,10,10,11,12,13,14,15]
8680 ; AVX512DQ-SLOW-NEXT: vmovdqa %ymm0, %ymm15
8681 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm5 = ymm5[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
8682 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm17 = ymm5[2,2,2,3]
8683 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm0, %zmm3
8684 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm19 = zmm2[0,1,2,3],zmm3[4,5,6,7]
8685 ; AVX512DQ-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8686 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm2 # 16-byte Folded Reload
8687 ; AVX512DQ-SLOW-NEXT: # xmm2 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
8688 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm21, %ymm0
8689 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm11 = ymm11[0],ymm0[0],ymm11[1],ymm0[1],ymm11[2],ymm0[2],ymm11[3],ymm0[3],ymm11[8],ymm0[8],ymm11[9],ymm0[9],ymm11[10],ymm0[10],ymm11[11],ymm0[11]
8690 ; AVX512DQ-SLOW-NEXT: vpermt2d %zmm2, %zmm4, %zmm11
8691 ; AVX512DQ-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8692 ; AVX512DQ-SLOW-NEXT: vpunpckhwd (%rsp), %xmm0, %xmm2 # 16-byte Folded Reload
8693 ; AVX512DQ-SLOW-NEXT: # xmm2 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
8694 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[1,1,1,1]
8695 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm28, %ymm0
8696 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm27, %ymm3
8697 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm0[0],ymm3[0],ymm0[1],ymm3[1],ymm0[2],ymm3[2],ymm0[3],ymm3[3],ymm0[8],ymm3[8],ymm0[9],ymm3[9],ymm0[10],ymm3[10],ymm0[11],ymm3[11]
8698 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
8699 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm3, %zmm2, %zmm11 {%k1}
8700 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm9[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
8701 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,2]
8702 ; AVX512DQ-SLOW-NEXT: vextracti64x4 $1, %zmm11, %ymm3
8703 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm9 = ymm3[0,1],ymm2[2],ymm3[3,4],ymm2[5],ymm3[6,7]
8704 ; AVX512DQ-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8705 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm2 # 16-byte Folded Reload
8706 ; AVX512DQ-SLOW-NEXT: # xmm2 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
8707 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm13 = ymm10[0],ymm12[0],ymm10[1],ymm12[1],ymm10[2],ymm12[2],ymm10[3],ymm12[3],ymm10[8],ymm12[8],ymm10[9],ymm12[9],ymm10[10],ymm12[10],ymm10[11],ymm12[11]
8708 ; AVX512DQ-SLOW-NEXT: vpermt2d %zmm2, %zmm4, %zmm13
8709 ; AVX512DQ-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8710 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm2 # 16-byte Folded Reload
8711 ; AVX512DQ-SLOW-NEXT: # xmm2 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
8712 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[1,1,1,1]
8713 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm22, %ymm0
8714 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm24, %ymm3
8715 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm0[0],ymm3[0],ymm0[1],ymm3[1],ymm0[2],ymm3[2],ymm0[3],ymm3[3],ymm0[8],ymm3[8],ymm0[9],ymm3[9],ymm0[10],ymm3[10],ymm0[11],ymm3[11]
8716 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
8717 ; AVX512DQ-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8718 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm10 # 16-byte Folded Reload
8719 ; AVX512DQ-SLOW-NEXT: # xmm10 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
8720 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm5 = ymm6[0],ymm7[0],ymm6[1],ymm7[1],ymm6[2],ymm7[2],ymm6[3],ymm7[3],ymm6[8],ymm7[8],ymm6[9],ymm7[9],ymm6[10],ymm7[10],ymm6[11],ymm7[11]
8721 ; AVX512DQ-SLOW-NEXT: vpermt2d %zmm10, %zmm4, %zmm5
8722 ; AVX512DQ-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8723 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm4 # 16-byte Folded Reload
8724 ; AVX512DQ-SLOW-NEXT: # xmm4 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
8725 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[1,1,1,1]
8726 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm20, %ymm0
8727 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} ymm6 = ymm14[0],ymm0[0],ymm14[1],ymm0[1],ymm14[2],ymm0[2],ymm14[3],ymm0[3],ymm14[8],ymm0[8],ymm14[9],ymm0[9],ymm14[10],ymm0[10],ymm14[11],ymm0[11]
8728 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
8729 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm3, %zmm2, %zmm13 {%k1}
8730 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm6, %zmm4, %zmm5 {%k1}
8731 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8732 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm2 # 32-byte Folded Reload
8733 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm26 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535]
8734 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm26, %zmm2 # 64-byte Folded Reload
8735 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8736 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm3 # 32-byte Folded Reload
8737 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm26, %zmm3 # 64-byte Folded Reload
8738 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
8739 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm4 # 32-byte Folded Reload
8740 ; AVX512DQ-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8741 ; AVX512DQ-SLOW-NEXT: vpshufb %xmm1, %xmm0, %xmm7
8742 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
8743 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm10 = ymm11[0],ymm7[1],ymm11[2,3],ymm7[4],ymm11[5,6],ymm7[7]
8744 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm15[2,3,2,3,6,7,6,7]
8745 ; AVX512DQ-SLOW-NEXT: vmovdqa %ymm15, %ymm6
8746 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm7 = ymm7[0,2,2,1,4,5,6,7,8,10,10,9,12,13,14,15]
8747 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm21 = ymm7[2,1,2,3]
8748 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm9, %zmm0, %zmm9
8749 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm20 = zmm10[0,1,2,3],zmm9[4,5,6,7]
8750 ; AVX512DQ-SLOW-NEXT: vextracti64x4 $1, %zmm13, %ymm10
8751 ; AVX512DQ-SLOW-NEXT: vpshuflw $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Folded Reload
8752 ; AVX512DQ-SLOW-NEXT: # ymm11 = mem[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
8753 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,2,2,2]
8754 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm11 = ymm10[0,1],ymm11[2],ymm10[3,4],ymm11[5],ymm10[6,7]
8755 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%r9), %ymm10
8756 ; AVX512DQ-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8757 ; AVX512DQ-SLOW-NEXT: vpshufb %xmm1, %xmm0, %xmm12
8758 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,1,0,1]
8759 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = ymm13[0],ymm12[1],ymm13[2,3],ymm12[4],ymm13[5,6],ymm12[7]
8760 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm13 = ymm10[0,2,2,3,4,5,6,7,8,10,10,11,12,13,14,15]
8761 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm13 = ymm13[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
8762 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm24 = ymm13[2,2,2,3]
8763 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm11, %zmm0, %zmm11
8764 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm22 = zmm12[0,1,2,3],zmm11[4,5,6,7]
8765 ; AVX512DQ-SLOW-NEXT: vextracti64x4 $1, %zmm5, %ymm12
8766 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm25, %ymm0
8767 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm14 = ymm0[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
8768 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,2,2,2]
8769 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0,1],ymm14[2],ymm12[3,4],ymm14[5],ymm12[6,7]
8770 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm10[2,3,2,3,6,7,6,7]
8771 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm14 = ymm14[0,2,2,1,4,5,6,7,8,10,10,9,12,13,14,15]
8772 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,1,2,3]
8773 ; AVX512DQ-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8774 ; AVX512DQ-SLOW-NEXT: vpshufb %xmm1, %xmm0, %xmm1
8775 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r9), %ymm9
8776 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
8777 ; AVX512DQ-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm5[0],ymm1[1],ymm5[2,3],ymm1[4],ymm5[5,6],ymm1[7]
8778 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm5 = ymm9[0,2,2,3,4,5,6,7,8,10,10,11,12,13,14,15]
8779 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm5 = ymm5[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
8780 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
8781 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm12, %zmm0, %zmm12
8782 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm1[0,1,2,3],zmm12[4,5,6,7]
8783 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm9[2,3,2,3,6,7,6,7]
8784 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm12 = ymm12[0,2,2,1,4,5,6,7,8,10,10,9,12,13,14,15]
8785 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,1,2,3]
8786 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[2,3,2,3]
8787 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm8 = xmm8[0,2,2,1,4,5,6,7]
8788 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,1,0,1]
8789 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm9 = ymm9[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
8790 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,2,2,2]
8791 ; AVX512DQ-SLOW-NEXT: vpshufd $238, {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Folded Reload
8792 ; AVX512DQ-SLOW-NEXT: # xmm15 = mem[2,3,2,3]
8793 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm15 = xmm15[0,2,2,1,4,5,6,7]
8794 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[0,1,0,1]
8795 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm16, %ymm0
8796 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm0[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
8797 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,2]
8798 ; AVX512DQ-SLOW-NEXT: vpshufd $238, {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Folded Reload
8799 ; AVX512DQ-SLOW-NEXT: # xmm7 = mem[2,3,2,3]
8800 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm7 = xmm7[0,2,2,1,4,5,6,7]
8801 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
8802 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm11 = ymm6[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
8803 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,2,2,2]
8804 ; AVX512DQ-SLOW-NEXT: vpshufd $238, {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Folded Reload
8805 ; AVX512DQ-SLOW-NEXT: # xmm13 = mem[2,3,2,3]
8806 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm13 = xmm13[0,2,2,1,4,5,6,7]
8807 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm13 = ymm13[0,1,0,1]
8808 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm10 = ymm10[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
8809 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,2,2,2]
8810 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm26, %zmm4 # 64-byte Folded Reload
8811 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm31, %zmm30, %zmm25
8812 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm26, %zmm25 # 64-byte Folded Reload
8813 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm18, %zmm23, %zmm6
8814 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm18 = [65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0]
8815 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm18, %zmm6 # 64-byte Folded Reload
8816 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm21, %zmm17, %zmm16
8817 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm18, %zmm16 # 64-byte Folded Reload
8818 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm14, %zmm24, %zmm14
8819 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm18, %zmm14 # 64-byte Folded Reload
8820 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm12, %zmm5, %zmm5
8821 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm29, %zmm18, %zmm5
8822 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm9, %zmm8, %zmm8
8823 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm9 = [65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
8824 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm19, %zmm9, %zmm8
8825 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm15, %zmm0
8826 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm20, %zmm9, %zmm0
8827 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm11, %zmm7, %zmm7
8828 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm22, %zmm9, %zmm7
8829 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm10, %zmm13, %zmm10
8830 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm1, %zmm9, %zmm10
8831 ; AVX512DQ-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
8832 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, 256(%rax)
8833 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, 448(%rax)
8834 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, 640(%rax)
8835 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, 64(%rax)
8836 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm25, (%rax)
8837 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, 192(%rax)
8838 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, 384(%rax)
8839 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, 576(%rax)
8840 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, 128(%rax)
8841 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm14, 320(%rax)
8842 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm16, 512(%rax)
8843 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm6, 704(%rax)
8844 ; AVX512DQ-SLOW-NEXT: addq $936, %rsp # imm = 0x3A8
8845 ; AVX512DQ-SLOW-NEXT: vzeroupper
8846 ; AVX512DQ-SLOW-NEXT: retq
8848 ; AVX512DQ-FAST-LABEL: store_i16_stride6_vf64:
8849 ; AVX512DQ-FAST: # %bb.0:
8850 ; AVX512DQ-FAST-NEXT: subq $1224, %rsp # imm = 0x4C8
8851 ; AVX512DQ-FAST-NEXT: vmovdqa (%rcx), %ymm1
8852 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8853 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdx), %ymm0
8854 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8855 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[8],ymm1[8],ymm0[9],ymm1[9],ymm0[10],ymm1[10],ymm0[11],ymm1[11]
8856 ; AVX512DQ-FAST-NEXT: vmovdqa (%rcx), %xmm1
8857 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8858 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rcx), %xmm4
8859 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8860 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rcx), %xmm7
8861 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8862 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdx), %xmm2
8863 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8864 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rdx), %xmm5
8865 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8866 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rdx), %xmm3
8867 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8868 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
8869 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
8870 ; AVX512DQ-FAST-NEXT: vmovdqa 96(%rcx), %ymm2
8871 ; AVX512DQ-FAST-NEXT: vmovdqa 96(%rdx), %ymm6
8872 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm6[0],ymm2[0],ymm6[1],ymm2[1],ymm6[2],ymm2[2],ymm6[3],ymm2[3],ymm6[8],ymm2[8],ymm6[9],ymm2[9],ymm6[10],ymm2[10],ymm6[11],ymm2[11]
8873 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm6, %ymm30
8874 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm2, %ymm31
8875 ; AVX512DQ-FAST-NEXT: vmovdqa 96(%rcx), %xmm2
8876 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8877 ; AVX512DQ-FAST-NEXT: vmovdqa 96(%rdx), %xmm6
8878 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8879 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm6[4],xmm2[4],xmm6[5],xmm2[5],xmm6[6],xmm2[6],xmm6[7],xmm2[7]
8880 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm6
8881 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rcx), %ymm1
8882 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8883 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rdx), %ymm8
8884 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm8[0],ymm1[0],ymm8[1],ymm1[1],ymm8[2],ymm1[2],ymm8[3],ymm1[3],ymm8[8],ymm1[8],ymm8[9],ymm1[9],ymm8[10],ymm1[10],ymm8[11],ymm1[11]
8885 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm8, %ymm28
8886 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm3[4],xmm7[4],xmm3[5],xmm7[5],xmm3[6],xmm7[6],xmm3[7],xmm7[7]
8887 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm3, %zmm17
8888 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rcx), %ymm2
8889 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8890 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rdx), %ymm1
8891 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8892 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm1[0],ymm2[0],ymm1[1],ymm2[1],ymm1[2],ymm2[2],ymm1[3],ymm2[3],ymm1[8],ymm2[8],ymm1[9],ymm2[9],ymm1[10],ymm2[10],ymm1[11],ymm2[11]
8893 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
8894 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm3, %zmm16
8895 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm18 = [1,1,1,1,10,10,10,11]
8896 ; AVX512DQ-FAST-NEXT: vmovdqa (%rsi), %ymm2
8897 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8898 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdi), %ymm1
8899 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8900 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm1[0],ymm2[0],ymm1[1],ymm2[1],ymm1[2],ymm2[2],ymm1[3],ymm2[3],ymm1[8],ymm2[8],ymm1[9],ymm2[9],ymm1[10],ymm2[10],ymm1[11],ymm2[11]
8901 ; AVX512DQ-FAST-NEXT: vmovdqa (%rsi), %xmm2
8902 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8903 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdi), %xmm3
8904 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8905 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
8906 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm18, %zmm3
8907 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm20 = <1,2,1,2,u,u,3,3,13,12,10,10,13,12,14,14>
8908 ; AVX512DQ-FAST-NEXT: vpermd %zmm0, %zmm20, %zmm22
8909 ; AVX512DQ-FAST-NEXT: movw $18724, %ax # imm = 0x4924
8910 ; AVX512DQ-FAST-NEXT: kmovw %eax, %k1
8911 ; AVX512DQ-FAST-NEXT: vmovdqa32 %zmm3, %zmm22 {%k1}
8912 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} ymm25 = [8,9,20,11,12,21,14,15]
8913 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm22, %zmm1
8914 ; AVX512DQ-FAST-NEXT: vmovdqa (%r8), %ymm0
8915 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8916 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} ymm0 = ymm0[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
8917 ; AVX512DQ-FAST-NEXT: vpermt2d %zmm0, %zmm25, %zmm1
8918 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8919 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} ymm19 = [0,9,2,3,8,5,6,11]
8920 ; AVX512DQ-FAST-NEXT: vmovdqa (%r8), %xmm0
8921 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8922 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm15 = [12,13,10,11,10,11,14,15,14,15,14,15,14,15,14,15]
8923 ; AVX512DQ-FAST-NEXT: vpshufb %xmm15, %xmm0, %xmm0
8924 ; AVX512DQ-FAST-NEXT: vpermt2d %ymm0, %ymm19, %ymm22
8925 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm23 = <0,u,0,1,u,10,10,u>
8926 ; AVX512DQ-FAST-NEXT: vmovdqa (%r9), %ymm0
8927 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8928 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} ymm0 = ymm0[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
8929 ; AVX512DQ-FAST-NEXT: vmovdqa (%r9), %xmm1
8930 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8931 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm8 = [8,9,12,13,12,13,10,11,8,9,10,11,12,13,14,15]
8932 ; AVX512DQ-FAST-NEXT: vpshufb %xmm8, %xmm1, %xmm1
8933 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm23, %zmm1
8934 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8935 ; AVX512DQ-FAST-NEXT: vmovdqa 96(%rsi), %ymm9
8936 ; AVX512DQ-FAST-NEXT: vmovdqa 96(%rdi), %ymm5
8937 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm5[0],ymm9[0],ymm5[1],ymm9[1],ymm5[2],ymm9[2],ymm5[3],ymm9[3],ymm5[8],ymm9[8],ymm5[9],ymm9[9],ymm5[10],ymm9[10],ymm5[11],ymm9[11]
8938 ; AVX512DQ-FAST-NEXT: vmovdqa 96(%rsi), %xmm0
8939 ; AVX512DQ-FAST-NEXT: vmovdqa 96(%rdi), %xmm14
8940 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm14[4],xmm0[4],xmm14[5],xmm0[5],xmm14[6],xmm0[6],xmm14[7],xmm0[7]
8941 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm0, %xmm21
8942 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm18, %zmm4
8943 ; AVX512DQ-FAST-NEXT: vpermd %zmm6, %zmm20, %zmm26
8944 ; AVX512DQ-FAST-NEXT: vmovdqa32 %zmm4, %zmm26 {%k1}
8945 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm26, %zmm0
8946 ; AVX512DQ-FAST-NEXT: vmovdqa 96(%r8), %ymm10
8947 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} ymm3 = ymm10[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
8948 ; AVX512DQ-FAST-NEXT: vpermt2d %zmm3, %zmm25, %zmm0
8949 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8950 ; AVX512DQ-FAST-NEXT: vmovdqa 96(%r8), %xmm0
8951 ; AVX512DQ-FAST-NEXT: vpshufb %xmm15, %xmm0, %xmm3
8952 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm0, %xmm24
8953 ; AVX512DQ-FAST-NEXT: vpermt2d %ymm3, %ymm19, %ymm26
8954 ; AVX512DQ-FAST-NEXT: vmovdqa 96(%r9), %ymm11
8955 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} ymm4 = ymm11[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
8956 ; AVX512DQ-FAST-NEXT: vmovdqa 96(%r9), %xmm0
8957 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8958 ; AVX512DQ-FAST-NEXT: vpshufb %xmm8, %xmm0, %xmm0
8959 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm23, %zmm0
8960 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8961 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rsi), %xmm0
8962 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8963 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rdi), %xmm1
8964 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8965 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rsi), %ymm13
8966 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rdi), %ymm12
8967 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm7 = ymm12[0],ymm13[0],ymm12[1],ymm13[1],ymm12[2],ymm13[2],ymm12[3],ymm13[3],ymm12[8],ymm13[8],ymm12[9],ymm13[9],ymm12[10],ymm13[10],ymm12[11],ymm13[11]
8968 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
8969 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm18, %zmm6
8970 ; AVX512DQ-FAST-NEXT: vpermd %zmm17, %zmm20, %zmm17
8971 ; AVX512DQ-FAST-NEXT: vmovdqa32 %zmm6, %zmm17 {%k1}
8972 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm17, %zmm0
8973 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%r8), %ymm6
8974 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} ymm7 = ymm6[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
8975 ; AVX512DQ-FAST-NEXT: vpermt2d %zmm7, %zmm25, %zmm0
8976 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8977 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%r8), %xmm0
8978 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8979 ; AVX512DQ-FAST-NEXT: vpshufb %xmm15, %xmm0, %xmm7
8980 ; AVX512DQ-FAST-NEXT: vpermt2d %ymm7, %ymm19, %ymm17
8981 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%r9), %xmm1
8982 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8983 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%r9), %ymm7
8984 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} ymm0 = ymm7[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
8985 ; AVX512DQ-FAST-NEXT: vpshufb %xmm8, %xmm1, %xmm1
8986 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm23, %zmm1
8987 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8988 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rsi), %xmm3
8989 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8990 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rdi), %xmm1
8991 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8992 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rsi), %ymm2
8993 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8994 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rdi), %ymm0
8995 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8996 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm4 = ymm0[0],ymm2[0],ymm0[1],ymm2[1],ymm0[2],ymm2[2],ymm0[3],ymm2[3],ymm0[8],ymm2[8],ymm0[9],ymm2[9],ymm0[10],ymm2[10],ymm0[11],ymm2[11]
8997 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm1[4],xmm3[4],xmm1[5],xmm3[5],xmm1[6],xmm3[6],xmm1[7],xmm3[7]
8998 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm18, %zmm0
8999 ; AVX512DQ-FAST-NEXT: vpermd %zmm16, %zmm20, %zmm20
9000 ; AVX512DQ-FAST-NEXT: vmovdqa32 %zmm0, %zmm20 {%k1}
9001 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%r8), %ymm0
9002 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9003 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} ymm0 = ymm0[0,1,1,3,4,5,6,7,8,9,9,11,12,13,14,15]
9004 ; AVX512DQ-FAST-NEXT: vpermi2d %zmm0, %zmm20, %zmm25
9005 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%r8), %xmm0
9006 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm0, (%rsp) # 16-byte Spill
9007 ; AVX512DQ-FAST-NEXT: vpshufb %xmm15, %xmm0, %xmm0
9008 ; AVX512DQ-FAST-NEXT: vpermt2d %ymm0, %ymm19, %ymm20
9009 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%r9), %xmm0
9010 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9011 ; AVX512DQ-FAST-NEXT: vpshufb %xmm8, %xmm0, %xmm1
9012 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%r9), %ymm0
9013 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9014 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} ymm0 = ymm0[0,0,2,1,4,5,6,7,8,8,10,9,12,13,14,15]
9015 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm23, %zmm1
9016 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9017 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm15 = <u,u,u,u,4,5,10,11,u,u,u,u,u,u,u,u,24,25,22,23,20,21,26,27,u,u,u,u,u,u,u,u>
9018 ; AVX512DQ-FAST-NEXT: vpshufb %ymm15, %ymm9, %ymm0
9019 ; AVX512DQ-FAST-NEXT: vpshufb %ymm15, %ymm5, %ymm1
9020 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm8 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[8],ymm0[8],ymm1[9],ymm0[9],ymm1[10],ymm0[10],ymm1[11],ymm0[11]
9021 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm5[4],ymm9[4],ymm5[5],ymm9[5],ymm5[6],ymm9[6],ymm5[7],ymm9[7],ymm5[12],ymm9[12],ymm5[13],ymm9[13],ymm5[14],ymm9[14],ymm5[15],ymm9[15]
9022 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm29 = [2,1,2,3,11,11,11,11]
9023 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm29, %zmm8
9024 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} ymm27 = [5,6,5,6,5,6,7,7]
9025 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm30, %ymm2
9026 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm31, %ymm1
9027 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} ymm0 = ymm2[4],ymm1[4],ymm2[5],ymm1[5],ymm2[6],ymm1[6],ymm2[7],ymm1[7],ymm2[12],ymm1[12],ymm2[13],ymm1[13],ymm2[14],ymm1[14],ymm2[15],ymm1[15]
9028 ; AVX512DQ-FAST-NEXT: vpermd %ymm0, %ymm27, %ymm0
9029 ; AVX512DQ-FAST-NEXT: vpsrldq {{.*#+}} ymm1 = ymm1[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm1[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
9030 ; AVX512DQ-FAST-NEXT: vpsrldq {{.*#+}} ymm2 = ymm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm2[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
9031 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm2[0],ymm1[0],ymm2[1],ymm1[1],ymm2[2],ymm1[2],ymm2[3],ymm1[3],ymm2[8],ymm1[8],ymm2[9],ymm1[9],ymm2[10],ymm1[10],ymm2[11],ymm1[11]
9032 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
9033 ; AVX512DQ-FAST-NEXT: vinserti32x8 $1, %ymm0, %zmm1, %zmm8 {%k1}
9034 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} ymm23 = [8,21,10,11,20,13,14,23]
9035 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm8, %zmm0
9036 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,u,u,u,u,u,u,14,15,14,15,14,15,14,15,28,29,26,27,26,27,30,31,30,31,30,31,30,31,30,31>
9037 ; AVX512DQ-FAST-NEXT: vpshufb %ymm2, %ymm10, %ymm1
9038 ; AVX512DQ-FAST-NEXT: vmovdqa %ymm2, %ymm9
9039 ; AVX512DQ-FAST-NEXT: vpermt2d %zmm1, %zmm23, %zmm0
9040 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} ymm1 = ymm10[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
9041 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} ymm30 = [12,1,2,13,4,5,14,7]
9042 ; AVX512DQ-FAST-NEXT: vpermt2d %ymm1, %ymm30, %ymm8
9043 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm8[0,1,2,3],zmm0[0,1,2,3]
9044 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9045 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = <u,u,u,u,u,u,u,u,8,9,10,11,12,13,14,15,24,25,28,29,28,29,26,27,24,25,26,27,28,29,30,31>
9046 ; AVX512DQ-FAST-NEXT: vpshufb %ymm1, %ymm11, %ymm0
9047 ; AVX512DQ-FAST-NEXT: vmovdqa %ymm1, %ymm8
9048 ; AVX512DQ-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [16,17,20,21,20,21,22,23,24,25,24,25,24,25,24,25,16,17,20,21,20,21,22,23,24,25,24,25,24,25,24,25]
9049 ; AVX512DQ-FAST-NEXT: # ymm2 = mem[0,1,0,1]
9050 ; AVX512DQ-FAST-NEXT: vpshufb %ymm2, %ymm11, %ymm1
9051 ; AVX512DQ-FAST-NEXT: vmovdqa %ymm2, %ymm11
9052 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm31 = <2,2,u,3,10,u,10,11>
9053 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm31, %zmm1
9054 ; AVX512DQ-FAST-NEXT: vpshufb %ymm15, %ymm13, %ymm0
9055 ; AVX512DQ-FAST-NEXT: vpshufb %ymm15, %ymm12, %ymm2
9056 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm2[0],ymm0[0],ymm2[1],ymm0[1],ymm2[2],ymm0[2],ymm2[3],ymm0[3],ymm2[8],ymm0[8],ymm2[9],ymm0[9],ymm2[10],ymm0[10],ymm2[11],ymm0[11]
9057 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm12[4],ymm13[4],ymm12[5],ymm13[5],ymm12[6],ymm13[6],ymm12[7],ymm13[7],ymm12[12],ymm13[12],ymm12[13],ymm13[13],ymm12[14],ymm13[14],ymm12[15],ymm13[15]
9058 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm29, %zmm0
9059 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm28, %ymm4
9060 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
9061 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} ymm2 = ymm4[4],ymm3[4],ymm4[5],ymm3[5],ymm4[6],ymm3[6],ymm4[7],ymm3[7],ymm4[12],ymm3[12],ymm4[13],ymm3[13],ymm4[14],ymm3[14],ymm4[15],ymm3[15]
9062 ; AVX512DQ-FAST-NEXT: vpermd %ymm2, %ymm27, %ymm2
9063 ; AVX512DQ-FAST-NEXT: vpsrldq {{.*#+}} ymm3 = ymm3[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm3[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
9064 ; AVX512DQ-FAST-NEXT: vpsrldq {{.*#+}} ymm4 = ymm4[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm4[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
9065 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm4[0],ymm3[0],ymm4[1],ymm3[1],ymm4[2],ymm3[2],ymm4[3],ymm3[3],ymm4[8],ymm3[8],ymm4[9],ymm3[9],ymm4[10],ymm3[10],ymm4[11],ymm3[11]
9066 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,2]
9067 ; AVX512DQ-FAST-NEXT: vinserti32x8 $1, %ymm2, %zmm3, %zmm0 {%k1}
9068 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, %zmm2
9069 ; AVX512DQ-FAST-NEXT: vpshufb %ymm9, %ymm6, %ymm3
9070 ; AVX512DQ-FAST-NEXT: vmovdqa %ymm9, %ymm10
9071 ; AVX512DQ-FAST-NEXT: vpermt2d %zmm3, %zmm23, %zmm2
9072 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} ymm3 = ymm6[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
9073 ; AVX512DQ-FAST-NEXT: vpermt2d %ymm3, %ymm30, %ymm0
9074 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,2,3],zmm2[0,1,2,3]
9075 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9076 ; AVX512DQ-FAST-NEXT: vpshufb %ymm8, %ymm7, %ymm0
9077 ; AVX512DQ-FAST-NEXT: vpshufb %ymm11, %ymm7, %ymm2
9078 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm31, %zmm2
9079 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm9 = [0,1,2,3,4,5,6,7,8,9,6,7,4,5,10,11]
9080 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm21, %xmm4
9081 ; AVX512DQ-FAST-NEXT: vpshufb %xmm9, %xmm4, %xmm0
9082 ; AVX512DQ-FAST-NEXT: vpshufb %xmm9, %xmm14, %xmm3
9083 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm3[4],xmm0[4],xmm3[5],xmm0[5],xmm3[6],xmm0[6],xmm3[7],xmm0[7]
9084 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm14[0],xmm4[0],xmm14[1],xmm4[1],xmm14[2],xmm4[2],xmm14[3],xmm4[3]
9085 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm19 = [0,0,2,1,8,9,8,9]
9086 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm19, %zmm3
9087 ; AVX512DQ-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm0 = [1,0,2,2,1,0,2,2]
9088 ; AVX512DQ-FAST-NEXT: # ymm0 = mem[0,1,0,1]
9089 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9090 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
9091 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
9092 ; AVX512DQ-FAST-NEXT: vpermd %ymm4, %ymm0, %ymm4
9093 ; AVX512DQ-FAST-NEXT: vpsrldq {{.*#+}} xmm5 = xmm5[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
9094 ; AVX512DQ-FAST-NEXT: vpsrldq {{.*#+}} xmm6 = xmm6[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
9095 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
9096 ; AVX512DQ-FAST-NEXT: vpbroadcastq %xmm5, %ymm5
9097 ; AVX512DQ-FAST-NEXT: movw $9362, %ax # imm = 0x2492
9098 ; AVX512DQ-FAST-NEXT: kmovw %eax, %k2
9099 ; AVX512DQ-FAST-NEXT: vinserti32x8 $1, %ymm5, %zmm4, %zmm3 {%k2}
9100 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} ymm28 = [16,9,10,17,12,13,18,15]
9101 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm4
9102 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm24, %xmm6
9103 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} xmm5 = xmm6[2,1,3,3,4,5,6,7]
9104 ; AVX512DQ-FAST-NEXT: vpermt2d %zmm5, %zmm28, %zmm4
9105 ; AVX512DQ-FAST-NEXT: vpmovzxwd {{.*#+}} xmm5 = xmm24[0],zero,xmm24[1],zero,xmm24[2],zero,xmm24[3],zero
9106 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} ymm16 = [0,1,8,3,4,9,6,7]
9107 ; AVX512DQ-FAST-NEXT: vpermt2d %ymm5, %ymm16, %ymm3
9108 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm3 = zmm3[0,1,2,3],zmm4[0,1,2,3]
9109 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9110 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm4 = [0,1,4,5,4,5,6,7,8,9,8,9,8,9,8,9]
9111 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
9112 ; AVX512DQ-FAST-NEXT: vpshufb %xmm4, %xmm5, %xmm3
9113 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm4, %xmm8
9114 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} xmm7 = xmm5[0,0,2,1,4,5,6,7]
9115 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm18 = <u,0,0,u,8,8,u,9>
9116 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm18, %zmm7
9117 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
9118 ; AVX512DQ-FAST-NEXT: vpshufb %xmm9, %xmm5, %xmm3
9119 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9120 ; AVX512DQ-FAST-NEXT: vpshufb %xmm9, %xmm6, %xmm4
9121 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
9122 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
9123 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm19, %zmm4
9124 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
9125 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9126 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
9127 ; AVX512DQ-FAST-NEXT: vpsrldq {{.*#+}} xmm5 = xmm5[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
9128 ; AVX512DQ-FAST-NEXT: vpsrldq {{.*#+}} xmm6 = xmm6[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
9129 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
9130 ; AVX512DQ-FAST-NEXT: vpermd %ymm3, %ymm0, %ymm3
9131 ; AVX512DQ-FAST-NEXT: vpbroadcastq %xmm5, %ymm5
9132 ; AVX512DQ-FAST-NEXT: vinserti32x8 $1, %ymm5, %zmm3, %zmm4 {%k2}
9133 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, %zmm3
9134 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9135 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} xmm5 = xmm6[2,1,3,3,4,5,6,7]
9136 ; AVX512DQ-FAST-NEXT: vpermt2d %zmm5, %zmm28, %zmm3
9137 ; AVX512DQ-FAST-NEXT: vpmovzxwd {{.*#+}} xmm5 = xmm6[0],zero,xmm6[1],zero,xmm6[2],zero,xmm6[3],zero
9138 ; AVX512DQ-FAST-NEXT: vpermt2d %ymm5, %ymm16, %ymm4
9139 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm12 = zmm4[0,1,2,3],zmm3[0,1,2,3]
9140 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
9141 ; AVX512DQ-FAST-NEXT: vpshufb %xmm8, %xmm4, %xmm3
9142 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm8, %xmm21
9143 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[0,0,2,1,4,5,6,7]
9144 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm18, %zmm4
9145 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
9146 ; AVX512DQ-FAST-NEXT: vpshufb %ymm15, %ymm8, %ymm3
9147 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
9148 ; AVX512DQ-FAST-NEXT: vpshufb %ymm15, %ymm6, %ymm5
9149 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm5[0],ymm3[0],ymm5[1],ymm3[1],ymm5[2],ymm3[2],ymm5[3],ymm3[3],ymm5[8],ymm3[8],ymm5[9],ymm3[9],ymm5[10],ymm3[10],ymm5[11],ymm3[11]
9150 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} ymm5 = ymm6[4],ymm8[4],ymm6[5],ymm8[5],ymm6[6],ymm8[6],ymm6[7],ymm8[7],ymm6[12],ymm8[12],ymm6[13],ymm8[13],ymm6[14],ymm8[14],ymm6[15],ymm8[15]
9151 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm29, %zmm3
9152 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
9153 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
9154 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} ymm5 = ymm8[4],ymm6[4],ymm8[5],ymm6[5],ymm8[6],ymm6[6],ymm8[7],ymm6[7],ymm8[12],ymm6[12],ymm8[13],ymm6[13],ymm8[14],ymm6[14],ymm8[15],ymm6[15]
9155 ; AVX512DQ-FAST-NEXT: vpsrldq {{.*#+}} ymm6 = ymm6[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm6[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
9156 ; AVX512DQ-FAST-NEXT: vpsrldq {{.*#+}} ymm8 = ymm8[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm8[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
9157 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm6 = ymm8[0],ymm6[0],ymm8[1],ymm6[1],ymm8[2],ymm6[2],ymm8[3],ymm6[3],ymm8[8],ymm6[8],ymm8[9],ymm6[9],ymm8[10],ymm6[10],ymm8[11],ymm6[11]
9158 ; AVX512DQ-FAST-NEXT: vpermd %ymm5, %ymm27, %ymm5
9159 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,2]
9160 ; AVX512DQ-FAST-NEXT: vinserti32x8 $1, %ymm5, %zmm6, %zmm3 {%k1}
9161 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm6
9162 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
9163 ; AVX512DQ-FAST-NEXT: vmovdqa %ymm10, %ymm14
9164 ; AVX512DQ-FAST-NEXT: vpshufb %ymm10, %ymm8, %ymm5
9165 ; AVX512DQ-FAST-NEXT: vpermt2d %zmm5, %zmm23, %zmm6
9166 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} ymm5 = ymm8[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
9167 ; AVX512DQ-FAST-NEXT: vpermt2d %ymm5, %ymm30, %ymm3
9168 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Reload
9169 ; AVX512DQ-FAST-NEXT: vpshufb %ymm15, %ymm13, %ymm5
9170 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
9171 ; AVX512DQ-FAST-NEXT: vpshufb %ymm15, %ymm10, %ymm8
9172 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm5 = ymm8[0],ymm5[0],ymm8[1],ymm5[1],ymm8[2],ymm5[2],ymm8[3],ymm5[3],ymm8[8],ymm5[8],ymm8[9],ymm5[9],ymm8[10],ymm5[10],ymm8[11],ymm5[11]
9173 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} ymm8 = ymm10[4],ymm13[4],ymm10[5],ymm13[5],ymm10[6],ymm13[6],ymm10[7],ymm13[7],ymm10[12],ymm13[12],ymm10[13],ymm13[13],ymm10[14],ymm13[14],ymm10[15],ymm13[15]
9174 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm8, %zmm29, %zmm5
9175 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm29 = zmm3[0,1,2,3],zmm6[0,1,2,3]
9176 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
9177 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = <u,u,u,u,u,u,u,u,8,9,10,11,12,13,14,15,24,25,28,29,28,29,26,27,24,25,26,27,28,29,30,31>
9178 ; AVX512DQ-FAST-NEXT: vpshufb %ymm13, %ymm3, %ymm8
9179 ; AVX512DQ-FAST-NEXT: vmovdqa %ymm11, %ymm15
9180 ; AVX512DQ-FAST-NEXT: vpshufb %ymm11, %ymm3, %ymm3
9181 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm8, %zmm31, %zmm3
9182 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
9183 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
9184 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} ymm8 = ymm11[4],ymm6[4],ymm11[5],ymm6[5],ymm11[6],ymm6[6],ymm11[7],ymm6[7],ymm11[12],ymm6[12],ymm11[13],ymm6[13],ymm11[14],ymm6[14],ymm11[15],ymm6[15]
9185 ; AVX512DQ-FAST-NEXT: vpermd %ymm8, %ymm27, %ymm8
9186 ; AVX512DQ-FAST-NEXT: vpsrldq {{.*#+}} ymm10 = ymm6[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm6[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
9187 ; AVX512DQ-FAST-NEXT: vpsrldq {{.*#+}} ymm11 = ymm11[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,ymm11[22,23,24,25,26,27,28,29,30,31],zero,zero,zero,zero,zero,zero
9188 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm10 = ymm11[0],ymm10[0],ymm11[1],ymm10[1],ymm11[2],ymm10[2],ymm11[3],ymm10[3],ymm11[8],ymm10[8],ymm11[9],ymm10[9],ymm11[10],ymm10[10],ymm11[11],ymm10[11]
9189 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,2,2,2]
9190 ; AVX512DQ-FAST-NEXT: vinserti32x8 $1, %ymm8, %zmm10, %zmm5 {%k1}
9191 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
9192 ; AVX512DQ-FAST-NEXT: vpshufb %ymm14, %ymm6, %ymm8
9193 ; AVX512DQ-FAST-NEXT: vpermi2d %zmm8, %zmm5, %zmm23
9194 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} ymm8 = ymm6[2,1,3,3,4,5,6,7,10,9,11,11,12,13,14,15]
9195 ; AVX512DQ-FAST-NEXT: vpermt2d %ymm8, %ymm30, %ymm5
9196 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
9197 ; AVX512DQ-FAST-NEXT: vpshufb %ymm13, %ymm6, %ymm10
9198 ; AVX512DQ-FAST-NEXT: vpshufb %ymm15, %ymm6, %ymm8
9199 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm10, %zmm31, %zmm8
9200 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9201 ; AVX512DQ-FAST-NEXT: vpshufb %xmm9, %xmm6, %xmm10
9202 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
9203 ; AVX512DQ-FAST-NEXT: vpshufb %xmm9, %xmm13, %xmm11
9204 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm11 = xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
9205 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm13[0],xmm6[0],xmm13[1],xmm6[1],xmm13[2],xmm6[2],xmm13[3],xmm6[3]
9206 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm11, %zmm19, %zmm10
9207 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9208 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
9209 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
9210 ; AVX512DQ-FAST-NEXT: vpermd %ymm11, %ymm0, %ymm11
9211 ; AVX512DQ-FAST-NEXT: vpsrldq {{.*#+}} xmm13 = xmm6[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
9212 ; AVX512DQ-FAST-NEXT: vpsrldq {{.*#+}} xmm14 = xmm14[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
9213 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3]
9214 ; AVX512DQ-FAST-NEXT: vpbroadcastq %xmm13, %ymm13
9215 ; AVX512DQ-FAST-NEXT: vinserti32x8 $1, %ymm13, %zmm11, %zmm10 {%k2}
9216 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, %zmm11
9217 ; AVX512DQ-FAST-NEXT: vmovdqa (%rsp), %xmm15 # 16-byte Reload
9218 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} xmm13 = xmm15[2,1,3,3,4,5,6,7]
9219 ; AVX512DQ-FAST-NEXT: vpermt2d %zmm13, %zmm28, %zmm11
9220 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
9221 ; AVX512DQ-FAST-NEXT: vpshufb %xmm9, %xmm14, %xmm13
9222 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9223 ; AVX512DQ-FAST-NEXT: vpshufb %xmm9, %xmm6, %xmm9
9224 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm9 = xmm9[4],xmm13[4],xmm9[5],xmm13[5],xmm9[6],xmm13[6],xmm9[7],xmm13[7]
9225 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm6[0],xmm14[0],xmm6[1],xmm14[1],xmm6[2],xmm14[2],xmm6[3],xmm14[3]
9226 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm9, %zmm19, %zmm13
9227 ; AVX512DQ-FAST-NEXT: vpmovzxwd {{.*#+}} xmm9 = xmm15[0],zero,xmm15[1],zero,xmm15[2],zero,xmm15[3],zero
9228 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
9229 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9230 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm6[0],xmm15[0],xmm6[1],xmm15[1],xmm6[2],xmm15[2],xmm6[3],xmm15[3]
9231 ; AVX512DQ-FAST-NEXT: vpermd %ymm14, %ymm0, %ymm0
9232 ; AVX512DQ-FAST-NEXT: vpsrldq {{.*#+}} xmm14 = xmm15[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
9233 ; AVX512DQ-FAST-NEXT: vpsrldq {{.*#+}} xmm15 = xmm6[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
9234 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
9235 ; AVX512DQ-FAST-NEXT: vpbroadcastq %xmm14, %ymm14
9236 ; AVX512DQ-FAST-NEXT: vinserti32x8 $1, %ymm14, %zmm0, %zmm13 {%k2}
9237 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9238 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} xmm0 = xmm6[2,1,3,3,4,5,6,7]
9239 ; AVX512DQ-FAST-NEXT: vpermi2d %zmm0, %zmm13, %zmm28
9240 ; AVX512DQ-FAST-NEXT: vpmovzxwd {{.*#+}} xmm0 = xmm6[0],zero,xmm6[1],zero,xmm6[2],zero,xmm6[3],zero
9241 ; AVX512DQ-FAST-NEXT: vpermt2d %ymm9, %ymm16, %ymm10
9242 ; AVX512DQ-FAST-NEXT: vpermt2d %ymm0, %ymm16, %ymm13
9243 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm5[0,1,2,3],zmm23[0,1,2,3]
9244 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm5 = zmm10[0,1,2,3],zmm11[0,1,2,3]
9245 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9246 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm21, %xmm11
9247 ; AVX512DQ-FAST-NEXT: vpshufb %xmm11, %xmm6, %xmm9
9248 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} xmm10 = xmm6[0,0,2,1,4,5,6,7]
9249 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm9, %zmm18, %zmm10
9250 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm9 = zmm13[0,1,2,3],zmm28[0,1,2,3]
9251 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9252 ; AVX512DQ-FAST-NEXT: vpshufb %xmm11, %xmm6, %xmm11
9253 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} xmm13 = xmm6[0,0,2,1,4,5,6,7]
9254 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm11, %zmm18, %zmm13
9255 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm11 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535]
9256 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm9, %zmm11, %zmm13
9257 ; AVX512DQ-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
9258 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm13, (%rax)
9259 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm5, %zmm11, %zmm10
9260 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, 192(%rax)
9261 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = [65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0]
9262 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm0, %zmm5, %zmm8
9263 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm8, 128(%rax)
9264 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm29, %zmm5, %zmm3
9265 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, 320(%rax)
9266 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm20[0,1,2,3],zmm25[0,1,2,3]
9267 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm3 = [65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
9268 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
9269 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm0, %zmm3, %zmm6
9270 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, 256(%rax)
9271 ; AVX512DQ-FAST-NEXT: vshufi64x2 $68, {{[-0-9]+}}(%r{{[sb]}}p), %zmm17, %zmm0 # 64-byte Folded Reload
9272 ; AVX512DQ-FAST-NEXT: # zmm0 = zmm17[0,1,2,3],mem[0,1,2,3]
9273 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
9274 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm0, %zmm3, %zmm6
9275 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, 448(%rax)
9276 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm12, %zmm11, %zmm4
9277 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, 384(%rax)
9278 ; AVX512DQ-FAST-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm11, %zmm7 # 64-byte Folded Reload
9279 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, 576(%rax)
9280 ; AVX512DQ-FAST-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm5, %zmm2 # 64-byte Folded Reload
9281 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, 512(%rax)
9282 ; AVX512DQ-FAST-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm5, %zmm1 # 64-byte Folded Reload
9283 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, 704(%rax)
9284 ; AVX512DQ-FAST-NEXT: vshufi64x2 $68, {{[-0-9]+}}(%r{{[sb]}}p), %zmm26, %zmm0 # 64-byte Folded Reload
9285 ; AVX512DQ-FAST-NEXT: # zmm0 = zmm26[0,1,2,3],mem[0,1,2,3]
9286 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9287 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm0, %zmm3, %zmm1
9288 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, 640(%rax)
9289 ; AVX512DQ-FAST-NEXT: vshufi64x2 $68, {{[-0-9]+}}(%r{{[sb]}}p), %zmm22, %zmm0 # 64-byte Folded Reload
9290 ; AVX512DQ-FAST-NEXT: # zmm0 = zmm22[0,1,2,3],mem[0,1,2,3]
9291 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9292 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm0, %zmm3, %zmm1
9293 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, 64(%rax)
9294 ; AVX512DQ-FAST-NEXT: addq $1224, %rsp # imm = 0x4C8
9295 ; AVX512DQ-FAST-NEXT: vzeroupper
9296 ; AVX512DQ-FAST-NEXT: retq
9298 ; AVX512BW-LABEL: store_i16_stride6_vf64:
9299 ; AVX512BW: # %bb.0:
9300 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm3
9301 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %zmm0
9302 ; AVX512BW-NEXT: vmovdqa64 (%rsi), %zmm16
9303 ; AVX512BW-NEXT: vmovdqa64 64(%rsi), %zmm24
9304 ; AVX512BW-NEXT: vmovdqa64 (%rdx), %zmm1
9305 ; AVX512BW-NEXT: vmovdqa64 64(%rdx), %zmm7
9306 ; AVX512BW-NEXT: vmovdqa64 (%rcx), %zmm12
9307 ; AVX512BW-NEXT: vmovdqa64 64(%rcx), %zmm15
9308 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm19 = [0,0,0,32,3,35,0,0,1,33,4,36,0,0,2,34,0,0,0,32,3,35,0,0,1,33,4,36,0,0,2,34]
9309 ; AVX512BW-NEXT: # zmm19 = mem[0,1,2,3,0,1,2,3]
9310 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm13
9311 ; AVX512BW-NEXT: vpermt2w %zmm12, %zmm19, %zmm13
9312 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm26 = [0,32,3,35,0,0,1,33,4,36,0,0,2,34,5,37,0,32,3,35,0,0,1,33,4,36,0,0,2,34,5,37]
9313 ; AVX512BW-NEXT: # zmm26 = mem[0,1,2,3,0,1,2,3]
9314 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm2
9315 ; AVX512BW-NEXT: vpermt2w %zmm16, %zmm26, %zmm2
9316 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [29,61,0,0,27,59,30,62,0,0,28,60,31,63,0,0,29,61,0,0,27,59,30,62,0,0,28,60,31,63,0,0]
9317 ; AVX512BW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
9318 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm20
9319 ; AVX512BW-NEXT: vpermt2w %zmm15, %zmm11, %zmm20
9320 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [0,0,27,59,30,62,0,0,28,60,31,63,0,0,29,61,0,0,27,59,30,62,0,0,28,60,31,63,0,0,29,61]
9321 ; AVX512BW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
9322 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm4
9323 ; AVX512BW-NEXT: vpermt2w %zmm24, %zmm5, %zmm4
9324 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [24,56,0,0,22,54,25,57,0,0,23,55,26,58,0,0,24,56,0,0,22,54,25,57,0,0,23,55,26,58,0,0]
9325 ; AVX512BW-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
9326 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm22
9327 ; AVX512BW-NEXT: vpermt2w %zmm24, %zmm18, %zmm22
9328 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [21,53,24,56,0,0,22,54,25,57,0,0,23,55,26,58,21,53,24,56,0,0,22,54,25,57,0,0,23,55,26,58]
9329 ; AVX512BW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
9330 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm6
9331 ; AVX512BW-NEXT: vpermt2w %zmm15, %zmm8, %zmm6
9332 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm21 = [0,0,16,48,19,51,0,0,17,49,20,52,0,0,18,50,0,0,16,48,19,51,0,0,17,49,20,52,0,0,18,50]
9333 ; AVX512BW-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3]
9334 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm23
9335 ; AVX512BW-NEXT: vpermt2w %zmm15, %zmm21, %zmm23
9336 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [16,48,19,51,0,0,17,49,20,52,0,0,18,50,21,53,16,48,19,51,0,0,17,49,20,52,0,0,18,50,21,53]
9337 ; AVX512BW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
9338 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm9
9339 ; AVX512BW-NEXT: vpermt2w %zmm24, %zmm10, %zmm9
9340 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm17 = [0,0,11,43,14,46,0,0,12,44,15,47,0,0,13,45,0,0,11,43,14,46,0,0,12,44,15,47,0,0,13,45]
9341 ; AVX512BW-NEXT: # zmm17 = mem[0,1,2,3,0,1,2,3]
9342 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm14
9343 ; AVX512BW-NEXT: vpermt2w %zmm24, %zmm17, %zmm14
9344 ; AVX512BW-NEXT: vmovdqa64 %zmm0, %zmm25
9345 ; AVX512BW-NEXT: vpermt2w %zmm24, %zmm26, %zmm0
9346 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm26 = [8,40,0,0,6,38,9,41,0,0,7,39,10,42,0,0,8,40,0,0,6,38,9,41,0,0,7,39,10,42,0,0]
9347 ; AVX512BW-NEXT: # zmm26 = mem[0,1,2,3,0,1,2,3]
9348 ; AVX512BW-NEXT: vpermt2w %zmm24, %zmm26, %zmm25
9349 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm24 = [13,45,0,0,11,43,14,46,0,0,12,44,15,47,0,0,13,45,0,0,11,43,14,46,0,0,12,44,15,47,0,0]
9350 ; AVX512BW-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3]
9351 ; AVX512BW-NEXT: vpermi2w %zmm16, %zmm3, %zmm5
9352 ; AVX512BW-NEXT: vpermi2w %zmm16, %zmm3, %zmm18
9353 ; AVX512BW-NEXT: vpermi2w %zmm16, %zmm3, %zmm10
9354 ; AVX512BW-NEXT: vpermi2w %zmm16, %zmm3, %zmm17
9355 ; AVX512BW-NEXT: vpermt2w %zmm16, %zmm26, %zmm3
9356 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm26
9357 ; AVX512BW-NEXT: vpermt2w %zmm15, %zmm24, %zmm26
9358 ; AVX512BW-NEXT: vmovdqa64 %zmm7, %zmm16
9359 ; AVX512BW-NEXT: vpermt2w %zmm15, %zmm19, %zmm7
9360 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm19 = [5,37,8,40,0,0,6,38,9,41,0,0,7,39,10,42,5,37,8,40,0,0,6,38,9,41,0,0,7,39,10,42]
9361 ; AVX512BW-NEXT: # zmm19 = mem[0,1,2,3,0,1,2,3]
9362 ; AVX512BW-NEXT: vpermt2w %zmm15, %zmm19, %zmm16
9363 ; AVX512BW-NEXT: vpermi2w %zmm12, %zmm1, %zmm11
9364 ; AVX512BW-NEXT: vpermi2w %zmm12, %zmm1, %zmm8
9365 ; AVX512BW-NEXT: vpermi2w %zmm12, %zmm1, %zmm21
9366 ; AVX512BW-NEXT: vpermi2w %zmm12, %zmm1, %zmm24
9367 ; AVX512BW-NEXT: vpermt2w %zmm12, %zmm19, %zmm1
9368 ; AVX512BW-NEXT: movw $9362, %ax # imm = 0x2492
9369 ; AVX512BW-NEXT: kmovd %eax, %k2
9370 ; AVX512BW-NEXT: vmovdqa32 %zmm13, %zmm2 {%k2}
9371 ; AVX512BW-NEXT: vmovdqa64 (%r8), %zmm12
9372 ; AVX512BW-NEXT: movw $18724, %ax # imm = 0x4924
9373 ; AVX512BW-NEXT: kmovd %eax, %k1
9374 ; AVX512BW-NEXT: vmovdqa32 %zmm20, %zmm4 {%k1}
9375 ; AVX512BW-NEXT: vmovdqa64 64(%r8), %zmm13
9376 ; AVX512BW-NEXT: vmovdqa32 %zmm22, %zmm6 {%k1}
9377 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm15 = <0,1,2,3,32,u,6,7,8,9,33,u,12,13,14,15,34,u,18,19,20,21,35,u,24,25,26,27,36,u,30,31>
9378 ; AVX512BW-NEXT: vpermt2w %zmm12, %zmm15, %zmm2
9379 ; AVX512BW-NEXT: vmovdqa32 %zmm23, %zmm9 {%k2}
9380 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm19 = <58,u,2,3,4,5,59,u,8,9,10,11,60,u,14,15,16,17,61,u,20,21,22,23,62,u,26,27,28,29,63,u>
9381 ; AVX512BW-NEXT: vpermt2w %zmm13, %zmm19, %zmm4
9382 ; AVX512BW-NEXT: vmovdqa32 %zmm26, %zmm14 {%k1}
9383 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm20 = <0,1,53,u,4,5,6,7,54,u,10,11,12,13,55,u,16,17,18,19,56,u,22,23,24,25,57,u,28,29,30,31>
9384 ; AVX512BW-NEXT: vpermt2w %zmm13, %zmm20, %zmm6
9385 ; AVX512BW-NEXT: vmovdqa32 %zmm25, %zmm16 {%k1}
9386 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm22 = <0,1,2,3,48,u,6,7,8,9,49,u,12,13,14,15,50,u,18,19,20,21,51,u,24,25,26,27,52,u,30,31>
9387 ; AVX512BW-NEXT: vpermt2w %zmm13, %zmm22, %zmm9
9388 ; AVX512BW-NEXT: vmovdqa32 %zmm7, %zmm0 {%k2}
9389 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <42,u,2,3,4,5,43,u,8,9,10,11,44,u,14,15,16,17,45,u,20,21,22,23,46,u,26,27,28,29,47,u>
9390 ; AVX512BW-NEXT: vpermt2w %zmm13, %zmm7, %zmm14
9391 ; AVX512BW-NEXT: vpermt2w %zmm13, %zmm15, %zmm0
9392 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm15 = <0,1,37,u,4,5,6,7,38,u,10,11,12,13,39,u,16,17,18,19,40,u,22,23,24,25,41,u,28,29,30,31>
9393 ; AVX512BW-NEXT: vpermt2w %zmm13, %zmm15, %zmm16
9394 ; AVX512BW-NEXT: vmovdqa32 %zmm11, %zmm5 {%k1}
9395 ; AVX512BW-NEXT: vpermt2w %zmm12, %zmm19, %zmm5
9396 ; AVX512BW-NEXT: vmovdqa32 %zmm18, %zmm8 {%k1}
9397 ; AVX512BW-NEXT: vpermt2w %zmm12, %zmm20, %zmm8
9398 ; AVX512BW-NEXT: vmovdqa32 %zmm21, %zmm10 {%k2}
9399 ; AVX512BW-NEXT: vmovdqa64 (%r9), %zmm11
9400 ; AVX512BW-NEXT: vpermt2w %zmm12, %zmm22, %zmm10
9401 ; AVX512BW-NEXT: vmovdqa64 64(%r9), %zmm13
9402 ; AVX512BW-NEXT: vmovdqa32 %zmm24, %zmm17 {%k1}
9403 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm18 = [0,1,2,3,4,32,6,7,8,9,10,33,12,13,14,15,16,34,18,19,20,21,22,35,24,25,26,27,28,36,30,31]
9404 ; AVX512BW-NEXT: vpermt2w %zmm11, %zmm18, %zmm2
9405 ; AVX512BW-NEXT: vpermt2w %zmm12, %zmm7, %zmm17
9406 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm7 = [0,58,2,3,4,5,6,59,8,9,10,11,12,60,14,15,16,17,18,61,20,21,22,23,24,62,26,27,28,29,30,63]
9407 ; AVX512BW-NEXT: vpermt2w %zmm13, %zmm7, %zmm4
9408 ; AVX512BW-NEXT: vmovdqa32 %zmm3, %zmm1 {%k1}
9409 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,2,53,4,5,6,7,8,54,10,11,12,13,14,55,16,17,18,19,20,56,22,23,24,25,26,57,28,29,30,31]
9410 ; AVX512BW-NEXT: vpermt2w %zmm13, %zmm3, %zmm6
9411 ; AVX512BW-NEXT: vpermt2w %zmm12, %zmm15, %zmm1
9412 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm12 = [0,1,2,3,4,48,6,7,8,9,10,49,12,13,14,15,16,50,18,19,20,21,22,51,24,25,26,27,28,52,30,31]
9413 ; AVX512BW-NEXT: vpermt2w %zmm13, %zmm12, %zmm9
9414 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm15 = [0,42,2,3,4,5,6,43,8,9,10,11,12,44,14,15,16,17,18,45,20,21,22,23,24,46,26,27,28,29,30,47]
9415 ; AVX512BW-NEXT: vpermt2w %zmm13, %zmm15, %zmm14
9416 ; AVX512BW-NEXT: vpermt2w %zmm13, %zmm18, %zmm0
9417 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm18 = [0,1,2,37,4,5,6,7,8,38,10,11,12,13,14,39,16,17,18,19,20,40,22,23,24,25,26,41,28,29,30,31]
9418 ; AVX512BW-NEXT: vpermt2w %zmm13, %zmm18, %zmm16
9419 ; AVX512BW-NEXT: vpermt2w %zmm11, %zmm7, %zmm5
9420 ; AVX512BW-NEXT: vpermt2w %zmm11, %zmm3, %zmm8
9421 ; AVX512BW-NEXT: vpermt2w %zmm11, %zmm12, %zmm10
9422 ; AVX512BW-NEXT: vpermt2w %zmm11, %zmm15, %zmm17
9423 ; AVX512BW-NEXT: vpermt2w %zmm11, %zmm18, %zmm1
9424 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
9425 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 64(%rax)
9426 ; AVX512BW-NEXT: vmovdqa64 %zmm17, 128(%rax)
9427 ; AVX512BW-NEXT: vmovdqa64 %zmm10, 192(%rax)
9428 ; AVX512BW-NEXT: vmovdqa64 %zmm8, 256(%rax)
9429 ; AVX512BW-NEXT: vmovdqa64 %zmm5, 320(%rax)
9430 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 384(%rax)
9431 ; AVX512BW-NEXT: vmovdqa64 %zmm16, 448(%rax)
9432 ; AVX512BW-NEXT: vmovdqa64 %zmm14, 512(%rax)
9433 ; AVX512BW-NEXT: vmovdqa64 %zmm9, 576(%rax)
9434 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 640(%rax)
9435 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 704(%rax)
9436 ; AVX512BW-NEXT: vmovdqa64 %zmm2, (%rax)
9437 ; AVX512BW-NEXT: vzeroupper
9438 ; AVX512BW-NEXT: retq
9439 %in.vec0 = load <64 x i16>, ptr %in.vecptr0, align 64
9440 %in.vec1 = load <64 x i16>, ptr %in.vecptr1, align 64
9441 %in.vec2 = load <64 x i16>, ptr %in.vecptr2, align 64
9442 %in.vec3 = load <64 x i16>, ptr %in.vecptr3, align 64
9443 %in.vec4 = load <64 x i16>, ptr %in.vecptr4, align 64
9444 %in.vec5 = load <64 x i16>, ptr %in.vecptr5, align 64
9445 %1 = shufflevector <64 x i16> %in.vec0, <64 x i16> %in.vec1, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
9446 %2 = shufflevector <64 x i16> %in.vec2, <64 x i16> %in.vec3, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
9447 %3 = shufflevector <64 x i16> %in.vec4, <64 x i16> %in.vec5, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
9448 %4 = shufflevector <128 x i16> %1, <128 x i16> %2, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255>
9449 %5 = shufflevector <128 x i16> %3, <128 x i16> poison, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
9450 %6 = shufflevector <256 x i16> %4, <256 x i16> %5, <384 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255, i32 256, i32 257, i32 258, i32 259, i32 260, i32 261, i32 262, i32 263, i32 264, i32 265, i32 266, i32 267, i32 268, i32 269, i32 270, i32 271, i32 272, i32 273, i32 274, i32 275, i32 276, i32 277, i32 278, i32 279, i32 280, i32 281, i32 282, i32 283, i32 284, i32 285, i32 286, i32 287, i32 288, i32 289, i32 290, i32 291, i32 292, i32 293, i32 294, i32 295, i32 296, i32 297, i32 298, i32 299, i32 300, i32 301, i32 302, i32 303, i32 304, i32 305, i32 306, i32 307, i32 308, i32 309, i32 310, i32 311, i32 312, i32 313, i32 314, i32 315, i32 316, i32 317, i32 318, i32 319, i32 320, i32 321, i32 322, i32 323, i32 324, i32 325, i32 326, i32 327, i32 328, i32 329, i32 330, i32 331, i32 332, i32 333, i32 334, i32 335, i32 336, i32 337, i32 338, i32 339, i32 340, i32 341, i32 342, i32 343, i32 344, i32 345, i32 346, i32 347, i32 348, i32 349, i32 350, i32 351, i32 352, i32 353, i32 354, i32 355, i32 356, i32 357, i32 358, i32 359, i32 360, i32 361, i32 362, i32 363, i32 364, i32 365, i32 366, i32 367, i32 368, i32 369, i32 370, i32 371, i32 372, i32 373, i32 374, i32 375, i32 376, i32 377, i32 378, i32 379, i32 380, i32 381, i32 382, i32 383>
9451 %interleaved.vec = shufflevector <384 x i16> %6, <384 x i16> poison, <384 x i32> <i32 0, i32 64, i32 128, i32 192, i32 256, i32 320, i32 1, i32 65, i32 129, i32 193, i32 257, i32 321, i32 2, i32 66, i32 130, i32 194, i32 258, i32 322, i32 3, i32 67, i32 131, i32 195, i32 259, i32 323, i32 4, i32 68, i32 132, i32 196, i32 260, i32 324, i32 5, i32 69, i32 133, i32 197, i32 261, i32 325, i32 6, i32 70, i32 134, i32 198, i32 262, i32 326, i32 7, i32 71, i32 135, i32 199, i32 263, i32 327, i32 8, i32 72, i32 136, i32 200, i32 264, i32 328, i32 9, i32 73, i32 137, i32 201, i32 265, i32 329, i32 10, i32 74, i32 138, i32 202, i32 266, i32 330, i32 11, i32 75, i32 139, i32 203, i32 267, i32 331, i32 12, i32 76, i32 140, i32 204, i32 268, i32 332, i32 13, i32 77, i32 141, i32 205, i32 269, i32 333, i32 14, i32 78, i32 142, i32 206, i32 270, i32 334, i32 15, i32 79, i32 143, i32 207, i32 271, i32 335, i32 16, i32 80, i32 144, i32 208, i32 272, i32 336, i32 17, i32 81, i32 145, i32 209, i32 273, i32 337, i32 18, i32 82, i32 146, i32 210, i32 274, i32 338, i32 19, i32 83, i32 147, i32 211, i32 275, i32 339, i32 20, i32 84, i32 148, i32 212, i32 276, i32 340, i32 21, i32 85, i32 149, i32 213, i32 277, i32 341, i32 22, i32 86, i32 150, i32 214, i32 278, i32 342, i32 23, i32 87, i32 151, i32 215, i32 279, i32 343, i32 24, i32 88, i32 152, i32 216, i32 280, i32 344, i32 25, i32 89, i32 153, i32 217, i32 281, i32 345, i32 26, i32 90, i32 154, i32 218, i32 282, i32 346, i32 27, i32 91, i32 155, i32 219, i32 283, i32 347, i32 28, i32 92, i32 156, i32 220, i32 284, i32 348, i32 29, i32 93, i32 157, i32 221, i32 285, i32 349, i32 30, i32 94, i32 158, i32 222, i32 286, i32 350, i32 31, i32 95, i32 159, i32 223, i32 287, i32 351, i32 32, i32 96, i32 160, i32 224, i32 288, i32 352, i32 33, i32 97, i32 161, i32 225, i32 289, i32 353, i32 34, i32 98, i32 162, i32 226, i32 290, i32 354, i32 35, i32 99, i32 163, i32 227, i32 291, i32 355, i32 36, i32 100, i32 164, i32 228, i32 292, i32 356, i32 37, i32 101, i32 165, i32 229, i32 293, i32 357, i32 38, i32 102, i32 166, i32 230, i32 294, i32 358, i32 39, i32 103, i32 167, i32 231, i32 295, i32 359, i32 40, i32 104, i32 168, i32 232, i32 296, i32 360, i32 41, i32 105, i32 169, i32 233, i32 297, i32 361, i32 42, i32 106, i32 170, i32 234, i32 298, i32 362, i32 43, i32 107, i32 171, i32 235, i32 299, i32 363, i32 44, i32 108, i32 172, i32 236, i32 300, i32 364, i32 45, i32 109, i32 173, i32 237, i32 301, i32 365, i32 46, i32 110, i32 174, i32 238, i32 302, i32 366, i32 47, i32 111, i32 175, i32 239, i32 303, i32 367, i32 48, i32 112, i32 176, i32 240, i32 304, i32 368, i32 49, i32 113, i32 177, i32 241, i32 305, i32 369, i32 50, i32 114, i32 178, i32 242, i32 306, i32 370, i32 51, i32 115, i32 179, i32 243, i32 307, i32 371, i32 52, i32 116, i32 180, i32 244, i32 308, i32 372, i32 53, i32 117, i32 181, i32 245, i32 309, i32 373, i32 54, i32 118, i32 182, i32 246, i32 310, i32 374, i32 55, i32 119, i32 183, i32 247, i32 311, i32 375, i32 56, i32 120, i32 184, i32 248, i32 312, i32 376, i32 57, i32 121, i32 185, i32 249, i32 313, i32 377, i32 58, i32 122, i32 186, i32 250, i32 314, i32 378, i32 59, i32 123, i32 187, i32 251, i32 315, i32 379, i32 60, i32 124, i32 188, i32 252, i32 316, i32 380, i32 61, i32 125, i32 189, i32 253, i32 317, i32 381, i32 62, i32 126, i32 190, i32 254, i32 318, i32 382, i32 63, i32 127, i32 191, i32 255, i32 319, i32 383>
9452 store <384 x i16> %interleaved.vec, ptr %out.vec, align 64
9455 ;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
9461 ; AVX512-FAST: {{.*}}
9462 ; AVX512-SLOW: {{.*}}
9463 ; AVX512BW-FAST: {{.*}}
9464 ; AVX512BW-ONLY-FAST: {{.*}}
9465 ; AVX512BW-ONLY-SLOW: {{.*}}
9466 ; AVX512BW-SLOW: {{.*}}
9467 ; AVX512DQBW-FAST: {{.*}}
9468 ; AVX512DQBW-SLOW: {{.*}}
9472 ; FALLBACK10: {{.*}}
9473 ; FALLBACK11: {{.*}}
9474 ; FALLBACK12: {{.*}}